UNPKG

1.11 MBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.16.0 @license Apache-2.0 */
2(function (global, factory) {
3 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
4 typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
5 (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.httpStreaming = {}, global.videojs, global.window));
6})(this, (function (exports, videojs, xmldom) { 'use strict';
7
8 function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
10 var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
11
12 function createCommonjsModule(fn, basedir, module) {
13 return module = {
14 path: basedir,
15 exports: {},
16 require: function (path, base) {
17 return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
18 }
19 }, fn(module, module.exports), module.exports;
20 }
21
22 function commonjsRequire () {
23 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
24 }
25
26 var assertThisInitialized = createCommonjsModule(function (module) {
27 function _assertThisInitialized(self) {
28 if (self === void 0) {
29 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
30 }
31
32 return self;
33 }
34
35 module.exports = _assertThisInitialized;
36 module.exports["default"] = module.exports, module.exports.__esModule = true;
37 });
38
39 var setPrototypeOf = createCommonjsModule(function (module) {
40 function _setPrototypeOf(o, p) {
41 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
42 o.__proto__ = p;
43 return o;
44 };
45
46 module.exports["default"] = module.exports, module.exports.__esModule = true;
47 return _setPrototypeOf(o, p);
48 }
49
50 module.exports = _setPrototypeOf;
51 module.exports["default"] = module.exports, module.exports.__esModule = true;
52 });
53
54 var inheritsLoose = createCommonjsModule(function (module) {
55 function _inheritsLoose(subClass, superClass) {
56 subClass.prototype = Object.create(superClass.prototype);
57 subClass.prototype.constructor = subClass;
58 setPrototypeOf(subClass, superClass);
59 }
60
61 module.exports = _inheritsLoose;
62 module.exports["default"] = module.exports, module.exports.__esModule = true;
63 });
64
65 var urlToolkit = createCommonjsModule(function (module, exports) {
66 // see https://tools.ietf.org/html/rfc1808
67 (function (root) {
68 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
69 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
70 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
71 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
72 var URLToolkit = {
73 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
74 // E.g
75 // With opts.alwaysNormalize = false (default, spec compliant)
76 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
77 // With opts.alwaysNormalize = true (not spec compliant)
78 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
79 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
80 opts = opts || {}; // remove any remaining space and CRLF
81
82 baseURL = baseURL.trim();
83 relativeURL = relativeURL.trim();
84
85 if (!relativeURL) {
86 // 2a) If the embedded URL is entirely empty, it inherits the
87 // entire base URL (i.e., is set equal to the base URL)
88 // and we are done.
89 if (!opts.alwaysNormalize) {
90 return baseURL;
91 }
92
93 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
94
95 if (!basePartsForNormalise) {
96 throw new Error('Error trying to parse base URL.');
97 }
98
99 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
100 return URLToolkit.buildURLFromParts(basePartsForNormalise);
101 }
102
103 var relativeParts = URLToolkit.parseURL(relativeURL);
104
105 if (!relativeParts) {
106 throw new Error('Error trying to parse relative URL.');
107 }
108
109 if (relativeParts.scheme) {
110 // 2b) If the embedded URL starts with a scheme name, it is
111 // interpreted as an absolute URL and we are done.
112 if (!opts.alwaysNormalize) {
113 return relativeURL;
114 }
115
116 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
117 return URLToolkit.buildURLFromParts(relativeParts);
118 }
119
120 var baseParts = URLToolkit.parseURL(baseURL);
121
122 if (!baseParts) {
123 throw new Error('Error trying to parse base URL.');
124 }
125
126 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
127 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
128 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
129 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
130 baseParts.netLoc = pathParts[1];
131 baseParts.path = pathParts[2];
132 }
133
134 if (baseParts.netLoc && !baseParts.path) {
135 baseParts.path = '/';
136 }
137
138 var builtParts = {
139 // 2c) Otherwise, the embedded URL inherits the scheme of
140 // the base URL.
141 scheme: baseParts.scheme,
142 netLoc: relativeParts.netLoc,
143 path: null,
144 params: relativeParts.params,
145 query: relativeParts.query,
146 fragment: relativeParts.fragment
147 };
148
149 if (!relativeParts.netLoc) {
150 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
151 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
152 // (if any) of the base URL.
153 builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
154 // path is not relative and we skip to Step 7.
155
156 if (relativeParts.path[0] !== '/') {
157 if (!relativeParts.path) {
158 // 5) If the embedded URL path is empty (and not preceded by a
159 // slash), then the embedded URL inherits the base URL path
160 builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
161 // step 7; otherwise, it inherits the <params> of the base
162 // URL (if any) and
163
164 if (!relativeParts.params) {
165 builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
166 // step 7; otherwise, it inherits the <query> of the base
167 // URL (if any) and we skip to step 7.
168
169 if (!relativeParts.query) {
170 builtParts.query = baseParts.query;
171 }
172 }
173 } else {
174 // 6) The last segment of the base URL's path (anything
175 // following the rightmost slash "/", or the entire path if no
176 // slash is present) is removed and the embedded URL's path is
177 // appended in its place.
178 var baseURLPath = baseParts.path;
179 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
180 builtParts.path = URLToolkit.normalizePath(newPath);
181 }
182 }
183 }
184
185 if (builtParts.path === null) {
186 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
187 }
188
189 return URLToolkit.buildURLFromParts(builtParts);
190 },
191 parseURL: function parseURL(url) {
192 var parts = URL_REGEX.exec(url);
193
194 if (!parts) {
195 return null;
196 }
197
198 return {
199 scheme: parts[1] || '',
200 netLoc: parts[2] || '',
201 path: parts[3] || '',
202 params: parts[4] || '',
203 query: parts[5] || '',
204 fragment: parts[6] || ''
205 };
206 },
207 normalizePath: function normalizePath(path) {
208 // The following operations are
209 // then applied, in order, to the new path:
210 // 6a) All occurrences of "./", where "." is a complete path
211 // segment, are removed.
212 // 6b) If the path ends with "." as a complete path segment,
213 // that "." is removed.
214 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
215 // complete path segment not equal to "..", are removed.
216 // Removal of these path segments is performed iteratively,
217 // removing the leftmost matching pattern on each iteration,
218 // until no matching pattern remains.
219 // 6d) If the path ends with "<segment>/..", where <segment> is a
220 // complete path segment not equal to "..", that
221 // "<segment>/.." is removed.
222
223 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
224
225 return path.split('').reverse().join('');
226 },
227 buildURLFromParts: function buildURLFromParts(parts) {
228 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
229 }
230 };
231 module.exports = URLToolkit;
232 })();
233 });
234
235 var DEFAULT_LOCATION = 'http://example.com';
236
237 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
238 // return early if we don't need to resolve
239 if (/^[a-z]+:/i.test(relativeUrl)) {
240 return relativeUrl;
241 } // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
242
243
244 if (/^data:/.test(baseUrl)) {
245 baseUrl = window.location && window.location.href || '';
246 } // IE11 supports URL but not the URL constructor
247 // feature detect the behavior we want
248
249
250 var nativeURL = typeof window.URL === 'function';
251 var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
252 // and if baseUrl isn't an absolute url
253
254 var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
255
256 if (nativeURL) {
257 baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
258 } else if (!/\/\//i.test(baseUrl)) {
259 baseUrl = urlToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
260 }
261
262 if (nativeURL) {
263 var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
264 // and if we're location-less, remove the location
265 // otherwise, return the url unmodified
266
267 if (removeLocation) {
268 return newUrl.href.slice(DEFAULT_LOCATION.length);
269 } else if (protocolLess) {
270 return newUrl.href.slice(newUrl.protocol.length);
271 }
272
273 return newUrl.href;
274 }
275
276 return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
277 };
278
279 /**
280 * @file resolve-url.js - Handling how URLs are resolved and manipulated
281 */
282 var resolveUrl = resolveUrl$1;
283 /**
284 * Checks whether xhr request was redirected and returns correct url depending
285 * on `handleManifestRedirects` option
286 *
287 * @api private
288 *
289 * @param {string} url - an url being requested
290 * @param {XMLHttpRequest} req - xhr request result
291 *
292 * @return {string}
293 */
294
295 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
296 // To understand how the responseURL below is set and generated:
297 // - https://fetch.spec.whatwg.org/#concept-response-url
298 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
299 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
300 return req.responseURL;
301 }
302
303 return url;
304 };
305
306 var logger = function logger(source) {
307 if (videojs__default["default"].log.debug) {
308 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', source + " >");
309 }
310
311 return function () {};
312 };
313
314 var _extends_1 = createCommonjsModule(function (module) {
315 function _extends() {
316 module.exports = _extends = Object.assign || function (target) {
317 for (var i = 1; i < arguments.length; i++) {
318 var source = arguments[i];
319
320 for (var key in source) {
321 if (Object.prototype.hasOwnProperty.call(source, key)) {
322 target[key] = source[key];
323 }
324 }
325 }
326
327 return target;
328 };
329
330 module.exports["default"] = module.exports, module.exports.__esModule = true;
331 return _extends.apply(this, arguments);
332 }
333
334 module.exports = _extends;
335 module.exports["default"] = module.exports, module.exports.__esModule = true;
336 });
337
338 /**
339 * @file stream.js
340 */
341
342 /**
343 * A lightweight readable stream implemention that handles event dispatching.
344 *
345 * @class Stream
346 */
347 var Stream = /*#__PURE__*/function () {
348 function Stream() {
349 this.listeners = {};
350 }
351 /**
352 * Add a listener for a specified event type.
353 *
354 * @param {string} type the event name
355 * @param {Function} listener the callback to be invoked when an event of
356 * the specified type occurs
357 */
358
359
360 var _proto = Stream.prototype;
361
362 _proto.on = function on(type, listener) {
363 if (!this.listeners[type]) {
364 this.listeners[type] = [];
365 }
366
367 this.listeners[type].push(listener);
368 }
369 /**
370 * Remove a listener for a specified event type.
371 *
372 * @param {string} type the event name
373 * @param {Function} listener a function previously registered for this
374 * type of event through `on`
375 * @return {boolean} if we could turn it off or not
376 */
377 ;
378
379 _proto.off = function off(type, listener) {
380 if (!this.listeners[type]) {
381 return false;
382 }
383
384 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
385 // In Video.js we slice listener functions
386 // on trigger so that it does not mess up the order
387 // while we loop through.
388 //
389 // Here we slice on off so that the loop in trigger
390 // can continue using it's old reference to loop without
391 // messing up the order.
392
393 this.listeners[type] = this.listeners[type].slice(0);
394 this.listeners[type].splice(index, 1);
395 return index > -1;
396 }
397 /**
398 * Trigger an event of the specified type on this stream. Any additional
399 * arguments to this function are passed as parameters to event listeners.
400 *
401 * @param {string} type the event name
402 */
403 ;
404
405 _proto.trigger = function trigger(type) {
406 var callbacks = this.listeners[type];
407
408 if (!callbacks) {
409 return;
410 } // Slicing the arguments on every invocation of this method
411 // can add a significant amount of overhead. Avoid the
412 // intermediate object creation for the common case of a
413 // single callback argument
414
415
416 if (arguments.length === 2) {
417 var length = callbacks.length;
418
419 for (var i = 0; i < length; ++i) {
420 callbacks[i].call(this, arguments[1]);
421 }
422 } else {
423 var args = Array.prototype.slice.call(arguments, 1);
424 var _length = callbacks.length;
425
426 for (var _i = 0; _i < _length; ++_i) {
427 callbacks[_i].apply(this, args);
428 }
429 }
430 }
431 /**
432 * Destroys the stream and cleans up.
433 */
434 ;
435
436 _proto.dispose = function dispose() {
437 this.listeners = {};
438 }
439 /**
440 * Forwards all `data` events on this stream to the destination stream. The
441 * destination stream should provide a method `push` to receive the data
442 * events as they arrive.
443 *
444 * @param {Stream} destination the stream that will receive all `data` events
445 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
446 */
447 ;
448
449 _proto.pipe = function pipe(destination) {
450 this.on('data', function (data) {
451 destination.push(data);
452 });
453 };
454
455 return Stream;
456 }();
457
458 var atob = function atob(s) {
459 return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
460 };
461
462 function decodeB64ToUint8Array(b64Text) {
463 var decodedString = atob(b64Text);
464 var array = new Uint8Array(decodedString.length);
465
466 for (var i = 0; i < decodedString.length; i++) {
467 array[i] = decodedString.charCodeAt(i);
468 }
469
470 return array;
471 }
472
473 /*! @name m3u8-parser @version 4.8.0 @license Apache-2.0 */
474 /**
475 * A stream that buffers string input and generates a `data` event for each
476 * line.
477 *
478 * @class LineStream
479 * @extends Stream
480 */
481
482 var LineStream = /*#__PURE__*/function (_Stream) {
483 inheritsLoose(LineStream, _Stream);
484
485 function LineStream() {
486 var _this;
487
488 _this = _Stream.call(this) || this;
489 _this.buffer = '';
490 return _this;
491 }
492 /**
493 * Add new data to be parsed.
494 *
495 * @param {string} data the text to process
496 */
497
498
499 var _proto = LineStream.prototype;
500
501 _proto.push = function push(data) {
502 var nextNewline;
503 this.buffer += data;
504 nextNewline = this.buffer.indexOf('\n');
505
506 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
507 this.trigger('data', this.buffer.substring(0, nextNewline));
508 this.buffer = this.buffer.substring(nextNewline + 1);
509 }
510 };
511
512 return LineStream;
513 }(Stream);
514
515 var TAB = String.fromCharCode(0x09);
516
517 var parseByterange = function parseByterange(byterangeString) {
518 // optionally match and capture 0+ digits before `@`
519 // optionally match and capture 0+ digits after `@`
520 var match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
521 var result = {};
522
523 if (match[1]) {
524 result.length = parseInt(match[1], 10);
525 }
526
527 if (match[2]) {
528 result.offset = parseInt(match[2], 10);
529 }
530
531 return result;
532 };
533 /**
534 * "forgiving" attribute list psuedo-grammar:
535 * attributes -> keyvalue (',' keyvalue)*
536 * keyvalue -> key '=' value
537 * key -> [^=]*
538 * value -> '"' [^"]* '"' | [^,]*
539 */
540
541
542 var attributeSeparator = function attributeSeparator() {
543 var key = '[^=]*';
544 var value = '"[^"]*"|[^,]*';
545 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
546 return new RegExp('(?:^|,)(' + keyvalue + ')');
547 };
548 /**
549 * Parse attributes from a line given the separator
550 *
551 * @param {string} attributes the attribute line to parse
552 */
553
554
555 var parseAttributes$1 = function parseAttributes(attributes) {
556 // split the string using attributes as the separator
557 var attrs = attributes.split(attributeSeparator());
558 var result = {};
559 var i = attrs.length;
560 var attr;
561
562 while (i--) {
563 // filter out unmatched portions of the string
564 if (attrs[i] === '') {
565 continue;
566 } // split the key and value
567
568
569 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
570
571 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
572 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
573 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
574 result[attr[0]] = attr[1];
575 }
576
577 return result;
578 };
579 /**
580 * A line-level M3U8 parser event stream. It expects to receive input one
581 * line at a time and performs a context-free parse of its contents. A stream
582 * interpretation of a manifest can be useful if the manifest is expected to
583 * be too large to fit comfortably into memory or the entirety of the input
584 * is not immediately available. Otherwise, it's probably much easier to work
585 * with a regular `Parser` object.
586 *
587 * Produces `data` events with an object that captures the parser's
588 * interpretation of the input. That object has a property `tag` that is one
589 * of `uri`, `comment`, or `tag`. URIs only have a single additional
590 * property, `line`, which captures the entirety of the input without
591 * interpretation. Comments similarly have a single additional property
592 * `text` which is the input without the leading `#`.
593 *
594 * Tags always have a property `tagType` which is the lower-cased version of
595 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
596 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
597 * tags are given the tag type `unknown` and a single additional property
598 * `data` with the remainder of the input.
599 *
600 * @class ParseStream
601 * @extends Stream
602 */
603
604
605 var ParseStream = /*#__PURE__*/function (_Stream) {
606 inheritsLoose(ParseStream, _Stream);
607
608 function ParseStream() {
609 var _this;
610
611 _this = _Stream.call(this) || this;
612 _this.customParsers = [];
613 _this.tagMappers = [];
614 return _this;
615 }
616 /**
617 * Parses an additional line of input.
618 *
619 * @param {string} line a single line of an M3U8 file to parse
620 */
621
622
623 var _proto = ParseStream.prototype;
624
625 _proto.push = function push(line) {
626 var _this2 = this;
627
628 var match;
629 var event; // strip whitespace
630
631 line = line.trim();
632
633 if (line.length === 0) {
634 // ignore empty lines
635 return;
636 } // URIs
637
638
639 if (line[0] !== '#') {
640 this.trigger('data', {
641 type: 'uri',
642 uri: line
643 });
644 return;
645 } // map tags
646
647
648 var newLines = this.tagMappers.reduce(function (acc, mapper) {
649 var mappedLine = mapper(line); // skip if unchanged
650
651 if (mappedLine === line) {
652 return acc;
653 }
654
655 return acc.concat([mappedLine]);
656 }, [line]);
657 newLines.forEach(function (newLine) {
658 for (var i = 0; i < _this2.customParsers.length; i++) {
659 if (_this2.customParsers[i].call(_this2, newLine)) {
660 return;
661 }
662 } // Comments
663
664
665 if (newLine.indexOf('#EXT') !== 0) {
666 _this2.trigger('data', {
667 type: 'comment',
668 text: newLine.slice(1)
669 });
670
671 return;
672 } // strip off any carriage returns here so the regex matching
673 // doesn't have to account for them.
674
675
676 newLine = newLine.replace('\r', ''); // Tags
677
678 match = /^#EXTM3U/.exec(newLine);
679
680 if (match) {
681 _this2.trigger('data', {
682 type: 'tag',
683 tagType: 'm3u'
684 });
685
686 return;
687 }
688
689 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
690
691 if (match) {
692 event = {
693 type: 'tag',
694 tagType: 'inf'
695 };
696
697 if (match[1]) {
698 event.duration = parseFloat(match[1]);
699 }
700
701 if (match[2]) {
702 event.title = match[2];
703 }
704
705 _this2.trigger('data', event);
706
707 return;
708 }
709
710 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
711
712 if (match) {
713 event = {
714 type: 'tag',
715 tagType: 'targetduration'
716 };
717
718 if (match[1]) {
719 event.duration = parseInt(match[1], 10);
720 }
721
722 _this2.trigger('data', event);
723
724 return;
725 }
726
727 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
728
729 if (match) {
730 event = {
731 type: 'tag',
732 tagType: 'version'
733 };
734
735 if (match[1]) {
736 event.version = parseInt(match[1], 10);
737 }
738
739 _this2.trigger('data', event);
740
741 return;
742 }
743
744 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
745
746 if (match) {
747 event = {
748 type: 'tag',
749 tagType: 'media-sequence'
750 };
751
752 if (match[1]) {
753 event.number = parseInt(match[1], 10);
754 }
755
756 _this2.trigger('data', event);
757
758 return;
759 }
760
761 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
762
763 if (match) {
764 event = {
765 type: 'tag',
766 tagType: 'discontinuity-sequence'
767 };
768
769 if (match[1]) {
770 event.number = parseInt(match[1], 10);
771 }
772
773 _this2.trigger('data', event);
774
775 return;
776 }
777
778 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
779
780 if (match) {
781 event = {
782 type: 'tag',
783 tagType: 'playlist-type'
784 };
785
786 if (match[1]) {
787 event.playlistType = match[1];
788 }
789
790 _this2.trigger('data', event);
791
792 return;
793 }
794
795 match = /^#EXT-X-BYTERANGE:?(.*)?$/.exec(newLine);
796
797 if (match) {
798 event = _extends_1(parseByterange(match[1]), {
799 type: 'tag',
800 tagType: 'byterange'
801 });
802
803 _this2.trigger('data', event);
804
805 return;
806 }
807
808 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
809
810 if (match) {
811 event = {
812 type: 'tag',
813 tagType: 'allow-cache'
814 };
815
816 if (match[1]) {
817 event.allowed = !/NO/.test(match[1]);
818 }
819
820 _this2.trigger('data', event);
821
822 return;
823 }
824
825 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
826
827 if (match) {
828 event = {
829 type: 'tag',
830 tagType: 'map'
831 };
832
833 if (match[1]) {
834 var attributes = parseAttributes$1(match[1]);
835
836 if (attributes.URI) {
837 event.uri = attributes.URI;
838 }
839
840 if (attributes.BYTERANGE) {
841 event.byterange = parseByterange(attributes.BYTERANGE);
842 }
843 }
844
845 _this2.trigger('data', event);
846
847 return;
848 }
849
850 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
851
852 if (match) {
853 event = {
854 type: 'tag',
855 tagType: 'stream-inf'
856 };
857
858 if (match[1]) {
859 event.attributes = parseAttributes$1(match[1]);
860
861 if (event.attributes.RESOLUTION) {
862 var split = event.attributes.RESOLUTION.split('x');
863 var resolution = {};
864
865 if (split[0]) {
866 resolution.width = parseInt(split[0], 10);
867 }
868
869 if (split[1]) {
870 resolution.height = parseInt(split[1], 10);
871 }
872
873 event.attributes.RESOLUTION = resolution;
874 }
875
876 if (event.attributes.BANDWIDTH) {
877 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
878 }
879
880 if (event.attributes['FRAME-RATE']) {
881 event.attributes['FRAME-RATE'] = parseFloat(event.attributes['FRAME-RATE']);
882 }
883
884 if (event.attributes['PROGRAM-ID']) {
885 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
886 }
887 }
888
889 _this2.trigger('data', event);
890
891 return;
892 }
893
894 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
895
896 if (match) {
897 event = {
898 type: 'tag',
899 tagType: 'media'
900 };
901
902 if (match[1]) {
903 event.attributes = parseAttributes$1(match[1]);
904 }
905
906 _this2.trigger('data', event);
907
908 return;
909 }
910
911 match = /^#EXT-X-ENDLIST/.exec(newLine);
912
913 if (match) {
914 _this2.trigger('data', {
915 type: 'tag',
916 tagType: 'endlist'
917 });
918
919 return;
920 }
921
922 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
923
924 if (match) {
925 _this2.trigger('data', {
926 type: 'tag',
927 tagType: 'discontinuity'
928 });
929
930 return;
931 }
932
933 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
934
935 if (match) {
936 event = {
937 type: 'tag',
938 tagType: 'program-date-time'
939 };
940
941 if (match[1]) {
942 event.dateTimeString = match[1];
943 event.dateTimeObject = new Date(match[1]);
944 }
945
946 _this2.trigger('data', event);
947
948 return;
949 }
950
951 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
952
953 if (match) {
954 event = {
955 type: 'tag',
956 tagType: 'key'
957 };
958
959 if (match[1]) {
960 event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
961
962 if (event.attributes.IV) {
963 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
964 event.attributes.IV = event.attributes.IV.substring(2);
965 }
966
967 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
968 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
969 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
970 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
971 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
972 event.attributes.IV = new Uint32Array(event.attributes.IV);
973 }
974 }
975
976 _this2.trigger('data', event);
977
978 return;
979 }
980
981 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
982
983 if (match) {
984 event = {
985 type: 'tag',
986 tagType: 'start'
987 };
988
989 if (match[1]) {
990 event.attributes = parseAttributes$1(match[1]);
991 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
992 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
993 }
994
995 _this2.trigger('data', event);
996
997 return;
998 }
999
1000 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
1001
1002 if (match) {
1003 event = {
1004 type: 'tag',
1005 tagType: 'cue-out-cont'
1006 };
1007
1008 if (match[1]) {
1009 event.data = match[1];
1010 } else {
1011 event.data = '';
1012 }
1013
1014 _this2.trigger('data', event);
1015
1016 return;
1017 }
1018
1019 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
1020
1021 if (match) {
1022 event = {
1023 type: 'tag',
1024 tagType: 'cue-out'
1025 };
1026
1027 if (match[1]) {
1028 event.data = match[1];
1029 } else {
1030 event.data = '';
1031 }
1032
1033 _this2.trigger('data', event);
1034
1035 return;
1036 }
1037
1038 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
1039
1040 if (match) {
1041 event = {
1042 type: 'tag',
1043 tagType: 'cue-in'
1044 };
1045
1046 if (match[1]) {
1047 event.data = match[1];
1048 } else {
1049 event.data = '';
1050 }
1051
1052 _this2.trigger('data', event);
1053
1054 return;
1055 }
1056
1057 match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
1058
1059 if (match && match[1]) {
1060 event = {
1061 type: 'tag',
1062 tagType: 'skip'
1063 };
1064 event.attributes = parseAttributes$1(match[1]);
1065
1066 if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
1067 event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
1068 }
1069
1070 if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
1071 event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
1072 }
1073
1074 _this2.trigger('data', event);
1075
1076 return;
1077 }
1078
1079 match = /^#EXT-X-PART:(.*)$/.exec(newLine);
1080
1081 if (match && match[1]) {
1082 event = {
1083 type: 'tag',
1084 tagType: 'part'
1085 };
1086 event.attributes = parseAttributes$1(match[1]);
1087 ['DURATION'].forEach(function (key) {
1088 if (event.attributes.hasOwnProperty(key)) {
1089 event.attributes[key] = parseFloat(event.attributes[key]);
1090 }
1091 });
1092 ['INDEPENDENT', 'GAP'].forEach(function (key) {
1093 if (event.attributes.hasOwnProperty(key)) {
1094 event.attributes[key] = /YES/.test(event.attributes[key]);
1095 }
1096 });
1097
1098 if (event.attributes.hasOwnProperty('BYTERANGE')) {
1099 event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
1100 }
1101
1102 _this2.trigger('data', event);
1103
1104 return;
1105 }
1106
1107 match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
1108
1109 if (match && match[1]) {
1110 event = {
1111 type: 'tag',
1112 tagType: 'server-control'
1113 };
1114 event.attributes = parseAttributes$1(match[1]);
1115 ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
1116 if (event.attributes.hasOwnProperty(key)) {
1117 event.attributes[key] = parseFloat(event.attributes[key]);
1118 }
1119 });
1120 ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
1121 if (event.attributes.hasOwnProperty(key)) {
1122 event.attributes[key] = /YES/.test(event.attributes[key]);
1123 }
1124 });
1125
1126 _this2.trigger('data', event);
1127
1128 return;
1129 }
1130
1131 match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
1132
1133 if (match && match[1]) {
1134 event = {
1135 type: 'tag',
1136 tagType: 'part-inf'
1137 };
1138 event.attributes = parseAttributes$1(match[1]);
1139 ['PART-TARGET'].forEach(function (key) {
1140 if (event.attributes.hasOwnProperty(key)) {
1141 event.attributes[key] = parseFloat(event.attributes[key]);
1142 }
1143 });
1144
1145 _this2.trigger('data', event);
1146
1147 return;
1148 }
1149
1150 match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
1151
1152 if (match && match[1]) {
1153 event = {
1154 type: 'tag',
1155 tagType: 'preload-hint'
1156 };
1157 event.attributes = parseAttributes$1(match[1]);
1158 ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
1159 if (event.attributes.hasOwnProperty(key)) {
1160 event.attributes[key] = parseInt(event.attributes[key], 10);
1161 var subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
1162 event.attributes.byterange = event.attributes.byterange || {};
1163 event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
1164
1165 delete event.attributes[key];
1166 }
1167 });
1168
1169 _this2.trigger('data', event);
1170
1171 return;
1172 }
1173
1174 match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
1175
1176 if (match && match[1]) {
1177 event = {
1178 type: 'tag',
1179 tagType: 'rendition-report'
1180 };
1181 event.attributes = parseAttributes$1(match[1]);
1182 ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
1183 if (event.attributes.hasOwnProperty(key)) {
1184 event.attributes[key] = parseInt(event.attributes[key], 10);
1185 }
1186 });
1187
1188 _this2.trigger('data', event);
1189
1190 return;
1191 } // unknown tag type
1192
1193
1194 _this2.trigger('data', {
1195 type: 'tag',
1196 data: newLine.slice(4)
1197 });
1198 });
1199 }
1200 /**
1201 * Add a parser for custom headers
1202 *
1203 * @param {Object} options a map of options for the added parser
1204 * @param {RegExp} options.expression a regular expression to match the custom header
1205 * @param {string} options.customType the custom type to register to the output
1206 * @param {Function} [options.dataParser] function to parse the line into an object
1207 * @param {boolean} [options.segment] should tag data be attached to the segment object
1208 */
1209 ;
1210
1211 _proto.addParser = function addParser(_ref) {
1212 var _this3 = this;
1213
1214 var expression = _ref.expression,
1215 customType = _ref.customType,
1216 dataParser = _ref.dataParser,
1217 segment = _ref.segment;
1218
1219 if (typeof dataParser !== 'function') {
1220 dataParser = function dataParser(line) {
1221 return line;
1222 };
1223 }
1224
1225 this.customParsers.push(function (line) {
1226 var match = expression.exec(line);
1227
1228 if (match) {
1229 _this3.trigger('data', {
1230 type: 'custom',
1231 data: dataParser(line),
1232 customType: customType,
1233 segment: segment
1234 });
1235
1236 return true;
1237 }
1238 });
1239 }
1240 /**
1241 * Add a custom header mapper
1242 *
1243 * @param {Object} options
1244 * @param {RegExp} options.expression a regular expression to match the custom header
1245 * @param {Function} options.map function to translate tag into a different tag
1246 */
1247 ;
1248
1249 _proto.addTagMapper = function addTagMapper(_ref2) {
1250 var expression = _ref2.expression,
1251 map = _ref2.map;
1252
1253 var mapFn = function mapFn(line) {
1254 if (expression.test(line)) {
1255 return map(line);
1256 }
1257
1258 return line;
1259 };
1260
1261 this.tagMappers.push(mapFn);
1262 };
1263
1264 return ParseStream;
1265 }(Stream);
1266
1267 var camelCase = function camelCase(str) {
1268 return str.toLowerCase().replace(/-(\w)/g, function (a) {
1269 return a[1].toUpperCase();
1270 });
1271 };
1272
1273 var camelCaseKeys = function camelCaseKeys(attributes) {
1274 var result = {};
1275 Object.keys(attributes).forEach(function (key) {
1276 result[camelCase(key)] = attributes[key];
1277 });
1278 return result;
1279 }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
1280 // we need this helper because defaults are based upon targetDuration and
1281 // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
1282 // target durations are set.
1283
1284
1285 var setHoldBack = function setHoldBack(manifest) {
1286 var serverControl = manifest.serverControl,
1287 targetDuration = manifest.targetDuration,
1288 partTargetDuration = manifest.partTargetDuration;
1289
1290 if (!serverControl) {
1291 return;
1292 }
1293
1294 var tag = '#EXT-X-SERVER-CONTROL';
1295 var hb = 'holdBack';
1296 var phb = 'partHoldBack';
1297 var minTargetDuration = targetDuration && targetDuration * 3;
1298 var minPartDuration = partTargetDuration && partTargetDuration * 2;
1299
1300 if (targetDuration && !serverControl.hasOwnProperty(hb)) {
1301 serverControl[hb] = minTargetDuration;
1302 this.trigger('info', {
1303 message: tag + " defaulting HOLD-BACK to targetDuration * 3 (" + minTargetDuration + ")."
1304 });
1305 }
1306
1307 if (minTargetDuration && serverControl[hb] < minTargetDuration) {
1308 this.trigger('warn', {
1309 message: tag + " clamping HOLD-BACK (" + serverControl[hb] + ") to targetDuration * 3 (" + minTargetDuration + ")"
1310 });
1311 serverControl[hb] = minTargetDuration;
1312 } // default no part hold back to part target duration * 3
1313
1314
1315 if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
1316 serverControl[phb] = partTargetDuration * 3;
1317 this.trigger('info', {
1318 message: tag + " defaulting PART-HOLD-BACK to partTargetDuration * 3 (" + serverControl[phb] + ")."
1319 });
1320 } // if part hold back is too small default it to part target duration * 2
1321
1322
1323 if (partTargetDuration && serverControl[phb] < minPartDuration) {
1324 this.trigger('warn', {
1325 message: tag + " clamping PART-HOLD-BACK (" + serverControl[phb] + ") to partTargetDuration * 2 (" + minPartDuration + ")."
1326 });
1327 serverControl[phb] = minPartDuration;
1328 }
1329 };
1330 /**
1331 * A parser for M3U8 files. The current interpretation of the input is
1332 * exposed as a property `manifest` on parser objects. It's just two lines to
1333 * create and parse a manifest once you have the contents available as a string:
1334 *
1335 * ```js
1336 * var parser = new m3u8.Parser();
1337 * parser.push(xhr.responseText);
1338 * ```
1339 *
1340 * New input can later be applied to update the manifest object by calling
1341 * `push` again.
1342 *
1343 * The parser attempts to create a usable manifest object even if the
1344 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1345 * events during the parse if it encounters input that seems invalid or
1346 * requires some property of the manifest object to be defaulted.
1347 *
1348 * @class Parser
1349 * @extends Stream
1350 */
1351
1352
1353 var Parser = /*#__PURE__*/function (_Stream) {
1354 inheritsLoose(Parser, _Stream);
1355
1356 function Parser() {
1357 var _this;
1358
1359 _this = _Stream.call(this) || this;
1360 _this.lineStream = new LineStream();
1361 _this.parseStream = new ParseStream();
1362
1363 _this.lineStream.pipe(_this.parseStream);
1364 /* eslint-disable consistent-this */
1365
1366
1367 var self = assertThisInitialized(_this);
1368 /* eslint-enable consistent-this */
1369
1370
1371 var uris = [];
1372 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1373
1374 var currentMap; // if specified, the active decryption key
1375
1376 var _key;
1377
1378 var hasParts = false;
1379
1380 var noop = function noop() {};
1381
1382 var defaultMediaGroups = {
1383 'AUDIO': {},
1384 'VIDEO': {},
1385 'CLOSED-CAPTIONS': {},
1386 'SUBTITLES': {}
1387 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1388 // used in MPDs with Widevine encrypted streams.
1389
1390 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1391
1392 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1393
1394 _this.manifest = {
1395 allowCache: true,
1396 discontinuityStarts: [],
1397 segments: []
1398 }; // keep track of the last seen segment's byte range end, as segments are not required
1399 // to provide the offset, in which case it defaults to the next byte after the
1400 // previous segment
1401
1402 var lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
1403
1404 var lastPartByterangeEnd = 0;
1405
1406 _this.on('end', function () {
1407 // only add preloadSegment if we don't yet have a uri for it.
1408 // and we actually have parts/preloadHints
1409 if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
1410 return;
1411 }
1412
1413 if (!currentUri.map && currentMap) {
1414 currentUri.map = currentMap;
1415 }
1416
1417 if (!currentUri.key && _key) {
1418 currentUri.key = _key;
1419 }
1420
1421 if (!currentUri.timeline && typeof currentTimeline === 'number') {
1422 currentUri.timeline = currentTimeline;
1423 }
1424
1425 _this.manifest.preloadSegment = currentUri;
1426 }); // update the manifest with the m3u8 entry from the parse stream
1427
1428
1429 _this.parseStream.on('data', function (entry) {
1430 var mediaGroup;
1431 var rendition;
1432 ({
1433 tag: function tag() {
1434 // switch based on the tag type
1435 (({
1436 version: function version() {
1437 if (entry.version) {
1438 this.manifest.version = entry.version;
1439 }
1440 },
1441 'allow-cache': function allowCache() {
1442 this.manifest.allowCache = entry.allowed;
1443
1444 if (!('allowed' in entry)) {
1445 this.trigger('info', {
1446 message: 'defaulting allowCache to YES'
1447 });
1448 this.manifest.allowCache = true;
1449 }
1450 },
1451 byterange: function byterange() {
1452 var byterange = {};
1453
1454 if ('length' in entry) {
1455 currentUri.byterange = byterange;
1456 byterange.length = entry.length;
1457
1458 if (!('offset' in entry)) {
1459 /*
1460 * From the latest spec (as of this writing):
1461 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
1462 *
1463 * Same text since EXT-X-BYTERANGE's introduction in draft 7:
1464 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
1465 *
1466 * "If o [offset] is not present, the sub-range begins at the next byte
1467 * following the sub-range of the previous media segment."
1468 */
1469 entry.offset = lastByterangeEnd;
1470 }
1471 }
1472
1473 if ('offset' in entry) {
1474 currentUri.byterange = byterange;
1475 byterange.offset = entry.offset;
1476 }
1477
1478 lastByterangeEnd = byterange.offset + byterange.length;
1479 },
1480 endlist: function endlist() {
1481 this.manifest.endList = true;
1482 },
1483 inf: function inf() {
1484 if (!('mediaSequence' in this.manifest)) {
1485 this.manifest.mediaSequence = 0;
1486 this.trigger('info', {
1487 message: 'defaulting media sequence to zero'
1488 });
1489 }
1490
1491 if (!('discontinuitySequence' in this.manifest)) {
1492 this.manifest.discontinuitySequence = 0;
1493 this.trigger('info', {
1494 message: 'defaulting discontinuity sequence to zero'
1495 });
1496 }
1497
1498 if (entry.duration > 0) {
1499 currentUri.duration = entry.duration;
1500 }
1501
1502 if (entry.duration === 0) {
1503 currentUri.duration = 0.01;
1504 this.trigger('info', {
1505 message: 'updating zero segment duration to a small value'
1506 });
1507 }
1508
1509 this.manifest.segments = uris;
1510 },
1511 key: function key() {
1512 if (!entry.attributes) {
1513 this.trigger('warn', {
1514 message: 'ignoring key declaration without attribute list'
1515 });
1516 return;
1517 } // clear the active encryption key
1518
1519
1520 if (entry.attributes.METHOD === 'NONE') {
1521 _key = null;
1522 return;
1523 }
1524
1525 if (!entry.attributes.URI) {
1526 this.trigger('warn', {
1527 message: 'ignoring key declaration without URI'
1528 });
1529 return;
1530 }
1531
1532 if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {
1533 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1534
1535 this.manifest.contentProtection['com.apple.fps.1_0'] = {
1536 attributes: entry.attributes
1537 };
1538 return;
1539 }
1540
1541 if (entry.attributes.KEYFORMAT === 'com.microsoft.playready') {
1542 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1543
1544 this.manifest.contentProtection['com.microsoft.playready'] = {
1545 uri: entry.attributes.URI
1546 };
1547 return;
1548 } // check if the content is encrypted for Widevine
1549 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1550
1551
1552 if (entry.attributes.KEYFORMAT === widevineUuid) {
1553 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1554
1555 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1556 this.trigger('warn', {
1557 message: 'invalid key method provided for Widevine'
1558 });
1559 return;
1560 }
1561
1562 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1563 this.trigger('warn', {
1564 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1565 });
1566 }
1567
1568 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1569 this.trigger('warn', {
1570 message: 'invalid key URI provided for Widevine'
1571 });
1572 return;
1573 }
1574
1575 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1576 this.trigger('warn', {
1577 message: 'invalid key ID provided for Widevine'
1578 });
1579 return;
1580 } // if Widevine key attributes are valid, store them as `contentProtection`
1581 // on the manifest to emulate Widevine tag structure in a DASH mpd
1582
1583
1584 this.manifest.contentProtection = this.manifest.contentProtection || {};
1585 this.manifest.contentProtection['com.widevine.alpha'] = {
1586 attributes: {
1587 schemeIdUri: entry.attributes.KEYFORMAT,
1588 // remove '0x' from the key id string
1589 keyId: entry.attributes.KEYID.substring(2)
1590 },
1591 // decode the base64-encoded PSSH box
1592 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1593 };
1594 return;
1595 }
1596
1597 if (!entry.attributes.METHOD) {
1598 this.trigger('warn', {
1599 message: 'defaulting key method to AES-128'
1600 });
1601 } // setup an encryption key for upcoming segments
1602
1603
1604 _key = {
1605 method: entry.attributes.METHOD || 'AES-128',
1606 uri: entry.attributes.URI
1607 };
1608
1609 if (typeof entry.attributes.IV !== 'undefined') {
1610 _key.iv = entry.attributes.IV;
1611 }
1612 },
1613 'media-sequence': function mediaSequence() {
1614 if (!isFinite(entry.number)) {
1615 this.trigger('warn', {
1616 message: 'ignoring invalid media sequence: ' + entry.number
1617 });
1618 return;
1619 }
1620
1621 this.manifest.mediaSequence = entry.number;
1622 },
1623 'discontinuity-sequence': function discontinuitySequence() {
1624 if (!isFinite(entry.number)) {
1625 this.trigger('warn', {
1626 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1627 });
1628 return;
1629 }
1630
1631 this.manifest.discontinuitySequence = entry.number;
1632 currentTimeline = entry.number;
1633 },
1634 'playlist-type': function playlistType() {
1635 if (!/VOD|EVENT/.test(entry.playlistType)) {
1636 this.trigger('warn', {
1637 message: 'ignoring unknown playlist type: ' + entry.playlist
1638 });
1639 return;
1640 }
1641
1642 this.manifest.playlistType = entry.playlistType;
1643 },
1644 map: function map() {
1645 currentMap = {};
1646
1647 if (entry.uri) {
1648 currentMap.uri = entry.uri;
1649 }
1650
1651 if (entry.byterange) {
1652 currentMap.byterange = entry.byterange;
1653 }
1654
1655 if (_key) {
1656 currentMap.key = _key;
1657 }
1658 },
1659 'stream-inf': function streamInf() {
1660 this.manifest.playlists = uris;
1661 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1662
1663 if (!entry.attributes) {
1664 this.trigger('warn', {
1665 message: 'ignoring empty stream-inf attributes'
1666 });
1667 return;
1668 }
1669
1670 if (!currentUri.attributes) {
1671 currentUri.attributes = {};
1672 }
1673
1674 _extends_1(currentUri.attributes, entry.attributes);
1675 },
1676 media: function media() {
1677 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1678
1679 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1680 this.trigger('warn', {
1681 message: 'ignoring incomplete or missing media group'
1682 });
1683 return;
1684 } // find the media group, creating defaults as necessary
1685
1686
1687 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1688 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1689 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1690
1691 rendition = {
1692 default: /yes/i.test(entry.attributes.DEFAULT)
1693 };
1694
1695 if (rendition.default) {
1696 rendition.autoselect = true;
1697 } else {
1698 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1699 }
1700
1701 if (entry.attributes.LANGUAGE) {
1702 rendition.language = entry.attributes.LANGUAGE;
1703 }
1704
1705 if (entry.attributes.URI) {
1706 rendition.uri = entry.attributes.URI;
1707 }
1708
1709 if (entry.attributes['INSTREAM-ID']) {
1710 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1711 }
1712
1713 if (entry.attributes.CHARACTERISTICS) {
1714 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1715 }
1716
1717 if (entry.attributes.FORCED) {
1718 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1719 } // insert the new rendition
1720
1721
1722 mediaGroup[entry.attributes.NAME] = rendition;
1723 },
1724 discontinuity: function discontinuity() {
1725 currentTimeline += 1;
1726 currentUri.discontinuity = true;
1727 this.manifest.discontinuityStarts.push(uris.length);
1728 },
1729 'program-date-time': function programDateTime() {
1730 if (typeof this.manifest.dateTimeString === 'undefined') {
1731 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1732 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1733 // to the manifest object
1734 // TODO: Consider removing this in future major version
1735 this.manifest.dateTimeString = entry.dateTimeString;
1736 this.manifest.dateTimeObject = entry.dateTimeObject;
1737 }
1738
1739 currentUri.dateTimeString = entry.dateTimeString;
1740 currentUri.dateTimeObject = entry.dateTimeObject;
1741 },
1742 targetduration: function targetduration() {
1743 if (!isFinite(entry.duration) || entry.duration < 0) {
1744 this.trigger('warn', {
1745 message: 'ignoring invalid target duration: ' + entry.duration
1746 });
1747 return;
1748 }
1749
1750 this.manifest.targetDuration = entry.duration;
1751 setHoldBack.call(this, this.manifest);
1752 },
1753 start: function start() {
1754 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1755 this.trigger('warn', {
1756 message: 'ignoring start declaration without appropriate attribute list'
1757 });
1758 return;
1759 }
1760
1761 this.manifest.start = {
1762 timeOffset: entry.attributes['TIME-OFFSET'],
1763 precise: entry.attributes.PRECISE
1764 };
1765 },
1766 'cue-out': function cueOut() {
1767 currentUri.cueOut = entry.data;
1768 },
1769 'cue-out-cont': function cueOutCont() {
1770 currentUri.cueOutCont = entry.data;
1771 },
1772 'cue-in': function cueIn() {
1773 currentUri.cueIn = entry.data;
1774 },
1775 'skip': function skip() {
1776 this.manifest.skip = camelCaseKeys(entry.attributes);
1777 this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
1778 },
1779 'part': function part() {
1780 var _this2 = this;
1781
1782 hasParts = true; // parts are always specifed before a segment
1783
1784 var segmentIndex = this.manifest.segments.length;
1785 var part = camelCaseKeys(entry.attributes);
1786 currentUri.parts = currentUri.parts || [];
1787 currentUri.parts.push(part);
1788
1789 if (part.byterange) {
1790 if (!part.byterange.hasOwnProperty('offset')) {
1791 part.byterange.offset = lastPartByterangeEnd;
1792 }
1793
1794 lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
1795 }
1796
1797 var partIndex = currentUri.parts.length - 1;
1798 this.warnOnMissingAttributes_("#EXT-X-PART #" + partIndex + " for segment #" + segmentIndex, entry.attributes, ['URI', 'DURATION']);
1799
1800 if (this.manifest.renditionReports) {
1801 this.manifest.renditionReports.forEach(function (r, i) {
1802 if (!r.hasOwnProperty('lastPart')) {
1803 _this2.trigger('warn', {
1804 message: "#EXT-X-RENDITION-REPORT #" + i + " lacks required attribute(s): LAST-PART"
1805 });
1806 }
1807 });
1808 }
1809 },
1810 'server-control': function serverControl() {
1811 var attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
1812
1813 if (!attrs.hasOwnProperty('canBlockReload')) {
1814 attrs.canBlockReload = false;
1815 this.trigger('info', {
1816 message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
1817 });
1818 }
1819
1820 setHoldBack.call(this, this.manifest);
1821
1822 if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
1823 this.trigger('warn', {
1824 message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
1825 });
1826 }
1827 },
1828 'preload-hint': function preloadHint() {
1829 // parts are always specifed before a segment
1830 var segmentIndex = this.manifest.segments.length;
1831 var hint = camelCaseKeys(entry.attributes);
1832 var isPart = hint.type && hint.type === 'PART';
1833 currentUri.preloadHints = currentUri.preloadHints || [];
1834 currentUri.preloadHints.push(hint);
1835
1836 if (hint.byterange) {
1837 if (!hint.byterange.hasOwnProperty('offset')) {
1838 // use last part byterange end or zero if not a part.
1839 hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
1840
1841 if (isPart) {
1842 lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
1843 }
1844 }
1845 }
1846
1847 var index = currentUri.preloadHints.length - 1;
1848 this.warnOnMissingAttributes_("#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex, entry.attributes, ['TYPE', 'URI']);
1849
1850 if (!hint.type) {
1851 return;
1852 } // search through all preload hints except for the current one for
1853 // a duplicate type.
1854
1855
1856 for (var i = 0; i < currentUri.preloadHints.length - 1; i++) {
1857 var otherHint = currentUri.preloadHints[i];
1858
1859 if (!otherHint.type) {
1860 continue;
1861 }
1862
1863 if (otherHint.type === hint.type) {
1864 this.trigger('warn', {
1865 message: "#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex + " has the same TYPE " + hint.type + " as preload hint #" + i
1866 });
1867 }
1868 }
1869 },
1870 'rendition-report': function renditionReport() {
1871 var report = camelCaseKeys(entry.attributes);
1872 this.manifest.renditionReports = this.manifest.renditionReports || [];
1873 this.manifest.renditionReports.push(report);
1874 var index = this.manifest.renditionReports.length - 1;
1875 var required = ['LAST-MSN', 'URI'];
1876
1877 if (hasParts) {
1878 required.push('LAST-PART');
1879 }
1880
1881 this.warnOnMissingAttributes_("#EXT-X-RENDITION-REPORT #" + index, entry.attributes, required);
1882 },
1883 'part-inf': function partInf() {
1884 this.manifest.partInf = camelCaseKeys(entry.attributes);
1885 this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
1886
1887 if (this.manifest.partInf.partTarget) {
1888 this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
1889 }
1890
1891 setHoldBack.call(this, this.manifest);
1892 }
1893 })[entry.tagType] || noop).call(self);
1894 },
1895 uri: function uri() {
1896 currentUri.uri = entry.uri;
1897 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1898
1899 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1900 this.trigger('warn', {
1901 message: 'defaulting segment duration to the target duration'
1902 });
1903 currentUri.duration = this.manifest.targetDuration;
1904 } // annotate with encryption information, if necessary
1905
1906
1907 if (_key) {
1908 currentUri.key = _key;
1909 }
1910
1911 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1912
1913 if (currentMap) {
1914 currentUri.map = currentMap;
1915 } // reset the last byterange end as it needs to be 0 between parts
1916
1917
1918 lastPartByterangeEnd = 0; // prepare for the next URI
1919
1920 currentUri = {};
1921 },
1922 comment: function comment() {// comments are not important for playback
1923 },
1924 custom: function custom() {
1925 // if this is segment-level data attach the output to the segment
1926 if (entry.segment) {
1927 currentUri.custom = currentUri.custom || {};
1928 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1929 } else {
1930 this.manifest.custom = this.manifest.custom || {};
1931 this.manifest.custom[entry.customType] = entry.data;
1932 }
1933 }
1934 })[entry.type].call(self);
1935 });
1936
1937 return _this;
1938 }
1939
1940 var _proto = Parser.prototype;
1941
1942 _proto.warnOnMissingAttributes_ = function warnOnMissingAttributes_(identifier, attributes, required) {
1943 var missing = [];
1944 required.forEach(function (key) {
1945 if (!attributes.hasOwnProperty(key)) {
1946 missing.push(key);
1947 }
1948 });
1949
1950 if (missing.length) {
1951 this.trigger('warn', {
1952 message: identifier + " lacks required attribute(s): " + missing.join(', ')
1953 });
1954 }
1955 }
1956 /**
1957 * Parse the input string and update the manifest object.
1958 *
1959 * @param {string} chunk a potentially incomplete portion of the manifest
1960 */
1961 ;
1962
1963 _proto.push = function push(chunk) {
1964 this.lineStream.push(chunk);
1965 }
1966 /**
1967 * Flush any remaining input. This can be handy if the last line of an M3U8
1968 * manifest did not contain a trailing newline but the file has been
1969 * completely received.
1970 */
1971 ;
1972
1973 _proto.end = function end() {
1974 // flush any buffered input
1975 this.lineStream.push('\n');
1976 this.trigger('end');
1977 }
1978 /**
1979 * Add an additional parser for non-standard tags
1980 *
1981 * @param {Object} options a map of options for the added parser
1982 * @param {RegExp} options.expression a regular expression to match the custom header
1983 * @param {string} options.type the type to register to the output
1984 * @param {Function} [options.dataParser] function to parse the line into an object
1985 * @param {boolean} [options.segment] should tag data be attached to the segment object
1986 */
1987 ;
1988
1989 _proto.addParser = function addParser(options) {
1990 this.parseStream.addParser(options);
1991 }
1992 /**
1993 * Add a custom header mapper
1994 *
1995 * @param {Object} options
1996 * @param {RegExp} options.expression a regular expression to match the custom header
1997 * @param {Function} options.map function to translate tag into a different tag
1998 */
1999 ;
2000
2001 _proto.addTagMapper = function addTagMapper(options) {
2002 this.parseStream.addTagMapper(options);
2003 };
2004
2005 return Parser;
2006 }(Stream);
2007
2008 var regexs = {
2009 // to determine mime types
2010 mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
2011 webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
2012 ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
2013 // to determine if a codec is audio or video
2014 video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
2015 audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
2016 text: /^(stpp.ttml.im1t)/,
2017 // mux.js support regex
2018 muxerVideo: /^(avc0?1)/,
2019 muxerAudio: /^(mp4a)/,
2020 // match nothing as muxer does not support text right now.
2021 // there cannot never be a character before the start of a string
2022 // so this matches nothing.
2023 muxerText: /a^/
2024 };
2025 var mediaTypes = ['video', 'audio', 'text'];
2026 var upperMediaTypes = ['Video', 'Audio', 'Text'];
2027 /**
2028 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
2029 * `avc1.<hhhhhh>`
2030 *
2031 * @param {string} codec
2032 * Codec string to translate
2033 * @return {string}
2034 * The translated codec string
2035 */
2036
2037 var translateLegacyCodec = function translateLegacyCodec(codec) {
2038 if (!codec) {
2039 return codec;
2040 }
2041
2042 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
2043 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
2044 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
2045 return 'avc1.' + profileHex + '00' + avcLevelHex;
2046 });
2047 };
2048 /**
2049 * @typedef {Object} ParsedCodecInfo
2050 * @property {number} codecCount
2051 * Number of codecs parsed
2052 * @property {string} [videoCodec]
2053 * Parsed video codec (if found)
2054 * @property {string} [videoObjectTypeIndicator]
2055 * Video object type indicator (if found)
2056 * @property {string|null} audioProfile
2057 * Audio profile
2058 */
2059
2060 /**
2061 * Parses a codec string to retrieve the number of codecs specified, the video codec and
2062 * object type indicator, and the audio profile.
2063 *
2064 * @param {string} [codecString]
2065 * The codec string to parse
2066 * @return {ParsedCodecInfo}
2067 * Parsed codec info
2068 */
2069
2070 var parseCodecs = function parseCodecs(codecString) {
2071 if (codecString === void 0) {
2072 codecString = '';
2073 }
2074
2075 var codecs = codecString.split(',');
2076 var result = [];
2077 codecs.forEach(function (codec) {
2078 codec = codec.trim();
2079 var codecType;
2080 mediaTypes.forEach(function (name) {
2081 var match = regexs[name].exec(codec.toLowerCase());
2082
2083 if (!match || match.length <= 1) {
2084 return;
2085 }
2086
2087 codecType = name; // maintain codec case
2088
2089 var type = codec.substring(0, match[1].length);
2090 var details = codec.replace(type, '');
2091 result.push({
2092 type: type,
2093 details: details,
2094 mediaType: name
2095 });
2096 });
2097
2098 if (!codecType) {
2099 result.push({
2100 type: codec,
2101 details: '',
2102 mediaType: 'unknown'
2103 });
2104 }
2105 });
2106 return result;
2107 };
2108 /**
2109 * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
2110 * a default alternate audio playlist for the provided audio group.
2111 *
2112 * @param {Object} master
2113 * The master playlist
2114 * @param {string} audioGroupId
2115 * ID of the audio group for which to find the default codec info
2116 * @return {ParsedCodecInfo}
2117 * Parsed codec info
2118 */
2119
2120 var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
2121 if (!master.mediaGroups.AUDIO || !audioGroupId) {
2122 return null;
2123 }
2124
2125 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
2126
2127 if (!audioGroup) {
2128 return null;
2129 }
2130
2131 for (var name in audioGroup) {
2132 var audioType = audioGroup[name];
2133
2134 if (audioType.default && audioType.playlists) {
2135 // codec should be the same for all playlists within the audio type
2136 return parseCodecs(audioType.playlists[0].attributes.CODECS);
2137 }
2138 }
2139
2140 return null;
2141 };
2142 var isAudioCodec = function isAudioCodec(codec) {
2143 if (codec === void 0) {
2144 codec = '';
2145 }
2146
2147 return regexs.audio.test(codec.trim().toLowerCase());
2148 };
2149 var isTextCodec = function isTextCodec(codec) {
2150 if (codec === void 0) {
2151 codec = '';
2152 }
2153
2154 return regexs.text.test(codec.trim().toLowerCase());
2155 };
2156 var getMimeForCodec = function getMimeForCodec(codecString) {
2157 if (!codecString || typeof codecString !== 'string') {
2158 return;
2159 }
2160
2161 var codecs = codecString.toLowerCase().split(',').map(function (c) {
2162 return translateLegacyCodec(c.trim());
2163 }); // default to video type
2164
2165 var type = 'video'; // only change to audio type if the only codec we have is
2166 // audio
2167
2168 if (codecs.length === 1 && isAudioCodec(codecs[0])) {
2169 type = 'audio';
2170 } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
2171 // text uses application/<container> for now
2172 type = 'application';
2173 } // default the container to mp4
2174
2175
2176 var container = 'mp4'; // every codec must be able to go into the container
2177 // for that container to be the correct one
2178
2179 if (codecs.every(function (c) {
2180 return regexs.mp4.test(c);
2181 })) {
2182 container = 'mp4';
2183 } else if (codecs.every(function (c) {
2184 return regexs.webm.test(c);
2185 })) {
2186 container = 'webm';
2187 } else if (codecs.every(function (c) {
2188 return regexs.ogg.test(c);
2189 })) {
2190 container = 'ogg';
2191 }
2192
2193 return type + "/" + container + ";codecs=\"" + codecString + "\"";
2194 };
2195 var browserSupportsCodec = function browserSupportsCodec(codecString) {
2196 if (codecString === void 0) {
2197 codecString = '';
2198 }
2199
2200 return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
2201 };
2202 var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
2203 if (codecString === void 0) {
2204 codecString = '';
2205 }
2206
2207 return codecString.toLowerCase().split(',').every(function (codec) {
2208 codec = codec.trim(); // any match is supported.
2209
2210 for (var i = 0; i < upperMediaTypes.length; i++) {
2211 var type = upperMediaTypes[i];
2212
2213 if (regexs["muxer" + type].test(codec)) {
2214 return true;
2215 }
2216 }
2217
2218 return false;
2219 });
2220 };
2221 var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
2222 var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
2223
2224 /**
2225 * ranges
2226 *
2227 * Utilities for working with TimeRanges.
2228 *
2229 */
2230
2231 var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
2232 // can be misleading because of precision differences or when the current media has poorly
2233 // aligned audio and video, which can cause values to be slightly off from what you would
2234 // expect. This value is what we consider to be safe to use in such comparisons to account
2235 // for these scenarios.
2236
2237 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2238
2239 var filterRanges = function filterRanges(timeRanges, predicate) {
2240 var results = [];
2241 var i;
2242
2243 if (timeRanges && timeRanges.length) {
2244 // Search for ranges that match the predicate
2245 for (i = 0; i < timeRanges.length; i++) {
2246 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2247 results.push([timeRanges.start(i), timeRanges.end(i)]);
2248 }
2249 }
2250 }
2251
2252 return videojs__default["default"].createTimeRanges(results);
2253 };
2254 /**
2255 * Attempts to find the buffered TimeRange that contains the specified
2256 * time.
2257 *
2258 * @param {TimeRanges} buffered - the TimeRanges object to query
2259 * @param {number} time - the time to filter on.
2260 * @return {TimeRanges} a new TimeRanges object
2261 */
2262
2263
2264 var findRange = function findRange(buffered, time) {
2265 return filterRanges(buffered, function (start, end) {
2266 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2267 });
2268 };
2269 /**
2270 * Returns the TimeRanges that begin later than the specified time.
2271 *
2272 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2273 * @param {number} time - the time to filter on.
2274 * @return {TimeRanges} a new TimeRanges object.
2275 */
2276
2277 var findNextRange = function findNextRange(timeRanges, time) {
2278 return filterRanges(timeRanges, function (start) {
2279 return start - TIME_FUDGE_FACTOR >= time;
2280 });
2281 };
2282 /**
2283 * Returns gaps within a list of TimeRanges
2284 *
2285 * @param {TimeRanges} buffered - the TimeRanges object
2286 * @return {TimeRanges} a TimeRanges object of gaps
2287 */
2288
2289 var findGaps = function findGaps(buffered) {
2290 if (buffered.length < 2) {
2291 return videojs__default["default"].createTimeRanges();
2292 }
2293
2294 var ranges = [];
2295
2296 for (var i = 1; i < buffered.length; i++) {
2297 var start = buffered.end(i - 1);
2298 var end = buffered.start(i);
2299 ranges.push([start, end]);
2300 }
2301
2302 return videojs__default["default"].createTimeRanges(ranges);
2303 };
2304 /**
2305 * Calculate the intersection of two TimeRanges
2306 *
2307 * @param {TimeRanges} bufferA
2308 * @param {TimeRanges} bufferB
2309 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
2310 */
2311
2312 var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
2313 var start = null;
2314 var end = null;
2315 var arity = 0;
2316 var extents = [];
2317 var ranges = [];
2318
2319 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
2320 return videojs__default["default"].createTimeRange();
2321 } // Handle the case where we have both buffers and create an
2322 // intersection of the two
2323
2324
2325 var count = bufferA.length; // A) Gather up all start and end times
2326
2327 while (count--) {
2328 extents.push({
2329 time: bufferA.start(count),
2330 type: 'start'
2331 });
2332 extents.push({
2333 time: bufferA.end(count),
2334 type: 'end'
2335 });
2336 }
2337
2338 count = bufferB.length;
2339
2340 while (count--) {
2341 extents.push({
2342 time: bufferB.start(count),
2343 type: 'start'
2344 });
2345 extents.push({
2346 time: bufferB.end(count),
2347 type: 'end'
2348 });
2349 } // B) Sort them by time
2350
2351
2352 extents.sort(function (a, b) {
2353 return a.time - b.time;
2354 }); // C) Go along one by one incrementing arity for start and decrementing
2355 // arity for ends
2356
2357 for (count = 0; count < extents.length; count++) {
2358 if (extents[count].type === 'start') {
2359 arity++; // D) If arity is ever incremented to 2 we are entering an
2360 // overlapping range
2361
2362 if (arity === 2) {
2363 start = extents[count].time;
2364 }
2365 } else if (extents[count].type === 'end') {
2366 arity--; // E) If arity is ever decremented to 1 we leaving an
2367 // overlapping range
2368
2369 if (arity === 1) {
2370 end = extents[count].time;
2371 }
2372 } // F) Record overlapping ranges
2373
2374
2375 if (start !== null && end !== null) {
2376 ranges.push([start, end]);
2377 start = null;
2378 end = null;
2379 }
2380 }
2381
2382 return videojs__default["default"].createTimeRanges(ranges);
2383 };
2384 /**
2385 * Gets a human readable string for a TimeRange
2386 *
2387 * @param {TimeRange} range
2388 * @return {string} a human readable string
2389 */
2390
2391 var printableRange = function printableRange(range) {
2392 var strArr = [];
2393
2394 if (!range || !range.length) {
2395 return '';
2396 }
2397
2398 for (var i = 0; i < range.length; i++) {
2399 strArr.push(range.start(i) + ' => ' + range.end(i));
2400 }
2401
2402 return strArr.join(', ');
2403 };
2404 /**
2405 * Calculates the amount of time left in seconds until the player hits the end of the
2406 * buffer and causes a rebuffer
2407 *
2408 * @param {TimeRange} buffered
2409 * The state of the buffer
2410 * @param {Numnber} currentTime
2411 * The current time of the player
2412 * @param {number} playbackRate
2413 * The current playback rate of the player. Defaults to 1.
2414 * @return {number}
2415 * Time until the player has to start rebuffering in seconds.
2416 * @function timeUntilRebuffer
2417 */
2418
2419 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
2420 if (playbackRate === void 0) {
2421 playbackRate = 1;
2422 }
2423
2424 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
2425 return (bufferedEnd - currentTime) / playbackRate;
2426 };
2427 /**
2428 * Converts a TimeRanges object into an array representation
2429 *
2430 * @param {TimeRanges} timeRanges
2431 * @return {Array}
2432 */
2433
2434 var timeRangesToArray = function timeRangesToArray(timeRanges) {
2435 var timeRangesList = [];
2436
2437 for (var i = 0; i < timeRanges.length; i++) {
2438 timeRangesList.push({
2439 start: timeRanges.start(i),
2440 end: timeRanges.end(i)
2441 });
2442 }
2443
2444 return timeRangesList;
2445 };
2446 /**
2447 * Determines if two time range objects are different.
2448 *
2449 * @param {TimeRange} a
2450 * the first time range object to check
2451 *
2452 * @param {TimeRange} b
2453 * the second time range object to check
2454 *
2455 * @return {Boolean}
2456 * Whether the time range objects differ
2457 */
2458
2459 var isRangeDifferent = function isRangeDifferent(a, b) {
2460 // same object
2461 if (a === b) {
2462 return false;
2463 } // one or the other is undefined
2464
2465
2466 if (!a && b || !b && a) {
2467 return true;
2468 } // length is different
2469
2470
2471 if (a.length !== b.length) {
2472 return true;
2473 } // see if any start/end pair is different
2474
2475
2476 for (var i = 0; i < a.length; i++) {
2477 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
2478 return true;
2479 }
2480 } // if the length and every pair is the same
2481 // this is the same time range
2482
2483
2484 return false;
2485 };
2486 var lastBufferedEnd = function lastBufferedEnd(a) {
2487 if (!a || !a.length || !a.end) {
2488 return;
2489 }
2490
2491 return a.end(a.length - 1);
2492 };
2493 /**
2494 * A utility function to add up the amount of time in a timeRange
2495 * after a specified startTime.
2496 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
2497 * would return 40 as there are 40s seconds after 0 in the timeRange
2498 *
2499 * @param {TimeRange} range
2500 * The range to check against
2501 * @param {number} startTime
2502 * The time in the time range that you should start counting from
2503 *
2504 * @return {number}
2505 * The number of seconds in the buffer passed the specified time.
2506 */
2507
2508 var timeAheadOf = function timeAheadOf(range, startTime) {
2509 var time = 0;
2510
2511 if (!range || !range.length) {
2512 return time;
2513 }
2514
2515 for (var i = 0; i < range.length; i++) {
2516 var start = range.start(i);
2517 var end = range.end(i); // startTime is after this range entirely
2518
2519 if (startTime > end) {
2520 continue;
2521 } // startTime is within this range
2522
2523
2524 if (startTime > start && startTime <= end) {
2525 time += end - startTime;
2526 continue;
2527 } // startTime is before this range.
2528
2529
2530 time += end - start;
2531 }
2532
2533 return time;
2534 };
2535
2536 /**
2537 * @file playlist.js
2538 *
2539 * Playlist related utilities.
2540 */
2541 var createTimeRange = videojs__default["default"].createTimeRange;
2542 /**
2543 * Get the duration of a segment, with special cases for
2544 * llhls segments that do not have a duration yet.
2545 *
2546 * @param {Object} playlist
2547 * the playlist that the segment belongs to.
2548 * @param {Object} segment
2549 * the segment to get a duration for.
2550 *
2551 * @return {number}
2552 * the segment duration
2553 */
2554
2555 var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
2556 // if this isn't a preload segment
2557 // then we will have a segment duration that is accurate.
2558 if (!segment.preload) {
2559 return segment.duration;
2560 } // otherwise we have to add up parts and preload hints
2561 // to get an up to date duration.
2562
2563
2564 var result = 0;
2565 (segment.parts || []).forEach(function (p) {
2566 result += p.duration;
2567 }); // for preload hints we have to use partTargetDuration
2568 // as they won't even have a duration yet.
2569
2570 (segment.preloadHints || []).forEach(function (p) {
2571 if (p.type === 'PART') {
2572 result += playlist.partTargetDuration;
2573 }
2574 });
2575 return result;
2576 };
2577 /**
2578 * A function to get a combined list of parts and segments with durations
2579 * and indexes.
2580 *
2581 * @param {Playlist} playlist the playlist to get the list for.
2582 *
2583 * @return {Array} The part/segment list.
2584 */
2585
2586 var getPartsAndSegments = function getPartsAndSegments(playlist) {
2587 return (playlist.segments || []).reduce(function (acc, segment, si) {
2588 if (segment.parts) {
2589 segment.parts.forEach(function (part, pi) {
2590 acc.push({
2591 duration: part.duration,
2592 segmentIndex: si,
2593 partIndex: pi,
2594 part: part,
2595 segment: segment
2596 });
2597 });
2598 } else {
2599 acc.push({
2600 duration: segment.duration,
2601 segmentIndex: si,
2602 partIndex: null,
2603 segment: segment,
2604 part: null
2605 });
2606 }
2607
2608 return acc;
2609 }, []);
2610 };
2611 var getLastParts = function getLastParts(media) {
2612 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
2613 return lastSegment && lastSegment.parts || [];
2614 };
2615 var getKnownPartCount = function getKnownPartCount(_ref) {
2616 var preloadSegment = _ref.preloadSegment;
2617
2618 if (!preloadSegment) {
2619 return;
2620 }
2621
2622 var parts = preloadSegment.parts,
2623 preloadHints = preloadSegment.preloadHints;
2624 var partCount = (preloadHints || []).reduce(function (count, hint) {
2625 return count + (hint.type === 'PART' ? 1 : 0);
2626 }, 0);
2627 partCount += parts && parts.length ? parts.length : 0;
2628 return partCount;
2629 };
2630 /**
2631 * Get the number of seconds to delay from the end of a
2632 * live playlist.
2633 *
2634 * @param {Playlist} master the master playlist
2635 * @param {Playlist} media the media playlist
2636 * @return {number} the hold back in seconds.
2637 */
2638
2639 var liveEdgeDelay = function liveEdgeDelay(master, media) {
2640 if (media.endList) {
2641 return 0;
2642 } // dash suggestedPresentationDelay trumps everything
2643
2644
2645 if (master && master.suggestedPresentationDelay) {
2646 return master.suggestedPresentationDelay;
2647 }
2648
2649 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
2650
2651 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
2652 return media.serverControl.partHoldBack;
2653 } else if (hasParts && media.partTargetDuration) {
2654 return media.partTargetDuration * 3; // finally look for full segment delays
2655 } else if (media.serverControl && media.serverControl.holdBack) {
2656 return media.serverControl.holdBack;
2657 } else if (media.targetDuration) {
2658 return media.targetDuration * 3;
2659 }
2660
2661 return 0;
2662 };
2663 /**
2664 * walk backward until we find a duration we can use
2665 * or return a failure
2666 *
2667 * @param {Playlist} playlist the playlist to walk through
2668 * @param {Number} endSequence the mediaSequence to stop walking on
2669 */
2670
2671 var backwardDuration = function backwardDuration(playlist, endSequence) {
2672 var result = 0;
2673 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
2674 // the interval, use it
2675
2676 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
2677 // information that is earlier than endSequence
2678
2679 if (segment) {
2680 if (typeof segment.start !== 'undefined') {
2681 return {
2682 result: segment.start,
2683 precise: true
2684 };
2685 }
2686
2687 if (typeof segment.end !== 'undefined') {
2688 return {
2689 result: segment.end - segment.duration,
2690 precise: true
2691 };
2692 }
2693 }
2694
2695 while (i--) {
2696 segment = playlist.segments[i];
2697
2698 if (typeof segment.end !== 'undefined') {
2699 return {
2700 result: result + segment.end,
2701 precise: true
2702 };
2703 }
2704
2705 result += segmentDurationWithParts(playlist, segment);
2706
2707 if (typeof segment.start !== 'undefined') {
2708 return {
2709 result: result + segment.start,
2710 precise: true
2711 };
2712 }
2713 }
2714
2715 return {
2716 result: result,
2717 precise: false
2718 };
2719 };
2720 /**
2721 * walk forward until we find a duration we can use
2722 * or return a failure
2723 *
2724 * @param {Playlist} playlist the playlist to walk through
2725 * @param {number} endSequence the mediaSequence to stop walking on
2726 */
2727
2728
2729 var forwardDuration = function forwardDuration(playlist, endSequence) {
2730 var result = 0;
2731 var segment;
2732 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
2733 // information
2734
2735 for (; i < playlist.segments.length; i++) {
2736 segment = playlist.segments[i];
2737
2738 if (typeof segment.start !== 'undefined') {
2739 return {
2740 result: segment.start - result,
2741 precise: true
2742 };
2743 }
2744
2745 result += segmentDurationWithParts(playlist, segment);
2746
2747 if (typeof segment.end !== 'undefined') {
2748 return {
2749 result: segment.end - result,
2750 precise: true
2751 };
2752 }
2753 } // indicate we didn't find a useful duration estimate
2754
2755
2756 return {
2757 result: -1,
2758 precise: false
2759 };
2760 };
2761 /**
2762 * Calculate the media duration from the segments associated with a
2763 * playlist. The duration of a subinterval of the available segments
2764 * may be calculated by specifying an end index.
2765 *
2766 * @param {Object} playlist a media playlist object
2767 * @param {number=} endSequence an exclusive upper boundary
2768 * for the playlist. Defaults to playlist length.
2769 * @param {number} expired the amount of time that has dropped
2770 * off the front of the playlist in a live scenario
2771 * @return {number} the duration between the first available segment
2772 * and end index.
2773 */
2774
2775
2776 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
2777 if (typeof endSequence === 'undefined') {
2778 endSequence = playlist.mediaSequence + playlist.segments.length;
2779 }
2780
2781 if (endSequence < playlist.mediaSequence) {
2782 return 0;
2783 } // do a backward walk to estimate the duration
2784
2785
2786 var backward = backwardDuration(playlist, endSequence);
2787
2788 if (backward.precise) {
2789 // if we were able to base our duration estimate on timing
2790 // information provided directly from the Media Source, return
2791 // it
2792 return backward.result;
2793 } // walk forward to see if a precise duration estimate can be made
2794 // that way
2795
2796
2797 var forward = forwardDuration(playlist, endSequence);
2798
2799 if (forward.precise) {
2800 // we found a segment that has been buffered and so it's
2801 // position is known precisely
2802 return forward.result;
2803 } // return the less-precise, playlist-based duration estimate
2804
2805
2806 return backward.result + expired;
2807 };
2808 /**
2809 * Calculates the duration of a playlist. If a start and end index
2810 * are specified, the duration will be for the subset of the media
2811 * timeline between those two indices. The total duration for live
2812 * playlists is always Infinity.
2813 *
2814 * @param {Object} playlist a media playlist object
2815 * @param {number=} endSequence an exclusive upper
2816 * boundary for the playlist. Defaults to the playlist media
2817 * sequence number plus its length.
2818 * @param {number=} expired the amount of time that has
2819 * dropped off the front of the playlist in a live scenario
2820 * @return {number} the duration between the start index and end
2821 * index.
2822 */
2823
2824
2825 var duration = function duration(playlist, endSequence, expired) {
2826 if (!playlist) {
2827 return 0;
2828 }
2829
2830 if (typeof expired !== 'number') {
2831 expired = 0;
2832 } // if a slice of the total duration is not requested, use
2833 // playlist-level duration indicators when they're present
2834
2835
2836 if (typeof endSequence === 'undefined') {
2837 // if present, use the duration specified in the playlist
2838 if (playlist.totalDuration) {
2839 return playlist.totalDuration;
2840 } // duration should be Infinity for live playlists
2841
2842
2843 if (!playlist.endList) {
2844 return window.Infinity;
2845 }
2846 } // calculate the total duration based on the segment durations
2847
2848
2849 return intervalDuration(playlist, endSequence, expired);
2850 };
2851 /**
2852 * Calculate the time between two indexes in the current playlist
2853 * neight the start- nor the end-index need to be within the current
2854 * playlist in which case, the targetDuration of the playlist is used
2855 * to approximate the durations of the segments
2856 *
2857 * @param {Array} options.durationList list to iterate over for durations.
2858 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
2859 * @param {number} options.startIndex partsAndSegments index to start
2860 * @param {number} options.endIndex partsAndSegments index to end.
2861 * @return {number} the number of seconds between startIndex and endIndex
2862 */
2863
2864 var sumDurations = function sumDurations(_ref2) {
2865 var defaultDuration = _ref2.defaultDuration,
2866 durationList = _ref2.durationList,
2867 startIndex = _ref2.startIndex,
2868 endIndex = _ref2.endIndex;
2869 var durations = 0;
2870
2871 if (startIndex > endIndex) {
2872 var _ref3 = [endIndex, startIndex];
2873 startIndex = _ref3[0];
2874 endIndex = _ref3[1];
2875 }
2876
2877 if (startIndex < 0) {
2878 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
2879 durations += defaultDuration;
2880 }
2881
2882 startIndex = 0;
2883 }
2884
2885 for (var _i = startIndex; _i < endIndex; _i++) {
2886 durations += durationList[_i].duration;
2887 }
2888
2889 return durations;
2890 };
2891 /**
2892 * Calculates the playlist end time
2893 *
2894 * @param {Object} playlist a media playlist object
2895 * @param {number=} expired the amount of time that has
2896 * dropped off the front of the playlist in a live scenario
2897 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
2898 * playlist end calculation should consider the safe live end
2899 * (truncate the playlist end by three segments). This is normally
2900 * used for calculating the end of the playlist's seekable range.
2901 * This takes into account the value of liveEdgePadding.
2902 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
2903 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
2904 * If this is provided, it is used in the safe live end calculation.
2905 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
2906 * Corresponds to suggestedPresentationDelay in DASH manifests.
2907 * @return {number} the end time of playlist
2908 * @function playlistEnd
2909 */
2910
2911 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
2912 if (!playlist || !playlist.segments) {
2913 return null;
2914 }
2915
2916 if (playlist.endList) {
2917 return duration(playlist);
2918 }
2919
2920 if (expired === null) {
2921 return null;
2922 }
2923
2924 expired = expired || 0;
2925 var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
2926
2927 if (useSafeLiveEnd) {
2928 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
2929 lastSegmentEndTime -= liveEdgePadding;
2930 } // don't return a time less than zero
2931
2932
2933 return Math.max(0, lastSegmentEndTime);
2934 };
2935 /**
2936 * Calculates the interval of time that is currently seekable in a
2937 * playlist. The returned time ranges are relative to the earliest
2938 * moment in the specified playlist that is still available. A full
2939 * seekable implementation for live streams would need to offset
2940 * these values by the duration of content that has expired from the
2941 * stream.
2942 *
2943 * @param {Object} playlist a media playlist object
2944 * dropped off the front of the playlist in a live scenario
2945 * @param {number=} expired the amount of time that has
2946 * dropped off the front of the playlist in a live scenario
2947 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
2948 * Corresponds to suggestedPresentationDelay in DASH manifests.
2949 * @return {TimeRanges} the periods of time that are valid targets
2950 * for seeking
2951 */
2952
2953 var seekable = function seekable(playlist, expired, liveEdgePadding) {
2954 var useSafeLiveEnd = true;
2955 var seekableStart = expired || 0;
2956 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
2957
2958 if (seekableEnd === null) {
2959 return createTimeRange();
2960 }
2961
2962 return createTimeRange(seekableStart, seekableEnd);
2963 };
2964 /**
2965 * Determine the index and estimated starting time of the segment that
2966 * contains a specified playback position in a media playlist.
2967 *
2968 * @param {Object} options.playlist the media playlist to query
2969 * @param {number} options.currentTime The number of seconds since the earliest
2970 * possible position to determine the containing segment for
2971 * @param {number} options.startTime the time when the segment/part starts
2972 * @param {number} options.startingSegmentIndex the segment index to start looking at.
2973 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
2974 *
2975 * @return {Object} an object with partIndex, segmentIndex, and startTime.
2976 */
2977
2978 var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
2979 var playlist = _ref4.playlist,
2980 currentTime = _ref4.currentTime,
2981 startingSegmentIndex = _ref4.startingSegmentIndex,
2982 startingPartIndex = _ref4.startingPartIndex,
2983 startTime = _ref4.startTime,
2984 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
2985 var time = currentTime - startTime;
2986 var partsAndSegments = getPartsAndSegments(playlist);
2987 var startIndex = 0;
2988
2989 for (var i = 0; i < partsAndSegments.length; i++) {
2990 var partAndSegment = partsAndSegments[i];
2991
2992 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
2993 continue;
2994 } // skip this if part index does not match.
2995
2996
2997 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
2998 continue;
2999 }
3000
3001 startIndex = i;
3002 break;
3003 }
3004
3005 if (time < 0) {
3006 // Walk backward from startIndex in the playlist, adding durations
3007 // until we find a segment that contains `time` and return it
3008 if (startIndex > 0) {
3009 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
3010 var _partAndSegment = partsAndSegments[_i2];
3011 time += _partAndSegment.duration;
3012
3013 if (experimentalExactManifestTimings) {
3014 if (time < 0) {
3015 continue;
3016 }
3017 } else if (time + TIME_FUDGE_FACTOR <= 0) {
3018 continue;
3019 }
3020
3021 return {
3022 partIndex: _partAndSegment.partIndex,
3023 segmentIndex: _partAndSegment.segmentIndex,
3024 startTime: startTime - sumDurations({
3025 defaultDuration: playlist.targetDuration,
3026 durationList: partsAndSegments,
3027 startIndex: startIndex,
3028 endIndex: _i2
3029 })
3030 };
3031 }
3032 } // We were unable to find a good segment within the playlist
3033 // so select the first segment
3034
3035
3036 return {
3037 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3038 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3039 startTime: currentTime
3040 };
3041 } // When startIndex is negative, we first walk forward to first segment
3042 // adding target durations. If we "run out of time" before getting to
3043 // the first segment, return the first segment
3044
3045
3046 if (startIndex < 0) {
3047 for (var _i3 = startIndex; _i3 < 0; _i3++) {
3048 time -= playlist.targetDuration;
3049
3050 if (time < 0) {
3051 return {
3052 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3053 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3054 startTime: currentTime
3055 };
3056 }
3057 }
3058
3059 startIndex = 0;
3060 } // Walk forward from startIndex in the playlist, subtracting durations
3061 // until we find a segment that contains `time` and return it
3062
3063
3064 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
3065 var _partAndSegment2 = partsAndSegments[_i4];
3066 time -= _partAndSegment2.duration;
3067
3068 if (experimentalExactManifestTimings) {
3069 if (time > 0) {
3070 continue;
3071 }
3072 } else if (time - TIME_FUDGE_FACTOR >= 0) {
3073 continue;
3074 }
3075
3076 return {
3077 partIndex: _partAndSegment2.partIndex,
3078 segmentIndex: _partAndSegment2.segmentIndex,
3079 startTime: startTime + sumDurations({
3080 defaultDuration: playlist.targetDuration,
3081 durationList: partsAndSegments,
3082 startIndex: startIndex,
3083 endIndex: _i4
3084 })
3085 };
3086 } // We are out of possible candidates so load the last one...
3087
3088
3089 return {
3090 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
3091 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
3092 startTime: currentTime
3093 };
3094 };
3095 /**
3096 * Check whether the playlist is blacklisted or not.
3097 *
3098 * @param {Object} playlist the media playlist object
3099 * @return {boolean} whether the playlist is blacklisted or not
3100 * @function isBlacklisted
3101 */
3102
3103 var isBlacklisted = function isBlacklisted(playlist) {
3104 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
3105 };
3106 /**
3107 * Check whether the playlist is compatible with current playback configuration or has
3108 * been blacklisted permanently for being incompatible.
3109 *
3110 * @param {Object} playlist the media playlist object
3111 * @return {boolean} whether the playlist is incompatible or not
3112 * @function isIncompatible
3113 */
3114
3115 var isIncompatible = function isIncompatible(playlist) {
3116 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
3117 };
3118 /**
3119 * Check whether the playlist is enabled or not.
3120 *
3121 * @param {Object} playlist the media playlist object
3122 * @return {boolean} whether the playlist is enabled or not
3123 * @function isEnabled
3124 */
3125
3126 var isEnabled = function isEnabled(playlist) {
3127 var blacklisted = isBlacklisted(playlist);
3128 return !playlist.disabled && !blacklisted;
3129 };
3130 /**
3131 * Check whether the playlist has been manually disabled through the representations api.
3132 *
3133 * @param {Object} playlist the media playlist object
3134 * @return {boolean} whether the playlist is disabled manually or not
3135 * @function isDisabled
3136 */
3137
3138 var isDisabled = function isDisabled(playlist) {
3139 return playlist.disabled;
3140 };
3141 /**
3142 * Returns whether the current playlist is an AES encrypted HLS stream
3143 *
3144 * @return {boolean} true if it's an AES encrypted HLS stream
3145 */
3146
3147 var isAes = function isAes(media) {
3148 for (var i = 0; i < media.segments.length; i++) {
3149 if (media.segments[i].key) {
3150 return true;
3151 }
3152 }
3153
3154 return false;
3155 };
3156 /**
3157 * Checks if the playlist has a value for the specified attribute
3158 *
3159 * @param {string} attr
3160 * Attribute to check for
3161 * @param {Object} playlist
3162 * The media playlist object
3163 * @return {boolean}
3164 * Whether the playlist contains a value for the attribute or not
3165 * @function hasAttribute
3166 */
3167
3168 var hasAttribute = function hasAttribute(attr, playlist) {
3169 return playlist.attributes && playlist.attributes[attr];
3170 };
3171 /**
3172 * Estimates the time required to complete a segment download from the specified playlist
3173 *
3174 * @param {number} segmentDuration
3175 * Duration of requested segment
3176 * @param {number} bandwidth
3177 * Current measured bandwidth of the player
3178 * @param {Object} playlist
3179 * The media playlist object
3180 * @param {number=} bytesReceived
3181 * Number of bytes already received for the request. Defaults to 0
3182 * @return {number|NaN}
3183 * The estimated time to request the segment. NaN if bandwidth information for
3184 * the given playlist is unavailable
3185 * @function estimateSegmentRequestTime
3186 */
3187
3188 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
3189 if (bytesReceived === void 0) {
3190 bytesReceived = 0;
3191 }
3192
3193 if (!hasAttribute('BANDWIDTH', playlist)) {
3194 return NaN;
3195 }
3196
3197 var size = segmentDuration * playlist.attributes.BANDWIDTH;
3198 return (size - bytesReceived * 8) / bandwidth;
3199 };
3200 /*
3201 * Returns whether the current playlist is the lowest rendition
3202 *
3203 * @return {Boolean} true if on lowest rendition
3204 */
3205
3206 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
3207 if (master.playlists.length === 1) {
3208 return true;
3209 }
3210
3211 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3212 return master.playlists.filter(function (playlist) {
3213 if (!isEnabled(playlist)) {
3214 return false;
3215 }
3216
3217 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3218 }).length === 0;
3219 };
3220 var playlistMatch = function playlistMatch(a, b) {
3221 // both playlits are null
3222 // or only one playlist is non-null
3223 // no match
3224 if (!a && !b || !a && b || a && !b) {
3225 return false;
3226 } // playlist objects are the same, match
3227
3228
3229 if (a === b) {
3230 return true;
3231 } // first try to use id as it should be the most
3232 // accurate
3233
3234
3235 if (a.id && b.id && a.id === b.id) {
3236 return true;
3237 } // next try to use reslovedUri as it should be the
3238 // second most accurate.
3239
3240
3241 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
3242 return true;
3243 } // finally try to use uri as it should be accurate
3244 // but might miss a few cases for relative uris
3245
3246
3247 if (a.uri && b.uri && a.uri === b.uri) {
3248 return true;
3249 }
3250
3251 return false;
3252 };
3253
3254 var someAudioVariant = function someAudioVariant(master, callback) {
3255 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
3256 var found = false;
3257
3258 for (var groupName in AUDIO) {
3259 for (var label in AUDIO[groupName]) {
3260 found = callback(AUDIO[groupName][label]);
3261
3262 if (found) {
3263 break;
3264 }
3265 }
3266
3267 if (found) {
3268 break;
3269 }
3270 }
3271
3272 return !!found;
3273 };
3274
3275 var isAudioOnly = function isAudioOnly(master) {
3276 // we are audio only if we have no main playlists but do
3277 // have media group playlists.
3278 if (!master || !master.playlists || !master.playlists.length) {
3279 // without audio variants or playlists this
3280 // is not an audio only master.
3281 var found = someAudioVariant(master, function (variant) {
3282 return variant.playlists && variant.playlists.length || variant.uri;
3283 });
3284 return found;
3285 } // if every playlist has only an audio codec it is audio only
3286
3287
3288 var _loop = function _loop(i) {
3289 var playlist = master.playlists[i];
3290 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
3291
3292 if (CODECS && CODECS.split(',').every(function (c) {
3293 return isAudioCodec(c);
3294 })) {
3295 return "continue";
3296 } // playlist is in an audio group it is audio only
3297
3298
3299 var found = someAudioVariant(master, function (variant) {
3300 return playlistMatch(playlist, variant);
3301 });
3302
3303 if (found) {
3304 return "continue";
3305 } // if we make it here this playlist isn't audio and we
3306 // are not audio only
3307
3308
3309 return {
3310 v: false
3311 };
3312 };
3313
3314 for (var i = 0; i < master.playlists.length; i++) {
3315 var _ret = _loop(i);
3316
3317 if (_ret === "continue") continue;
3318 if (typeof _ret === "object") return _ret.v;
3319 } // if we make it past every playlist without returning, then
3320 // this is an audio only playlist.
3321
3322
3323 return true;
3324 }; // exports
3325
3326 var Playlist = {
3327 liveEdgeDelay: liveEdgeDelay,
3328 duration: duration,
3329 seekable: seekable,
3330 getMediaInfoForTime: getMediaInfoForTime,
3331 isEnabled: isEnabled,
3332 isDisabled: isDisabled,
3333 isBlacklisted: isBlacklisted,
3334 isIncompatible: isIncompatible,
3335 playlistEnd: playlistEnd,
3336 isAes: isAes,
3337 hasAttribute: hasAttribute,
3338 estimateSegmentRequestTime: estimateSegmentRequestTime,
3339 isLowestEnabledRendition: isLowestEnabledRendition,
3340 isAudioOnly: isAudioOnly,
3341 playlistMatch: playlistMatch,
3342 segmentDurationWithParts: segmentDurationWithParts
3343 };
3344
3345 var log = videojs__default["default"].log;
3346 var createPlaylistID = function createPlaylistID(index, uri) {
3347 return index + "-" + uri;
3348 };
3349 /**
3350 * Parses a given m3u8 playlist
3351 *
3352 * @param {Function} [onwarn]
3353 * a function to call when the parser triggers a warning event.
3354 * @param {Function} [oninfo]
3355 * a function to call when the parser triggers an info event.
3356 * @param {string} manifestString
3357 * The downloaded manifest string
3358 * @param {Object[]} [customTagParsers]
3359 * An array of custom tag parsers for the m3u8-parser instance
3360 * @param {Object[]} [customTagMappers]
3361 * An array of custom tag mappers for the m3u8-parser instance
3362 * @param {boolean} [experimentalLLHLS=false]
3363 * Whether to keep ll-hls features in the manifest after parsing.
3364 * @return {Object}
3365 * The manifest object
3366 */
3367
3368 var parseManifest = function parseManifest(_ref) {
3369 var onwarn = _ref.onwarn,
3370 oninfo = _ref.oninfo,
3371 manifestString = _ref.manifestString,
3372 _ref$customTagParsers = _ref.customTagParsers,
3373 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
3374 _ref$customTagMappers = _ref.customTagMappers,
3375 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
3376 experimentalLLHLS = _ref.experimentalLLHLS;
3377 var parser = new Parser();
3378
3379 if (onwarn) {
3380 parser.on('warn', onwarn);
3381 }
3382
3383 if (oninfo) {
3384 parser.on('info', oninfo);
3385 }
3386
3387 customTagParsers.forEach(function (customParser) {
3388 return parser.addParser(customParser);
3389 });
3390 customTagMappers.forEach(function (mapper) {
3391 return parser.addTagMapper(mapper);
3392 });
3393 parser.push(manifestString);
3394 parser.end();
3395 var manifest = parser.manifest; // remove llhls features from the parsed manifest
3396 // if we don't want llhls support.
3397
3398 if (!experimentalLLHLS) {
3399 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
3400 if (manifest.hasOwnProperty(k)) {
3401 delete manifest[k];
3402 }
3403 });
3404
3405 if (manifest.segments) {
3406 manifest.segments.forEach(function (segment) {
3407 ['parts', 'preloadHints'].forEach(function (k) {
3408 if (segment.hasOwnProperty(k)) {
3409 delete segment[k];
3410 }
3411 });
3412 });
3413 }
3414 }
3415
3416 if (!manifest.targetDuration) {
3417 var targetDuration = 10;
3418
3419 if (manifest.segments && manifest.segments.length) {
3420 targetDuration = manifest.segments.reduce(function (acc, s) {
3421 return Math.max(acc, s.duration);
3422 }, 0);
3423 }
3424
3425 if (onwarn) {
3426 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
3427 }
3428
3429 manifest.targetDuration = targetDuration;
3430 }
3431
3432 var parts = getLastParts(manifest);
3433
3434 if (parts.length && !manifest.partTargetDuration) {
3435 var partTargetDuration = parts.reduce(function (acc, p) {
3436 return Math.max(acc, p.duration);
3437 }, 0);
3438
3439 if (onwarn) {
3440 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
3441 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
3442 }
3443
3444 manifest.partTargetDuration = partTargetDuration;
3445 }
3446
3447 return manifest;
3448 };
3449 /**
3450 * Loops through all supported media groups in master and calls the provided
3451 * callback for each group
3452 *
3453 * @param {Object} master
3454 * The parsed master manifest object
3455 * @param {Function} callback
3456 * Callback to call for each media group
3457 */
3458
3459 var forEachMediaGroup$1 = function forEachMediaGroup(master, callback) {
3460 if (!master.mediaGroups) {
3461 return;
3462 }
3463
3464 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
3465 if (!master.mediaGroups[mediaType]) {
3466 return;
3467 }
3468
3469 for (var groupKey in master.mediaGroups[mediaType]) {
3470 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
3471 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
3472 callback(mediaProperties, mediaType, groupKey, labelKey);
3473 }
3474 }
3475 });
3476 };
3477 /**
3478 * Adds properties and attributes to the playlist to keep consistent functionality for
3479 * playlists throughout VHS.
3480 *
3481 * @param {Object} config
3482 * Arguments object
3483 * @param {Object} config.playlist
3484 * The media playlist
3485 * @param {string} [config.uri]
3486 * The uri to the media playlist (if media playlist is not from within a master
3487 * playlist)
3488 * @param {string} id
3489 * ID to use for the playlist
3490 */
3491
3492 var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
3493 var playlist = _ref2.playlist,
3494 uri = _ref2.uri,
3495 id = _ref2.id;
3496 playlist.id = id;
3497 playlist.playlistErrors_ = 0;
3498
3499 if (uri) {
3500 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
3501 // playlists do not contain their own source URI, but one is needed for consistency in
3502 // VHS.
3503 playlist.uri = uri;
3504 } // For HLS master playlists, even though certain attributes MUST be defined, the
3505 // stream may still be played without them.
3506 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
3507 // manifest.
3508 //
3509 // To avoid undefined reference errors through the project, and make the code easier
3510 // to write/read, add an empty attributes object for these cases.
3511
3512
3513 playlist.attributes = playlist.attributes || {};
3514 };
3515 /**
3516 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
3517 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
3518 * playlist references to the playlists array.
3519 *
3520 * @param {Object} master
3521 * The master playlist
3522 */
3523
3524 var setupMediaPlaylists = function setupMediaPlaylists(master) {
3525 var i = master.playlists.length;
3526
3527 while (i--) {
3528 var playlist = master.playlists[i];
3529 setupMediaPlaylist({
3530 playlist: playlist,
3531 id: createPlaylistID(i, playlist.uri)
3532 });
3533 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
3534 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
3535
3536 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
3537 // the stream can be played without it. Although an attributes property may have been
3538 // added to the playlist to prevent undefined references, issue a warning to fix the
3539 // manifest.
3540
3541 if (!playlist.attributes.BANDWIDTH) {
3542 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
3543 }
3544 }
3545 };
3546 /**
3547 * Adds resolvedUri properties to each media group.
3548 *
3549 * @param {Object} master
3550 * The master playlist
3551 */
3552
3553 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
3554 forEachMediaGroup$1(master, function (properties) {
3555 if (properties.uri) {
3556 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
3557 }
3558 });
3559 };
3560 /**
3561 * Creates a master playlist wrapper to insert a sole media playlist into.
3562 *
3563 * @param {Object} media
3564 * Media playlist
3565 * @param {string} uri
3566 * The media URI
3567 *
3568 * @return {Object}
3569 * Master playlist
3570 */
3571
3572 var masterForMedia = function masterForMedia(media, uri) {
3573 var id = createPlaylistID(0, uri);
3574 var master = {
3575 mediaGroups: {
3576 'AUDIO': {},
3577 'VIDEO': {},
3578 'CLOSED-CAPTIONS': {},
3579 'SUBTITLES': {}
3580 },
3581 uri: window.location.href,
3582 resolvedUri: window.location.href,
3583 playlists: [{
3584 uri: uri,
3585 id: id,
3586 resolvedUri: uri,
3587 // m3u8-parser does not attach an attributes property to media playlists so make
3588 // sure that the property is attached to avoid undefined reference errors
3589 attributes: {}
3590 }]
3591 }; // set up ID reference
3592
3593 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
3594
3595 master.playlists[uri] = master.playlists[0];
3596 return master;
3597 };
3598 /**
3599 * Does an in-place update of the master manifest to add updated playlist URI references
3600 * as well as other properties needed by VHS that aren't included by the parser.
3601 *
3602 * @param {Object} master
3603 * Master manifest object
3604 * @param {string} uri
3605 * The source URI
3606 */
3607
3608 var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
3609 master.uri = uri;
3610
3611 for (var i = 0; i < master.playlists.length; i++) {
3612 if (!master.playlists[i].uri) {
3613 // Set up phony URIs for the playlists since playlists are referenced by their URIs
3614 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
3615 // TODO: consider adding dummy URIs in mpd-parser
3616 var phonyUri = "placeholder-uri-" + i;
3617 master.playlists[i].uri = phonyUri;
3618 }
3619 }
3620
3621 var audioOnlyMaster = isAudioOnly(master);
3622 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3623 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
3624
3625 if (!properties.playlists || !properties.playlists.length) {
3626 // If the manifest is audio only and this media group does not have a uri, check
3627 // if the media group is located in the main list of playlists. If it is, don't add
3628 // placeholder properties as it shouldn't be considered an alternate audio track.
3629 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
3630 for (var _i = 0; _i < master.playlists.length; _i++) {
3631 var p = master.playlists[_i];
3632
3633 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
3634 return;
3635 }
3636 }
3637 }
3638
3639 properties.playlists = [_extends_1({}, properties)];
3640 }
3641
3642 properties.playlists.forEach(function (p, i) {
3643 var id = createPlaylistID(i, groupId);
3644
3645 if (p.uri) {
3646 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
3647 } else {
3648 // DEPRECATED, this has been added to prevent a breaking change.
3649 // previously we only ever had a single media group playlist, so
3650 // we mark the first playlist uri without prepending the index as we used to
3651 // ideally we would do all of the playlists the same way.
3652 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
3653 // the placeholder again
3654
3655 p.resolvedUri = p.uri;
3656 }
3657
3658 p.id = p.id || id; // add an empty attributes object, all playlists are
3659 // expected to have this.
3660
3661 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
3662
3663 master.playlists[p.id] = p;
3664 master.playlists[p.uri] = p;
3665 });
3666 });
3667 setupMediaPlaylists(master);
3668 resolveMediaGroupUris(master);
3669 };
3670
3671 var mergeOptions$2 = videojs__default["default"].mergeOptions,
3672 EventTarget$1 = videojs__default["default"].EventTarget;
3673
3674 var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
3675 if (media.endList || !media.serverControl) {
3676 return uri;
3677 }
3678
3679 var parameters = {};
3680
3681 if (media.serverControl.canBlockReload) {
3682 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
3683
3684 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
3685 // that we are going to request a part of that preload segment.
3686 // the logic below is used to determine that.
3687
3688 if (preloadSegment) {
3689 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
3690
3691 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
3692 // length of parts, then we know we had part preload hints
3693 // and we need to add the _HLS_part= query
3694
3695 if (nextPart > -1 && nextPart !== parts.length - 1) {
3696 // add existing parts to our preload hints
3697 // eslint-disable-next-line
3698 parameters._HLS_part = nextPart;
3699 } // this if statement makes sure that we request the msn
3700 // of the preload segment if:
3701 // 1. the preload segment had parts (and was not yet a full segment)
3702 // but was added to our segments array
3703 // 2. the preload segment had preload hints for parts that are not in
3704 // the manifest yet.
3705 // in all other cases we want the segment after the preload segment
3706 // which will be given by using media.segments.length because it is 1 based
3707 // rather than 0 based.
3708
3709
3710 if (nextPart > -1 || parts.length) {
3711 nextMSN--;
3712 }
3713 } // add _HLS_msn= in front of any _HLS_part query
3714 // eslint-disable-next-line
3715
3716
3717 parameters._HLS_msn = nextMSN;
3718 }
3719
3720 if (media.serverControl && media.serverControl.canSkipUntil) {
3721 // add _HLS_skip= infront of all other queries.
3722 // eslint-disable-next-line
3723 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
3724 }
3725
3726 if (Object.keys(parameters).length) {
3727 var parsedUri = new window.URL(uri);
3728 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
3729 if (!parameters.hasOwnProperty(name)) {
3730 return;
3731 }
3732
3733 parsedUri.searchParams.set(name, parameters[name]);
3734 });
3735 uri = parsedUri.toString();
3736 }
3737
3738 return uri;
3739 };
3740 /**
3741 * Returns a new segment object with properties and
3742 * the parts array merged.
3743 *
3744 * @param {Object} a the old segment
3745 * @param {Object} b the new segment
3746 *
3747 * @return {Object} the merged segment
3748 */
3749
3750
3751 var updateSegment = function updateSegment(a, b) {
3752 if (!a) {
3753 return b;
3754 }
3755
3756 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
3757 // and the new one does not, remove preload hints.
3758
3759 if (a.preloadHints && !b.preloadHints) {
3760 delete result.preloadHints;
3761 } // if only the old segment has parts
3762 // then the parts are no longer valid
3763
3764
3765 if (a.parts && !b.parts) {
3766 delete result.parts; // if both segments have parts
3767 // copy part propeties from the old segment
3768 // to the new one.
3769 } else if (a.parts && b.parts) {
3770 for (var i = 0; i < b.parts.length; i++) {
3771 if (a.parts && a.parts[i]) {
3772 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
3773 }
3774 }
3775 } // set skipped to false for segments that have
3776 // have had information merged from the old segment.
3777
3778
3779 if (!a.skipped && b.skipped) {
3780 result.skipped = false;
3781 } // set preload to false for segments that have
3782 // had information added in the new segment.
3783
3784
3785 if (a.preload && !b.preload) {
3786 result.preload = false;
3787 }
3788
3789 return result;
3790 };
3791 /**
3792 * Returns a new array of segments that is the result of merging
3793 * properties from an older list of segments onto an updated
3794 * list. No properties on the updated playlist will be ovewritten.
3795 *
3796 * @param {Array} original the outdated list of segments
3797 * @param {Array} update the updated list of segments
3798 * @param {number=} offset the index of the first update
3799 * segment in the original segment list. For non-live playlists,
3800 * this should always be zero and does not need to be
3801 * specified. For live playlists, it should be the difference
3802 * between the media sequence numbers in the original and updated
3803 * playlists.
3804 * @return {Array} a list of merged segment objects
3805 */
3806
3807 var updateSegments = function updateSegments(original, update, offset) {
3808 var oldSegments = original.slice();
3809 var newSegments = update.slice();
3810 offset = offset || 0;
3811 var result = [];
3812 var currentMap;
3813
3814 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
3815 var oldSegment = oldSegments[newIndex + offset];
3816 var newSegment = newSegments[newIndex];
3817
3818 if (oldSegment) {
3819 currentMap = oldSegment.map || currentMap;
3820 result.push(updateSegment(oldSegment, newSegment));
3821 } else {
3822 // carry over map to new segment if it is missing
3823 if (currentMap && !newSegment.map) {
3824 newSegment.map = currentMap;
3825 }
3826
3827 result.push(newSegment);
3828 }
3829 }
3830
3831 return result;
3832 };
3833 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
3834 // preloadSegment will not have a uri at all
3835 // as the segment isn't actually in the manifest yet, only parts
3836 if (!segment.resolvedUri && segment.uri) {
3837 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
3838 }
3839
3840 if (segment.key && !segment.key.resolvedUri) {
3841 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
3842 }
3843
3844 if (segment.map && !segment.map.resolvedUri) {
3845 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
3846 }
3847
3848 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
3849 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
3850 }
3851
3852 if (segment.parts && segment.parts.length) {
3853 segment.parts.forEach(function (p) {
3854 if (p.resolvedUri) {
3855 return;
3856 }
3857
3858 p.resolvedUri = resolveUrl(baseUri, p.uri);
3859 });
3860 }
3861
3862 if (segment.preloadHints && segment.preloadHints.length) {
3863 segment.preloadHints.forEach(function (p) {
3864 if (p.resolvedUri) {
3865 return;
3866 }
3867
3868 p.resolvedUri = resolveUrl(baseUri, p.uri);
3869 });
3870 }
3871 };
3872
3873 var getAllSegments = function getAllSegments(media) {
3874 var segments = media.segments || [];
3875 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
3876 // a usable segment, only include a preloadSegment that has
3877 // parts.
3878
3879 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
3880 // if preloadHints has a MAP that means that the
3881 // init segment is going to change. We cannot use any of the parts
3882 // from this preload segment.
3883 if (preloadSegment.preloadHints) {
3884 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
3885 if (preloadSegment.preloadHints[i].type === 'MAP') {
3886 return segments;
3887 }
3888 }
3889 } // set the duration for our preload segment to target duration.
3890
3891
3892 preloadSegment.duration = media.targetDuration;
3893 preloadSegment.preload = true;
3894 segments.push(preloadSegment);
3895 }
3896
3897 return segments;
3898 }; // consider the playlist unchanged if the playlist object is the same or
3899 // the number of segments is equal, the media sequence number is unchanged,
3900 // and this playlist hasn't become the end of the playlist
3901
3902
3903 var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
3904 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
3905 };
3906 /**
3907 * Returns a new master playlist that is the result of merging an
3908 * updated media playlist into the original version. If the
3909 * updated media playlist does not match any of the playlist
3910 * entries in the original master playlist, null is returned.
3911 *
3912 * @param {Object} master a parsed master M3U8 object
3913 * @param {Object} media a parsed media M3U8 object
3914 * @return {Object} a new object that represents the original
3915 * master playlist with the updated media playlist merged in, or
3916 * null if the merge produced no change.
3917 */
3918
3919 var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
3920 if (unchangedCheck === void 0) {
3921 unchangedCheck = isPlaylistUnchanged;
3922 }
3923
3924 var result = mergeOptions$2(master, {});
3925 var oldMedia = result.playlists[newMedia.id];
3926
3927 if (!oldMedia) {
3928 return null;
3929 }
3930
3931 if (unchangedCheck(oldMedia, newMedia)) {
3932 return null;
3933 }
3934
3935 newMedia.segments = getAllSegments(newMedia);
3936 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
3937
3938 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
3939 delete mergedPlaylist.preloadSegment;
3940 } // if the update could overlap existing segment information, merge the two segment lists
3941
3942
3943 if (oldMedia.segments) {
3944 if (newMedia.skip) {
3945 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
3946 // old properties into the new segments
3947
3948 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
3949 newMedia.segments.unshift({
3950 skipped: true
3951 });
3952 }
3953 }
3954
3955 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
3956 } // resolve any segment URIs to prevent us from having to do it later
3957
3958
3959 mergedPlaylist.segments.forEach(function (segment) {
3960 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
3961 }); // TODO Right now in the playlists array there are two references to each playlist, one
3962 // that is referenced by index, and one by URI. The index reference may no longer be
3963 // necessary.
3964
3965 for (var _i = 0; _i < result.playlists.length; _i++) {
3966 if (result.playlists[_i].id === newMedia.id) {
3967 result.playlists[_i] = mergedPlaylist;
3968 }
3969 }
3970
3971 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
3972
3973 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
3974
3975 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3976 if (!properties.playlists) {
3977 return;
3978 }
3979
3980 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
3981 if (newMedia.id === properties.playlists[_i2].id) {
3982 properties.playlists[_i2] = mergedPlaylist;
3983 }
3984 }
3985 });
3986 return result;
3987 };
3988 /**
3989 * Calculates the time to wait before refreshing a live playlist
3990 *
3991 * @param {Object} media
3992 * The current media
3993 * @param {boolean} update
3994 * True if there were any updates from the last refresh, false otherwise
3995 * @return {number}
3996 * The time in ms to wait before refreshing the live playlist
3997 */
3998
3999 var refreshDelay = function refreshDelay(media, update) {
4000 var segments = media.segments || [];
4001 var lastSegment = segments[segments.length - 1];
4002 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
4003 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
4004
4005 if (update && lastDuration) {
4006 return lastDuration * 1000;
4007 } // if the playlist is unchanged since the last reload or last segment duration
4008 // cannot be determined, try again after half the target duration
4009
4010
4011 return (media.partTargetDuration || media.targetDuration || 10) * 500;
4012 };
4013 /**
4014 * Load a playlist from a remote location
4015 *
4016 * @class PlaylistLoader
4017 * @extends Stream
4018 * @param {string|Object} src url or object of manifest
4019 * @param {boolean} withCredentials the withCredentials xhr option
4020 * @class
4021 */
4022
4023 var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
4024 inheritsLoose(PlaylistLoader, _EventTarget);
4025
4026 function PlaylistLoader(src, vhs, options) {
4027 var _this;
4028
4029 if (options === void 0) {
4030 options = {};
4031 }
4032
4033 _this = _EventTarget.call(this) || this;
4034
4035 if (!src) {
4036 throw new Error('A non-empty playlist URL or object is required');
4037 }
4038
4039 _this.logger_ = logger('PlaylistLoader');
4040 var _options = options,
4041 _options$withCredenti = _options.withCredentials,
4042 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
4043 _options$handleManife = _options.handleManifestRedirects,
4044 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
4045 _this.src = src;
4046 _this.vhs_ = vhs;
4047 _this.withCredentials = withCredentials;
4048 _this.handleManifestRedirects = handleManifestRedirects;
4049 var vhsOptions = vhs.options_;
4050 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
4051 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
4052 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
4053
4054 if (videojs__default["default"].browser.IE_VERSION) {
4055 _this.experimentalLLHLS = false;
4056 } // initialize the loader state
4057
4058
4059 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
4060
4061 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(assertThisInitialized(_this));
4062
4063 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
4064
4065 return _this;
4066 }
4067
4068 var _proto = PlaylistLoader.prototype;
4069
4070 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
4071 var _this2 = this;
4072
4073 if (this.state !== 'HAVE_METADATA') {
4074 // only refresh the media playlist if no other activity is going on
4075 return;
4076 }
4077
4078 var media = this.media();
4079 var uri = resolveUrl(this.master.uri, media.uri);
4080
4081 if (this.experimentalLLHLS) {
4082 uri = addLLHLSQueryDirectives(uri, media);
4083 }
4084
4085 this.state = 'HAVE_CURRENT_METADATA';
4086 this.request = this.vhs_.xhr({
4087 uri: uri,
4088 withCredentials: this.withCredentials
4089 }, function (error, req) {
4090 // disposed
4091 if (!_this2.request) {
4092 return;
4093 }
4094
4095 if (error) {
4096 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
4097 }
4098
4099 _this2.haveMetadata({
4100 playlistString: _this2.request.responseText,
4101 url: _this2.media().uri,
4102 id: _this2.media().id
4103 });
4104 });
4105 };
4106
4107 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
4108 var uri = playlist.uri,
4109 id = playlist.id; // any in-flight request is now finished
4110
4111 this.request = null;
4112
4113 if (startingState) {
4114 this.state = startingState;
4115 }
4116
4117 this.error = {
4118 playlist: this.master.playlists[id],
4119 status: xhr.status,
4120 message: "HLS playlist request error at URL: " + uri + ".",
4121 responseText: xhr.responseText,
4122 code: xhr.status >= 500 ? 4 : 2
4123 };
4124 this.trigger('error');
4125 };
4126
4127 _proto.parseManifest_ = function parseManifest_(_ref) {
4128 var _this3 = this;
4129
4130 var url = _ref.url,
4131 manifestString = _ref.manifestString;
4132 return parseManifest({
4133 onwarn: function onwarn(_ref2) {
4134 var message = _ref2.message;
4135 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
4136 },
4137 oninfo: function oninfo(_ref3) {
4138 var message = _ref3.message;
4139 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
4140 },
4141 manifestString: manifestString,
4142 customTagParsers: this.customTagParsers,
4143 customTagMappers: this.customTagMappers,
4144 experimentalLLHLS: this.experimentalLLHLS
4145 });
4146 }
4147 /**
4148 * Update the playlist loader's state in response to a new or updated playlist.
4149 *
4150 * @param {string} [playlistString]
4151 * Playlist string (if playlistObject is not provided)
4152 * @param {Object} [playlistObject]
4153 * Playlist object (if playlistString is not provided)
4154 * @param {string} url
4155 * URL of playlist
4156 * @param {string} id
4157 * ID to use for playlist
4158 */
4159 ;
4160
4161 _proto.haveMetadata = function haveMetadata(_ref4) {
4162 var playlistString = _ref4.playlistString,
4163 playlistObject = _ref4.playlistObject,
4164 url = _ref4.url,
4165 id = _ref4.id;
4166 // any in-flight request is now finished
4167 this.request = null;
4168 this.state = 'HAVE_METADATA';
4169 var playlist = playlistObject || this.parseManifest_({
4170 url: url,
4171 manifestString: playlistString
4172 });
4173 playlist.lastRequest = Date.now();
4174 setupMediaPlaylist({
4175 playlist: playlist,
4176 uri: url,
4177 id: id
4178 }); // merge this playlist into the master
4179
4180 var update = updateMaster$1(this.master, playlist);
4181 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
4182 this.pendingMedia_ = null;
4183
4184 if (update) {
4185 this.master = update;
4186 this.media_ = this.master.playlists[id];
4187 } else {
4188 this.trigger('playlistunchanged');
4189 }
4190
4191 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
4192 this.trigger('loadedplaylist');
4193 }
4194 /**
4195 * Abort any outstanding work and clean up.
4196 */
4197 ;
4198
4199 _proto.dispose = function dispose() {
4200 this.trigger('dispose');
4201 this.stopRequest();
4202 window.clearTimeout(this.mediaUpdateTimeout);
4203 window.clearTimeout(this.finalRenditionTimeout);
4204 this.off();
4205 };
4206
4207 _proto.stopRequest = function stopRequest() {
4208 if (this.request) {
4209 var oldRequest = this.request;
4210 this.request = null;
4211 oldRequest.onreadystatechange = null;
4212 oldRequest.abort();
4213 }
4214 }
4215 /**
4216 * When called without any arguments, returns the currently
4217 * active media playlist. When called with a single argument,
4218 * triggers the playlist loader to asynchronously switch to the
4219 * specified media playlist. Calling this method while the
4220 * loader is in the HAVE_NOTHING causes an error to be emitted
4221 * but otherwise has no effect.
4222 *
4223 * @param {Object=} playlist the parsed media playlist
4224 * object to switch to
4225 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
4226 *
4227 * @return {Playlist} the current loaded media
4228 */
4229 ;
4230
4231 _proto.media = function media(playlist, shouldDelay) {
4232 var _this4 = this;
4233
4234 // getter
4235 if (!playlist) {
4236 return this.media_;
4237 } // setter
4238
4239
4240 if (this.state === 'HAVE_NOTHING') {
4241 throw new Error('Cannot switch media playlist from ' + this.state);
4242 } // find the playlist object if the target playlist has been
4243 // specified by URI
4244
4245
4246 if (typeof playlist === 'string') {
4247 if (!this.master.playlists[playlist]) {
4248 throw new Error('Unknown playlist URI: ' + playlist);
4249 }
4250
4251 playlist = this.master.playlists[playlist];
4252 }
4253
4254 window.clearTimeout(this.finalRenditionTimeout);
4255
4256 if (shouldDelay) {
4257 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
4258 this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);
4259 return;
4260 }
4261
4262 var startingState = this.state;
4263 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
4264 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
4265
4266 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
4267 // media playlist or, for the case of demuxed audio, a resolved audio media group)
4268 playlist.endList && playlist.segments.length) {
4269 // abort outstanding playlist requests
4270 if (this.request) {
4271 this.request.onreadystatechange = null;
4272 this.request.abort();
4273 this.request = null;
4274 }
4275
4276 this.state = 'HAVE_METADATA';
4277 this.media_ = playlist; // trigger media change if the active media has been updated
4278
4279 if (mediaChange) {
4280 this.trigger('mediachanging');
4281
4282 if (startingState === 'HAVE_MASTER') {
4283 // The initial playlist was a master manifest, and the first media selected was
4284 // also provided (in the form of a resolved playlist object) as part of the
4285 // source object (rather than just a URL). Therefore, since the media playlist
4286 // doesn't need to be requested, loadedmetadata won't trigger as part of the
4287 // normal flow, and needs an explicit trigger here.
4288 this.trigger('loadedmetadata');
4289 } else {
4290 this.trigger('mediachange');
4291 }
4292 }
4293
4294 return;
4295 } // We update/set the timeout here so that live playlists
4296 // that are not a media change will "start" the loader as expected.
4297 // We expect that this function will start the media update timeout
4298 // cycle again. This also prevents a playlist switch failure from
4299 // causing us to stall during live.
4300
4301
4302 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
4303
4304 if (!mediaChange) {
4305 return;
4306 }
4307
4308 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
4309
4310 if (this.request) {
4311 if (playlist.resolvedUri === this.request.url) {
4312 // requesting to switch to the same playlist multiple times
4313 // has no effect after the first
4314 return;
4315 }
4316
4317 this.request.onreadystatechange = null;
4318 this.request.abort();
4319 this.request = null;
4320 } // request the new playlist
4321
4322
4323 if (this.media_) {
4324 this.trigger('mediachanging');
4325 }
4326
4327 this.pendingMedia_ = playlist;
4328 this.request = this.vhs_.xhr({
4329 uri: playlist.resolvedUri,
4330 withCredentials: this.withCredentials
4331 }, function (error, req) {
4332 // disposed
4333 if (!_this4.request) {
4334 return;
4335 }
4336
4337 playlist.lastRequest = Date.now();
4338 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
4339
4340 if (error) {
4341 return _this4.playlistRequestError(_this4.request, playlist, startingState);
4342 }
4343
4344 _this4.haveMetadata({
4345 playlistString: req.responseText,
4346 url: playlist.uri,
4347 id: playlist.id
4348 }); // fire loadedmetadata the first time a media playlist is loaded
4349
4350
4351 if (startingState === 'HAVE_MASTER') {
4352 _this4.trigger('loadedmetadata');
4353 } else {
4354 _this4.trigger('mediachange');
4355 }
4356 });
4357 }
4358 /**
4359 * pause loading of the playlist
4360 */
4361 ;
4362
4363 _proto.pause = function pause() {
4364 if (this.mediaUpdateTimeout) {
4365 window.clearTimeout(this.mediaUpdateTimeout);
4366 this.mediaUpdateTimeout = null;
4367 }
4368
4369 this.stopRequest();
4370
4371 if (this.state === 'HAVE_NOTHING') {
4372 // If we pause the loader before any data has been retrieved, its as if we never
4373 // started, so reset to an unstarted state.
4374 this.started = false;
4375 } // Need to restore state now that no activity is happening
4376
4377
4378 if (this.state === 'SWITCHING_MEDIA') {
4379 // if the loader was in the process of switching media, it should either return to
4380 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
4381 // playlist yet. This is determined by the existence of loader.media_
4382 if (this.media_) {
4383 this.state = 'HAVE_METADATA';
4384 } else {
4385 this.state = 'HAVE_MASTER';
4386 }
4387 } else if (this.state === 'HAVE_CURRENT_METADATA') {
4388 this.state = 'HAVE_METADATA';
4389 }
4390 }
4391 /**
4392 * start loading of the playlist
4393 */
4394 ;
4395
4396 _proto.load = function load(shouldDelay) {
4397 var _this5 = this;
4398
4399 if (this.mediaUpdateTimeout) {
4400 window.clearTimeout(this.mediaUpdateTimeout);
4401 this.mediaUpdateTimeout = null;
4402 }
4403
4404 var media = this.media();
4405
4406 if (shouldDelay) {
4407 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
4408 this.mediaUpdateTimeout = window.setTimeout(function () {
4409 _this5.mediaUpdateTimeout = null;
4410
4411 _this5.load();
4412 }, delay);
4413 return;
4414 }
4415
4416 if (!this.started) {
4417 this.start();
4418 return;
4419 }
4420
4421 if (media && !media.endList) {
4422 this.trigger('mediaupdatetimeout');
4423 } else {
4424 this.trigger('loadedplaylist');
4425 }
4426 };
4427
4428 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
4429 var _this6 = this;
4430
4431 if (this.mediaUpdateTimeout) {
4432 window.clearTimeout(this.mediaUpdateTimeout);
4433 this.mediaUpdateTimeout = null;
4434 } // we only have use mediaupdatetimeout for live playlists.
4435
4436
4437 if (!this.media() || this.media().endList) {
4438 return;
4439 }
4440
4441 this.mediaUpdateTimeout = window.setTimeout(function () {
4442 _this6.mediaUpdateTimeout = null;
4443
4444 _this6.trigger('mediaupdatetimeout');
4445
4446 _this6.updateMediaUpdateTimeout_(delay);
4447 }, delay);
4448 }
4449 /**
4450 * start loading of the playlist
4451 */
4452 ;
4453
4454 _proto.start = function start() {
4455 var _this7 = this;
4456
4457 this.started = true;
4458
4459 if (typeof this.src === 'object') {
4460 // in the case of an entirely constructed manifest object (meaning there's no actual
4461 // manifest on a server), default the uri to the page's href
4462 if (!this.src.uri) {
4463 this.src.uri = window.location.href;
4464 } // resolvedUri is added on internally after the initial request. Since there's no
4465 // request for pre-resolved manifests, add on resolvedUri here.
4466
4467
4468 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
4469 // request can be skipped (since the top level of the manifest, at a minimum, is
4470 // already available as a parsed manifest object). However, if the manifest object
4471 // represents a master playlist, some media playlists may need to be resolved before
4472 // the starting segment list is available. Therefore, go directly to setup of the
4473 // initial playlist, and let the normal flow continue from there.
4474 //
4475 // Note that the call to setup is asynchronous, as other sections of VHS may assume
4476 // that the first request is asynchronous.
4477
4478 setTimeout(function () {
4479 _this7.setupInitialPlaylist(_this7.src);
4480 }, 0);
4481 return;
4482 } // request the specified URL
4483
4484
4485 this.request = this.vhs_.xhr({
4486 uri: this.src,
4487 withCredentials: this.withCredentials
4488 }, function (error, req) {
4489 // disposed
4490 if (!_this7.request) {
4491 return;
4492 } // clear the loader's request reference
4493
4494
4495 _this7.request = null;
4496
4497 if (error) {
4498 _this7.error = {
4499 status: req.status,
4500 message: "HLS playlist request error at URL: " + _this7.src + ".",
4501 responseText: req.responseText,
4502 // MEDIA_ERR_NETWORK
4503 code: 2
4504 };
4505
4506 if (_this7.state === 'HAVE_NOTHING') {
4507 _this7.started = false;
4508 }
4509
4510 return _this7.trigger('error');
4511 }
4512
4513 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
4514
4515 var manifest = _this7.parseManifest_({
4516 manifestString: req.responseText,
4517 url: _this7.src
4518 });
4519
4520 _this7.setupInitialPlaylist(manifest);
4521 });
4522 };
4523
4524 _proto.srcUri = function srcUri() {
4525 return typeof this.src === 'string' ? this.src : this.src.uri;
4526 }
4527 /**
4528 * Given a manifest object that's either a master or media playlist, trigger the proper
4529 * events and set the state of the playlist loader.
4530 *
4531 * If the manifest object represents a master playlist, `loadedplaylist` will be
4532 * triggered to allow listeners to select a playlist. If none is selected, the loader
4533 * will default to the first one in the playlists array.
4534 *
4535 * If the manifest object represents a media playlist, `loadedplaylist` will be
4536 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
4537 *
4538 * In the case of a media playlist, a master playlist object wrapper with one playlist
4539 * will be created so that all logic can handle playlists in the same fashion (as an
4540 * assumed manifest object schema).
4541 *
4542 * @param {Object} manifest
4543 * The parsed manifest object
4544 */
4545 ;
4546
4547 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
4548 this.state = 'HAVE_MASTER';
4549
4550 if (manifest.playlists) {
4551 this.master = manifest;
4552 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
4553 // then resolve URIs in advance, as they are usually done after a playlist request,
4554 // which may not happen if the playlist is resolved.
4555
4556 manifest.playlists.forEach(function (playlist) {
4557 playlist.segments = getAllSegments(playlist);
4558 playlist.segments.forEach(function (segment) {
4559 resolveSegmentUris(segment, playlist.resolvedUri);
4560 });
4561 });
4562 this.trigger('loadedplaylist');
4563
4564 if (!this.request) {
4565 // no media playlist was specifically selected so start
4566 // from the first listed one
4567 this.media(this.master.playlists[0]);
4568 }
4569
4570 return;
4571 } // In order to support media playlists passed in as vhs-json, the case where the uri
4572 // is not provided as part of the manifest should be considered, and an appropriate
4573 // default used.
4574
4575
4576 var uri = this.srcUri() || window.location.href;
4577 this.master = masterForMedia(manifest, uri);
4578 this.haveMetadata({
4579 playlistObject: manifest,
4580 url: uri,
4581 id: this.master.playlists[0].id
4582 });
4583 this.trigger('loadedmetadata');
4584 };
4585
4586 return PlaylistLoader;
4587 }(EventTarget$1);
4588
4589 /**
4590 * @file xhr.js
4591 */
4592 var videojsXHR = videojs__default["default"].xhr,
4593 mergeOptions$1 = videojs__default["default"].mergeOptions;
4594
4595 var callbackWrapper = function callbackWrapper(request, error, response, callback) {
4596 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
4597
4598 if (!error && reqResponse) {
4599 request.responseTime = Date.now();
4600 request.roundTripTime = request.responseTime - request.requestTime;
4601 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
4602
4603 if (!request.bandwidth) {
4604 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
4605 }
4606 }
4607
4608 if (response.headers) {
4609 request.responseHeaders = response.headers;
4610 } // videojs.xhr now uses a specific code on the error
4611 // object to signal that a request has timed out instead
4612 // of setting a boolean on the request object
4613
4614
4615 if (error && error.code === 'ETIMEDOUT') {
4616 request.timedout = true;
4617 } // videojs.xhr no longer considers status codes outside of 200 and 0
4618 // (for file uris) to be errors, but the old XHR did, so emulate that
4619 // behavior. Status 206 may be used in response to byterange requests.
4620
4621
4622 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
4623 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
4624 }
4625
4626 callback(error, request);
4627 };
4628
4629 var xhrFactory = function xhrFactory() {
4630 var xhr = function XhrFunction(options, callback) {
4631 // Add a default timeout
4632 options = mergeOptions$1({
4633 timeout: 45e3
4634 }, options); // Allow an optional user-specified function to modify the option
4635 // object before we construct the xhr request
4636
4637 var beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest;
4638
4639 if (beforeRequest && typeof beforeRequest === 'function') {
4640 var newOptions = beforeRequest(options);
4641
4642 if (newOptions) {
4643 options = newOptions;
4644 }
4645 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
4646 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
4647
4648
4649 var xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr;
4650 var request = xhrMethod(options, function (error, response) {
4651 return callbackWrapper(request, error, response, callback);
4652 });
4653 var originalAbort = request.abort;
4654
4655 request.abort = function () {
4656 request.aborted = true;
4657 return originalAbort.apply(request, arguments);
4658 };
4659
4660 request.uri = options.uri;
4661 request.requestTime = Date.now();
4662 return request;
4663 };
4664
4665 xhr.original = true;
4666 return xhr;
4667 };
4668 /**
4669 * Turns segment byterange into a string suitable for use in
4670 * HTTP Range requests
4671 *
4672 * @param {Object} byterange - an object with two values defining the start and end
4673 * of a byte-range
4674 */
4675
4676
4677 var byterangeStr = function byterangeStr(byterange) {
4678 // `byterangeEnd` is one less than `offset + length` because the HTTP range
4679 // header uses inclusive ranges
4680 var byterangeEnd;
4681 var byterangeStart = byterange.offset;
4682
4683 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
4684 byterangeEnd = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
4685 } else {
4686 byterangeEnd = byterange.offset + byterange.length - 1;
4687 }
4688
4689 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
4690 };
4691 /**
4692 * Defines headers for use in the xhr request for a particular segment.
4693 *
4694 * @param {Object} segment - a simplified copy of the segmentInfo object
4695 * from SegmentLoader
4696 */
4697
4698 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
4699 var headers = {};
4700
4701 if (segment.byterange) {
4702 headers.Range = byterangeStr(segment.byterange);
4703 }
4704
4705 return headers;
4706 };
4707
4708 var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
4709 var DASH_REGEX = /^application\/dash\+xml/i;
4710 /**
4711 * Returns a string that describes the type of source based on a video source object's
4712 * media type.
4713 *
4714 * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
4715 *
4716 * @param {string} type
4717 * Video source object media type
4718 * @return {('hls'|'dash'|'vhs-json'|null)}
4719 * VHS source type string
4720 */
4721
4722 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
4723 if (MPEGURL_REGEX.test(type)) {
4724 return 'hls';
4725 }
4726
4727 if (DASH_REGEX.test(type)) {
4728 return 'dash';
4729 } // Denotes the special case of a manifest object passed to http-streaming instead of a
4730 // source URL.
4731 //
4732 // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
4733 //
4734 // In this case, vnd stands for vendor, video.js for the organization, VHS for this
4735 // project, and the +json suffix identifies the structure of the media type.
4736
4737
4738 if (type === 'application/vnd.videojs.vhs+json') {
4739 return 'vhs-json';
4740 }
4741
4742 return null;
4743 };
4744
4745 // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
4746 // we used to do this with log2 but BigInt does not support builtin math
4747 // Math.ceil(log2(x));
4748
4749
4750 var countBits = function countBits(x) {
4751 return x.toString(2).length;
4752 }; // count the number of whole bytes it would take to represent a number
4753
4754 var countBytes = function countBytes(x) {
4755 return Math.ceil(countBits(x) / 8);
4756 };
4757 var isArrayBufferView = function isArrayBufferView(obj) {
4758 if (ArrayBuffer.isView === 'function') {
4759 return ArrayBuffer.isView(obj);
4760 }
4761
4762 return obj && obj.buffer instanceof ArrayBuffer;
4763 };
4764 var isTypedArray = function isTypedArray(obj) {
4765 return isArrayBufferView(obj);
4766 };
4767 var toUint8 = function toUint8(bytes) {
4768 if (bytes instanceof Uint8Array) {
4769 return bytes;
4770 }
4771
4772 if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
4773 // any non-number or NaN leads to empty uint8array
4774 // eslint-disable-next-line
4775 if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
4776 bytes = 0;
4777 } else {
4778 bytes = [bytes];
4779 }
4780 }
4781
4782 return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
4783 };
4784 var BigInt = window.BigInt || Number;
4785 var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
4786 (function () {
4787 var a = new Uint16Array([0xFFCC]);
4788 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
4789
4790 if (b[0] === 0xFF) {
4791 return 'big';
4792 }
4793
4794 if (b[0] === 0xCC) {
4795 return 'little';
4796 }
4797
4798 return 'unknown';
4799 })();
4800 var bytesToNumber = function bytesToNumber(bytes, _temp) {
4801 var _ref = _temp === void 0 ? {} : _temp,
4802 _ref$signed = _ref.signed,
4803 signed = _ref$signed === void 0 ? false : _ref$signed,
4804 _ref$le = _ref.le,
4805 le = _ref$le === void 0 ? false : _ref$le;
4806
4807 bytes = toUint8(bytes);
4808 var fn = le ? 'reduce' : 'reduceRight';
4809 var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
4810 var number = obj.call(bytes, function (total, byte, i) {
4811 var exponent = le ? i : Math.abs(i + 1 - bytes.length);
4812 return total + BigInt(byte) * BYTE_TABLE[exponent];
4813 }, BigInt(0));
4814
4815 if (signed) {
4816 var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
4817 number = BigInt(number);
4818
4819 if (number > max) {
4820 number -= max;
4821 number -= max;
4822 number -= BigInt(2);
4823 }
4824 }
4825
4826 return Number(number);
4827 };
4828 var numberToBytes = function numberToBytes(number, _temp2) {
4829 var _ref2 = _temp2 === void 0 ? {} : _temp2,
4830 _ref2$le = _ref2.le,
4831 le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
4832
4833
4834 if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
4835 number = 0;
4836 }
4837
4838 number = BigInt(number);
4839 var byteCount = countBytes(number);
4840 var bytes = new Uint8Array(new ArrayBuffer(byteCount));
4841
4842 for (var i = 0; i < byteCount; i++) {
4843 var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
4844 bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
4845
4846 if (number < 0) {
4847 bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
4848 bytes[byteIndex] -= i === 0 ? 1 : 2;
4849 }
4850 }
4851
4852 return bytes;
4853 };
4854 var stringToBytes = function stringToBytes(string, stringIsBytes) {
4855 if (typeof string !== 'string' && string && typeof string.toString === 'function') {
4856 string = string.toString();
4857 }
4858
4859 if (typeof string !== 'string') {
4860 return new Uint8Array();
4861 } // If the string already is bytes, we don't have to do this
4862 // otherwise we do this so that we split multi length characters
4863 // into individual bytes
4864
4865
4866 if (!stringIsBytes) {
4867 string = unescape(encodeURIComponent(string));
4868 }
4869
4870 var view = new Uint8Array(string.length);
4871
4872 for (var i = 0; i < string.length; i++) {
4873 view[i] = string.charCodeAt(i);
4874 }
4875
4876 return view;
4877 };
4878 var concatTypedArrays = function concatTypedArrays() {
4879 for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
4880 buffers[_key] = arguments[_key];
4881 }
4882
4883 buffers = buffers.filter(function (b) {
4884 return b && (b.byteLength || b.length) && typeof b !== 'string';
4885 });
4886
4887 if (buffers.length <= 1) {
4888 // for 0 length we will return empty uint8
4889 // for 1 length we return the first uint8
4890 return toUint8(buffers[0]);
4891 }
4892
4893 var totalLen = buffers.reduce(function (total, buf, i) {
4894 return total + (buf.byteLength || buf.length);
4895 }, 0);
4896 var tempBuffer = new Uint8Array(totalLen);
4897 var offset = 0;
4898 buffers.forEach(function (buf) {
4899 buf = toUint8(buf);
4900 tempBuffer.set(buf, offset);
4901 offset += buf.byteLength;
4902 });
4903 return tempBuffer;
4904 };
4905 /**
4906 * Check if the bytes "b" are contained within bytes "a".
4907 *
4908 * @param {Uint8Array|Array} a
4909 * Bytes to check in
4910 *
4911 * @param {Uint8Array|Array} b
4912 * Bytes to check for
4913 *
4914 * @param {Object} options
4915 * options
4916 *
4917 * @param {Array|Uint8Array} [offset=0]
4918 * offset to use when looking at bytes in a
4919 *
4920 * @param {Array|Uint8Array} [mask=[]]
4921 * mask to use on bytes before comparison.
4922 *
4923 * @return {boolean}
4924 * If all bytes in b are inside of a, taking into account
4925 * bit masks.
4926 */
4927
4928 var bytesMatch = function bytesMatch(a, b, _temp3) {
4929 var _ref3 = _temp3 === void 0 ? {} : _temp3,
4930 _ref3$offset = _ref3.offset,
4931 offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
4932 _ref3$mask = _ref3.mask,
4933 mask = _ref3$mask === void 0 ? [] : _ref3$mask;
4934
4935 a = toUint8(a);
4936 b = toUint8(b); // ie 11 does not support uint8 every
4937
4938 var fn = b.every ? b.every : Array.prototype.every;
4939 return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
4940 fn.call(b, function (bByte, i) {
4941 var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
4942 return bByte === aByte;
4943 });
4944 };
4945
4946 /**
4947 * @file bin-utils.js
4948 */
4949
4950 /**
4951 * convert a TimeRange to text
4952 *
4953 * @param {TimeRange} range the timerange to use for conversion
4954 * @param {number} i the iterator on the range to convert
4955 * @return {string} the range in string format
4956 */
4957
4958 var textRange = function textRange(range, i) {
4959 return range.start(i) + '-' + range.end(i);
4960 };
4961 /**
4962 * format a number as hex string
4963 *
4964 * @param {number} e The number
4965 * @param {number} i the iterator
4966 * @return {string} the hex formatted number as a string
4967 */
4968
4969
4970 var formatHexString = function formatHexString(e, i) {
4971 var value = e.toString(16);
4972 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
4973 };
4974
4975 var formatAsciiString = function formatAsciiString(e) {
4976 if (e >= 0x20 && e < 0x7e) {
4977 return String.fromCharCode(e);
4978 }
4979
4980 return '.';
4981 };
4982 /**
4983 * Creates an object for sending to a web worker modifying properties that are TypedArrays
4984 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
4985 *
4986 * @param {Object} message
4987 * Object of properties and values to send to the web worker
4988 * @return {Object}
4989 * Modified message with TypedArray values expanded
4990 * @function createTransferableMessage
4991 */
4992
4993
4994 var createTransferableMessage = function createTransferableMessage(message) {
4995 var transferable = {};
4996 Object.keys(message).forEach(function (key) {
4997 var value = message[key];
4998
4999 if (isArrayBufferView(value)) {
5000 transferable[key] = {
5001 bytes: value.buffer,
5002 byteOffset: value.byteOffset,
5003 byteLength: value.byteLength
5004 };
5005 } else {
5006 transferable[key] = value;
5007 }
5008 });
5009 return transferable;
5010 };
5011 /**
5012 * Returns a unique string identifier for a media initialization
5013 * segment.
5014 *
5015 * @param {Object} initSegment
5016 * the init segment object.
5017 *
5018 * @return {string} the generated init segment id
5019 */
5020
5021 var initSegmentId = function initSegmentId(initSegment) {
5022 var byterange = initSegment.byterange || {
5023 length: Infinity,
5024 offset: 0
5025 };
5026 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
5027 };
5028 /**
5029 * Returns a unique string identifier for a media segment key.
5030 *
5031 * @param {Object} key the encryption key
5032 * @return {string} the unique id for the media segment key.
5033 */
5034
5035 var segmentKeyId = function segmentKeyId(key) {
5036 return key.resolvedUri;
5037 };
5038 /**
5039 * utils to help dump binary data to the console
5040 *
5041 * @param {Array|TypedArray} data
5042 * data to dump to a string
5043 *
5044 * @return {string} the data as a hex string.
5045 */
5046
5047 var hexDump = function hexDump(data) {
5048 var bytes = Array.prototype.slice.call(data);
5049 var step = 16;
5050 var result = '';
5051 var hex;
5052 var ascii;
5053
5054 for (var j = 0; j < bytes.length / step; j++) {
5055 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
5056 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
5057 result += hex + ' ' + ascii + '\n';
5058 }
5059
5060 return result;
5061 };
5062 var tagDump = function tagDump(_ref) {
5063 var bytes = _ref.bytes;
5064 return hexDump(bytes);
5065 };
5066 var textRanges = function textRanges(ranges) {
5067 var result = '';
5068 var i;
5069
5070 for (i = 0; i < ranges.length; i++) {
5071 result += textRange(ranges, i) + ' ';
5072 }
5073
5074 return result;
5075 };
5076
5077 var utils = /*#__PURE__*/Object.freeze({
5078 __proto__: null,
5079 createTransferableMessage: createTransferableMessage,
5080 initSegmentId: initSegmentId,
5081 segmentKeyId: segmentKeyId,
5082 hexDump: hexDump,
5083 tagDump: tagDump,
5084 textRanges: textRanges
5085 });
5086
5087 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
5088 // 25% was arbitrarily chosen, and may need to be refined over time.
5089
5090 var SEGMENT_END_FUDGE_PERCENT = 0.25;
5091 /**
5092 * Converts a player time (any time that can be gotten/set from player.currentTime(),
5093 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
5094 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
5095 *
5096 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
5097 * point" (a point where we have a mapping from program time to player time, with player
5098 * time being the post transmux start of the segment).
5099 *
5100 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
5101 *
5102 * @param {number} playerTime the player time
5103 * @param {Object} segment the segment which contains the player time
5104 * @return {Date} program time
5105 */
5106
5107 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
5108 if (!segment.dateTimeObject) {
5109 // Can't convert without an "anchor point" for the program time (i.e., a time that can
5110 // be used to map the start of a segment with a real world time).
5111 return null;
5112 }
5113
5114 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
5115 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
5116
5117 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
5118 var offsetFromSegmentStart = playerTime - startOfSegment;
5119 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
5120 };
5121 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
5122 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
5123 };
5124 /**
5125 * Finds a segment that contains the time requested given as an ISO-8601 string. The
5126 * returned segment might be an estimate or an accurate match.
5127 *
5128 * @param {string} programTime The ISO-8601 programTime to find a match for
5129 * @param {Object} playlist A playlist object to search within
5130 */
5131
5132 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
5133 // Assumptions:
5134 // - verifyProgramDateTimeTags has already been run
5135 // - live streams have been started
5136 var dateTimeObject;
5137
5138 try {
5139 dateTimeObject = new Date(programTime);
5140 } catch (e) {
5141 return null;
5142 }
5143
5144 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5145 return null;
5146 }
5147
5148 var segment = playlist.segments[0];
5149
5150 if (dateTimeObject < segment.dateTimeObject) {
5151 // Requested time is before stream start.
5152 return null;
5153 }
5154
5155 for (var i = 0; i < playlist.segments.length - 1; i++) {
5156 segment = playlist.segments[i];
5157 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
5158
5159 if (dateTimeObject < nextSegmentStart) {
5160 break;
5161 }
5162 }
5163
5164 var lastSegment = playlist.segments[playlist.segments.length - 1];
5165 var lastSegmentStart = lastSegment.dateTimeObject;
5166 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
5167 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
5168
5169 if (dateTimeObject > lastSegmentEnd) {
5170 // Beyond the end of the stream, or our best guess of the end of the stream.
5171 return null;
5172 }
5173
5174 if (dateTimeObject > lastSegmentStart) {
5175 segment = lastSegment;
5176 }
5177
5178 return {
5179 segment: segment,
5180 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
5181 // Although, given that all segments have accurate date time objects, the segment
5182 // selected should be accurate, unless the video has been transmuxed at some point
5183 // (determined by the presence of the videoTimingInfo object), the segment's "player
5184 // time" (the start time in the player) can't be considered accurate.
5185 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5186 };
5187 };
5188 /**
5189 * Finds a segment that contains the given player time(in seconds).
5190 *
5191 * @param {number} time The player time to find a match for
5192 * @param {Object} playlist A playlist object to search within
5193 */
5194
5195 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
5196 // Assumptions:
5197 // - there will always be a segment.duration
5198 // - we can start from zero
5199 // - segments are in time order
5200 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5201 return null;
5202 }
5203
5204 var segmentEnd = 0;
5205 var segment;
5206
5207 for (var i = 0; i < playlist.segments.length; i++) {
5208 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
5209 // should contain the most accurate values we have for the segment's player times.
5210 //
5211 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
5212 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
5213 // calculate an end value.
5214
5215 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
5216
5217 if (time <= segmentEnd) {
5218 break;
5219 }
5220 }
5221
5222 var lastSegment = playlist.segments[playlist.segments.length - 1];
5223
5224 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
5225 // The time requested is beyond the stream end.
5226 return null;
5227 }
5228
5229 if (time > segmentEnd) {
5230 // The time is within or beyond the last segment.
5231 //
5232 // Check to see if the time is beyond a reasonable guess of the end of the stream.
5233 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
5234 // Technically, because the duration value is only an estimate, the time may still
5235 // exist in the last segment, however, there isn't enough information to make even
5236 // a reasonable estimate.
5237 return null;
5238 }
5239
5240 segment = lastSegment;
5241 }
5242
5243 return {
5244 segment: segment,
5245 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
5246 // Because videoTimingInfo is only set after transmux, it is the only way to get
5247 // accurate timing values.
5248 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5249 };
5250 };
5251 /**
5252 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
5253 * If the offset returned is positive, the programTime occurs after the
5254 * comparisonTimestamp.
5255 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
5256 *
5257 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
5258 * @param {string} programTime The programTime as an ISO-8601 string
5259 * @return {number} offset
5260 */
5261
5262 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
5263 var segmentDateTime;
5264 var programDateTime;
5265
5266 try {
5267 segmentDateTime = new Date(comparisonTimeStamp);
5268 programDateTime = new Date(programTime);
5269 } catch (e) {// TODO handle error
5270 }
5271
5272 var segmentTimeEpoch = segmentDateTime.getTime();
5273 var programTimeEpoch = programDateTime.getTime();
5274 return (programTimeEpoch - segmentTimeEpoch) / 1000;
5275 };
5276 /**
5277 * Checks that all segments in this playlist have programDateTime tags.
5278 *
5279 * @param {Object} playlist A playlist object
5280 */
5281
5282 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
5283 if (!playlist.segments || playlist.segments.length === 0) {
5284 return false;
5285 }
5286
5287 for (var i = 0; i < playlist.segments.length; i++) {
5288 var segment = playlist.segments[i];
5289
5290 if (!segment.dateTimeObject) {
5291 return false;
5292 }
5293 }
5294
5295 return true;
5296 };
5297 /**
5298 * Returns the programTime of the media given a playlist and a playerTime.
5299 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
5300 * If the segments containing the time requested have not been buffered yet, an estimate
5301 * may be returned to the callback.
5302 *
5303 * @param {Object} args
5304 * @param {Object} args.playlist A playlist object to search within
5305 * @param {number} time A playerTime in seconds
5306 * @param {Function} callback(err, programTime)
5307 * @return {string} err.message A detailed error message
5308 * @return {Object} programTime
5309 * @return {number} programTime.mediaSeconds The streamTime in seconds
5310 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
5311 */
5312
5313 var getProgramTime = function getProgramTime(_ref) {
5314 var playlist = _ref.playlist,
5315 _ref$time = _ref.time,
5316 time = _ref$time === void 0 ? undefined : _ref$time,
5317 callback = _ref.callback;
5318
5319 if (!callback) {
5320 throw new Error('getProgramTime: callback must be provided');
5321 }
5322
5323 if (!playlist || time === undefined) {
5324 return callback({
5325 message: 'getProgramTime: playlist and time must be provided'
5326 });
5327 }
5328
5329 var matchedSegment = findSegmentForPlayerTime(time, playlist);
5330
5331 if (!matchedSegment) {
5332 return callback({
5333 message: 'valid programTime was not found'
5334 });
5335 }
5336
5337 if (matchedSegment.type === 'estimate') {
5338 return callback({
5339 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
5340 seekTime: matchedSegment.estimatedStart
5341 });
5342 }
5343
5344 var programTimeObject = {
5345 mediaSeconds: time
5346 };
5347 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
5348
5349 if (programTime) {
5350 programTimeObject.programDateTime = programTime.toISOString();
5351 }
5352
5353 return callback(null, programTimeObject);
5354 };
5355 /**
5356 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
5357 *
5358 * @param {Object} args
5359 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
5360 * @param {Object} args.playlist A playlist to look within
5361 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
5362 * @param {Function} args.seekTo A method to perform a seek
5363 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
5364 * @param {Object} args.tech The tech to seek on
5365 * @param {Function} args.callback(err, newTime) A callback to return the new time to
5366 * @return {string} err.message A detailed error message
5367 * @return {number} newTime The exact time that was seeked to in seconds
5368 */
5369
5370 var seekToProgramTime = function seekToProgramTime(_ref2) {
5371 var programTime = _ref2.programTime,
5372 playlist = _ref2.playlist,
5373 _ref2$retryCount = _ref2.retryCount,
5374 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
5375 seekTo = _ref2.seekTo,
5376 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
5377 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
5378 tech = _ref2.tech,
5379 callback = _ref2.callback;
5380
5381 if (!callback) {
5382 throw new Error('seekToProgramTime: callback must be provided');
5383 }
5384
5385 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
5386 return callback({
5387 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
5388 });
5389 }
5390
5391 if (!playlist.endList && !tech.hasStarted_) {
5392 return callback({
5393 message: 'player must be playing a live stream to start buffering'
5394 });
5395 }
5396
5397 if (!verifyProgramDateTimeTags(playlist)) {
5398 return callback({
5399 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
5400 });
5401 }
5402
5403 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
5404
5405 if (!matchedSegment) {
5406 return callback({
5407 message: programTime + " was not found in the stream"
5408 });
5409 }
5410
5411 var segment = matchedSegment.segment;
5412 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
5413
5414 if (matchedSegment.type === 'estimate') {
5415 // we've run out of retries
5416 if (retryCount === 0) {
5417 return callback({
5418 message: programTime + " is not buffered yet. Try again"
5419 });
5420 }
5421
5422 seekTo(matchedSegment.estimatedStart + mediaOffset);
5423 tech.one('seeked', function () {
5424 seekToProgramTime({
5425 programTime: programTime,
5426 playlist: playlist,
5427 retryCount: retryCount - 1,
5428 seekTo: seekTo,
5429 pauseAfterSeek: pauseAfterSeek,
5430 tech: tech,
5431 callback: callback
5432 });
5433 });
5434 return;
5435 } // Since the segment.start value is determined from the buffered end or ending time
5436 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
5437 // modifications.
5438
5439
5440 var seekToTime = segment.start + mediaOffset;
5441
5442 var seekedCallback = function seekedCallback() {
5443 return callback(null, tech.currentTime());
5444 }; // listen for seeked event
5445
5446
5447 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
5448
5449 if (pauseAfterSeek) {
5450 tech.pause();
5451 }
5452
5453 seekTo(seekToTime);
5454 };
5455
5456 /**
5457 * Loops through all supported media groups in master and calls the provided
5458 * callback for each group
5459 *
5460 * @param {Object} master
5461 * The parsed master manifest object
5462 * @param {string[]} groups
5463 * The media groups to call the callback for
5464 * @param {Function} callback
5465 * Callback to call for each media group
5466 */
5467 var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
5468 groups.forEach(function (mediaType) {
5469 for (var groupKey in master.mediaGroups[mediaType]) {
5470 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
5471 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
5472 callback(mediaProperties, mediaType, groupKey, labelKey);
5473 }
5474 }
5475 });
5476 };
5477
5478 /*! @name mpd-parser @version 0.22.1 @license Apache-2.0 */
5479
5480 var isObject = function isObject(obj) {
5481 return !!obj && typeof obj === 'object';
5482 };
5483
5484 var merge = function merge() {
5485 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
5486 objects[_key] = arguments[_key];
5487 }
5488
5489 return objects.reduce(function (result, source) {
5490 if (typeof source !== 'object') {
5491 return result;
5492 }
5493
5494 Object.keys(source).forEach(function (key) {
5495 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
5496 result[key] = result[key].concat(source[key]);
5497 } else if (isObject(result[key]) && isObject(source[key])) {
5498 result[key] = merge(result[key], source[key]);
5499 } else {
5500 result[key] = source[key];
5501 }
5502 });
5503 return result;
5504 }, {});
5505 };
5506
5507 var values = function values(o) {
5508 return Object.keys(o).map(function (k) {
5509 return o[k];
5510 });
5511 };
5512
5513 var range = function range(start, end) {
5514 var result = [];
5515
5516 for (var i = start; i < end; i++) {
5517 result.push(i);
5518 }
5519
5520 return result;
5521 };
5522
5523 var flatten = function flatten(lists) {
5524 return lists.reduce(function (x, y) {
5525 return x.concat(y);
5526 }, []);
5527 };
5528
5529 var from = function from(list) {
5530 if (!list.length) {
5531 return [];
5532 }
5533
5534 var result = [];
5535
5536 for (var i = 0; i < list.length; i++) {
5537 result.push(list[i]);
5538 }
5539
5540 return result;
5541 };
5542
5543 var findIndexes = function findIndexes(l, key) {
5544 return l.reduce(function (a, e, i) {
5545 if (e[key]) {
5546 a.push(i);
5547 }
5548
5549 return a;
5550 }, []);
5551 };
5552 /**
5553 * Returns the first index that satisfies the matching function, or -1 if not found.
5554 *
5555 * Only necessary because of IE11 support.
5556 *
5557 * @param {Array} list - the list to search through
5558 * @param {Function} matchingFunction - the matching function
5559 *
5560 * @return {number} the matching index or -1 if not found
5561 */
5562
5563
5564 var findIndex = function findIndex(list, matchingFunction) {
5565 for (var i = 0; i < list.length; i++) {
5566 if (matchingFunction(list[i])) {
5567 return i;
5568 }
5569 }
5570
5571 return -1;
5572 };
5573 /**
5574 * Returns a union of the included lists provided each element can be identified by a key.
5575 *
5576 * @param {Array} list - list of lists to get the union of
5577 * @param {Function} keyFunction - the function to use as a key for each element
5578 *
5579 * @return {Array} the union of the arrays
5580 */
5581
5582
5583 var union = function union(lists, keyFunction) {
5584 return values(lists.reduce(function (acc, list) {
5585 list.forEach(function (el) {
5586 acc[keyFunction(el)] = el;
5587 });
5588 return acc;
5589 }, {}));
5590 };
5591
5592 var errors = {
5593 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
5594 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
5595 DASH_INVALID_XML: 'DASH_INVALID_XML',
5596 NO_BASE_URL: 'NO_BASE_URL',
5597 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
5598 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
5599 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
5600 };
5601 /**
5602 * @typedef {Object} SingleUri
5603 * @property {string} uri - relative location of segment
5604 * @property {string} resolvedUri - resolved location of segment
5605 * @property {Object} byterange - Object containing information on how to make byte range
5606 * requests following byte-range-spec per RFC2616.
5607 * @property {String} byterange.length - length of range request
5608 * @property {String} byterange.offset - byte offset of range request
5609 *
5610 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
5611 */
5612
5613 /**
5614 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
5615 * that conforms to how m3u8-parser is structured
5616 *
5617 * @see https://github.com/videojs/m3u8-parser
5618 *
5619 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
5620 * @param {string} source - source url for segment
5621 * @param {string} range - optional range used for range calls,
5622 * follows RFC 2616, Clause 14.35.1
5623 * @return {SingleUri} full segment information transformed into a format similar
5624 * to m3u8-parser
5625 */
5626
5627 var urlTypeToSegment = function urlTypeToSegment(_ref) {
5628 var _ref$baseUrl = _ref.baseUrl,
5629 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
5630 _ref$source = _ref.source,
5631 source = _ref$source === void 0 ? '' : _ref$source,
5632 _ref$range = _ref.range,
5633 range = _ref$range === void 0 ? '' : _ref$range,
5634 _ref$indexRange = _ref.indexRange,
5635 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
5636 var segment = {
5637 uri: source,
5638 resolvedUri: resolveUrl$1(baseUrl || '', source)
5639 };
5640
5641 if (range || indexRange) {
5642 var rangeStr = range ? range : indexRange;
5643 var ranges = rangeStr.split('-'); // default to parsing this as a BigInt if possible
5644
5645 var startRange = window.BigInt ? window.BigInt(ranges[0]) : parseInt(ranges[0], 10);
5646 var endRange = window.BigInt ? window.BigInt(ranges[1]) : parseInt(ranges[1], 10); // convert back to a number if less than MAX_SAFE_INTEGER
5647
5648 if (startRange < Number.MAX_SAFE_INTEGER && typeof startRange === 'bigint') {
5649 startRange = Number(startRange);
5650 }
5651
5652 if (endRange < Number.MAX_SAFE_INTEGER && typeof endRange === 'bigint') {
5653 endRange = Number(endRange);
5654 }
5655
5656 var length;
5657
5658 if (typeof endRange === 'bigint' || typeof startRange === 'bigint') {
5659 length = window.BigInt(endRange) - window.BigInt(startRange) + window.BigInt(1);
5660 } else {
5661 length = endRange - startRange + 1;
5662 }
5663
5664 if (typeof length === 'bigint' && length < Number.MAX_SAFE_INTEGER) {
5665 length = Number(length);
5666 } // byterange should be inclusive according to
5667 // RFC 2616, Clause 14.35.1
5668
5669
5670 segment.byterange = {
5671 length: length,
5672 offset: startRange
5673 };
5674 }
5675
5676 return segment;
5677 };
5678
5679 var byteRangeToString = function byteRangeToString(byterange) {
5680 // `endRange` is one less than `offset + length` because the HTTP range
5681 // header uses inclusive ranges
5682 var endRange;
5683
5684 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
5685 endRange = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
5686 } else {
5687 endRange = byterange.offset + byterange.length - 1;
5688 }
5689
5690 return byterange.offset + "-" + endRange;
5691 };
5692 /**
5693 * parse the end number attribue that can be a string
5694 * number, or undefined.
5695 *
5696 * @param {string|number|undefined} endNumber
5697 * The end number attribute.
5698 *
5699 * @return {number|null}
5700 * The result of parsing the end number.
5701 */
5702
5703
5704 var parseEndNumber = function parseEndNumber(endNumber) {
5705 if (endNumber && typeof endNumber !== 'number') {
5706 endNumber = parseInt(endNumber, 10);
5707 }
5708
5709 if (isNaN(endNumber)) {
5710 return null;
5711 }
5712
5713 return endNumber;
5714 };
5715 /**
5716 * Functions for calculating the range of available segments in static and dynamic
5717 * manifests.
5718 */
5719
5720
5721 var segmentRange = {
5722 /**
5723 * Returns the entire range of available segments for a static MPD
5724 *
5725 * @param {Object} attributes
5726 * Inheritied MPD attributes
5727 * @return {{ start: number, end: number }}
5728 * The start and end numbers for available segments
5729 */
5730 static: function _static(attributes) {
5731 var duration = attributes.duration,
5732 _attributes$timescale = attributes.timescale,
5733 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
5734 sourceDuration = attributes.sourceDuration,
5735 periodDuration = attributes.periodDuration;
5736 var endNumber = parseEndNumber(attributes.endNumber);
5737 var segmentDuration = duration / timescale;
5738
5739 if (typeof endNumber === 'number') {
5740 return {
5741 start: 0,
5742 end: endNumber
5743 };
5744 }
5745
5746 if (typeof periodDuration === 'number') {
5747 return {
5748 start: 0,
5749 end: periodDuration / segmentDuration
5750 };
5751 }
5752
5753 return {
5754 start: 0,
5755 end: sourceDuration / segmentDuration
5756 };
5757 },
5758
5759 /**
5760 * Returns the current live window range of available segments for a dynamic MPD
5761 *
5762 * @param {Object} attributes
5763 * Inheritied MPD attributes
5764 * @return {{ start: number, end: number }}
5765 * The start and end numbers for available segments
5766 */
5767 dynamic: function dynamic(attributes) {
5768 var NOW = attributes.NOW,
5769 clientOffset = attributes.clientOffset,
5770 availabilityStartTime = attributes.availabilityStartTime,
5771 _attributes$timescale2 = attributes.timescale,
5772 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
5773 duration = attributes.duration,
5774 _attributes$periodSta = attributes.periodStart,
5775 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
5776 _attributes$minimumUp = attributes.minimumUpdatePeriod,
5777 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
5778 _attributes$timeShift = attributes.timeShiftBufferDepth,
5779 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
5780 var endNumber = parseEndNumber(attributes.endNumber); // clientOffset is passed in at the top level of mpd-parser and is an offset calculated
5781 // after retrieving UTC server time.
5782
5783 var now = (NOW + clientOffset) / 1000; // WC stands for Wall Clock.
5784 // Convert the period start time to EPOCH.
5785
5786 var periodStartWC = availabilityStartTime + periodStart; // Period end in EPOCH is manifest's retrieval time + time until next update.
5787
5788 var periodEndWC = now + minimumUpdatePeriod;
5789 var periodDuration = periodEndWC - periodStartWC;
5790 var segmentCount = Math.ceil(periodDuration * timescale / duration);
5791 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
5792 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
5793 return {
5794 start: Math.max(0, availableStart),
5795 end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)
5796 };
5797 }
5798 };
5799 /**
5800 * Maps a range of numbers to objects with information needed to build the corresponding
5801 * segment list
5802 *
5803 * @name toSegmentsCallback
5804 * @function
5805 * @param {number} number
5806 * Number of the segment
5807 * @param {number} index
5808 * Index of the number in the range list
5809 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
5810 * Object with segment timing and duration info
5811 */
5812
5813 /**
5814 * Returns a callback for Array.prototype.map for mapping a range of numbers to
5815 * information needed to build the segment list.
5816 *
5817 * @param {Object} attributes
5818 * Inherited MPD attributes
5819 * @return {toSegmentsCallback}
5820 * Callback map function
5821 */
5822
5823 var toSegments = function toSegments(attributes) {
5824 return function (number) {
5825 var duration = attributes.duration,
5826 _attributes$timescale3 = attributes.timescale,
5827 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
5828 periodStart = attributes.periodStart,
5829 _attributes$startNumb = attributes.startNumber,
5830 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
5831 return {
5832 number: startNumber + number,
5833 duration: duration / timescale,
5834 timeline: periodStart,
5835 time: number * duration
5836 };
5837 };
5838 };
5839 /**
5840 * Returns a list of objects containing segment timing and duration info used for
5841 * building the list of segments. This uses the @duration attribute specified
5842 * in the MPD manifest to derive the range of segments.
5843 *
5844 * @param {Object} attributes
5845 * Inherited MPD attributes
5846 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5847 * List of Objects with segment timing and duration info
5848 */
5849
5850
5851 var parseByDuration = function parseByDuration(attributes) {
5852 var type = attributes.type,
5853 duration = attributes.duration,
5854 _attributes$timescale4 = attributes.timescale,
5855 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
5856 periodDuration = attributes.periodDuration,
5857 sourceDuration = attributes.sourceDuration;
5858
5859 var _segmentRange$type = segmentRange[type](attributes),
5860 start = _segmentRange$type.start,
5861 end = _segmentRange$type.end;
5862
5863 var segments = range(start, end).map(toSegments(attributes));
5864
5865 if (type === 'static') {
5866 var index = segments.length - 1; // section is either a period or the full source
5867
5868 var sectionDuration = typeof periodDuration === 'number' ? periodDuration : sourceDuration; // final segment may be less than full segment duration
5869
5870 segments[index].duration = sectionDuration - duration / timescale * index;
5871 }
5872
5873 return segments;
5874 };
5875 /**
5876 * Translates SegmentBase into a set of segments.
5877 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
5878 * node should be translated into segment.
5879 *
5880 * @param {Object} attributes
5881 * Object containing all inherited attributes from parent elements with attribute
5882 * names as keys
5883 * @return {Object.<Array>} list of segments
5884 */
5885
5886
5887 var segmentsFromBase = function segmentsFromBase(attributes) {
5888 var baseUrl = attributes.baseUrl,
5889 _attributes$initializ = attributes.initialization,
5890 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
5891 sourceDuration = attributes.sourceDuration,
5892 _attributes$indexRang = attributes.indexRange,
5893 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
5894 periodStart = attributes.periodStart,
5895 presentationTime = attributes.presentationTime,
5896 _attributes$number = attributes.number,
5897 number = _attributes$number === void 0 ? 0 : _attributes$number,
5898 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
5899
5900 if (!baseUrl) {
5901 throw new Error(errors.NO_BASE_URL);
5902 }
5903
5904 var initSegment = urlTypeToSegment({
5905 baseUrl: baseUrl,
5906 source: initialization.sourceURL,
5907 range: initialization.range
5908 });
5909 var segment = urlTypeToSegment({
5910 baseUrl: baseUrl,
5911 source: baseUrl,
5912 indexRange: indexRange
5913 });
5914 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
5915 // (since SegmentBase is only for one total segment)
5916
5917 if (duration) {
5918 var segmentTimeInfo = parseByDuration(attributes);
5919
5920 if (segmentTimeInfo.length) {
5921 segment.duration = segmentTimeInfo[0].duration;
5922 segment.timeline = segmentTimeInfo[0].timeline;
5923 }
5924 } else if (sourceDuration) {
5925 segment.duration = sourceDuration;
5926 segment.timeline = periodStart;
5927 } // If presentation time is provided, these segments are being generated by SIDX
5928 // references, and should use the time provided. For the general case of SegmentBase,
5929 // there should only be one segment in the period, so its presentation time is the same
5930 // as its period start.
5931
5932
5933 segment.presentationTime = presentationTime || periodStart;
5934 segment.number = number;
5935 return [segment];
5936 };
5937 /**
5938 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
5939 * according to the sidx information given.
5940 *
5941 * playlist.sidx has metadadata about the sidx where-as the sidx param
5942 * is the parsed sidx box itself.
5943 *
5944 * @param {Object} playlist the playlist to update the sidx information for
5945 * @param {Object} sidx the parsed sidx box
5946 * @return {Object} the playlist object with the updated sidx information
5947 */
5948
5949
5950 var addSidxSegmentsToPlaylist$1 = function addSidxSegmentsToPlaylist(playlist, sidx, baseUrl) {
5951 // Retain init segment information
5952 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial main manifest parsing
5953
5954 var sourceDuration = playlist.sidx.duration; // Retain source timeline
5955
5956 var timeline = playlist.timeline || 0;
5957 var sidxByteRange = playlist.sidx.byterange;
5958 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
5959
5960 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
5961
5962 var mediaReferences = sidx.references.filter(function (r) {
5963 return r.referenceType !== 1;
5964 });
5965 var segments = [];
5966 var type = playlist.endList ? 'static' : 'dynamic';
5967 var periodStart = playlist.sidx.timeline;
5968 var presentationTime = periodStart;
5969 var number = playlist.mediaSequence || 0; // firstOffset is the offset from the end of the sidx box
5970
5971 var startIndex; // eslint-disable-next-line
5972
5973 if (typeof sidx.firstOffset === 'bigint') {
5974 startIndex = window.BigInt(sidxEnd) + sidx.firstOffset;
5975 } else {
5976 startIndex = sidxEnd + sidx.firstOffset;
5977 }
5978
5979 for (var i = 0; i < mediaReferences.length; i++) {
5980 var reference = sidx.references[i]; // size of the referenced (sub)segment
5981
5982 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
5983 // this will be converted to seconds when generating segments
5984
5985 var duration = reference.subsegmentDuration; // should be an inclusive range
5986
5987 var endIndex = void 0; // eslint-disable-next-line
5988
5989 if (typeof startIndex === 'bigint') {
5990 endIndex = startIndex + window.BigInt(size) - window.BigInt(1);
5991 } else {
5992 endIndex = startIndex + size - 1;
5993 }
5994
5995 var indexRange = startIndex + "-" + endIndex;
5996 var attributes = {
5997 baseUrl: baseUrl,
5998 timescale: timescale,
5999 timeline: timeline,
6000 periodStart: periodStart,
6001 presentationTime: presentationTime,
6002 number: number,
6003 duration: duration,
6004 sourceDuration: sourceDuration,
6005 indexRange: indexRange,
6006 type: type
6007 };
6008 var segment = segmentsFromBase(attributes)[0];
6009
6010 if (initSegment) {
6011 segment.map = initSegment;
6012 }
6013
6014 segments.push(segment);
6015
6016 if (typeof startIndex === 'bigint') {
6017 startIndex += window.BigInt(size);
6018 } else {
6019 startIndex += size;
6020 }
6021
6022 presentationTime += duration / timescale;
6023 number++;
6024 }
6025
6026 playlist.segments = segments;
6027 return playlist;
6028 };
6029
6030 var SUPPORTED_MEDIA_TYPES = ['AUDIO', 'SUBTITLES']; // allow one 60fps frame as leniency (arbitrarily chosen)
6031
6032 var TIME_FUDGE = 1 / 60;
6033 /**
6034 * Given a list of timelineStarts, combines, dedupes, and sorts them.
6035 *
6036 * @param {TimelineStart[]} timelineStarts - list of timeline starts
6037 *
6038 * @return {TimelineStart[]} the combined and deduped timeline starts
6039 */
6040
6041 var getUniqueTimelineStarts = function getUniqueTimelineStarts(timelineStarts) {
6042 return union(timelineStarts, function (_ref) {
6043 var timeline = _ref.timeline;
6044 return timeline;
6045 }).sort(function (a, b) {
6046 return a.timeline > b.timeline ? 1 : -1;
6047 });
6048 };
6049 /**
6050 * Finds the playlist with the matching NAME attribute.
6051 *
6052 * @param {Array} playlists - playlists to search through
6053 * @param {string} name - the NAME attribute to search for
6054 *
6055 * @return {Object|null} the matching playlist object, or null
6056 */
6057
6058
6059 var findPlaylistWithName = function findPlaylistWithName(playlists, name) {
6060 for (var i = 0; i < playlists.length; i++) {
6061 if (playlists[i].attributes.NAME === name) {
6062 return playlists[i];
6063 }
6064 }
6065
6066 return null;
6067 };
6068 /**
6069 * Gets a flattened array of media group playlists.
6070 *
6071 * @param {Object} manifest - the main manifest object
6072 *
6073 * @return {Array} the media group playlists
6074 */
6075
6076
6077 var getMediaGroupPlaylists = function getMediaGroupPlaylists(manifest) {
6078 var mediaGroupPlaylists = [];
6079 forEachMediaGroup(manifest, SUPPORTED_MEDIA_TYPES, function (properties, type, group, label) {
6080 mediaGroupPlaylists = mediaGroupPlaylists.concat(properties.playlists || []);
6081 });
6082 return mediaGroupPlaylists;
6083 };
6084 /**
6085 * Updates the playlist's media sequence numbers.
6086 *
6087 * @param {Object} config - options object
6088 * @param {Object} config.playlist - the playlist to update
6089 * @param {number} config.mediaSequence - the mediaSequence number to start with
6090 */
6091
6092
6093 var updateMediaSequenceForPlaylist = function updateMediaSequenceForPlaylist(_ref2) {
6094 var playlist = _ref2.playlist,
6095 mediaSequence = _ref2.mediaSequence;
6096 playlist.mediaSequence = mediaSequence;
6097 playlist.segments.forEach(function (segment, index) {
6098 segment.number = playlist.mediaSequence + index;
6099 });
6100 };
6101 /**
6102 * Updates the media and discontinuity sequence numbers of newPlaylists given oldPlaylists
6103 * and a complete list of timeline starts.
6104 *
6105 * If no matching playlist is found, only the discontinuity sequence number of the playlist
6106 * will be updated.
6107 *
6108 * Since early available timelines are not supported, at least one segment must be present.
6109 *
6110 * @param {Object} config - options object
6111 * @param {Object[]} oldPlaylists - the old playlists to use as a reference
6112 * @param {Object[]} newPlaylists - the new playlists to update
6113 * @param {Object} timelineStarts - all timelineStarts seen in the stream to this point
6114 */
6115
6116
6117 var updateSequenceNumbers = function updateSequenceNumbers(_ref3) {
6118 var oldPlaylists = _ref3.oldPlaylists,
6119 newPlaylists = _ref3.newPlaylists,
6120 timelineStarts = _ref3.timelineStarts;
6121 newPlaylists.forEach(function (playlist) {
6122 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref4) {
6123 var timeline = _ref4.timeline;
6124 return timeline === playlist.timeline;
6125 }); // Playlists NAMEs come from DASH Representation IDs, which are mandatory
6126 // (see ISO_23009-1-2012 5.3.5.2).
6127 //
6128 // If the same Representation existed in a prior Period, it will retain the same NAME.
6129
6130 var oldPlaylist = findPlaylistWithName(oldPlaylists, playlist.attributes.NAME);
6131
6132 if (!oldPlaylist) {
6133 // Since this is a new playlist, the media sequence values can start from 0 without
6134 // consequence.
6135 return;
6136 } // TODO better support for live SIDX
6137 //
6138 // As of this writing, mpd-parser does not support multiperiod SIDX (in live or VOD).
6139 // This is evident by a playlist only having a single SIDX reference. In a multiperiod
6140 // playlist there would need to be multiple SIDX references. In addition, live SIDX is
6141 // not supported when the SIDX properties change on refreshes.
6142 //
6143 // In the future, if support needs to be added, the merging logic here can be called
6144 // after SIDX references are resolved. For now, exit early to prevent exceptions being
6145 // thrown due to undefined references.
6146
6147
6148 if (playlist.sidx) {
6149 return;
6150 } // Since we don't yet support early available timelines, we don't need to support
6151 // playlists with no segments.
6152
6153
6154 var firstNewSegment = playlist.segments[0];
6155 var oldMatchingSegmentIndex = findIndex(oldPlaylist.segments, function (oldSegment) {
6156 return Math.abs(oldSegment.presentationTime - firstNewSegment.presentationTime) < TIME_FUDGE;
6157 }); // No matching segment from the old playlist means the entire playlist was refreshed.
6158 // In this case the media sequence should account for this update, and the new segments
6159 // should be marked as discontinuous from the prior content, since the last prior
6160 // timeline was removed.
6161
6162 if (oldMatchingSegmentIndex === -1) {
6163 updateMediaSequenceForPlaylist({
6164 playlist: playlist,
6165 mediaSequence: oldPlaylist.mediaSequence + oldPlaylist.segments.length
6166 });
6167 playlist.segments[0].discontinuity = true;
6168 playlist.discontinuityStarts.unshift(0); // No matching segment does not necessarily mean there's missing content.
6169 //
6170 // If the new playlist's timeline is the same as the last seen segment's timeline,
6171 // then a discontinuity can be added to identify that there's potentially missing
6172 // content. If there's no missing content, the discontinuity should still be rather
6173 // harmless. It's possible that if segment durations are accurate enough, that the
6174 // existence of a gap can be determined using the presentation times and durations,
6175 // but if the segment timing info is off, it may introduce more problems than simply
6176 // adding the discontinuity.
6177 //
6178 // If the new playlist's timeline is different from the last seen segment's timeline,
6179 // then a discontinuity can be added to identify that this is the first seen segment
6180 // of a new timeline. However, the logic at the start of this function that
6181 // determined the disconinuity sequence by timeline index is now off by one (the
6182 // discontinuity of the newest timeline hasn't yet fallen off the manifest...since
6183 // we added it), so the disconinuity sequence must be decremented.
6184 //
6185 // A period may also have a duration of zero, so the case of no segments is handled
6186 // here even though we don't yet support early available periods.
6187
6188 if (!oldPlaylist.segments.length && playlist.timeline > oldPlaylist.timeline || oldPlaylist.segments.length && playlist.timeline > oldPlaylist.segments[oldPlaylist.segments.length - 1].timeline) {
6189 playlist.discontinuitySequence--;
6190 }
6191
6192 return;
6193 } // If the first segment matched with a prior segment on a discontinuity (it's matching
6194 // on the first segment of a period), then the discontinuitySequence shouldn't be the
6195 // timeline's matching one, but instead should be the one prior, and the first segment
6196 // of the new manifest should be marked with a discontinuity.
6197 //
6198 // The reason for this special case is that discontinuity sequence shows how many
6199 // discontinuities have fallen off of the playlist, and discontinuities are marked on
6200 // the first segment of a new "timeline." Because of this, while DASH will retain that
6201 // Period while the "timeline" exists, HLS keeps track of it via the discontinuity
6202 // sequence, and that first segment is an indicator, but can be removed before that
6203 // timeline is gone.
6204
6205
6206 var oldMatchingSegment = oldPlaylist.segments[oldMatchingSegmentIndex];
6207
6208 if (oldMatchingSegment.discontinuity && !firstNewSegment.discontinuity) {
6209 firstNewSegment.discontinuity = true;
6210 playlist.discontinuityStarts.unshift(0);
6211 playlist.discontinuitySequence--;
6212 }
6213
6214 updateMediaSequenceForPlaylist({
6215 playlist: playlist,
6216 mediaSequence: oldPlaylist.segments[oldMatchingSegmentIndex].number
6217 });
6218 });
6219 };
6220 /**
6221 * Given an old parsed manifest object and a new parsed manifest object, updates the
6222 * sequence and timing values within the new manifest to ensure that it lines up with the
6223 * old.
6224 *
6225 * @param {Array} oldManifest - the old main manifest object
6226 * @param {Array} newManifest - the new main manifest object
6227 *
6228 * @return {Object} the updated new manifest object
6229 */
6230
6231
6232 var positionManifestOnTimeline = function positionManifestOnTimeline(_ref5) {
6233 var oldManifest = _ref5.oldManifest,
6234 newManifest = _ref5.newManifest; // Starting from v4.1.2 of the IOP, section 4.4.3.3 states:
6235 //
6236 // "MPD@availabilityStartTime and Period@start shall not be changed over MPD updates."
6237 //
6238 // This was added from https://github.com/Dash-Industry-Forum/DASH-IF-IOP/issues/160
6239 //
6240 // Because of this change, and the difficulty of supporting periods with changing start
6241 // times, periods with changing start times are not supported. This makes the logic much
6242 // simpler, since periods with the same start time can be considerred the same period
6243 // across refreshes.
6244 //
6245 // To give an example as to the difficulty of handling periods where the start time may
6246 // change, if a single period manifest is refreshed with another manifest with a single
6247 // period, and both the start and end times are increased, then the only way to determine
6248 // if it's a new period or an old one that has changed is to look through the segments of
6249 // each playlist and determine the presentation time bounds to find a match. In addition,
6250 // if the period start changed to exceed the old period end, then there would be no
6251 // match, and it would not be possible to determine whether the refreshed period is a new
6252 // one or the old one.
6253
6254 var oldPlaylists = oldManifest.playlists.concat(getMediaGroupPlaylists(oldManifest));
6255 var newPlaylists = newManifest.playlists.concat(getMediaGroupPlaylists(newManifest)); // Save all seen timelineStarts to the new manifest. Although this potentially means that
6256 // there's a "memory leak" in that it will never stop growing, in reality, only a couple
6257 // of properties are saved for each seen Period. Even long running live streams won't
6258 // generate too many Periods, unless the stream is watched for decades. In the future,
6259 // this can be optimized by mapping to discontinuity sequence numbers for each timeline,
6260 // but it may not become an issue, and the additional info can be useful for debugging.
6261
6262 newManifest.timelineStarts = getUniqueTimelineStarts([oldManifest.timelineStarts, newManifest.timelineStarts]);
6263 updateSequenceNumbers({
6264 oldPlaylists: oldPlaylists,
6265 newPlaylists: newPlaylists,
6266 timelineStarts: newManifest.timelineStarts
6267 });
6268 return newManifest;
6269 };
6270
6271 var generateSidxKey = function generateSidxKey(sidx) {
6272 return sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);
6273 };
6274
6275 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
6276 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
6277 // assuming playlist IDs are the same across periods
6278 // TODO: handle multiperiod where representation sets are not the same
6279 // across periods
6280 var name = playlist.attributes.id + (playlist.attributes.lang || '');
6281
6282 if (!acc[name]) {
6283 // First Period
6284 acc[name] = playlist;
6285 acc[name].attributes.timelineStarts = [];
6286 } else {
6287 // Subsequent Periods
6288 if (playlist.segments) {
6289 var _acc$name$segments; // first segment of subsequent periods signal a discontinuity
6290
6291
6292 if (playlist.segments[0]) {
6293 playlist.segments[0].discontinuity = true;
6294 }
6295
6296 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments);
6297 } // bubble up contentProtection, this assumes all DRM content
6298 // has the same contentProtection
6299
6300
6301 if (playlist.attributes.contentProtection) {
6302 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
6303 }
6304 }
6305
6306 acc[name].attributes.timelineStarts.push({
6307 // Although they represent the same number, it's important to have both to make it
6308 // compatible with HLS potentially having a similar attribute.
6309 start: playlist.attributes.periodStart,
6310 timeline: playlist.attributes.periodStart
6311 });
6312 return acc;
6313 }, {}));
6314 return mergedPlaylists.map(function (playlist) {
6315 playlist.discontinuityStarts = findIndexes(playlist.segments || [], 'discontinuity');
6316 return playlist;
6317 });
6318 };
6319
6320 var addSidxSegmentsToPlaylist = function addSidxSegmentsToPlaylist(playlist, sidxMapping) {
6321 var sidxKey = generateSidxKey(playlist.sidx);
6322 var sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
6323
6324 if (sidxMatch) {
6325 addSidxSegmentsToPlaylist$1(playlist, sidxMatch, playlist.sidx.resolvedUri);
6326 }
6327
6328 return playlist;
6329 };
6330
6331 var addSidxSegmentsToPlaylists = function addSidxSegmentsToPlaylists(playlists, sidxMapping) {
6332 if (sidxMapping === void 0) {
6333 sidxMapping = {};
6334 }
6335
6336 if (!Object.keys(sidxMapping).length) {
6337 return playlists;
6338 }
6339
6340 for (var i in playlists) {
6341 playlists[i] = addSidxSegmentsToPlaylist(playlists[i], sidxMapping);
6342 }
6343
6344 return playlists;
6345 };
6346
6347 var formatAudioPlaylist = function formatAudioPlaylist(_ref, isAudioOnly) {
6348 var _attributes;
6349
6350 var attributes = _ref.attributes,
6351 segments = _ref.segments,
6352 sidx = _ref.sidx,
6353 mediaSequence = _ref.mediaSequence,
6354 discontinuitySequence = _ref.discontinuitySequence,
6355 discontinuityStarts = _ref.discontinuityStarts;
6356 var playlist = {
6357 attributes: (_attributes = {
6358 NAME: attributes.id,
6359 BANDWIDTH: attributes.bandwidth,
6360 CODECS: attributes.codecs
6361 }, _attributes['PROGRAM-ID'] = 1, _attributes),
6362 uri: '',
6363 endList: attributes.type === 'static',
6364 timeline: attributes.periodStart,
6365 resolvedUri: '',
6366 targetDuration: attributes.duration,
6367 discontinuitySequence: discontinuitySequence,
6368 discontinuityStarts: discontinuityStarts,
6369 timelineStarts: attributes.timelineStarts,
6370 mediaSequence: mediaSequence,
6371 segments: segments
6372 };
6373
6374 if (attributes.contentProtection) {
6375 playlist.contentProtection = attributes.contentProtection;
6376 }
6377
6378 if (sidx) {
6379 playlist.sidx = sidx;
6380 }
6381
6382 if (isAudioOnly) {
6383 playlist.attributes.AUDIO = 'audio';
6384 playlist.attributes.SUBTITLES = 'subs';
6385 }
6386
6387 return playlist;
6388 };
6389
6390 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
6391 var _m3u8Attributes;
6392
6393 var attributes = _ref2.attributes,
6394 segments = _ref2.segments,
6395 mediaSequence = _ref2.mediaSequence,
6396 discontinuityStarts = _ref2.discontinuityStarts,
6397 discontinuitySequence = _ref2.discontinuitySequence;
6398
6399 if (typeof segments === 'undefined') {
6400 // vtt tracks may use single file in BaseURL
6401 segments = [{
6402 uri: attributes.baseUrl,
6403 timeline: attributes.periodStart,
6404 resolvedUri: attributes.baseUrl || '',
6405 duration: attributes.sourceDuration,
6406 number: 0
6407 }]; // targetDuration should be the same duration as the only segment
6408
6409 attributes.duration = attributes.sourceDuration;
6410 }
6411
6412 var m3u8Attributes = (_m3u8Attributes = {
6413 NAME: attributes.id,
6414 BANDWIDTH: attributes.bandwidth
6415 }, _m3u8Attributes['PROGRAM-ID'] = 1, _m3u8Attributes);
6416
6417 if (attributes.codecs) {
6418 m3u8Attributes.CODECS = attributes.codecs;
6419 }
6420
6421 return {
6422 attributes: m3u8Attributes,
6423 uri: '',
6424 endList: attributes.type === 'static',
6425 timeline: attributes.periodStart,
6426 resolvedUri: attributes.baseUrl || '',
6427 targetDuration: attributes.duration,
6428 timelineStarts: attributes.timelineStarts,
6429 discontinuityStarts: discontinuityStarts,
6430 discontinuitySequence: discontinuitySequence,
6431 mediaSequence: mediaSequence,
6432 segments: segments
6433 };
6434 };
6435
6436 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping, isAudioOnly) {
6437 if (sidxMapping === void 0) {
6438 sidxMapping = {};
6439 }
6440
6441 if (isAudioOnly === void 0) {
6442 isAudioOnly = false;
6443 }
6444
6445 var mainPlaylist;
6446 var formattedPlaylists = playlists.reduce(function (a, playlist) {
6447 var role = playlist.attributes.role && playlist.attributes.role.value || '';
6448 var language = playlist.attributes.lang || '';
6449 var label = playlist.attributes.label || 'main';
6450
6451 if (language && !playlist.attributes.label) {
6452 var roleLabel = role ? " (" + role + ")" : '';
6453 label = "" + playlist.attributes.lang + roleLabel;
6454 }
6455
6456 if (!a[label]) {
6457 a[label] = {
6458 language: language,
6459 autoselect: true,
6460 default: role === 'main',
6461 playlists: [],
6462 uri: ''
6463 };
6464 }
6465
6466 var formatted = addSidxSegmentsToPlaylist(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);
6467 a[label].playlists.push(formatted);
6468
6469 if (typeof mainPlaylist === 'undefined' && role === 'main') {
6470 mainPlaylist = playlist;
6471 mainPlaylist.default = true;
6472 }
6473
6474 return a;
6475 }, {}); // if no playlists have role "main", mark the first as main
6476
6477 if (!mainPlaylist) {
6478 var firstLabel = Object.keys(formattedPlaylists)[0];
6479 formattedPlaylists[firstLabel].default = true;
6480 }
6481
6482 return formattedPlaylists;
6483 };
6484
6485 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
6486 if (sidxMapping === void 0) {
6487 sidxMapping = {};
6488 }
6489
6490 return playlists.reduce(function (a, playlist) {
6491 var label = playlist.attributes.lang || 'text';
6492
6493 if (!a[label]) {
6494 a[label] = {
6495 language: label,
6496 default: false,
6497 autoselect: false,
6498 playlists: [],
6499 uri: ''
6500 };
6501 }
6502
6503 a[label].playlists.push(addSidxSegmentsToPlaylist(formatVttPlaylist(playlist), sidxMapping));
6504 return a;
6505 }, {});
6506 };
6507
6508 var organizeCaptionServices = function organizeCaptionServices(captionServices) {
6509 return captionServices.reduce(function (svcObj, svc) {
6510 if (!svc) {
6511 return svcObj;
6512 }
6513
6514 svc.forEach(function (service) {
6515 var channel = service.channel,
6516 language = service.language;
6517 svcObj[language] = {
6518 autoselect: false,
6519 default: false,
6520 instreamId: channel,
6521 language: language
6522 };
6523
6524 if (service.hasOwnProperty('aspectRatio')) {
6525 svcObj[language].aspectRatio = service.aspectRatio;
6526 }
6527
6528 if (service.hasOwnProperty('easyReader')) {
6529 svcObj[language].easyReader = service.easyReader;
6530 }
6531
6532 if (service.hasOwnProperty('3D')) {
6533 svcObj[language]['3D'] = service['3D'];
6534 }
6535 });
6536 return svcObj;
6537 }, {});
6538 };
6539
6540 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
6541 var _attributes2;
6542
6543 var attributes = _ref3.attributes,
6544 segments = _ref3.segments,
6545 sidx = _ref3.sidx,
6546 discontinuityStarts = _ref3.discontinuityStarts;
6547 var playlist = {
6548 attributes: (_attributes2 = {
6549 NAME: attributes.id,
6550 AUDIO: 'audio',
6551 SUBTITLES: 'subs',
6552 RESOLUTION: {
6553 width: attributes.width,
6554 height: attributes.height
6555 },
6556 CODECS: attributes.codecs,
6557 BANDWIDTH: attributes.bandwidth
6558 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
6559 uri: '',
6560 endList: attributes.type === 'static',
6561 timeline: attributes.periodStart,
6562 resolvedUri: '',
6563 targetDuration: attributes.duration,
6564 discontinuityStarts: discontinuityStarts,
6565 timelineStarts: attributes.timelineStarts,
6566 segments: segments
6567 };
6568
6569 if (attributes.frameRate) {
6570 playlist.attributes['FRAME-RATE'] = attributes.frameRate;
6571 }
6572
6573 if (attributes.contentProtection) {
6574 playlist.contentProtection = attributes.contentProtection;
6575 }
6576
6577 if (sidx) {
6578 playlist.sidx = sidx;
6579 }
6580
6581 return playlist;
6582 };
6583
6584 var videoOnly = function videoOnly(_ref4) {
6585 var attributes = _ref4.attributes;
6586 return attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';
6587 };
6588
6589 var audioOnly = function audioOnly(_ref5) {
6590 var attributes = _ref5.attributes;
6591 return attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';
6592 };
6593
6594 var vttOnly = function vttOnly(_ref6) {
6595 var attributes = _ref6.attributes;
6596 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
6597 };
6598 /**
6599 * Contains start and timeline properties denoting a timeline start. For DASH, these will
6600 * be the same number.
6601 *
6602 * @typedef {Object} TimelineStart
6603 * @property {number} start - the start time of the timeline
6604 * @property {number} timeline - the timeline number
6605 */
6606
6607 /**
6608 * Adds appropriate media and discontinuity sequence values to the segments and playlists.
6609 *
6610 * Throughout mpd-parser, the `number` attribute is used in relation to `startNumber`, a
6611 * DASH specific attribute used in constructing segment URI's from templates. However, from
6612 * an HLS perspective, the `number` attribute on a segment would be its `mediaSequence`
6613 * value, which should start at the original media sequence value (or 0) and increment by 1
6614 * for each segment thereafter. Since DASH's `startNumber` values are independent per
6615 * period, it doesn't make sense to use it for `number`. Instead, assume everything starts
6616 * from a 0 mediaSequence value and increment from there.
6617 *
6618 * Note that VHS currently doesn't use the `number` property, but it can be helpful for
6619 * debugging and making sense of the manifest.
6620 *
6621 * For live playlists, to account for values increasing in manifests when periods are
6622 * removed on refreshes, merging logic should be used to update the numbers to their
6623 * appropriate values (to ensure they're sequential and increasing).
6624 *
6625 * @param {Object[]} playlists - the playlists to update
6626 * @param {TimelineStart[]} timelineStarts - the timeline starts for the manifest
6627 */
6628
6629
6630 var addMediaSequenceValues = function addMediaSequenceValues(playlists, timelineStarts) {
6631 // increment all segments sequentially
6632 playlists.forEach(function (playlist) {
6633 playlist.mediaSequence = 0;
6634 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref7) {
6635 var timeline = _ref7.timeline;
6636 return timeline === playlist.timeline;
6637 });
6638
6639 if (!playlist.segments) {
6640 return;
6641 }
6642
6643 playlist.segments.forEach(function (segment, index) {
6644 segment.number = index;
6645 });
6646 });
6647 };
6648 /**
6649 * Given a media group object, flattens all playlists within the media group into a single
6650 * array.
6651 *
6652 * @param {Object} mediaGroupObject - the media group object
6653 *
6654 * @return {Object[]}
6655 * The media group playlists
6656 */
6657
6658
6659 var flattenMediaGroupPlaylists = function flattenMediaGroupPlaylists(mediaGroupObject) {
6660 if (!mediaGroupObject) {
6661 return [];
6662 }
6663
6664 return Object.keys(mediaGroupObject).reduce(function (acc, label) {
6665 var labelContents = mediaGroupObject[label];
6666 return acc.concat(labelContents.playlists);
6667 }, []);
6668 };
6669
6670 var toM3u8 = function toM3u8(_ref8) {
6671 var _mediaGroups;
6672
6673 var dashPlaylists = _ref8.dashPlaylists,
6674 locations = _ref8.locations,
6675 _ref8$sidxMapping = _ref8.sidxMapping,
6676 sidxMapping = _ref8$sidxMapping === void 0 ? {} : _ref8$sidxMapping,
6677 previousManifest = _ref8.previousManifest;
6678
6679 if (!dashPlaylists.length) {
6680 return {};
6681 } // grab all main manifest attributes
6682
6683
6684 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
6685 duration = _dashPlaylists$0$attr.sourceDuration,
6686 type = _dashPlaylists$0$attr.type,
6687 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
6688 minimumUpdatePeriod = _dashPlaylists$0$attr.minimumUpdatePeriod;
6689 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
6690 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
6691 var vttPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(vttOnly));
6692 var captions = dashPlaylists.map(function (playlist) {
6693 return playlist.attributes.captionServices;
6694 }).filter(Boolean);
6695 var manifest = {
6696 allowCache: true,
6697 discontinuityStarts: [],
6698 segments: [],
6699 endList: true,
6700 mediaGroups: (_mediaGroups = {
6701 AUDIO: {},
6702 VIDEO: {}
6703 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
6704 uri: '',
6705 duration: duration,
6706 playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)
6707 };
6708
6709 if (minimumUpdatePeriod >= 0) {
6710 manifest.minimumUpdatePeriod = minimumUpdatePeriod * 1000;
6711 }
6712
6713 if (locations) {
6714 manifest.locations = locations;
6715 }
6716
6717 if (type === 'dynamic') {
6718 manifest.suggestedPresentationDelay = suggestedPresentationDelay;
6719 }
6720
6721 var isAudioOnly = manifest.playlists.length === 0;
6722 var organizedAudioGroup = audioPlaylists.length ? organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly) : null;
6723 var organizedVttGroup = vttPlaylists.length ? organizeVttPlaylists(vttPlaylists, sidxMapping) : null;
6724 var formattedPlaylists = videoPlaylists.concat(flattenMediaGroupPlaylists(organizedAudioGroup), flattenMediaGroupPlaylists(organizedVttGroup));
6725 var playlistTimelineStarts = formattedPlaylists.map(function (_ref9) {
6726 var timelineStarts = _ref9.timelineStarts;
6727 return timelineStarts;
6728 });
6729 manifest.timelineStarts = getUniqueTimelineStarts(playlistTimelineStarts);
6730 addMediaSequenceValues(formattedPlaylists, manifest.timelineStarts);
6731
6732 if (organizedAudioGroup) {
6733 manifest.mediaGroups.AUDIO.audio = organizedAudioGroup;
6734 }
6735
6736 if (organizedVttGroup) {
6737 manifest.mediaGroups.SUBTITLES.subs = organizedVttGroup;
6738 }
6739
6740 if (captions.length) {
6741 manifest.mediaGroups['CLOSED-CAPTIONS'].cc = organizeCaptionServices(captions);
6742 }
6743
6744 if (previousManifest) {
6745 return positionManifestOnTimeline({
6746 oldManifest: previousManifest,
6747 newManifest: manifest
6748 });
6749 }
6750
6751 return manifest;
6752 };
6753 /**
6754 * Calculates the R (repetition) value for a live stream (for the final segment
6755 * in a manifest where the r value is negative 1)
6756 *
6757 * @param {Object} attributes
6758 * Object containing all inherited attributes from parent elements with attribute
6759 * names as keys
6760 * @param {number} time
6761 * current time (typically the total time up until the final segment)
6762 * @param {number} duration
6763 * duration property for the given <S />
6764 *
6765 * @return {number}
6766 * R value to reach the end of the given period
6767 */
6768
6769
6770 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
6771 var NOW = attributes.NOW,
6772 clientOffset = attributes.clientOffset,
6773 availabilityStartTime = attributes.availabilityStartTime,
6774 _attributes$timescale = attributes.timescale,
6775 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
6776 _attributes$periodSta = attributes.periodStart,
6777 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
6778 _attributes$minimumUp = attributes.minimumUpdatePeriod,
6779 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
6780 var now = (NOW + clientOffset) / 1000;
6781 var periodStartWC = availabilityStartTime + periodStart;
6782 var periodEndWC = now + minimumUpdatePeriod;
6783 var periodDuration = periodEndWC - periodStartWC;
6784 return Math.ceil((periodDuration * timescale - time) / duration);
6785 };
6786 /**
6787 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
6788 * timing and duration
6789 *
6790 * @param {Object} attributes
6791 * Object containing all inherited attributes from parent elements with attribute
6792 * names as keys
6793 * @param {Object[]} segmentTimeline
6794 * List of objects representing the attributes of each S element contained within
6795 *
6796 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6797 * List of Objects with segment timing and duration info
6798 */
6799
6800
6801 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
6802 var type = attributes.type,
6803 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
6804 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
6805 _attributes$media = attributes.media,
6806 media = _attributes$media === void 0 ? '' : _attributes$media,
6807 sourceDuration = attributes.sourceDuration,
6808 _attributes$timescale2 = attributes.timescale,
6809 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
6810 _attributes$startNumb = attributes.startNumber,
6811 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
6812 timeline = attributes.periodStart;
6813 var segments = [];
6814 var time = -1;
6815
6816 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
6817 var S = segmentTimeline[sIndex];
6818 var duration = S.d;
6819 var repeat = S.r || 0;
6820 var segmentTime = S.t || 0;
6821
6822 if (time < 0) {
6823 // first segment
6824 time = segmentTime;
6825 }
6826
6827 if (segmentTime && segmentTime > time) {
6828 // discontinuity
6829 // TODO: How to handle this type of discontinuity
6830 // timeline++ here would treat it like HLS discontuity and content would
6831 // get appended without gap
6832 // E.G.
6833 // <S t="0" d="1" />
6834 // <S d="1" />
6835 // <S d="1" />
6836 // <S t="5" d="1" />
6837 // would have $Time$ values of [0, 1, 2, 5]
6838 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
6839 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
6840 // does the value of sourceDuration consider this when calculating arbitrary
6841 // negative @r repeat value?
6842 // E.G. Same elements as above with this added at the end
6843 // <S d="1" r="-1" />
6844 // with a sourceDuration of 10
6845 // Would the 2 gaps be included in the time duration calculations resulting in
6846 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
6847 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
6848 time = segmentTime;
6849 }
6850
6851 var count = void 0;
6852
6853 if (repeat < 0) {
6854 var nextS = sIndex + 1;
6855
6856 if (nextS === segmentTimeline.length) {
6857 // last segment
6858 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
6859 count = getLiveRValue(attributes, time, duration);
6860 } else {
6861 // TODO: This may be incorrect depending on conclusion of TODO above
6862 count = (sourceDuration * timescale - time) / duration;
6863 }
6864 } else {
6865 count = (segmentTimeline[nextS].t - time) / duration;
6866 }
6867 } else {
6868 count = repeat + 1;
6869 }
6870
6871 var end = startNumber + segments.length + count;
6872 var number = startNumber + segments.length;
6873
6874 while (number < end) {
6875 segments.push({
6876 number: number,
6877 duration: duration / timescale,
6878 time: time,
6879 timeline: timeline
6880 });
6881 time += duration;
6882 number++;
6883 }
6884 }
6885
6886 return segments;
6887 };
6888
6889 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
6890 /**
6891 * Replaces template identifiers with corresponding values. To be used as the callback
6892 * for String.prototype.replace
6893 *
6894 * @name replaceCallback
6895 * @function
6896 * @param {string} match
6897 * Entire match of identifier
6898 * @param {string} identifier
6899 * Name of matched identifier
6900 * @param {string} format
6901 * Format tag string. Its presence indicates that padding is expected
6902 * @param {string} width
6903 * Desired length of the replaced value. Values less than this width shall be left
6904 * zero padded
6905 * @return {string}
6906 * Replacement for the matched identifier
6907 */
6908
6909 /**
6910 * Returns a function to be used as a callback for String.prototype.replace to replace
6911 * template identifiers
6912 *
6913 * @param {Obect} values
6914 * Object containing values that shall be used to replace known identifiers
6915 * @param {number} values.RepresentationID
6916 * Value of the Representation@id attribute
6917 * @param {number} values.Number
6918 * Number of the corresponding segment
6919 * @param {number} values.Bandwidth
6920 * Value of the Representation@bandwidth attribute.
6921 * @param {number} values.Time
6922 * Timestamp value of the corresponding segment
6923 * @return {replaceCallback}
6924 * Callback to be used with String.prototype.replace to replace identifiers
6925 */
6926
6927 var identifierReplacement = function identifierReplacement(values) {
6928 return function (match, identifier, format, width) {
6929 if (match === '$$') {
6930 // escape sequence
6931 return '$';
6932 }
6933
6934 if (typeof values[identifier] === 'undefined') {
6935 return match;
6936 }
6937
6938 var value = '' + values[identifier];
6939
6940 if (identifier === 'RepresentationID') {
6941 // Format tag shall not be present with RepresentationID
6942 return value;
6943 }
6944
6945 if (!format) {
6946 width = 1;
6947 } else {
6948 width = parseInt(width, 10);
6949 }
6950
6951 if (value.length >= width) {
6952 return value;
6953 }
6954
6955 return "" + new Array(width - value.length + 1).join('0') + value;
6956 };
6957 };
6958 /**
6959 * Constructs a segment url from a template string
6960 *
6961 * @param {string} url
6962 * Template string to construct url from
6963 * @param {Obect} values
6964 * Object containing values that shall be used to replace known identifiers
6965 * @param {number} values.RepresentationID
6966 * Value of the Representation@id attribute
6967 * @param {number} values.Number
6968 * Number of the corresponding segment
6969 * @param {number} values.Bandwidth
6970 * Value of the Representation@bandwidth attribute.
6971 * @param {number} values.Time
6972 * Timestamp value of the corresponding segment
6973 * @return {string}
6974 * Segment url with identifiers replaced
6975 */
6976
6977
6978 var constructTemplateUrl = function constructTemplateUrl(url, values) {
6979 return url.replace(identifierPattern, identifierReplacement(values));
6980 };
6981 /**
6982 * Generates a list of objects containing timing and duration information about each
6983 * segment needed to generate segment uris and the complete segment object
6984 *
6985 * @param {Object} attributes
6986 * Object containing all inherited attributes from parent elements with attribute
6987 * names as keys
6988 * @param {Object[]|undefined} segmentTimeline
6989 * List of objects representing the attributes of each S element contained within
6990 * the SegmentTimeline element
6991 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6992 * List of Objects with segment timing and duration info
6993 */
6994
6995
6996 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
6997 if (!attributes.duration && !segmentTimeline) {
6998 // if neither @duration or SegmentTimeline are present, then there shall be exactly
6999 // one media segment
7000 return [{
7001 number: attributes.startNumber || 1,
7002 duration: attributes.sourceDuration,
7003 time: 0,
7004 timeline: attributes.periodStart
7005 }];
7006 }
7007
7008 if (attributes.duration) {
7009 return parseByDuration(attributes);
7010 }
7011
7012 return parseByTimeline(attributes, segmentTimeline);
7013 };
7014 /**
7015 * Generates a list of segments using information provided by the SegmentTemplate element
7016 *
7017 * @param {Object} attributes
7018 * Object containing all inherited attributes from parent elements with attribute
7019 * names as keys
7020 * @param {Object[]|undefined} segmentTimeline
7021 * List of objects representing the attributes of each S element contained within
7022 * the SegmentTimeline element
7023 * @return {Object[]}
7024 * List of segment objects
7025 */
7026
7027
7028 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
7029 var templateValues = {
7030 RepresentationID: attributes.id,
7031 Bandwidth: attributes.bandwidth || 0
7032 };
7033 var _attributes$initializ = attributes.initialization,
7034 initialization = _attributes$initializ === void 0 ? {
7035 sourceURL: '',
7036 range: ''
7037 } : _attributes$initializ;
7038 var mapSegment = urlTypeToSegment({
7039 baseUrl: attributes.baseUrl,
7040 source: constructTemplateUrl(initialization.sourceURL, templateValues),
7041 range: initialization.range
7042 });
7043 var segments = parseTemplateInfo(attributes, segmentTimeline);
7044 return segments.map(function (segment) {
7045 templateValues.Number = segment.number;
7046 templateValues.Time = segment.time;
7047 var uri = constructTemplateUrl(attributes.media || '', templateValues); // See DASH spec section 5.3.9.2.2
7048 // - if timescale isn't present on any level, default to 1.
7049
7050 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7051
7052 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7053 var presentationTime = // Even if the @t attribute is not specified for the segment, segment.time is
7054 // calculated in mpd-parser prior to this, so it's assumed to be available.
7055 attributes.periodStart + (segment.time - presentationTimeOffset) / timescale;
7056 var map = {
7057 uri: uri,
7058 timeline: segment.timeline,
7059 duration: segment.duration,
7060 resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),
7061 map: mapSegment,
7062 number: segment.number,
7063 presentationTime: presentationTime
7064 };
7065 return map;
7066 });
7067 };
7068 /**
7069 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
7070 * to an object that matches the output of a segment in videojs/mpd-parser
7071 *
7072 * @param {Object} attributes
7073 * Object containing all inherited attributes from parent elements with attribute
7074 * names as keys
7075 * @param {Object} segmentUrl
7076 * <SegmentURL> node to translate into a segment object
7077 * @return {Object} translated segment object
7078 */
7079
7080
7081 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
7082 var baseUrl = attributes.baseUrl,
7083 _attributes$initializ = attributes.initialization,
7084 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
7085 var initSegment = urlTypeToSegment({
7086 baseUrl: baseUrl,
7087 source: initialization.sourceURL,
7088 range: initialization.range
7089 });
7090 var segment = urlTypeToSegment({
7091 baseUrl: baseUrl,
7092 source: segmentUrl.media,
7093 range: segmentUrl.mediaRange
7094 });
7095 segment.map = initSegment;
7096 return segment;
7097 };
7098 /**
7099 * Generates a list of segments using information provided by the SegmentList element
7100 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
7101 * node should be translated into segment.
7102 *
7103 * @param {Object} attributes
7104 * Object containing all inherited attributes from parent elements with attribute
7105 * names as keys
7106 * @param {Object[]|undefined} segmentTimeline
7107 * List of objects representing the attributes of each S element contained within
7108 * the SegmentTimeline element
7109 * @return {Object.<Array>} list of segments
7110 */
7111
7112
7113 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
7114 var duration = attributes.duration,
7115 _attributes$segmentUr = attributes.segmentUrls,
7116 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr,
7117 periodStart = attributes.periodStart; // Per spec (5.3.9.2.1) no way to determine segment duration OR
7118 // if both SegmentTimeline and @duration are defined, it is outside of spec.
7119
7120 if (!duration && !segmentTimeline || duration && segmentTimeline) {
7121 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
7122 }
7123
7124 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
7125 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
7126 });
7127 var segmentTimeInfo;
7128
7129 if (duration) {
7130 segmentTimeInfo = parseByDuration(attributes);
7131 }
7132
7133 if (segmentTimeline) {
7134 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
7135 }
7136
7137 var segments = segmentTimeInfo.map(function (segmentTime, index) {
7138 if (segmentUrlMap[index]) {
7139 var segment = segmentUrlMap[index]; // See DASH spec section 5.3.9.2.2
7140 // - if timescale isn't present on any level, default to 1.
7141
7142 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7143
7144 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7145 segment.timeline = segmentTime.timeline;
7146 segment.duration = segmentTime.duration;
7147 segment.number = segmentTime.number;
7148 segment.presentationTime = periodStart + (segmentTime.time - presentationTimeOffset) / timescale;
7149 return segment;
7150 } // Since we're mapping we should get rid of any blank segments (in case
7151 // the given SegmentTimeline is handling for more elements than we have
7152 // SegmentURLs for).
7153
7154 }).filter(function (segment) {
7155 return segment;
7156 });
7157 return segments;
7158 };
7159
7160 var generateSegments = function generateSegments(_ref) {
7161 var attributes = _ref.attributes,
7162 segmentInfo = _ref.segmentInfo;
7163 var segmentAttributes;
7164 var segmentsFn;
7165
7166 if (segmentInfo.template) {
7167 segmentsFn = segmentsFromTemplate;
7168 segmentAttributes = merge(attributes, segmentInfo.template);
7169 } else if (segmentInfo.base) {
7170 segmentsFn = segmentsFromBase;
7171 segmentAttributes = merge(attributes, segmentInfo.base);
7172 } else if (segmentInfo.list) {
7173 segmentsFn = segmentsFromList;
7174 segmentAttributes = merge(attributes, segmentInfo.list);
7175 }
7176
7177 var segmentsInfo = {
7178 attributes: attributes
7179 };
7180
7181 if (!segmentsFn) {
7182 return segmentsInfo;
7183 }
7184
7185 var segments = segmentsFn(segmentAttributes, segmentInfo.segmentTimeline); // The @duration attribute will be used to determin the playlist's targetDuration which
7186 // must be in seconds. Since we've generated the segment list, we no longer need
7187 // @duration to be in @timescale units, so we can convert it here.
7188
7189 if (segmentAttributes.duration) {
7190 var _segmentAttributes = segmentAttributes,
7191 duration = _segmentAttributes.duration,
7192 _segmentAttributes$ti = _segmentAttributes.timescale,
7193 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
7194 segmentAttributes.duration = duration / timescale;
7195 } else if (segments.length) {
7196 // if there is no @duration attribute, use the largest segment duration as
7197 // as target duration
7198 segmentAttributes.duration = segments.reduce(function (max, segment) {
7199 return Math.max(max, Math.ceil(segment.duration));
7200 }, 0);
7201 } else {
7202 segmentAttributes.duration = 0;
7203 }
7204
7205 segmentsInfo.attributes = segmentAttributes;
7206 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
7207
7208 if (segmentInfo.base && segmentAttributes.indexRange) {
7209 segmentsInfo.sidx = segments[0];
7210 segmentsInfo.segments = [];
7211 }
7212
7213 return segmentsInfo;
7214 };
7215
7216 var toPlaylists = function toPlaylists(representations) {
7217 return representations.map(generateSegments);
7218 };
7219
7220 var findChildren = function findChildren(element, name) {
7221 return from(element.childNodes).filter(function (_ref) {
7222 var tagName = _ref.tagName;
7223 return tagName === name;
7224 });
7225 };
7226
7227 var getContent = function getContent(element) {
7228 return element.textContent.trim();
7229 };
7230 /**
7231 * Converts the provided string that may contain a division operation to a number.
7232 *
7233 * @param {string} value - the provided string value
7234 *
7235 * @return {number} the parsed string value
7236 */
7237
7238
7239 var parseDivisionValue = function parseDivisionValue(value) {
7240 return parseFloat(value.split('/').reduce(function (prev, current) {
7241 return prev / current;
7242 }));
7243 };
7244
7245 var parseDuration = function parseDuration(str) {
7246 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
7247 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
7248 var SECONDS_IN_DAY = 24 * 60 * 60;
7249 var SECONDS_IN_HOUR = 60 * 60;
7250 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
7251
7252 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
7253 var match = durationRegex.exec(str);
7254
7255 if (!match) {
7256 return 0;
7257 }
7258
7259 var _match$slice = match.slice(1),
7260 year = _match$slice[0],
7261 month = _match$slice[1],
7262 day = _match$slice[2],
7263 hour = _match$slice[3],
7264 minute = _match$slice[4],
7265 second = _match$slice[5];
7266
7267 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
7268 };
7269
7270 var parseDate = function parseDate(str) {
7271 // Date format without timezone according to ISO 8601
7272 // YYY-MM-DDThh:mm:ss.ssssss
7273 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
7274 // expressed by ending with 'Z'
7275
7276 if (dateRegex.test(str)) {
7277 str += 'Z';
7278 }
7279
7280 return Date.parse(str);
7281 };
7282
7283 var parsers = {
7284 /**
7285 * Specifies the duration of the entire Media Presentation. Format is a duration string
7286 * as specified in ISO 8601
7287 *
7288 * @param {string} value
7289 * value of attribute as a string
7290 * @return {number}
7291 * The duration in seconds
7292 */
7293 mediaPresentationDuration: function mediaPresentationDuration(value) {
7294 return parseDuration(value);
7295 },
7296
7297 /**
7298 * Specifies the Segment availability start time for all Segments referred to in this
7299 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
7300 * time. Format is a date string as specified in ISO 8601
7301 *
7302 * @param {string} value
7303 * value of attribute as a string
7304 * @return {number}
7305 * The date as seconds from unix epoch
7306 */
7307 availabilityStartTime: function availabilityStartTime(value) {
7308 return parseDate(value) / 1000;
7309 },
7310
7311 /**
7312 * Specifies the smallest period between potential changes to the MPD. Format is a
7313 * duration string as specified in ISO 8601
7314 *
7315 * @param {string} value
7316 * value of attribute as a string
7317 * @return {number}
7318 * The duration in seconds
7319 */
7320 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
7321 return parseDuration(value);
7322 },
7323
7324 /**
7325 * Specifies the suggested presentation delay. Format is a
7326 * duration string as specified in ISO 8601
7327 *
7328 * @param {string} value
7329 * value of attribute as a string
7330 * @return {number}
7331 * The duration in seconds
7332 */
7333 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
7334 return parseDuration(value);
7335 },
7336
7337 /**
7338 * specifices the type of mpd. Can be either "static" or "dynamic"
7339 *
7340 * @param {string} value
7341 * value of attribute as a string
7342 *
7343 * @return {string}
7344 * The type as a string
7345 */
7346 type: function type(value) {
7347 return value;
7348 },
7349
7350 /**
7351 * Specifies the duration of the smallest time shifting buffer for any Representation
7352 * in the MPD. Format is a duration string as specified in ISO 8601
7353 *
7354 * @param {string} value
7355 * value of attribute as a string
7356 * @return {number}
7357 * The duration in seconds
7358 */
7359 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
7360 return parseDuration(value);
7361 },
7362
7363 /**
7364 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
7365 * Format is a duration string as specified in ISO 8601
7366 *
7367 * @param {string} value
7368 * value of attribute as a string
7369 * @return {number}
7370 * The duration in seconds
7371 */
7372 start: function start(value) {
7373 return parseDuration(value);
7374 },
7375
7376 /**
7377 * Specifies the width of the visual presentation
7378 *
7379 * @param {string} value
7380 * value of attribute as a string
7381 * @return {number}
7382 * The parsed width
7383 */
7384 width: function width(value) {
7385 return parseInt(value, 10);
7386 },
7387
7388 /**
7389 * Specifies the height of the visual presentation
7390 *
7391 * @param {string} value
7392 * value of attribute as a string
7393 * @return {number}
7394 * The parsed height
7395 */
7396 height: function height(value) {
7397 return parseInt(value, 10);
7398 },
7399
7400 /**
7401 * Specifies the bitrate of the representation
7402 *
7403 * @param {string} value
7404 * value of attribute as a string
7405 * @return {number}
7406 * The parsed bandwidth
7407 */
7408 bandwidth: function bandwidth(value) {
7409 return parseInt(value, 10);
7410 },
7411
7412 /**
7413 * Specifies the frame rate of the representation
7414 *
7415 * @param {string} value
7416 * value of attribute as a string
7417 * @return {number}
7418 * The parsed frame rate
7419 */
7420 frameRate: function frameRate(value) {
7421 return parseDivisionValue(value);
7422 },
7423
7424 /**
7425 * Specifies the number of the first Media Segment in this Representation in the Period
7426 *
7427 * @param {string} value
7428 * value of attribute as a string
7429 * @return {number}
7430 * The parsed number
7431 */
7432 startNumber: function startNumber(value) {
7433 return parseInt(value, 10);
7434 },
7435
7436 /**
7437 * Specifies the timescale in units per seconds
7438 *
7439 * @param {string} value
7440 * value of attribute as a string
7441 * @return {number}
7442 * The parsed timescale
7443 */
7444 timescale: function timescale(value) {
7445 return parseInt(value, 10);
7446 },
7447
7448 /**
7449 * Specifies the presentationTimeOffset.
7450 *
7451 * @param {string} value
7452 * value of the attribute as a string
7453 *
7454 * @return {number}
7455 * The parsed presentationTimeOffset
7456 */
7457 presentationTimeOffset: function presentationTimeOffset(value) {
7458 return parseInt(value, 10);
7459 },
7460
7461 /**
7462 * Specifies the constant approximate Segment duration
7463 * NOTE: The <Period> element also contains an @duration attribute. This duration
7464 * specifies the duration of the Period. This attribute is currently not
7465 * supported by the rest of the parser, however we still check for it to prevent
7466 * errors.
7467 *
7468 * @param {string} value
7469 * value of attribute as a string
7470 * @return {number}
7471 * The parsed duration
7472 */
7473 duration: function duration(value) {
7474 var parsedValue = parseInt(value, 10);
7475
7476 if (isNaN(parsedValue)) {
7477 return parseDuration(value);
7478 }
7479
7480 return parsedValue;
7481 },
7482
7483 /**
7484 * Specifies the Segment duration, in units of the value of the @timescale.
7485 *
7486 * @param {string} value
7487 * value of attribute as a string
7488 * @return {number}
7489 * The parsed duration
7490 */
7491 d: function d(value) {
7492 return parseInt(value, 10);
7493 },
7494
7495 /**
7496 * Specifies the MPD start time, in @timescale units, the first Segment in the series
7497 * starts relative to the beginning of the Period
7498 *
7499 * @param {string} value
7500 * value of attribute as a string
7501 * @return {number}
7502 * The parsed time
7503 */
7504 t: function t(value) {
7505 return parseInt(value, 10);
7506 },
7507
7508 /**
7509 * Specifies the repeat count of the number of following contiguous Segments with the
7510 * same duration expressed by the value of @d
7511 *
7512 * @param {string} value
7513 * value of attribute as a string
7514 * @return {number}
7515 * The parsed number
7516 */
7517 r: function r(value) {
7518 return parseInt(value, 10);
7519 },
7520
7521 /**
7522 * Default parser for all other attributes. Acts as a no-op and just returns the value
7523 * as a string
7524 *
7525 * @param {string} value
7526 * value of attribute as a string
7527 * @return {string}
7528 * Unparsed value
7529 */
7530 DEFAULT: function DEFAULT(value) {
7531 return value;
7532 }
7533 };
7534 /**
7535 * Gets all the attributes and values of the provided node, parses attributes with known
7536 * types, and returns an object with attribute names mapped to values.
7537 *
7538 * @param {Node} el
7539 * The node to parse attributes from
7540 * @return {Object}
7541 * Object with all attributes of el parsed
7542 */
7543
7544 var parseAttributes = function parseAttributes(el) {
7545 if (!(el && el.attributes)) {
7546 return {};
7547 }
7548
7549 return from(el.attributes).reduce(function (a, e) {
7550 var parseFn = parsers[e.name] || parsers.DEFAULT;
7551 a[e.name] = parseFn(e.value);
7552 return a;
7553 }, {});
7554 };
7555
7556 var keySystemsMap = {
7557 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
7558 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
7559 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
7560 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
7561 };
7562 /**
7563 * Builds a list of urls that is the product of the reference urls and BaseURL values
7564 *
7565 * @param {string[]} referenceUrls
7566 * List of reference urls to resolve to
7567 * @param {Node[]} baseUrlElements
7568 * List of BaseURL nodes from the mpd
7569 * @return {string[]}
7570 * List of resolved urls
7571 */
7572
7573 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
7574 if (!baseUrlElements.length) {
7575 return referenceUrls;
7576 }
7577
7578 return flatten(referenceUrls.map(function (reference) {
7579 return baseUrlElements.map(function (baseUrlElement) {
7580 return resolveUrl$1(reference, getContent(baseUrlElement));
7581 });
7582 }));
7583 };
7584 /**
7585 * Contains all Segment information for its containing AdaptationSet
7586 *
7587 * @typedef {Object} SegmentInformation
7588 * @property {Object|undefined} template
7589 * Contains the attributes for the SegmentTemplate node
7590 * @property {Object[]|undefined} segmentTimeline
7591 * Contains a list of atrributes for each S node within the SegmentTimeline node
7592 * @property {Object|undefined} list
7593 * Contains the attributes for the SegmentList node
7594 * @property {Object|undefined} base
7595 * Contains the attributes for the SegmentBase node
7596 */
7597
7598 /**
7599 * Returns all available Segment information contained within the AdaptationSet node
7600 *
7601 * @param {Node} adaptationSet
7602 * The AdaptationSet node to get Segment information from
7603 * @return {SegmentInformation}
7604 * The Segment information contained within the provided AdaptationSet
7605 */
7606
7607
7608 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
7609 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
7610 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
7611 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
7612 return merge({
7613 tag: 'SegmentURL'
7614 }, parseAttributes(s));
7615 });
7616 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
7617 var segmentTimelineParentNode = segmentList || segmentTemplate;
7618 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
7619 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
7620 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
7621 // @initialization and an <Initialization> node. @initialization can be templated,
7622 // while the node can have a url and range specified. If the <SegmentTemplate> has
7623 // both @initialization and an <Initialization> subelement we opt to override with
7624 // the node, as this interaction is not defined in the spec.
7625
7626 var template = segmentTemplate && parseAttributes(segmentTemplate);
7627
7628 if (template && segmentInitialization) {
7629 template.initialization = segmentInitialization && parseAttributes(segmentInitialization);
7630 } else if (template && template.initialization) {
7631 // If it is @initialization we convert it to an object since this is the format that
7632 // later functions will rely on for the initialization segment. This is only valid
7633 // for <SegmentTemplate>
7634 template.initialization = {
7635 sourceURL: template.initialization
7636 };
7637 }
7638
7639 var segmentInfo = {
7640 template: template,
7641 segmentTimeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
7642 return parseAttributes(s);
7643 }),
7644 list: segmentList && merge(parseAttributes(segmentList), {
7645 segmentUrls: segmentUrls,
7646 initialization: parseAttributes(segmentInitialization)
7647 }),
7648 base: segmentBase && merge(parseAttributes(segmentBase), {
7649 initialization: parseAttributes(segmentInitialization)
7650 })
7651 };
7652 Object.keys(segmentInfo).forEach(function (key) {
7653 if (!segmentInfo[key]) {
7654 delete segmentInfo[key];
7655 }
7656 });
7657 return segmentInfo;
7658 };
7659 /**
7660 * Contains Segment information and attributes needed to construct a Playlist object
7661 * from a Representation
7662 *
7663 * @typedef {Object} RepresentationInformation
7664 * @property {SegmentInformation} segmentInfo
7665 * Segment information for this Representation
7666 * @property {Object} attributes
7667 * Inherited attributes for this Representation
7668 */
7669
7670 /**
7671 * Maps a Representation node to an object containing Segment information and attributes
7672 *
7673 * @name inheritBaseUrlsCallback
7674 * @function
7675 * @param {Node} representation
7676 * Representation node from the mpd
7677 * @return {RepresentationInformation}
7678 * Representation information needed to construct a Playlist object
7679 */
7680
7681 /**
7682 * Returns a callback for Array.prototype.map for mapping Representation nodes to
7683 * Segment information and attributes using inherited BaseURL nodes.
7684 *
7685 * @param {Object} adaptationSetAttributes
7686 * Contains attributes inherited by the AdaptationSet
7687 * @param {string[]} adaptationSetBaseUrls
7688 * Contains list of resolved base urls inherited by the AdaptationSet
7689 * @param {SegmentInformation} adaptationSetSegmentInfo
7690 * Contains Segment information for the AdaptationSet
7691 * @return {inheritBaseUrlsCallback}
7692 * Callback map function
7693 */
7694
7695
7696 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
7697 return function (representation) {
7698 var repBaseUrlElements = findChildren(representation, 'BaseURL');
7699 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
7700 var attributes = merge(adaptationSetAttributes, parseAttributes(representation));
7701 var representationSegmentInfo = getSegmentInformation(representation);
7702 return repBaseUrls.map(function (baseUrl) {
7703 return {
7704 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
7705 attributes: merge(attributes, {
7706 baseUrl: baseUrl
7707 })
7708 };
7709 });
7710 };
7711 };
7712 /**
7713 * Tranforms a series of content protection nodes to
7714 * an object containing pssh data by key system
7715 *
7716 * @param {Node[]} contentProtectionNodes
7717 * Content protection nodes
7718 * @return {Object}
7719 * Object containing pssh data by key system
7720 */
7721
7722
7723 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
7724 return contentProtectionNodes.reduce(function (acc, node) {
7725 var attributes = parseAttributes(node); // Although it could be argued that according to the UUID RFC spec the UUID string (a-f chars) should be generated
7726 // as a lowercase string it also mentions it should be treated as case-insensitive on input. Since the key system
7727 // UUIDs in the keySystemsMap are hardcoded as lowercase in the codebase there isn't any reason not to do
7728 // .toLowerCase() on the input UUID string from the manifest (at least I could not think of one).
7729
7730 if (attributes.schemeIdUri) {
7731 attributes.schemeIdUri = attributes.schemeIdUri.toLowerCase();
7732 }
7733
7734 var keySystem = keySystemsMap[attributes.schemeIdUri];
7735
7736 if (keySystem) {
7737 acc[keySystem] = {
7738 attributes: attributes
7739 };
7740 var psshNode = findChildren(node, 'cenc:pssh')[0];
7741
7742 if (psshNode) {
7743 var pssh = getContent(psshNode);
7744 acc[keySystem].pssh = pssh && decodeB64ToUint8Array(pssh);
7745 }
7746 }
7747
7748 return acc;
7749 }, {});
7750 }; // defined in ANSI_SCTE 214-1 2016
7751
7752
7753 var parseCaptionServiceMetadata = function parseCaptionServiceMetadata(service) {
7754 // 608 captions
7755 if (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {
7756 var values = typeof service.value !== 'string' ? [] : service.value.split(';');
7757 return values.map(function (value) {
7758 var channel;
7759 var language; // default language to value
7760
7761 language = value;
7762
7763 if (/^CC\d=/.test(value)) {
7764 var _value$split = value.split('=');
7765
7766 channel = _value$split[0];
7767 language = _value$split[1];
7768 } else if (/^CC\d$/.test(value)) {
7769 channel = value;
7770 }
7771
7772 return {
7773 channel: channel,
7774 language: language
7775 };
7776 });
7777 } else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {
7778 var _values = typeof service.value !== 'string' ? [] : service.value.split(';');
7779
7780 return _values.map(function (value) {
7781 var flags = {
7782 // service or channel number 1-63
7783 'channel': undefined,
7784 // language is a 3ALPHA per ISO 639.2/B
7785 // field is required
7786 'language': undefined,
7787 // BIT 1/0 or ?
7788 // default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown
7789 'aspectRatio': 1,
7790 // BIT 1/0
7791 // easy reader flag indicated the text is tailed to the needs of beginning readers
7792 // default 0, or off
7793 'easyReader': 0,
7794 // BIT 1/0
7795 // If 3d metadata is present (CEA-708.1) then 1
7796 // default 0
7797 '3D': 0
7798 };
7799
7800 if (/=/.test(value)) {
7801 var _value$split2 = value.split('='),
7802 channel = _value$split2[0],
7803 _value$split2$ = _value$split2[1],
7804 opts = _value$split2$ === void 0 ? '' : _value$split2$;
7805
7806 flags.channel = channel;
7807 flags.language = value;
7808 opts.split(',').forEach(function (opt) {
7809 var _opt$split = opt.split(':'),
7810 name = _opt$split[0],
7811 val = _opt$split[1];
7812
7813 if (name === 'lang') {
7814 flags.language = val; // er for easyReadery
7815 } else if (name === 'er') {
7816 flags.easyReader = Number(val); // war for wide aspect ratio
7817 } else if (name === 'war') {
7818 flags.aspectRatio = Number(val);
7819 } else if (name === '3D') {
7820 flags['3D'] = Number(val);
7821 }
7822 });
7823 } else {
7824 flags.language = value;
7825 }
7826
7827 if (flags.channel) {
7828 flags.channel = 'SERVICE' + flags.channel;
7829 }
7830
7831 return flags;
7832 });
7833 }
7834 };
7835 /**
7836 * Maps an AdaptationSet node to a list of Representation information objects
7837 *
7838 * @name toRepresentationsCallback
7839 * @function
7840 * @param {Node} adaptationSet
7841 * AdaptationSet node from the mpd
7842 * @return {RepresentationInformation[]}
7843 * List of objects containing Representaion information
7844 */
7845
7846 /**
7847 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
7848 * Representation information objects
7849 *
7850 * @param {Object} periodAttributes
7851 * Contains attributes inherited by the Period
7852 * @param {string[]} periodBaseUrls
7853 * Contains list of resolved base urls inherited by the Period
7854 * @param {string[]} periodSegmentInfo
7855 * Contains Segment Information at the period level
7856 * @return {toRepresentationsCallback}
7857 * Callback map function
7858 */
7859
7860
7861 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
7862 return function (adaptationSet) {
7863 var adaptationSetAttributes = parseAttributes(adaptationSet);
7864 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
7865 var role = findChildren(adaptationSet, 'Role')[0];
7866 var roleAttributes = {
7867 role: parseAttributes(role)
7868 };
7869 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
7870 var accessibility = findChildren(adaptationSet, 'Accessibility')[0];
7871 var captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));
7872
7873 if (captionServices) {
7874 attrs = merge(attrs, {
7875 captionServices: captionServices
7876 });
7877 }
7878
7879 var label = findChildren(adaptationSet, 'Label')[0];
7880
7881 if (label && label.childNodes.length) {
7882 var labelVal = label.childNodes[0].nodeValue.trim();
7883 attrs = merge(attrs, {
7884 label: labelVal
7885 });
7886 }
7887
7888 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
7889
7890 if (Object.keys(contentProtection).length) {
7891 attrs = merge(attrs, {
7892 contentProtection: contentProtection
7893 });
7894 }
7895
7896 var segmentInfo = getSegmentInformation(adaptationSet);
7897 var representations = findChildren(adaptationSet, 'Representation');
7898 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
7899 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
7900 };
7901 };
7902 /**
7903 * Contains all period information for mapping nodes onto adaptation sets.
7904 *
7905 * @typedef {Object} PeriodInformation
7906 * @property {Node} period.node
7907 * Period node from the mpd
7908 * @property {Object} period.attributes
7909 * Parsed period attributes from node plus any added
7910 */
7911
7912 /**
7913 * Maps a PeriodInformation object to a list of Representation information objects for all
7914 * AdaptationSet nodes contained within the Period.
7915 *
7916 * @name toAdaptationSetsCallback
7917 * @function
7918 * @param {PeriodInformation} period
7919 * Period object containing necessary period information
7920 * @param {number} periodStart
7921 * Start time of the Period within the mpd
7922 * @return {RepresentationInformation[]}
7923 * List of objects containing Representaion information
7924 */
7925
7926 /**
7927 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
7928 * Representation information objects
7929 *
7930 * @param {Object} mpdAttributes
7931 * Contains attributes inherited by the mpd
7932 * @param {string[]} mpdBaseUrls
7933 * Contains list of resolved base urls inherited by the mpd
7934 * @return {toAdaptationSetsCallback}
7935 * Callback map function
7936 */
7937
7938
7939 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
7940 return function (period, index) {
7941 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));
7942 var periodAttributes = merge(mpdAttributes, {
7943 periodStart: period.attributes.start
7944 });
7945
7946 if (typeof period.attributes.duration === 'number') {
7947 periodAttributes.periodDuration = period.attributes.duration;
7948 }
7949
7950 var adaptationSets = findChildren(period.node, 'AdaptationSet');
7951 var periodSegmentInfo = getSegmentInformation(period.node);
7952 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
7953 };
7954 };
7955 /**
7956 * Gets Period@start property for a given period.
7957 *
7958 * @param {Object} options
7959 * Options object
7960 * @param {Object} options.attributes
7961 * Period attributes
7962 * @param {Object} [options.priorPeriodAttributes]
7963 * Prior period attributes (if prior period is available)
7964 * @param {string} options.mpdType
7965 * The MPD@type these periods came from
7966 * @return {number|null}
7967 * The period start, or null if it's an early available period or error
7968 */
7969
7970
7971 var getPeriodStart = function getPeriodStart(_ref) {
7972 var attributes = _ref.attributes,
7973 priorPeriodAttributes = _ref.priorPeriodAttributes,
7974 mpdType = _ref.mpdType; // Summary of period start time calculation from DASH spec section 5.3.2.1
7975 //
7976 // A period's start is the first period's start + time elapsed after playing all
7977 // prior periods to this one. Periods continue one after the other in time (without
7978 // gaps) until the end of the presentation.
7979 //
7980 // The value of Period@start should be:
7981 // 1. if Period@start is present: value of Period@start
7982 // 2. if previous period exists and it has @duration: previous Period@start +
7983 // previous Period@duration
7984 // 3. if this is first period and MPD@type is 'static': 0
7985 // 4. in all other cases, consider the period an "early available period" (note: not
7986 // currently supported)
7987 // (1)
7988
7989 if (typeof attributes.start === 'number') {
7990 return attributes.start;
7991 } // (2)
7992
7993
7994 if (priorPeriodAttributes && typeof priorPeriodAttributes.start === 'number' && typeof priorPeriodAttributes.duration === 'number') {
7995 return priorPeriodAttributes.start + priorPeriodAttributes.duration;
7996 } // (3)
7997
7998
7999 if (!priorPeriodAttributes && mpdType === 'static') {
8000 return 0;
8001 } // (4)
8002 // There is currently no logic for calculating the Period@start value if there is
8003 // no Period@start or prior Period@start and Period@duration available. This is not made
8004 // explicit by the DASH interop guidelines or the DASH spec, however, since there's
8005 // nothing about any other resolution strategies, it's implied. Thus, this case should
8006 // be considered an early available period, or error, and null should suffice for both
8007 // of those cases.
8008
8009
8010 return null;
8011 };
8012 /**
8013 * Traverses the mpd xml tree to generate a list of Representation information objects
8014 * that have inherited attributes from parent nodes
8015 *
8016 * @param {Node} mpd
8017 * The root node of the mpd
8018 * @param {Object} options
8019 * Available options for inheritAttributes
8020 * @param {string} options.manifestUri
8021 * The uri source of the mpd
8022 * @param {number} options.NOW
8023 * Current time per DASH IOP. Default is current time in ms since epoch
8024 * @param {number} options.clientOffset
8025 * Client time difference from NOW (in milliseconds)
8026 * @return {RepresentationInformation[]}
8027 * List of objects containing Representation information
8028 */
8029
8030
8031 var inheritAttributes = function inheritAttributes(mpd, options) {
8032 if (options === void 0) {
8033 options = {};
8034 }
8035
8036 var _options = options,
8037 _options$manifestUri = _options.manifestUri,
8038 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
8039 _options$NOW = _options.NOW,
8040 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
8041 _options$clientOffset = _options.clientOffset,
8042 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
8043 var periodNodes = findChildren(mpd, 'Period');
8044
8045 if (!periodNodes.length) {
8046 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
8047 }
8048
8049 var locations = findChildren(mpd, 'Location');
8050 var mpdAttributes = parseAttributes(mpd);
8051 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL')); // See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.
8052
8053 mpdAttributes.type = mpdAttributes.type || 'static';
8054 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
8055 mpdAttributes.NOW = NOW;
8056 mpdAttributes.clientOffset = clientOffset;
8057
8058 if (locations.length) {
8059 mpdAttributes.locations = locations.map(getContent);
8060 }
8061
8062 var periods = []; // Since toAdaptationSets acts on individual periods right now, the simplest approach to
8063 // adding properties that require looking at prior periods is to parse attributes and add
8064 // missing ones before toAdaptationSets is called. If more such properties are added, it
8065 // may be better to refactor toAdaptationSets.
8066
8067 periodNodes.forEach(function (node, index) {
8068 var attributes = parseAttributes(node); // Use the last modified prior period, as it may contain added information necessary
8069 // for this period.
8070
8071 var priorPeriod = periods[index - 1];
8072 attributes.start = getPeriodStart({
8073 attributes: attributes,
8074 priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,
8075 mpdType: mpdAttributes.type
8076 });
8077 periods.push({
8078 node: node,
8079 attributes: attributes
8080 });
8081 });
8082 return {
8083 locations: mpdAttributes.locations,
8084 representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)))
8085 };
8086 };
8087
8088 var stringToMpdXml = function stringToMpdXml(manifestString) {
8089 if (manifestString === '') {
8090 throw new Error(errors.DASH_EMPTY_MANIFEST);
8091 }
8092
8093 var parser = new xmldom.DOMParser();
8094 var xml;
8095 var mpd;
8096
8097 try {
8098 xml = parser.parseFromString(manifestString, 'application/xml');
8099 mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
8100 } catch (e) {// ie 11 throwsw on invalid xml
8101 }
8102
8103 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
8104 throw new Error(errors.DASH_INVALID_XML);
8105 }
8106
8107 return mpd;
8108 };
8109 /**
8110 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8111 *
8112 * @param {string} mpd
8113 * XML string of the MPD manifest
8114 * @return {Object|null}
8115 * Attributes of UTCTiming node specified in the manifest. Null if none found
8116 */
8117
8118
8119 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
8120 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
8121
8122 if (!UTCTimingNode) {
8123 return null;
8124 }
8125
8126 var attributes = parseAttributes(UTCTimingNode);
8127
8128 switch (attributes.schemeIdUri) {
8129 case 'urn:mpeg:dash:utc:http-head:2014':
8130 case 'urn:mpeg:dash:utc:http-head:2012':
8131 attributes.method = 'HEAD';
8132 break;
8133
8134 case 'urn:mpeg:dash:utc:http-xsdate:2014':
8135 case 'urn:mpeg:dash:utc:http-iso:2014':
8136 case 'urn:mpeg:dash:utc:http-xsdate:2012':
8137 case 'urn:mpeg:dash:utc:http-iso:2012':
8138 attributes.method = 'GET';
8139 break;
8140
8141 case 'urn:mpeg:dash:utc:direct:2014':
8142 case 'urn:mpeg:dash:utc:direct:2012':
8143 attributes.method = 'DIRECT';
8144 attributes.value = Date.parse(attributes.value);
8145 break;
8146
8147 case 'urn:mpeg:dash:utc:http-ntp:2014':
8148 case 'urn:mpeg:dash:utc:ntp:2014':
8149 case 'urn:mpeg:dash:utc:sntp:2014':
8150 default:
8151 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
8152 }
8153
8154 return attributes;
8155 };
8156 /*
8157 * Given a DASH manifest string and options, parses the DASH manifest into an object in the
8158 * form outputed by m3u8-parser and accepted by videojs/http-streaming.
8159 *
8160 * For live DASH manifests, if `previousManifest` is provided in options, then the newly
8161 * parsed DASH manifest will have its media sequence and discontinuity sequence values
8162 * updated to reflect its position relative to the prior manifest.
8163 *
8164 * @param {string} manifestString - the DASH manifest as a string
8165 * @param {options} [options] - any options
8166 *
8167 * @return {Object} the manifest object
8168 */
8169
8170 var parse = function parse(manifestString, options) {
8171 if (options === void 0) {
8172 options = {};
8173 }
8174
8175 var parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);
8176 var playlists = toPlaylists(parsedManifestInfo.representationInfo);
8177 return toM3u8({
8178 dashPlaylists: playlists,
8179 locations: parsedManifestInfo.locations,
8180 sidxMapping: options.sidxMapping,
8181 previousManifest: options.previousManifest
8182 });
8183 };
8184 /**
8185 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8186 *
8187 * @param {string} manifestString
8188 * XML string of the MPD manifest
8189 * @return {Object|null}
8190 * Attributes of UTCTiming node specified in the manifest. Null if none found
8191 */
8192
8193
8194 var parseUTCTiming = function parseUTCTiming(manifestString) {
8195 return parseUTCTimingScheme(stringToMpdXml(manifestString));
8196 };
8197
8198 var MAX_UINT32 = Math.pow(2, 32);
8199
8200 var getUint64$1 = function getUint64(uint8) {
8201 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
8202 var value;
8203
8204 if (dv.getBigUint64) {
8205 value = dv.getBigUint64(0);
8206
8207 if (value < Number.MAX_SAFE_INTEGER) {
8208 return Number(value);
8209 }
8210
8211 return value;
8212 }
8213
8214 return dv.getUint32(0) * MAX_UINT32 + dv.getUint32(4);
8215 };
8216
8217 var numbers = {
8218 getUint64: getUint64$1,
8219 MAX_UINT32: MAX_UINT32
8220 };
8221
8222 var getUint64 = numbers.getUint64;
8223
8224 var parseSidx = function parseSidx(data) {
8225 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
8226 result = {
8227 version: data[0],
8228 flags: new Uint8Array(data.subarray(1, 4)),
8229 references: [],
8230 referenceId: view.getUint32(4),
8231 timescale: view.getUint32(8)
8232 },
8233 i = 12;
8234
8235 if (result.version === 0) {
8236 result.earliestPresentationTime = view.getUint32(i);
8237 result.firstOffset = view.getUint32(i + 4);
8238 i += 8;
8239 } else {
8240 // read 64 bits
8241 result.earliestPresentationTime = getUint64(data.subarray(i));
8242 result.firstOffset = getUint64(data.subarray(i + 8));
8243 i += 16;
8244 }
8245
8246 i += 2; // reserved
8247
8248 var referenceCount = view.getUint16(i);
8249 i += 2; // start of references
8250
8251 for (; referenceCount > 0; i += 12, referenceCount--) {
8252 result.references.push({
8253 referenceType: (data[i] & 0x80) >>> 7,
8254 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
8255 subsegmentDuration: view.getUint32(i + 4),
8256 startsWithSap: !!(data[i + 8] & 0x80),
8257 sapType: (data[i + 8] & 0x70) >>> 4,
8258 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
8259 });
8260 }
8261
8262 return result;
8263 };
8264
8265 var parseSidx_1 = parseSidx;
8266
8267 var ID3 = toUint8([0x49, 0x44, 0x33]);
8268 var getId3Size = function getId3Size(bytes, offset) {
8269 if (offset === void 0) {
8270 offset = 0;
8271 }
8272
8273 bytes = toUint8(bytes);
8274 var flags = bytes[offset + 5];
8275 var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
8276 var footerPresent = (flags & 16) >> 4;
8277
8278 if (footerPresent) {
8279 return returnSize + 20;
8280 }
8281
8282 return returnSize + 10;
8283 };
8284 var getId3Offset = function getId3Offset(bytes, offset) {
8285 if (offset === void 0) {
8286 offset = 0;
8287 }
8288
8289 bytes = toUint8(bytes);
8290
8291 if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
8292 offset: offset
8293 })) {
8294 return offset;
8295 }
8296
8297 offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
8298 // have multiple ID3 tag sections even though
8299 // they should not.
8300
8301 return getId3Offset(bytes, offset);
8302 };
8303
8304 var normalizePath$1 = function normalizePath(path) {
8305 if (typeof path === 'string') {
8306 return stringToBytes(path);
8307 }
8308
8309 if (typeof path === 'number') {
8310 return path;
8311 }
8312
8313 return path;
8314 };
8315
8316 var normalizePaths$1 = function normalizePaths(paths) {
8317 if (!Array.isArray(paths)) {
8318 return [normalizePath$1(paths)];
8319 }
8320
8321 return paths.map(function (p) {
8322 return normalizePath$1(p);
8323 });
8324 };
8325 /**
8326 * find any number of boxes by name given a path to it in an iso bmff
8327 * such as mp4.
8328 *
8329 * @param {TypedArray} bytes
8330 * bytes for the iso bmff to search for boxes in
8331 *
8332 * @param {Uint8Array[]|string[]|string|Uint8Array} name
8333 * An array of paths or a single path representing the name
8334 * of boxes to search through in bytes. Paths may be
8335 * uint8 (character codes) or strings.
8336 *
8337 * @param {boolean} [complete=false]
8338 * Should we search only for complete boxes on the final path.
8339 * This is very useful when you do not want to get back partial boxes
8340 * in the case of streaming files.
8341 *
8342 * @return {Uint8Array[]}
8343 * An array of the end paths that we found.
8344 */
8345
8346 var findBox = function findBox(bytes, paths, complete) {
8347 if (complete === void 0) {
8348 complete = false;
8349 }
8350
8351 paths = normalizePaths$1(paths);
8352 bytes = toUint8(bytes);
8353 var results = [];
8354
8355 if (!paths.length) {
8356 // short-circuit the search for empty paths
8357 return results;
8358 }
8359
8360 var i = 0;
8361
8362 while (i < bytes.length) {
8363 var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
8364 var type = bytes.subarray(i + 4, i + 8); // invalid box format.
8365
8366 if (size === 0) {
8367 break;
8368 }
8369
8370 var end = i + size;
8371
8372 if (end > bytes.length) {
8373 // this box is bigger than the number of bytes we have
8374 // and complete is set, we cannot find any more boxes.
8375 if (complete) {
8376 break;
8377 }
8378
8379 end = bytes.length;
8380 }
8381
8382 var data = bytes.subarray(i + 8, end);
8383
8384 if (bytesMatch(type, paths[0])) {
8385 if (paths.length === 1) {
8386 // this is the end of the path and we've found the box we were
8387 // looking for
8388 results.push(data);
8389 } else {
8390 // recursively search for the next box along the path
8391 results.push.apply(results, findBox(data, paths.slice(1), complete));
8392 }
8393 }
8394
8395 i = end;
8396 } // we've finished searching all of bytes
8397
8398
8399 return results;
8400 };
8401
8402 // https://matroska-org.github.io/libebml/specs.html
8403 // https://www.matroska.org/technical/elements.html
8404 // https://www.webmproject.org/docs/container/
8405
8406 var EBML_TAGS = {
8407 EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
8408 DocType: toUint8([0x42, 0x82]),
8409 Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
8410 SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
8411 Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
8412 Track: toUint8([0xAE]),
8413 TrackNumber: toUint8([0xd7]),
8414 DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
8415 TrackEntry: toUint8([0xAE]),
8416 TrackType: toUint8([0x83]),
8417 FlagDefault: toUint8([0x88]),
8418 CodecID: toUint8([0x86]),
8419 CodecPrivate: toUint8([0x63, 0xA2]),
8420 VideoTrack: toUint8([0xe0]),
8421 AudioTrack: toUint8([0xe1]),
8422 // Not used yet, but will be used for live webm/mkv
8423 // see https://www.matroska.org/technical/basics.html#block-structure
8424 // see https://www.matroska.org/technical/basics.html#simpleblock-structure
8425 Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
8426 Timestamp: toUint8([0xE7]),
8427 TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
8428 BlockGroup: toUint8([0xA0]),
8429 BlockDuration: toUint8([0x9B]),
8430 Block: toUint8([0xA1]),
8431 SimpleBlock: toUint8([0xA3])
8432 };
8433 /**
8434 * This is a simple table to determine the length
8435 * of things in ebml. The length is one based (starts at 1,
8436 * rather than zero) and for every zero bit before a one bit
8437 * we add one to length. We also need this table because in some
8438 * case we have to xor all the length bits from another value.
8439 */
8440
8441 var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
8442
8443 var getLength = function getLength(byte) {
8444 var len = 1;
8445
8446 for (var i = 0; i < LENGTH_TABLE.length; i++) {
8447 if (byte & LENGTH_TABLE[i]) {
8448 break;
8449 }
8450
8451 len++;
8452 }
8453
8454 return len;
8455 }; // length in ebml is stored in the first 4 to 8 bits
8456 // of the first byte. 4 for the id length and 8 for the
8457 // data size length. Length is measured by converting the number to binary
8458 // then 1 + the number of zeros before a 1 is encountered starting
8459 // from the left.
8460
8461
8462 var getvint = function getvint(bytes, offset, removeLength, signed) {
8463 if (removeLength === void 0) {
8464 removeLength = true;
8465 }
8466
8467 if (signed === void 0) {
8468 signed = false;
8469 }
8470
8471 var length = getLength(bytes[offset]);
8472 var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
8473 // as they will be modified below to remove the dataSizeLen bits and we do not
8474 // want to modify the original data. normally we could just call slice on
8475 // uint8array but ie 11 does not support that...
8476
8477 if (removeLength) {
8478 valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
8479 valueBytes[0] ^= LENGTH_TABLE[length - 1];
8480 }
8481
8482 return {
8483 length: length,
8484 value: bytesToNumber(valueBytes, {
8485 signed: signed
8486 }),
8487 bytes: valueBytes
8488 };
8489 };
8490
8491 var normalizePath = function normalizePath(path) {
8492 if (typeof path === 'string') {
8493 return path.match(/.{1,2}/g).map(function (p) {
8494 return normalizePath(p);
8495 });
8496 }
8497
8498 if (typeof path === 'number') {
8499 return numberToBytes(path);
8500 }
8501
8502 return path;
8503 };
8504
8505 var normalizePaths = function normalizePaths(paths) {
8506 if (!Array.isArray(paths)) {
8507 return [normalizePath(paths)];
8508 }
8509
8510 return paths.map(function (p) {
8511 return normalizePath(p);
8512 });
8513 };
8514
8515 var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
8516 if (offset >= bytes.length) {
8517 return bytes.length;
8518 }
8519
8520 var innerid = getvint(bytes, offset, false);
8521
8522 if (bytesMatch(id.bytes, innerid.bytes)) {
8523 return offset;
8524 }
8525
8526 var dataHeader = getvint(bytes, offset + innerid.length);
8527 return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
8528 };
8529 /**
8530 * Notes on the EBLM format.
8531 *
8532 * EBLM uses "vints" tags. Every vint tag contains
8533 * two parts
8534 *
8535 * 1. The length from the first byte. You get this by
8536 * converting the byte to binary and counting the zeros
8537 * before a 1. Then you add 1 to that. Examples
8538 * 00011111 = length 4 because there are 3 zeros before a 1.
8539 * 00100000 = length 3 because there are 2 zeros before a 1.
8540 * 00000011 = length 7 because there are 6 zeros before a 1.
8541 *
8542 * 2. The bits used for length are removed from the first byte
8543 * Then all the bytes are merged into a value. NOTE: this
8544 * is not the case for id ebml tags as there id includes
8545 * length bits.
8546 *
8547 */
8548
8549
8550 var findEbml = function findEbml(bytes, paths) {
8551 paths = normalizePaths(paths);
8552 bytes = toUint8(bytes);
8553 var results = [];
8554
8555 if (!paths.length) {
8556 return results;
8557 }
8558
8559 var i = 0;
8560
8561 while (i < bytes.length) {
8562 var id = getvint(bytes, i, false);
8563 var dataHeader = getvint(bytes, i + id.length);
8564 var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
8565
8566 if (dataHeader.value === 0x7f) {
8567 dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
8568
8569 if (dataHeader.value !== bytes.length) {
8570 dataHeader.value -= dataStart;
8571 }
8572 }
8573
8574 var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
8575 var data = bytes.subarray(dataStart, dataEnd);
8576
8577 if (bytesMatch(paths[0], id.bytes)) {
8578 if (paths.length === 1) {
8579 // this is the end of the paths and we've found the tag we were
8580 // looking for
8581 results.push(data);
8582 } else {
8583 // recursively search for the next tag inside of the data
8584 // of this one
8585 results = results.concat(findEbml(data, paths.slice(1)));
8586 }
8587 }
8588
8589 var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
8590
8591 i += totalLength;
8592 }
8593
8594 return results;
8595 }; // see https://www.matroska.org/technical/basics.html#block-structure
8596
8597 var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
8598 var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
8599 var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
8600 /**
8601 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8602 * Sequence Payload"
8603 *
8604 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8605 * unit
8606 * @return {Uint8Array} the RBSP without any Emulation
8607 * Prevention Bytes
8608 */
8609
8610 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
8611 var positions = [];
8612 var i = 1; // Find all `Emulation Prevention Bytes`
8613
8614 while (i < bytes.length - 2) {
8615 if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
8616 positions.push(i + 2);
8617 i++;
8618 }
8619
8620 i++;
8621 } // If no Emulation Prevention Bytes were found just return the original
8622 // array
8623
8624
8625 if (positions.length === 0) {
8626 return bytes;
8627 } // Create a new array to hold the NAL unit data
8628
8629
8630 var newLength = bytes.length - positions.length;
8631 var newData = new Uint8Array(newLength);
8632 var sourceIndex = 0;
8633
8634 for (i = 0; i < newLength; sourceIndex++, i++) {
8635 if (sourceIndex === positions[0]) {
8636 // Skip this byte
8637 sourceIndex++; // Remove this position index
8638
8639 positions.shift();
8640 }
8641
8642 newData[i] = bytes[sourceIndex];
8643 }
8644
8645 return newData;
8646 };
8647 var findNal = function findNal(bytes, dataType, types, nalLimit) {
8648 if (nalLimit === void 0) {
8649 nalLimit = Infinity;
8650 }
8651
8652 bytes = toUint8(bytes);
8653 types = [].concat(types);
8654 var i = 0;
8655 var nalStart;
8656 var nalsFound = 0; // keep searching until:
8657 // we reach the end of bytes
8658 // we reach the maximum number of nals they want to seach
8659 // NOTE: that we disregard nalLimit when we have found the start
8660 // of the nal we want so that we can find the end of the nal we want.
8661
8662 while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
8663 var nalOffset = void 0;
8664
8665 if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
8666 nalOffset = 4;
8667 } else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
8668 nalOffset = 3;
8669 } // we are unsynced,
8670 // find the next nal unit
8671
8672
8673 if (!nalOffset) {
8674 i++;
8675 continue;
8676 }
8677
8678 nalsFound++;
8679
8680 if (nalStart) {
8681 return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
8682 }
8683
8684 var nalType = void 0;
8685
8686 if (dataType === 'h264') {
8687 nalType = bytes[i + nalOffset] & 0x1f;
8688 } else if (dataType === 'h265') {
8689 nalType = bytes[i + nalOffset] >> 1 & 0x3f;
8690 }
8691
8692 if (types.indexOf(nalType) !== -1) {
8693 nalStart = i + nalOffset;
8694 } // nal header is 1 length for h264, and 2 for h265
8695
8696
8697 i += nalOffset + (dataType === 'h264' ? 1 : 2);
8698 }
8699
8700 return bytes.subarray(0, 0);
8701 };
8702 var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
8703 return findNal(bytes, 'h264', type, nalLimit);
8704 };
8705 var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
8706 return findNal(bytes, 'h265', type, nalLimit);
8707 };
8708
8709 var CONSTANTS = {
8710 // "webm" string literal in hex
8711 'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
8712 // "matroska" string literal in hex
8713 'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
8714 // "fLaC" string literal in hex
8715 'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
8716 // "OggS" string literal in hex
8717 'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
8718 // ac-3 sync byte, also works for ec-3 as that is simply a codec
8719 // of ac-3
8720 'ac3': toUint8([0x0b, 0x77]),
8721 // "RIFF" string literal in hex used for wav and avi
8722 'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
8723 // "AVI" string literal in hex
8724 'avi': toUint8([0x41, 0x56, 0x49]),
8725 // "WAVE" string literal in hex
8726 'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
8727 // "ftyp3g" string literal in hex
8728 '3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
8729 // "ftyp" string literal in hex
8730 'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
8731 // "styp" string literal in hex
8732 'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
8733 // "ftypqt" string literal in hex
8734 'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
8735 // moov string literal in hex
8736 'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
8737 // moof string literal in hex
8738 'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
8739 };
8740 var _isLikely = {
8741 aac: function aac(bytes) {
8742 var offset = getId3Offset(bytes);
8743 return bytesMatch(bytes, [0xFF, 0x10], {
8744 offset: offset,
8745 mask: [0xFF, 0x16]
8746 });
8747 },
8748 mp3: function mp3(bytes) {
8749 var offset = getId3Offset(bytes);
8750 return bytesMatch(bytes, [0xFF, 0x02], {
8751 offset: offset,
8752 mask: [0xFF, 0x06]
8753 });
8754 },
8755 webm: function webm(bytes) {
8756 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
8757
8758 return bytesMatch(docType, CONSTANTS.webm);
8759 },
8760 mkv: function mkv(bytes) {
8761 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
8762
8763 return bytesMatch(docType, CONSTANTS.matroska);
8764 },
8765 mp4: function mp4(bytes) {
8766 // if this file is another base media file format, it is not mp4
8767 if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
8768 return false;
8769 } // if this file starts with a ftyp or styp box its mp4
8770
8771
8772 if (bytesMatch(bytes, CONSTANTS.mp4, {
8773 offset: 4
8774 }) || bytesMatch(bytes, CONSTANTS.fmp4, {
8775 offset: 4
8776 })) {
8777 return true;
8778 } // if this file starts with a moof/moov box its mp4
8779
8780
8781 if (bytesMatch(bytes, CONSTANTS.moof, {
8782 offset: 4
8783 }) || bytesMatch(bytes, CONSTANTS.moov, {
8784 offset: 4
8785 })) {
8786 return true;
8787 }
8788 },
8789 mov: function mov(bytes) {
8790 return bytesMatch(bytes, CONSTANTS.mov, {
8791 offset: 4
8792 });
8793 },
8794 '3gp': function gp(bytes) {
8795 return bytesMatch(bytes, CONSTANTS['3gp'], {
8796 offset: 4
8797 });
8798 },
8799 ac3: function ac3(bytes) {
8800 var offset = getId3Offset(bytes);
8801 return bytesMatch(bytes, CONSTANTS.ac3, {
8802 offset: offset
8803 });
8804 },
8805 ts: function ts(bytes) {
8806 if (bytes.length < 189 && bytes.length >= 1) {
8807 return bytes[0] === 0x47;
8808 }
8809
8810 var i = 0; // check the first 376 bytes for two matching sync bytes
8811
8812 while (i + 188 < bytes.length && i < 188) {
8813 if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
8814 return true;
8815 }
8816
8817 i += 1;
8818 }
8819
8820 return false;
8821 },
8822 flac: function flac(bytes) {
8823 var offset = getId3Offset(bytes);
8824 return bytesMatch(bytes, CONSTANTS.flac, {
8825 offset: offset
8826 });
8827 },
8828 ogg: function ogg(bytes) {
8829 return bytesMatch(bytes, CONSTANTS.ogg);
8830 },
8831 avi: function avi(bytes) {
8832 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
8833 offset: 8
8834 });
8835 },
8836 wav: function wav(bytes) {
8837 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
8838 offset: 8
8839 });
8840 },
8841 'h264': function h264(bytes) {
8842 // find seq_parameter_set_rbsp
8843 return findH264Nal(bytes, 7, 3).length;
8844 },
8845 'h265': function h265(bytes) {
8846 // find video_parameter_set_rbsp or seq_parameter_set_rbsp
8847 return findH265Nal(bytes, [32, 33], 3).length;
8848 }
8849 }; // get all the isLikely functions
8850 // but make sure 'ts' is above h264 and h265
8851 // but below everything else as it is the least specific
8852
8853 var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
8854 .filter(function (t) {
8855 return t !== 'ts' && t !== 'h264' && t !== 'h265';
8856 }) // add it back to the bottom
8857 .concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
8858
8859 isLikelyTypes.forEach(function (type) {
8860 var isLikelyFn = _isLikely[type];
8861
8862 _isLikely[type] = function (bytes) {
8863 return isLikelyFn(toUint8(bytes));
8864 };
8865 }); // export after wrapping
8866
8867 var isLikely = _isLikely; // A useful list of file signatures can be found here
8868 // https://en.wikipedia.org/wiki/List_of_file_signatures
8869
8870 var detectContainerForBytes = function detectContainerForBytes(bytes) {
8871 bytes = toUint8(bytes);
8872
8873 for (var i = 0; i < isLikelyTypes.length; i++) {
8874 var type = isLikelyTypes[i];
8875
8876 if (isLikely[type](bytes)) {
8877 return type;
8878 }
8879 }
8880
8881 return '';
8882 }; // fmp4 is not a container
8883
8884 var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
8885 return findBox(bytes, ['moof']).length > 0;
8886 };
8887
8888 // which will only happen if the request is complete.
8889
8890 var callbackOnCompleted = function callbackOnCompleted(request, cb) {
8891 if (request.readyState === 4) {
8892 return cb();
8893 }
8894
8895 return;
8896 };
8897
8898 var containerRequest = function containerRequest(uri, xhr, cb) {
8899 var bytes = [];
8900 var id3Offset;
8901 var finished = false;
8902
8903 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
8904 req.abort();
8905 finished = true;
8906 return cb(err, req, type, _bytes);
8907 };
8908
8909 var progressListener = function progressListener(error, request) {
8910 if (finished) {
8911 return;
8912 }
8913
8914 if (error) {
8915 return endRequestAndCallback(error, request, '', bytes);
8916 } // grap the new part of content that was just downloaded
8917
8918
8919 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
8920
8921 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
8922 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
8923 // or we need at least two bytes after an id3Offset
8924
8925 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
8926 return callbackOnCompleted(request, function () {
8927 return endRequestAndCallback(error, request, '', bytes);
8928 });
8929 }
8930
8931 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
8932 // to see the second sync byte, wait until we have enough data
8933 // before declaring it ts
8934
8935 if (type === 'ts' && bytes.length < 188) {
8936 return callbackOnCompleted(request, function () {
8937 return endRequestAndCallback(error, request, '', bytes);
8938 });
8939 } // this may be an unsynced ts segment
8940 // wait for 376 bytes before detecting no container
8941
8942
8943 if (!type && bytes.length < 376) {
8944 return callbackOnCompleted(request, function () {
8945 return endRequestAndCallback(error, request, '', bytes);
8946 });
8947 }
8948
8949 return endRequestAndCallback(null, request, type, bytes);
8950 };
8951
8952 var options = {
8953 uri: uri,
8954 beforeSend: function beforeSend(request) {
8955 // this forces the browser to pass the bytes to us unprocessed
8956 request.overrideMimeType('text/plain; charset=x-user-defined');
8957 request.addEventListener('progress', function (_ref) {
8958 _ref.total;
8959 _ref.loaded;
8960 return callbackWrapper(request, null, {
8961 statusCode: request.status
8962 }, progressListener);
8963 });
8964 }
8965 };
8966 var request = xhr(options, function (error, response) {
8967 return callbackWrapper(request, error, response, progressListener);
8968 });
8969 return request;
8970 };
8971
8972 var EventTarget = videojs__default["default"].EventTarget,
8973 mergeOptions = videojs__default["default"].mergeOptions;
8974
8975 var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
8976 if (!isPlaylistUnchanged(a, b)) {
8977 return false;
8978 } // for dash the above check will often return true in scenarios where
8979 // the playlist actually has changed because mediaSequence isn't a
8980 // dash thing, and we often set it to 1. So if the playlists have the same amount
8981 // of segments we return true.
8982 // So for dash we need to make sure that the underlying segments are different.
8983 // if sidx changed then the playlists are different.
8984
8985
8986 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
8987 return false;
8988 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
8989 return false;
8990 } // one or the other does not have segments
8991 // there was a change.
8992
8993
8994 if (a.segments && !b.segments || !a.segments && b.segments) {
8995 return false;
8996 } // neither has segments nothing changed
8997
8998
8999 if (!a.segments && !b.segments) {
9000 return true;
9001 } // check segments themselves
9002
9003
9004 for (var i = 0; i < a.segments.length; i++) {
9005 var aSegment = a.segments[i];
9006 var bSegment = b.segments[i]; // if uris are different between segments there was a change
9007
9008 if (aSegment.uri !== bSegment.uri) {
9009 return false;
9010 } // neither segment has a byterange, there will be no byterange change.
9011
9012
9013 if (!aSegment.byterange && !bSegment.byterange) {
9014 continue;
9015 }
9016
9017 var aByterange = aSegment.byterange;
9018 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
9019
9020 if (aByterange && !bByterange || !aByterange && bByterange) {
9021 return false;
9022 } // if both segments have byterange with different offsets, there was a change.
9023
9024
9025 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
9026 return false;
9027 }
9028 } // if everything was the same with segments, this is the same playlist.
9029
9030
9031 return true;
9032 };
9033 /**
9034 * Parses the master XML string and updates playlist URI references.
9035 *
9036 * @param {Object} config
9037 * Object of arguments
9038 * @param {string} config.masterXml
9039 * The mpd XML
9040 * @param {string} config.srcUrl
9041 * The mpd URL
9042 * @param {Date} config.clientOffset
9043 * A time difference between server and client
9044 * @param {Object} config.sidxMapping
9045 * SIDX mappings for moof/mdat URIs and byte ranges
9046 * @return {Object}
9047 * The parsed mpd manifest object
9048 */
9049
9050
9051 var parseMasterXml = function parseMasterXml(_ref) {
9052 var masterXml = _ref.masterXml,
9053 srcUrl = _ref.srcUrl,
9054 clientOffset = _ref.clientOffset,
9055 sidxMapping = _ref.sidxMapping,
9056 previousManifest = _ref.previousManifest;
9057 var manifest = parse(masterXml, {
9058 manifestUri: srcUrl,
9059 clientOffset: clientOffset,
9060 sidxMapping: sidxMapping,
9061 previousManifest: previousManifest
9062 });
9063 addPropertiesToMaster(manifest, srcUrl);
9064 return manifest;
9065 };
9066 /**
9067 * Returns a new master manifest that is the result of merging an updated master manifest
9068 * into the original version.
9069 *
9070 * @param {Object} oldMaster
9071 * The old parsed mpd object
9072 * @param {Object} newMaster
9073 * The updated parsed mpd object
9074 * @return {Object}
9075 * A new object representing the original master manifest with the updated media
9076 * playlists merged in
9077 */
9078
9079 var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
9080 var noChanges = true;
9081 var update = mergeOptions(oldMaster, {
9082 // These are top level properties that can be updated
9083 duration: newMaster.duration,
9084 minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
9085 timelineStarts: newMaster.timelineStarts
9086 }); // First update the playlists in playlist list
9087
9088 for (var i = 0; i < newMaster.playlists.length; i++) {
9089 var playlist = newMaster.playlists[i];
9090
9091 if (playlist.sidx) {
9092 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
9093
9094 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
9095 addSidxSegmentsToPlaylist$1(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
9096 }
9097 }
9098
9099 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
9100
9101 if (playlistUpdate) {
9102 update = playlistUpdate;
9103 noChanges = false;
9104 }
9105 } // Then update media group playlists
9106
9107
9108 forEachMediaGroup$1(newMaster, function (properties, type, group, label) {
9109 if (properties.playlists && properties.playlists.length) {
9110 var id = properties.playlists[0].id;
9111
9112 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
9113
9114 if (_playlistUpdate) {
9115 update = _playlistUpdate; // update the playlist reference within media groups
9116
9117 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
9118 noChanges = false;
9119 }
9120 }
9121 });
9122
9123 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9124 noChanges = false;
9125 }
9126
9127 if (noChanges) {
9128 return null;
9129 }
9130
9131 return update;
9132 }; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
9133 // If the SIDXs have maps, the two maps should match,
9134 // both `a` and `b` missing SIDXs is considered matching.
9135 // If `a` or `b` but not both have a map, they aren't matching.
9136
9137 var equivalentSidx = function equivalentSidx(a, b) {
9138 var neitherMap = Boolean(!a.map && !b.map);
9139 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
9140 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
9141 }; // exported for testing
9142
9143
9144 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
9145 var newSidxMapping = {};
9146
9147 for (var id in playlists) {
9148 var playlist = playlists[id];
9149 var currentSidxInfo = playlist.sidx;
9150
9151 if (currentSidxInfo) {
9152 var key = generateSidxKey(currentSidxInfo);
9153
9154 if (!oldSidxMapping[key]) {
9155 break;
9156 }
9157
9158 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
9159
9160 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
9161 newSidxMapping[key] = oldSidxMapping[key];
9162 }
9163 }
9164 }
9165
9166 return newSidxMapping;
9167 };
9168 /**
9169 * A function that filters out changed items as they need to be requested separately.
9170 *
9171 * The method is exported for testing
9172 *
9173 * @param {Object} master the parsed mpd XML returned via mpd-parser
9174 * @param {Object} oldSidxMapping the SIDX to compare against
9175 */
9176
9177 var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
9178 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
9179 var mediaGroupSidx = videoSidx;
9180 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
9181 if (properties.playlists && properties.playlists.length) {
9182 var playlists = properties.playlists;
9183 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
9184 }
9185 });
9186 return mediaGroupSidx;
9187 };
9188
9189 var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
9190 inheritsLoose(DashPlaylistLoader, _EventTarget);
9191
9192 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
9193 // playlist loader setups from media groups will expect to be able to pass a playlist
9194 // (since there aren't external URLs to media playlists with DASH)
9195 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
9196 var _this;
9197
9198 if (options === void 0) {
9199 options = {};
9200 }
9201
9202 _this = _EventTarget.call(this) || this;
9203 _this.masterPlaylistLoader_ = masterPlaylistLoader || assertThisInitialized(_this);
9204
9205 if (!masterPlaylistLoader) {
9206 _this.isMaster_ = true;
9207 }
9208
9209 var _options = options,
9210 _options$withCredenti = _options.withCredentials,
9211 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
9212 _options$handleManife = _options.handleManifestRedirects,
9213 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
9214 _this.vhs_ = vhs;
9215 _this.withCredentials = withCredentials;
9216 _this.handleManifestRedirects = handleManifestRedirects;
9217
9218 if (!srcUrlOrPlaylist) {
9219 throw new Error('A non-empty playlist URL or object is required');
9220 } // event naming?
9221
9222
9223 _this.on('minimumUpdatePeriod', function () {
9224 _this.refreshXml_();
9225 }); // live playlist staleness timeout
9226
9227
9228 _this.on('mediaupdatetimeout', function () {
9229 _this.refreshMedia_(_this.media().id);
9230 });
9231
9232 _this.state = 'HAVE_NOTHING';
9233 _this.loadedPlaylists_ = {};
9234 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
9235 // The masterPlaylistLoader will be created with a string
9236
9237 if (_this.isMaster_) {
9238 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
9239 // once multi-period is refactored
9240
9241 _this.masterPlaylistLoader_.sidxMapping_ = {};
9242 } else {
9243 _this.childPlaylist_ = srcUrlOrPlaylist;
9244 }
9245
9246 return _this;
9247 }
9248
9249 var _proto = DashPlaylistLoader.prototype;
9250
9251 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
9252 // disposed
9253 if (!this.request) {
9254 return true;
9255 } // pending request is cleared
9256
9257
9258 this.request = null;
9259
9260 if (err) {
9261 // use the provided error object or create one
9262 // based on the request/response
9263 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
9264 status: request.status,
9265 message: 'DASH request error at URL: ' + request.uri,
9266 response: request.response,
9267 // MEDIA_ERR_NETWORK
9268 code: 2
9269 };
9270
9271 if (startingState) {
9272 this.state = startingState;
9273 }
9274
9275 this.trigger('error');
9276 return true;
9277 }
9278 }
9279 /**
9280 * Verify that the container of the sidx segment can be parsed
9281 * and if it can, get and parse that segment.
9282 */
9283 ;
9284
9285 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
9286 var _this2 = this;
9287
9288 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
9289
9290 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
9291 // keep this function async
9292 this.mediaRequest_ = window.setTimeout(function () {
9293 return cb(false);
9294 }, 0);
9295 return;
9296 } // resolve the segment URL relative to the playlist
9297
9298
9299 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
9300
9301 var fin = function fin(err, request) {
9302 if (_this2.requestErrored_(err, request, startingState)) {
9303 return;
9304 }
9305
9306 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
9307 var sidx;
9308
9309 try {
9310 sidx = parseSidx_1(toUint8(request.response).subarray(8));
9311 } catch (e) {
9312 // sidx parsing failed.
9313 _this2.requestErrored_(e, request, startingState);
9314
9315 return;
9316 }
9317
9318 sidxMapping[sidxKey] = {
9319 sidxInfo: playlist.sidx,
9320 sidx: sidx
9321 };
9322 addSidxSegmentsToPlaylist$1(playlist, sidx, playlist.sidx.resolvedUri);
9323 return cb(true);
9324 };
9325
9326 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
9327 if (err) {
9328 return fin(err, request);
9329 }
9330
9331 if (!container || container !== 'mp4') {
9332 return fin({
9333 status: request.status,
9334 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
9335 // response is just bytes in this case
9336 // but we really don't want to return that.
9337 response: '',
9338 playlist: playlist,
9339 internal: true,
9340 blacklistDuration: Infinity,
9341 // MEDIA_ERR_NETWORK
9342 code: 2
9343 }, request);
9344 } // if we already downloaded the sidx bytes in the container request, use them
9345
9346
9347 var _playlist$sidx$bytera = playlist.sidx.byterange,
9348 offset = _playlist$sidx$bytera.offset,
9349 length = _playlist$sidx$bytera.length;
9350
9351 if (bytes.length >= length + offset) {
9352 return fin(err, {
9353 response: bytes.subarray(offset, offset + length),
9354 status: request.status,
9355 uri: request.uri
9356 });
9357 } // otherwise request sidx bytes
9358
9359
9360 _this2.request = _this2.vhs_.xhr({
9361 uri: uri,
9362 responseType: 'arraybuffer',
9363 headers: segmentXhrHeaders({
9364 byterange: playlist.sidx.byterange
9365 })
9366 }, fin);
9367 });
9368 };
9369
9370 _proto.dispose = function dispose() {
9371 this.trigger('dispose');
9372 this.stopRequest();
9373 this.loadedPlaylists_ = {};
9374 window.clearTimeout(this.minimumUpdatePeriodTimeout_);
9375 window.clearTimeout(this.mediaRequest_);
9376 window.clearTimeout(this.mediaUpdateTimeout);
9377 this.mediaUpdateTimeout = null;
9378 this.mediaRequest_ = null;
9379 this.minimumUpdatePeriodTimeout_ = null;
9380
9381 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9382 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9383 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9384 }
9385
9386 this.off();
9387 };
9388
9389 _proto.hasPendingRequest = function hasPendingRequest() {
9390 return this.request || this.mediaRequest_;
9391 };
9392
9393 _proto.stopRequest = function stopRequest() {
9394 if (this.request) {
9395 var oldRequest = this.request;
9396 this.request = null;
9397 oldRequest.onreadystatechange = null;
9398 oldRequest.abort();
9399 }
9400 };
9401
9402 _proto.media = function media(playlist) {
9403 var _this3 = this;
9404
9405 // getter
9406 if (!playlist) {
9407 return this.media_;
9408 } // setter
9409
9410
9411 if (this.state === 'HAVE_NOTHING') {
9412 throw new Error('Cannot switch media playlist from ' + this.state);
9413 }
9414
9415 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
9416
9417 if (typeof playlist === 'string') {
9418 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
9419 throw new Error('Unknown playlist URI: ' + playlist);
9420 }
9421
9422 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
9423 }
9424
9425 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
9426
9427 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
9428 this.state = 'HAVE_METADATA';
9429 this.media_ = playlist; // trigger media change if the active media has been updated
9430
9431 if (mediaChange) {
9432 this.trigger('mediachanging');
9433 this.trigger('mediachange');
9434 }
9435
9436 return;
9437 } // switching to the active playlist is a no-op
9438
9439
9440 if (!mediaChange) {
9441 return;
9442 } // switching from an already loaded playlist
9443
9444
9445 if (this.media_) {
9446 this.trigger('mediachanging');
9447 }
9448
9449 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
9450 // everything is ready just continue to haveMetadata
9451 _this3.haveMetadata({
9452 startingState: startingState,
9453 playlist: playlist
9454 });
9455 });
9456 };
9457
9458 _proto.haveMetadata = function haveMetadata(_ref2) {
9459 var startingState = _ref2.startingState,
9460 playlist = _ref2.playlist;
9461 this.state = 'HAVE_METADATA';
9462 this.loadedPlaylists_[playlist.id] = playlist;
9463 this.mediaRequest_ = null; // This will trigger loadedplaylist
9464
9465 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
9466 // to resolve setup of media groups
9467
9468 if (startingState === 'HAVE_MASTER') {
9469 this.trigger('loadedmetadata');
9470 } else {
9471 // trigger media change if the active media has been updated
9472 this.trigger('mediachange');
9473 }
9474 };
9475
9476 _proto.pause = function pause() {
9477 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9478 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9479 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9480 }
9481
9482 this.stopRequest();
9483 window.clearTimeout(this.mediaUpdateTimeout);
9484 this.mediaUpdateTimeout = null;
9485
9486 if (this.isMaster_) {
9487 window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
9488 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
9489 }
9490
9491 if (this.state === 'HAVE_NOTHING') {
9492 // If we pause the loader before any data has been retrieved, its as if we never
9493 // started, so reset to an unstarted state.
9494 this.started = false;
9495 }
9496 };
9497
9498 _proto.load = function load(isFinalRendition) {
9499 var _this4 = this;
9500
9501 window.clearTimeout(this.mediaUpdateTimeout);
9502 this.mediaUpdateTimeout = null;
9503 var media = this.media();
9504
9505 if (isFinalRendition) {
9506 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
9507 this.mediaUpdateTimeout = window.setTimeout(function () {
9508 return _this4.load();
9509 }, delay);
9510 return;
9511 } // because the playlists are internal to the manifest, load should either load the
9512 // main manifest, or do nothing but trigger an event
9513
9514
9515 if (!this.started) {
9516 this.start();
9517 return;
9518 }
9519
9520 if (media && !media.endList) {
9521 // Check to see if this is the master loader and the MUP was cleared (this happens
9522 // when the loader was paused). `media` should be set at this point since one is always
9523 // set during `start()`.
9524 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
9525 // Trigger minimumUpdatePeriod to refresh the master manifest
9526 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
9527
9528 this.updateMinimumUpdatePeriodTimeout_();
9529 }
9530
9531 this.trigger('mediaupdatetimeout');
9532 } else {
9533 this.trigger('loadedplaylist');
9534 }
9535 };
9536
9537 _proto.start = function start() {
9538 var _this5 = this;
9539
9540 this.started = true; // We don't need to request the master manifest again
9541 // Call this asynchronously to match the xhr request behavior below
9542
9543 if (!this.isMaster_) {
9544 this.mediaRequest_ = window.setTimeout(function () {
9545 return _this5.haveMaster_();
9546 }, 0);
9547 return;
9548 }
9549
9550 this.requestMaster_(function (req, masterChanged) {
9551 _this5.haveMaster_();
9552
9553 if (!_this5.hasPendingRequest() && !_this5.media_) {
9554 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
9555 }
9556 });
9557 };
9558
9559 _proto.requestMaster_ = function requestMaster_(cb) {
9560 var _this6 = this;
9561
9562 this.request = this.vhs_.xhr({
9563 uri: this.masterPlaylistLoader_.srcUrl,
9564 withCredentials: this.withCredentials
9565 }, function (error, req) {
9566 if (_this6.requestErrored_(error, req)) {
9567 if (_this6.state === 'HAVE_NOTHING') {
9568 _this6.started = false;
9569 }
9570
9571 return;
9572 }
9573
9574 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
9575 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
9576
9577 if (req.responseHeaders && req.responseHeaders.date) {
9578 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
9579 } else {
9580 _this6.masterLoaded_ = Date.now();
9581 }
9582
9583 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
9584
9585 if (masterChanged) {
9586 _this6.handleMaster_();
9587
9588 _this6.syncClientServerClock_(function () {
9589 return cb(req, masterChanged);
9590 });
9591
9592 return;
9593 }
9594
9595 return cb(req, masterChanged);
9596 });
9597 }
9598 /**
9599 * Parses the master xml for UTCTiming node to sync the client clock to the server
9600 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
9601 *
9602 * @param {Function} done
9603 * Function to call when clock sync has completed
9604 */
9605 ;
9606
9607 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
9608 var _this7 = this;
9609
9610 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
9611 // server clock
9612
9613 if (utcTiming === null) {
9614 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
9615 return done();
9616 }
9617
9618 if (utcTiming.method === 'DIRECT') {
9619 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
9620 return done();
9621 }
9622
9623 this.request = this.vhs_.xhr({
9624 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
9625 method: utcTiming.method,
9626 withCredentials: this.withCredentials
9627 }, function (error, req) {
9628 // disposed
9629 if (!_this7.request) {
9630 return;
9631 }
9632
9633 if (error) {
9634 // sync request failed, fall back to using date header from mpd
9635 // TODO: log warning
9636 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
9637 return done();
9638 }
9639
9640 var serverTime;
9641
9642 if (utcTiming.method === 'HEAD') {
9643 if (!req.responseHeaders || !req.responseHeaders.date) {
9644 // expected date header not preset, fall back to using date header from mpd
9645 // TODO: log warning
9646 serverTime = _this7.masterLoaded_;
9647 } else {
9648 serverTime = Date.parse(req.responseHeaders.date);
9649 }
9650 } else {
9651 serverTime = Date.parse(req.responseText);
9652 }
9653
9654 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
9655 done();
9656 });
9657 };
9658
9659 _proto.haveMaster_ = function haveMaster_() {
9660 this.state = 'HAVE_MASTER';
9661
9662 if (this.isMaster_) {
9663 // We have the master playlist at this point, so
9664 // trigger this to allow MasterPlaylistController
9665 // to make an initial playlist selection
9666 this.trigger('loadedplaylist');
9667 } else if (!this.media_) {
9668 // no media playlist was specifically selected so select
9669 // the one the child playlist loader was created with
9670 this.media(this.childPlaylist_);
9671 }
9672 };
9673
9674 _proto.handleMaster_ = function handleMaster_() {
9675 // clear media request
9676 this.mediaRequest_ = null;
9677 var oldMaster = this.masterPlaylistLoader_.master;
9678 var newMaster = parseMasterXml({
9679 masterXml: this.masterPlaylistLoader_.masterXml_,
9680 srcUrl: this.masterPlaylistLoader_.srcUrl,
9681 clientOffset: this.masterPlaylistLoader_.clientOffset_,
9682 sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
9683 previousManifest: oldMaster
9684 }); // if we have an old master to compare the new master against
9685
9686 if (oldMaster) {
9687 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
9688 } // only update master if we have a new master
9689
9690
9691 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
9692 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
9693
9694 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
9695 this.masterPlaylistLoader_.srcUrl = location;
9696 }
9697
9698 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9699 this.updateMinimumUpdatePeriodTimeout_();
9700 }
9701
9702 return Boolean(newMaster);
9703 };
9704
9705 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
9706 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
9707 // a new one will be added if needed.
9708
9709 if (mpl.createMupOnMedia_) {
9710 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
9711 mpl.createMupOnMedia_ = null;
9712 } // clear any pending timeouts
9713
9714
9715 if (mpl.minimumUpdatePeriodTimeout_) {
9716 window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
9717 mpl.minimumUpdatePeriodTimeout_ = null;
9718 }
9719
9720 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
9721 // MPD has no future validity, so a new one will need to be acquired when new
9722 // media segments are to be made available. Thus, we use the target duration
9723 // in this case
9724
9725 if (mup === 0) {
9726 if (mpl.media()) {
9727 mup = mpl.media().targetDuration * 1000;
9728 } else {
9729 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
9730 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
9731 }
9732 } // if minimumUpdatePeriod is invalid or <= zero, which
9733 // can happen when a live video becomes VOD. skip timeout
9734 // creation.
9735
9736
9737 if (typeof mup !== 'number' || mup <= 0) {
9738 if (mup < 0) {
9739 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
9740 }
9741
9742 return;
9743 }
9744
9745 this.createMUPTimeout_(mup);
9746 };
9747
9748 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
9749 var mpl = this.masterPlaylistLoader_;
9750 mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(function () {
9751 mpl.minimumUpdatePeriodTimeout_ = null;
9752 mpl.trigger('minimumUpdatePeriod');
9753 mpl.createMUPTimeout_(mup);
9754 }, mup);
9755 }
9756 /**
9757 * Sends request to refresh the master xml and updates the parsed master manifest
9758 */
9759 ;
9760
9761 _proto.refreshXml_ = function refreshXml_() {
9762 var _this8 = this;
9763
9764 this.requestMaster_(function (req, masterChanged) {
9765 if (!masterChanged) {
9766 return;
9767 }
9768
9769 if (_this8.media_) {
9770 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
9771 } // This will filter out updated sidx info from the mapping
9772
9773
9774 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
9775
9776 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
9777 // TODO: do we need to reload the current playlist?
9778 _this8.refreshMedia_(_this8.media().id);
9779 });
9780 });
9781 }
9782 /**
9783 * Refreshes the media playlist by re-parsing the master xml and updating playlist
9784 * references. If this is an alternate loader, the updated parsed manifest is retrieved
9785 * from the master loader.
9786 */
9787 ;
9788
9789 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
9790 var _this9 = this;
9791
9792 if (!mediaID) {
9793 throw new Error('refreshMedia_ must take a media id');
9794 } // for master we have to reparse the master xml
9795 // to re-create segments based on current timing values
9796 // which may change media. We only skip updating master
9797 // if this is the first time this.media_ is being set.
9798 // as master was just parsed in that case.
9799
9800
9801 if (this.media_ && this.isMaster_) {
9802 this.handleMaster_();
9803 }
9804
9805 var playlists = this.masterPlaylistLoader_.master.playlists;
9806 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
9807
9808 if (mediaChanged) {
9809 this.media_ = playlists[mediaID];
9810 } else {
9811 this.trigger('playlistunchanged');
9812 }
9813
9814 if (!this.mediaUpdateTimeout) {
9815 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
9816 if (_this9.media().endList) {
9817 return;
9818 }
9819
9820 _this9.mediaUpdateTimeout = window.setTimeout(function () {
9821 _this9.trigger('mediaupdatetimeout');
9822
9823 createMediaUpdateTimeout();
9824 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
9825 };
9826
9827 createMediaUpdateTimeout();
9828 }
9829
9830 this.trigger('loadedplaylist');
9831 };
9832
9833 return DashPlaylistLoader;
9834 }(EventTarget);
9835
9836 var Config = {
9837 GOAL_BUFFER_LENGTH: 30,
9838 MAX_GOAL_BUFFER_LENGTH: 60,
9839 BACK_BUFFER_LENGTH: 30,
9840 GOAL_BUFFER_LENGTH_RATE: 1,
9841 // 0.5 MB/s
9842 INITIAL_BANDWIDTH: 4194304,
9843 // A fudge factor to apply to advertised playlist bitrates to account for
9844 // temporary flucations in client bandwidth
9845 BANDWIDTH_VARIANCE: 1.2,
9846 // How much of the buffer must be filled before we consider upswitching
9847 BUFFER_LOW_WATER_LINE: 0,
9848 MAX_BUFFER_LOW_WATER_LINE: 30,
9849 // TODO: Remove this when experimentalBufferBasedABR is removed
9850 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
9851 BUFFER_LOW_WATER_LINE_RATE: 1,
9852 // If the buffer is greater than the high water line, we won't switch down
9853 BUFFER_HIGH_WATER_LINE: 30
9854 };
9855
9856 var stringToArrayBuffer = function stringToArrayBuffer(string) {
9857 var view = new Uint8Array(new ArrayBuffer(string.length));
9858
9859 for (var i = 0; i < string.length; i++) {
9860 view[i] = string.charCodeAt(i);
9861 }
9862
9863 return view.buffer;
9864 };
9865
9866 var MockWorker = /*#__PURE__*/function () {
9867 function MockWorker() {
9868 this.listeners_ = [];
9869 this.onmessage = null;
9870 this.remote_ = null;
9871 }
9872
9873 var _proto = MockWorker.prototype;
9874
9875 _proto.addEventListener = function addEventListener(type, fn) {
9876 if (type !== 'message') {
9877 return;
9878 }
9879
9880 this.listeners_.push(fn);
9881 };
9882
9883 _proto.removeEventListener = function removeEventListener(type, fn) {
9884 if (type !== 'message') {
9885 return;
9886 }
9887
9888 var i = this.listeners_.indexOf(fn);
9889
9890 if (i === -1) {
9891 return;
9892 }
9893
9894 this.listeners_.splice(i, 1);
9895 };
9896
9897 _proto.dispatchEvent = function dispatchEvent(event) {
9898 if (!event || event.type !== 'message') {
9899 return;
9900 }
9901
9902 if (this.onmessage) {
9903 this.onmessage(event);
9904 }
9905
9906 this.listeners_.forEach(function (fn) {
9907 fn(event);
9908 });
9909 };
9910
9911 _proto.postMessage = function postMessage(data) {
9912 if (this.remote_) {
9913 this.remote_.recv_(data);
9914 }
9915 };
9916
9917 _proto.recv_ = function recv_(data) {
9918 // the browser puts the actual message under
9919 var message = {
9920 data: data
9921 };
9922
9923 if (this.onmessage) {
9924 this.onmessage(message);
9925 }
9926
9927 this.listeners_.forEach(function (fn) {
9928 fn(message);
9929 });
9930 };
9931
9932 _proto.terminate = function terminate() {
9933 if (this.remote_) {
9934 this.remote_.remote_ = null;
9935 this.remote_.terminate();
9936 this.remote_ = null;
9937 }
9938
9939 this.onmessage = null;
9940 this.listeners_.length = 0;
9941 };
9942
9943 return MockWorker;
9944 }();
9945
9946 MockWorker.prototype.on = MockWorker.prototype.addEventListener;
9947 MockWorker.prototype.off = MockWorker.prototype.removeEventListener;
9948 var factory = function factory(fn) {
9949 return function () {
9950 var client = new MockWorker();
9951 var worker = new MockWorker();
9952 client.type_ = 'window api';
9953 client.remote_ = worker;
9954 worker.remote_ = client;
9955 worker.type_ = 'web worker';
9956 fn(worker);
9957 return client;
9958 };
9959 };
9960 var transform = function transform(fn) {
9961 // eslint-disable-next-line
9962 return fn;
9963 };
9964
9965 /* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
9966 var workerCode$1 = transform(function (self) {
9967 /**
9968 * mux.js
9969 *
9970 * Copyright (c) Brightcove
9971 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9972 *
9973 * A lightweight readable stream implemention that handles event dispatching.
9974 * Objects that inherit from streams should call init in their constructors.
9975 */
9976
9977 var Stream = function Stream() {
9978 this.init = function () {
9979 var listeners = {};
9980 /**
9981 * Add a listener for a specified event type.
9982 * @param type {string} the event name
9983 * @param listener {function} the callback to be invoked when an event of
9984 * the specified type occurs
9985 */
9986
9987 this.on = function (type, listener) {
9988 if (!listeners[type]) {
9989 listeners[type] = [];
9990 }
9991
9992 listeners[type] = listeners[type].concat(listener);
9993 };
9994 /**
9995 * Remove a listener for a specified event type.
9996 * @param type {string} the event name
9997 * @param listener {function} a function previously registered for this
9998 * type of event through `on`
9999 */
10000
10001
10002 this.off = function (type, listener) {
10003 var index;
10004
10005 if (!listeners[type]) {
10006 return false;
10007 }
10008
10009 index = listeners[type].indexOf(listener);
10010 listeners[type] = listeners[type].slice();
10011 listeners[type].splice(index, 1);
10012 return index > -1;
10013 };
10014 /**
10015 * Trigger an event of the specified type on this stream. Any additional
10016 * arguments to this function are passed as parameters to event listeners.
10017 * @param type {string} the event name
10018 */
10019
10020
10021 this.trigger = function (type) {
10022 var callbacks, i, length, args;
10023 callbacks = listeners[type];
10024
10025 if (!callbacks) {
10026 return;
10027 } // Slicing the arguments on every invocation of this method
10028 // can add a significant amount of overhead. Avoid the
10029 // intermediate object creation for the common case of a
10030 // single callback argument
10031
10032
10033 if (arguments.length === 2) {
10034 length = callbacks.length;
10035
10036 for (i = 0; i < length; ++i) {
10037 callbacks[i].call(this, arguments[1]);
10038 }
10039 } else {
10040 args = [];
10041 i = arguments.length;
10042
10043 for (i = 1; i < arguments.length; ++i) {
10044 args.push(arguments[i]);
10045 }
10046
10047 length = callbacks.length;
10048
10049 for (i = 0; i < length; ++i) {
10050 callbacks[i].apply(this, args);
10051 }
10052 }
10053 };
10054 /**
10055 * Destroys the stream and cleans up.
10056 */
10057
10058
10059 this.dispose = function () {
10060 listeners = {};
10061 };
10062 };
10063 };
10064 /**
10065 * Forwards all `data` events on this stream to the destination stream. The
10066 * destination stream should provide a method `push` to receive the data
10067 * events as they arrive.
10068 * @param destination {stream} the stream that will receive all `data` events
10069 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
10070 * when the current stream emits a 'done' event
10071 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
10072 */
10073
10074
10075 Stream.prototype.pipe = function (destination) {
10076 this.on('data', function (data) {
10077 destination.push(data);
10078 });
10079 this.on('done', function (flushSource) {
10080 destination.flush(flushSource);
10081 });
10082 this.on('partialdone', function (flushSource) {
10083 destination.partialFlush(flushSource);
10084 });
10085 this.on('endedtimeline', function (flushSource) {
10086 destination.endTimeline(flushSource);
10087 });
10088 this.on('reset', function (flushSource) {
10089 destination.reset(flushSource);
10090 });
10091 return destination;
10092 }; // Default stream functions that are expected to be overridden to perform
10093 // actual work. These are provided by the prototype as a sort of no-op
10094 // implementation so that we don't have to check for their existence in the
10095 // `pipe` function above.
10096
10097
10098 Stream.prototype.push = function (data) {
10099 this.trigger('data', data);
10100 };
10101
10102 Stream.prototype.flush = function (flushSource) {
10103 this.trigger('done', flushSource);
10104 };
10105
10106 Stream.prototype.partialFlush = function (flushSource) {
10107 this.trigger('partialdone', flushSource);
10108 };
10109
10110 Stream.prototype.endTimeline = function (flushSource) {
10111 this.trigger('endedtimeline', flushSource);
10112 };
10113
10114 Stream.prototype.reset = function (flushSource) {
10115 this.trigger('reset', flushSource);
10116 };
10117
10118 var stream = Stream;
10119 var MAX_UINT32$1 = Math.pow(2, 32);
10120
10121 var getUint64$2 = function getUint64(uint8) {
10122 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
10123 var value;
10124
10125 if (dv.getBigUint64) {
10126 value = dv.getBigUint64(0);
10127
10128 if (value < Number.MAX_SAFE_INTEGER) {
10129 return Number(value);
10130 }
10131
10132 return value;
10133 }
10134
10135 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
10136 };
10137
10138 var numbers = {
10139 getUint64: getUint64$2,
10140 MAX_UINT32: MAX_UINT32$1
10141 };
10142 var MAX_UINT32 = numbers.MAX_UINT32;
10143 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
10144
10145 (function () {
10146 var i;
10147 types = {
10148 avc1: [],
10149 // codingname
10150 avcC: [],
10151 btrt: [],
10152 dinf: [],
10153 dref: [],
10154 esds: [],
10155 ftyp: [],
10156 hdlr: [],
10157 mdat: [],
10158 mdhd: [],
10159 mdia: [],
10160 mfhd: [],
10161 minf: [],
10162 moof: [],
10163 moov: [],
10164 mp4a: [],
10165 // codingname
10166 mvex: [],
10167 mvhd: [],
10168 pasp: [],
10169 sdtp: [],
10170 smhd: [],
10171 stbl: [],
10172 stco: [],
10173 stsc: [],
10174 stsd: [],
10175 stsz: [],
10176 stts: [],
10177 styp: [],
10178 tfdt: [],
10179 tfhd: [],
10180 traf: [],
10181 trak: [],
10182 trun: [],
10183 trex: [],
10184 tkhd: [],
10185 vmhd: []
10186 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
10187 // don't throw an error
10188
10189 if (typeof Uint8Array === 'undefined') {
10190 return;
10191 }
10192
10193 for (i in types) {
10194 if (types.hasOwnProperty(i)) {
10195 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
10196 }
10197 }
10198
10199 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
10200 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
10201 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
10202 VIDEO_HDLR = new Uint8Array([0x00, // version 0
10203 0x00, 0x00, 0x00, // flags
10204 0x00, 0x00, 0x00, 0x00, // pre_defined
10205 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
10206 0x00, 0x00, 0x00, 0x00, // reserved
10207 0x00, 0x00, 0x00, 0x00, // reserved
10208 0x00, 0x00, 0x00, 0x00, // reserved
10209 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
10210 ]);
10211 AUDIO_HDLR = new Uint8Array([0x00, // version 0
10212 0x00, 0x00, 0x00, // flags
10213 0x00, 0x00, 0x00, 0x00, // pre_defined
10214 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
10215 0x00, 0x00, 0x00, 0x00, // reserved
10216 0x00, 0x00, 0x00, 0x00, // reserved
10217 0x00, 0x00, 0x00, 0x00, // reserved
10218 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
10219 ]);
10220 HDLR_TYPES = {
10221 video: VIDEO_HDLR,
10222 audio: AUDIO_HDLR
10223 };
10224 DREF = new Uint8Array([0x00, // version 0
10225 0x00, 0x00, 0x00, // flags
10226 0x00, 0x00, 0x00, 0x01, // entry_count
10227 0x00, 0x00, 0x00, 0x0c, // entry_size
10228 0x75, 0x72, 0x6c, 0x20, // 'url' type
10229 0x00, // version 0
10230 0x00, 0x00, 0x01 // entry_flags
10231 ]);
10232 SMHD = new Uint8Array([0x00, // version
10233 0x00, 0x00, 0x00, // flags
10234 0x00, 0x00, // balance, 0 means centered
10235 0x00, 0x00 // reserved
10236 ]);
10237 STCO = new Uint8Array([0x00, // version
10238 0x00, 0x00, 0x00, // flags
10239 0x00, 0x00, 0x00, 0x00 // entry_count
10240 ]);
10241 STSC = STCO;
10242 STSZ = new Uint8Array([0x00, // version
10243 0x00, 0x00, 0x00, // flags
10244 0x00, 0x00, 0x00, 0x00, // sample_size
10245 0x00, 0x00, 0x00, 0x00 // sample_count
10246 ]);
10247 STTS = STCO;
10248 VMHD = new Uint8Array([0x00, // version
10249 0x00, 0x00, 0x01, // flags
10250 0x00, 0x00, // graphicsmode
10251 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
10252 ]);
10253 })();
10254
10255 box = function box(type) {
10256 var payload = [],
10257 size = 0,
10258 i,
10259 result,
10260 view;
10261
10262 for (i = 1; i < arguments.length; i++) {
10263 payload.push(arguments[i]);
10264 }
10265
10266 i = payload.length; // calculate the total size we need to allocate
10267
10268 while (i--) {
10269 size += payload[i].byteLength;
10270 }
10271
10272 result = new Uint8Array(size + 8);
10273 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
10274 view.setUint32(0, result.byteLength);
10275 result.set(type, 4); // copy the payload into the result
10276
10277 for (i = 0, size = 8; i < payload.length; i++) {
10278 result.set(payload[i], size);
10279 size += payload[i].byteLength;
10280 }
10281
10282 return result;
10283 };
10284
10285 dinf = function dinf() {
10286 return box(types.dinf, box(types.dref, DREF));
10287 };
10288
10289 esds = function esds(track) {
10290 return box(types.esds, new Uint8Array([0x00, // version
10291 0x00, 0x00, 0x00, // flags
10292 // ES_Descriptor
10293 0x03, // tag, ES_DescrTag
10294 0x19, // length
10295 0x00, 0x00, // ES_ID
10296 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
10297 // DecoderConfigDescriptor
10298 0x04, // tag, DecoderConfigDescrTag
10299 0x11, // length
10300 0x40, // object type
10301 0x15, // streamType
10302 0x00, 0x06, 0x00, // bufferSizeDB
10303 0x00, 0x00, 0xda, 0xc0, // maxBitrate
10304 0x00, 0x00, 0xda, 0xc0, // avgBitrate
10305 // DecoderSpecificInfo
10306 0x05, // tag, DecoderSpecificInfoTag
10307 0x02, // length
10308 // ISO/IEC 14496-3, AudioSpecificConfig
10309 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
10310 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
10311 ]));
10312 };
10313
10314 ftyp = function ftyp() {
10315 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
10316 };
10317
10318 hdlr = function hdlr(type) {
10319 return box(types.hdlr, HDLR_TYPES[type]);
10320 };
10321
10322 mdat = function mdat(data) {
10323 return box(types.mdat, data);
10324 };
10325
10326 mdhd = function mdhd(track) {
10327 var result = new Uint8Array([0x00, // version 0
10328 0x00, 0x00, 0x00, // flags
10329 0x00, 0x00, 0x00, 0x02, // creation_time
10330 0x00, 0x00, 0x00, 0x03, // modification_time
10331 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10332 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
10333 0x55, 0xc4, // 'und' language (undetermined)
10334 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
10335 // defined. The sample rate can be parsed out of an ADTS header, for
10336 // instance.
10337
10338 if (track.samplerate) {
10339 result[12] = track.samplerate >>> 24 & 0xFF;
10340 result[13] = track.samplerate >>> 16 & 0xFF;
10341 result[14] = track.samplerate >>> 8 & 0xFF;
10342 result[15] = track.samplerate & 0xFF;
10343 }
10344
10345 return box(types.mdhd, result);
10346 };
10347
10348 mdia = function mdia(track) {
10349 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
10350 };
10351
10352 mfhd = function mfhd(sequenceNumber) {
10353 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
10354 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
10355 ]));
10356 };
10357
10358 minf = function minf(track) {
10359 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
10360 };
10361
10362 moof = function moof(sequenceNumber, tracks) {
10363 var trackFragments = [],
10364 i = tracks.length; // build traf boxes for each track fragment
10365
10366 while (i--) {
10367 trackFragments[i] = traf(tracks[i]);
10368 }
10369
10370 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
10371 };
10372 /**
10373 * Returns a movie box.
10374 * @param tracks {array} the tracks associated with this movie
10375 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
10376 */
10377
10378
10379 moov = function moov(tracks) {
10380 var i = tracks.length,
10381 boxes = [];
10382
10383 while (i--) {
10384 boxes[i] = trak(tracks[i]);
10385 }
10386
10387 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
10388 };
10389
10390 mvex = function mvex(tracks) {
10391 var i = tracks.length,
10392 boxes = [];
10393
10394 while (i--) {
10395 boxes[i] = trex(tracks[i]);
10396 }
10397
10398 return box.apply(null, [types.mvex].concat(boxes));
10399 };
10400
10401 mvhd = function mvhd(duration) {
10402 var bytes = new Uint8Array([0x00, // version 0
10403 0x00, 0x00, 0x00, // flags
10404 0x00, 0x00, 0x00, 0x01, // creation_time
10405 0x00, 0x00, 0x00, 0x02, // modification_time
10406 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10407 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
10408 0x00, 0x01, 0x00, 0x00, // 1.0 rate
10409 0x01, 0x00, // 1.0 volume
10410 0x00, 0x00, // reserved
10411 0x00, 0x00, 0x00, 0x00, // reserved
10412 0x00, 0x00, 0x00, 0x00, // reserved
10413 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10414 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10415 0xff, 0xff, 0xff, 0xff // next_track_ID
10416 ]);
10417 return box(types.mvhd, bytes);
10418 };
10419
10420 sdtp = function sdtp(track) {
10421 var samples = track.samples || [],
10422 bytes = new Uint8Array(4 + samples.length),
10423 flags,
10424 i; // leave the full box header (4 bytes) all zero
10425 // write the sample table
10426
10427 for (i = 0; i < samples.length; i++) {
10428 flags = samples[i].flags;
10429 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
10430 }
10431
10432 return box(types.sdtp, bytes);
10433 };
10434
10435 stbl = function stbl(track) {
10436 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
10437 };
10438
10439 (function () {
10440 var videoSample, audioSample;
10441
10442 stsd = function stsd(track) {
10443 return box(types.stsd, new Uint8Array([0x00, // version 0
10444 0x00, 0x00, 0x00, // flags
10445 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
10446 };
10447
10448 videoSample = function videoSample(track) {
10449 var sps = track.sps || [],
10450 pps = track.pps || [],
10451 sequenceParameterSets = [],
10452 pictureParameterSets = [],
10453 i,
10454 avc1Box; // assemble the SPSs
10455
10456 for (i = 0; i < sps.length; i++) {
10457 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
10458 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
10459
10460 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
10461 } // assemble the PPSs
10462
10463
10464 for (i = 0; i < pps.length; i++) {
10465 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
10466 pictureParameterSets.push(pps[i].byteLength & 0xFF);
10467 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
10468 }
10469
10470 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10471 0x00, 0x01, // data_reference_index
10472 0x00, 0x00, // pre_defined
10473 0x00, 0x00, // reserved
10474 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10475 (track.width & 0xff00) >> 8, track.width & 0xff, // width
10476 (track.height & 0xff00) >> 8, track.height & 0xff, // height
10477 0x00, 0x48, 0x00, 0x00, // horizresolution
10478 0x00, 0x48, 0x00, 0x00, // vertresolution
10479 0x00, 0x00, 0x00, 0x00, // reserved
10480 0x00, 0x01, // frame_count
10481 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
10482 0x00, 0x18, // depth = 24
10483 0x11, 0x11 // pre_defined = -1
10484 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
10485 track.profileIdc, // AVCProfileIndication
10486 track.profileCompatibility, // profile_compatibility
10487 track.levelIdc, // AVCLevelIndication
10488 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
10489 ].concat([sps.length], // numOfSequenceParameterSets
10490 sequenceParameterSets, // "SPS"
10491 [pps.length], // numOfPictureParameterSets
10492 pictureParameterSets // "PPS"
10493 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
10494 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
10495 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
10496 ]))];
10497
10498 if (track.sarRatio) {
10499 var hSpacing = track.sarRatio[0],
10500 vSpacing = track.sarRatio[1];
10501 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
10502 }
10503
10504 return box.apply(null, avc1Box);
10505 };
10506
10507 audioSample = function audioSample(track) {
10508 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
10509 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10510 0x00, 0x01, // data_reference_index
10511 // AudioSampleEntry, ISO/IEC 14496-12
10512 0x00, 0x00, 0x00, 0x00, // reserved
10513 0x00, 0x00, 0x00, 0x00, // reserved
10514 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
10515 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
10516 0x00, 0x00, // pre_defined
10517 0x00, 0x00, // reserved
10518 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
10519 // MP4AudioSampleEntry, ISO/IEC 14496-14
10520 ]), esds(track));
10521 };
10522 })();
10523
10524 tkhd = function tkhd(track) {
10525 var result = new Uint8Array([0x00, // version 0
10526 0x00, 0x00, 0x07, // flags
10527 0x00, 0x00, 0x00, 0x00, // creation_time
10528 0x00, 0x00, 0x00, 0x00, // modification_time
10529 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10530 0x00, 0x00, 0x00, 0x00, // reserved
10531 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
10532 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10533 0x00, 0x00, // layer
10534 0x00, 0x00, // alternate_group
10535 0x01, 0x00, // non-audio track volume
10536 0x00, 0x00, // reserved
10537 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10538 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
10539 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
10540 ]);
10541 return box(types.tkhd, result);
10542 };
10543 /**
10544 * Generate a track fragment (traf) box. A traf box collects metadata
10545 * about tracks in a movie fragment (moof) box.
10546 */
10547
10548
10549 traf = function traf(track) {
10550 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
10551 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
10552 0x00, 0x00, 0x3a, // flags
10553 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10554 0x00, 0x00, 0x00, 0x01, // sample_description_index
10555 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10556 0x00, 0x00, 0x00, 0x00, // default_sample_size
10557 0x00, 0x00, 0x00, 0x00 // default_sample_flags
10558 ]));
10559 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
10560 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
10561 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
10562 0x00, 0x00, 0x00, // flags
10563 // baseMediaDecodeTime
10564 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
10565 // the containing moof to the first payload byte of the associated
10566 // mdat
10567
10568 dataOffset = 32 + // tfhd
10569 20 + // tfdt
10570 8 + // traf header
10571 16 + // mfhd
10572 8 + // moof header
10573 8; // mdat header
10574 // audio tracks require less metadata
10575
10576 if (track.type === 'audio') {
10577 trackFragmentRun = trun$1(track, dataOffset);
10578 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
10579 } // video tracks should contain an independent and disposable samples
10580 // box (sdtp)
10581 // generate one and adjust offsets to match
10582
10583
10584 sampleDependencyTable = sdtp(track);
10585 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
10586 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
10587 };
10588 /**
10589 * Generate a track box.
10590 * @param track {object} a track definition
10591 * @return {Uint8Array} the track box
10592 */
10593
10594
10595 trak = function trak(track) {
10596 track.duration = track.duration || 0xffffffff;
10597 return box(types.trak, tkhd(track), mdia(track));
10598 };
10599
10600 trex = function trex(track) {
10601 var result = new Uint8Array([0x00, // version 0
10602 0x00, 0x00, 0x00, // flags
10603 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10604 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
10605 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10606 0x00, 0x00, 0x00, 0x00, // default_sample_size
10607 0x00, 0x01, 0x00, 0x01 // default_sample_flags
10608 ]); // the last two bytes of default_sample_flags is the sample
10609 // degradation priority, a hint about the importance of this sample
10610 // relative to others. Lower the degradation priority for all sample
10611 // types other than video.
10612
10613 if (track.type !== 'video') {
10614 result[result.length - 1] = 0x00;
10615 }
10616
10617 return box(types.trex, result);
10618 };
10619
10620 (function () {
10621 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
10622 // duration is present for the first sample, it will be present for
10623 // all subsequent samples.
10624 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
10625
10626 trunHeader = function trunHeader(samples, offset) {
10627 var durationPresent = 0,
10628 sizePresent = 0,
10629 flagsPresent = 0,
10630 compositionTimeOffset = 0; // trun flag constants
10631
10632 if (samples.length) {
10633 if (samples[0].duration !== undefined) {
10634 durationPresent = 0x1;
10635 }
10636
10637 if (samples[0].size !== undefined) {
10638 sizePresent = 0x2;
10639 }
10640
10641 if (samples[0].flags !== undefined) {
10642 flagsPresent = 0x4;
10643 }
10644
10645 if (samples[0].compositionTimeOffset !== undefined) {
10646 compositionTimeOffset = 0x8;
10647 }
10648 }
10649
10650 return [0x00, // version 0
10651 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
10652 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
10653 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
10654 ];
10655 };
10656
10657 videoTrun = function videoTrun(track, offset) {
10658 var bytesOffest, bytes, header, samples, sample, i;
10659 samples = track.samples || [];
10660 offset += 8 + 12 + 16 * samples.length;
10661 header = trunHeader(samples, offset);
10662 bytes = new Uint8Array(header.length + samples.length * 16);
10663 bytes.set(header);
10664 bytesOffest = header.length;
10665
10666 for (i = 0; i < samples.length; i++) {
10667 sample = samples[i];
10668 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10669 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10670 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10671 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10672
10673 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10674 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10675 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10676 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10677
10678 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
10679 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
10680 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
10681 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
10682
10683 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
10684 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
10685 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
10686 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
10687 }
10688
10689 return box(types.trun, bytes);
10690 };
10691
10692 audioTrun = function audioTrun(track, offset) {
10693 var bytes, bytesOffest, header, samples, sample, i;
10694 samples = track.samples || [];
10695 offset += 8 + 12 + 8 * samples.length;
10696 header = trunHeader(samples, offset);
10697 bytes = new Uint8Array(header.length + samples.length * 8);
10698 bytes.set(header);
10699 bytesOffest = header.length;
10700
10701 for (i = 0; i < samples.length; i++) {
10702 sample = samples[i];
10703 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10704 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10705 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10706 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10707
10708 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10709 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10710 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10711 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10712 }
10713
10714 return box(types.trun, bytes);
10715 };
10716
10717 trun$1 = function trun(track, offset) {
10718 if (track.type === 'audio') {
10719 return audioTrun(track, offset);
10720 }
10721
10722 return videoTrun(track, offset);
10723 };
10724 })();
10725
10726 var mp4Generator = {
10727 ftyp: ftyp,
10728 mdat: mdat,
10729 moof: moof,
10730 moov: moov,
10731 initSegment: function initSegment(tracks) {
10732 var fileType = ftyp(),
10733 movie = moov(tracks),
10734 result;
10735 result = new Uint8Array(fileType.byteLength + movie.byteLength);
10736 result.set(fileType);
10737 result.set(movie, fileType.byteLength);
10738 return result;
10739 }
10740 };
10741 /**
10742 * mux.js
10743 *
10744 * Copyright (c) Brightcove
10745 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10746 */
10747 // Convert an array of nal units into an array of frames with each frame being
10748 // composed of the nal units that make up that frame
10749 // Also keep track of cummulative data about the frame from the nal units such
10750 // as the frame duration, starting pts, etc.
10751
10752 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
10753 var i,
10754 currentNal,
10755 currentFrame = [],
10756 frames = []; // TODO added for LHLS, make sure this is OK
10757
10758 frames.byteLength = 0;
10759 frames.nalCount = 0;
10760 frames.duration = 0;
10761 currentFrame.byteLength = 0;
10762
10763 for (i = 0; i < nalUnits.length; i++) {
10764 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
10765
10766 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
10767 // Since the very first nal unit is expected to be an AUD
10768 // only push to the frames array when currentFrame is not empty
10769 if (currentFrame.length) {
10770 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
10771
10772 frames.byteLength += currentFrame.byteLength;
10773 frames.nalCount += currentFrame.length;
10774 frames.duration += currentFrame.duration;
10775 frames.push(currentFrame);
10776 }
10777
10778 currentFrame = [currentNal];
10779 currentFrame.byteLength = currentNal.data.byteLength;
10780 currentFrame.pts = currentNal.pts;
10781 currentFrame.dts = currentNal.dts;
10782 } else {
10783 // Specifically flag key frames for ease of use later
10784 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
10785 currentFrame.keyFrame = true;
10786 }
10787
10788 currentFrame.duration = currentNal.dts - currentFrame.dts;
10789 currentFrame.byteLength += currentNal.data.byteLength;
10790 currentFrame.push(currentNal);
10791 }
10792 } // For the last frame, use the duration of the previous frame if we
10793 // have nothing better to go on
10794
10795
10796 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
10797 currentFrame.duration = frames[frames.length - 1].duration;
10798 } // Push the final frame
10799 // TODO added for LHLS, make sure this is OK
10800
10801
10802 frames.byteLength += currentFrame.byteLength;
10803 frames.nalCount += currentFrame.length;
10804 frames.duration += currentFrame.duration;
10805 frames.push(currentFrame);
10806 return frames;
10807 }; // Convert an array of frames into an array of Gop with each Gop being composed
10808 // of the frames that make up that Gop
10809 // Also keep track of cummulative data about the Gop from the frames such as the
10810 // Gop duration, starting pts, etc.
10811
10812
10813 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
10814 var i,
10815 currentFrame,
10816 currentGop = [],
10817 gops = []; // We must pre-set some of the values on the Gop since we
10818 // keep running totals of these values
10819
10820 currentGop.byteLength = 0;
10821 currentGop.nalCount = 0;
10822 currentGop.duration = 0;
10823 currentGop.pts = frames[0].pts;
10824 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
10825
10826 gops.byteLength = 0;
10827 gops.nalCount = 0;
10828 gops.duration = 0;
10829 gops.pts = frames[0].pts;
10830 gops.dts = frames[0].dts;
10831
10832 for (i = 0; i < frames.length; i++) {
10833 currentFrame = frames[i];
10834
10835 if (currentFrame.keyFrame) {
10836 // Since the very first frame is expected to be an keyframe
10837 // only push to the gops array when currentGop is not empty
10838 if (currentGop.length) {
10839 gops.push(currentGop);
10840 gops.byteLength += currentGop.byteLength;
10841 gops.nalCount += currentGop.nalCount;
10842 gops.duration += currentGop.duration;
10843 }
10844
10845 currentGop = [currentFrame];
10846 currentGop.nalCount = currentFrame.length;
10847 currentGop.byteLength = currentFrame.byteLength;
10848 currentGop.pts = currentFrame.pts;
10849 currentGop.dts = currentFrame.dts;
10850 currentGop.duration = currentFrame.duration;
10851 } else {
10852 currentGop.duration += currentFrame.duration;
10853 currentGop.nalCount += currentFrame.length;
10854 currentGop.byteLength += currentFrame.byteLength;
10855 currentGop.push(currentFrame);
10856 }
10857 }
10858
10859 if (gops.length && currentGop.duration <= 0) {
10860 currentGop.duration = gops[gops.length - 1].duration;
10861 }
10862
10863 gops.byteLength += currentGop.byteLength;
10864 gops.nalCount += currentGop.nalCount;
10865 gops.duration += currentGop.duration; // push the final Gop
10866
10867 gops.push(currentGop);
10868 return gops;
10869 };
10870 /*
10871 * Search for the first keyframe in the GOPs and throw away all frames
10872 * until that keyframe. Then extend the duration of the pulled keyframe
10873 * and pull the PTS and DTS of the keyframe so that it covers the time
10874 * range of the frames that were disposed.
10875 *
10876 * @param {Array} gops video GOPs
10877 * @returns {Array} modified video GOPs
10878 */
10879
10880
10881 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
10882 var currentGop;
10883
10884 if (!gops[0][0].keyFrame && gops.length > 1) {
10885 // Remove the first GOP
10886 currentGop = gops.shift();
10887 gops.byteLength -= currentGop.byteLength;
10888 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
10889 // first gop to cover the time period of the
10890 // frames we just removed
10891
10892 gops[0][0].dts = currentGop.dts;
10893 gops[0][0].pts = currentGop.pts;
10894 gops[0][0].duration += currentGop.duration;
10895 }
10896
10897 return gops;
10898 };
10899 /**
10900 * Default sample object
10901 * see ISO/IEC 14496-12:2012, section 8.6.4.3
10902 */
10903
10904
10905 var createDefaultSample = function createDefaultSample() {
10906 return {
10907 size: 0,
10908 flags: {
10909 isLeading: 0,
10910 dependsOn: 1,
10911 isDependedOn: 0,
10912 hasRedundancy: 0,
10913 degradationPriority: 0,
10914 isNonSyncSample: 1
10915 }
10916 };
10917 };
10918 /*
10919 * Collates information from a video frame into an object for eventual
10920 * entry into an MP4 sample table.
10921 *
10922 * @param {Object} frame the video frame
10923 * @param {Number} dataOffset the byte offset to position the sample
10924 * @return {Object} object containing sample table info for a frame
10925 */
10926
10927
10928 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
10929 var sample = createDefaultSample();
10930 sample.dataOffset = dataOffset;
10931 sample.compositionTimeOffset = frame.pts - frame.dts;
10932 sample.duration = frame.duration;
10933 sample.size = 4 * frame.length; // Space for nal unit size
10934
10935 sample.size += frame.byteLength;
10936
10937 if (frame.keyFrame) {
10938 sample.flags.dependsOn = 2;
10939 sample.flags.isNonSyncSample = 0;
10940 }
10941
10942 return sample;
10943 }; // generate the track's sample table from an array of gops
10944
10945
10946 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
10947 var h,
10948 i,
10949 sample,
10950 currentGop,
10951 currentFrame,
10952 dataOffset = baseDataOffset || 0,
10953 samples = [];
10954
10955 for (h = 0; h < gops.length; h++) {
10956 currentGop = gops[h];
10957
10958 for (i = 0; i < currentGop.length; i++) {
10959 currentFrame = currentGop[i];
10960 sample = sampleForFrame(currentFrame, dataOffset);
10961 dataOffset += sample.size;
10962 samples.push(sample);
10963 }
10964 }
10965
10966 return samples;
10967 }; // generate the track's raw mdat data from an array of gops
10968
10969
10970 var concatenateNalData = function concatenateNalData(gops) {
10971 var h,
10972 i,
10973 j,
10974 currentGop,
10975 currentFrame,
10976 currentNal,
10977 dataOffset = 0,
10978 nalsByteLength = gops.byteLength,
10979 numberOfNals = gops.nalCount,
10980 totalByteLength = nalsByteLength + 4 * numberOfNals,
10981 data = new Uint8Array(totalByteLength),
10982 view = new DataView(data.buffer); // For each Gop..
10983
10984 for (h = 0; h < gops.length; h++) {
10985 currentGop = gops[h]; // For each Frame..
10986
10987 for (i = 0; i < currentGop.length; i++) {
10988 currentFrame = currentGop[i]; // For each NAL..
10989
10990 for (j = 0; j < currentFrame.length; j++) {
10991 currentNal = currentFrame[j];
10992 view.setUint32(dataOffset, currentNal.data.byteLength);
10993 dataOffset += 4;
10994 data.set(currentNal.data, dataOffset);
10995 dataOffset += currentNal.data.byteLength;
10996 }
10997 }
10998 }
10999
11000 return data;
11001 }; // generate the track's sample table from a frame
11002
11003
11004 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
11005 var sample,
11006 dataOffset = baseDataOffset || 0,
11007 samples = [];
11008 sample = sampleForFrame(frame, dataOffset);
11009 samples.push(sample);
11010 return samples;
11011 }; // generate the track's raw mdat data from a frame
11012
11013
11014 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
11015 var i,
11016 currentNal,
11017 dataOffset = 0,
11018 nalsByteLength = frame.byteLength,
11019 numberOfNals = frame.length,
11020 totalByteLength = nalsByteLength + 4 * numberOfNals,
11021 data = new Uint8Array(totalByteLength),
11022 view = new DataView(data.buffer); // For each NAL..
11023
11024 for (i = 0; i < frame.length; i++) {
11025 currentNal = frame[i];
11026 view.setUint32(dataOffset, currentNal.data.byteLength);
11027 dataOffset += 4;
11028 data.set(currentNal.data, dataOffset);
11029 dataOffset += currentNal.data.byteLength;
11030 }
11031
11032 return data;
11033 };
11034
11035 var frameUtils = {
11036 groupNalsIntoFrames: groupNalsIntoFrames,
11037 groupFramesIntoGops: groupFramesIntoGops,
11038 extendFirstKeyFrame: extendFirstKeyFrame,
11039 generateSampleTable: generateSampleTable$1,
11040 concatenateNalData: concatenateNalData,
11041 generateSampleTableForFrame: generateSampleTableForFrame,
11042 concatenateNalDataForFrame: concatenateNalDataForFrame
11043 };
11044 /**
11045 * mux.js
11046 *
11047 * Copyright (c) Brightcove
11048 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11049 */
11050
11051 var highPrefix = [33, 16, 5, 32, 164, 27];
11052 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
11053
11054 var zeroFill = function zeroFill(count) {
11055 var a = [];
11056
11057 while (count--) {
11058 a.push(0);
11059 }
11060
11061 return a;
11062 };
11063
11064 var makeTable = function makeTable(metaTable) {
11065 return Object.keys(metaTable).reduce(function (obj, key) {
11066 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
11067 return arr.concat(part);
11068 }, []));
11069 return obj;
11070 }, {});
11071 };
11072
11073 var silence;
11074
11075 var silence_1 = function silence_1() {
11076 if (!silence) {
11077 // Frames-of-silence to use for filling in missing AAC frames
11078 var coneOfSilence = {
11079 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
11080 88200: [highPrefix, [231], zeroFill(170), [56]],
11081 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
11082 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
11083 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
11084 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
11085 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
11086 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
11087 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
11088 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
11089 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
11090 };
11091 silence = makeTable(coneOfSilence);
11092 }
11093
11094 return silence;
11095 };
11096 /**
11097 * mux.js
11098 *
11099 * Copyright (c) Brightcove
11100 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11101 */
11102
11103
11104 var ONE_SECOND_IN_TS$4 = 90000,
11105 // 90kHz clock
11106 secondsToVideoTs,
11107 secondsToAudioTs,
11108 videoTsToSeconds,
11109 audioTsToSeconds,
11110 audioTsToVideoTs,
11111 videoTsToAudioTs,
11112 metadataTsToSeconds;
11113
11114 secondsToVideoTs = function secondsToVideoTs(seconds) {
11115 return seconds * ONE_SECOND_IN_TS$4;
11116 };
11117
11118 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
11119 return seconds * sampleRate;
11120 };
11121
11122 videoTsToSeconds = function videoTsToSeconds(timestamp) {
11123 return timestamp / ONE_SECOND_IN_TS$4;
11124 };
11125
11126 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
11127 return timestamp / sampleRate;
11128 };
11129
11130 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
11131 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
11132 };
11133
11134 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
11135 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
11136 };
11137 /**
11138 * Adjust ID3 tag or caption timing information by the timeline pts values
11139 * (if keepOriginalTimestamps is false) and convert to seconds
11140 */
11141
11142
11143 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
11144 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
11145 };
11146
11147 var clock = {
11148 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
11149 secondsToVideoTs: secondsToVideoTs,
11150 secondsToAudioTs: secondsToAudioTs,
11151 videoTsToSeconds: videoTsToSeconds,
11152 audioTsToSeconds: audioTsToSeconds,
11153 audioTsToVideoTs: audioTsToVideoTs,
11154 videoTsToAudioTs: videoTsToAudioTs,
11155 metadataTsToSeconds: metadataTsToSeconds
11156 };
11157 /**
11158 * mux.js
11159 *
11160 * Copyright (c) Brightcove
11161 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11162 */
11163
11164 /**
11165 * Sum the `byteLength` properties of the data in each AAC frame
11166 */
11167
11168 var sumFrameByteLengths = function sumFrameByteLengths(array) {
11169 var i,
11170 currentObj,
11171 sum = 0; // sum the byteLength's all each nal unit in the frame
11172
11173 for (i = 0; i < array.length; i++) {
11174 currentObj = array[i];
11175 sum += currentObj.data.byteLength;
11176 }
11177
11178 return sum;
11179 }; // Possibly pad (prefix) the audio track with silence if appending this track
11180 // would lead to the introduction of a gap in the audio buffer
11181
11182
11183 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
11184 var baseMediaDecodeTimeTs,
11185 frameDuration = 0,
11186 audioGapDuration = 0,
11187 audioFillFrameCount = 0,
11188 audioFillDuration = 0,
11189 silentFrame,
11190 i,
11191 firstFrame;
11192
11193 if (!frames.length) {
11194 return;
11195 }
11196
11197 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
11198
11199 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
11200
11201 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
11202 // insert the shortest possible amount (audio gap or audio to video gap)
11203 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
11204
11205 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
11206 audioFillDuration = audioFillFrameCount * frameDuration;
11207 } // don't attempt to fill gaps smaller than a single frame or larger
11208 // than a half second
11209
11210
11211 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
11212 return;
11213 }
11214
11215 silentFrame = silence_1()[track.samplerate];
11216
11217 if (!silentFrame) {
11218 // we don't have a silent frame pregenerated for the sample rate, so use a frame
11219 // from the content instead
11220 silentFrame = frames[0].data;
11221 }
11222
11223 for (i = 0; i < audioFillFrameCount; i++) {
11224 firstFrame = frames[0];
11225 frames.splice(0, 0, {
11226 data: silentFrame,
11227 dts: firstFrame.dts - frameDuration,
11228 pts: firstFrame.pts - frameDuration
11229 });
11230 }
11231
11232 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
11233 return audioFillDuration;
11234 }; // If the audio segment extends before the earliest allowed dts
11235 // value, remove AAC frames until starts at or after the earliest
11236 // allowed DTS so that we don't end up with a negative baseMedia-
11237 // DecodeTime for the audio track
11238
11239
11240 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
11241 if (track.minSegmentDts >= earliestAllowedDts) {
11242 return adtsFrames;
11243 } // We will need to recalculate the earliest segment Dts
11244
11245
11246 track.minSegmentDts = Infinity;
11247 return adtsFrames.filter(function (currentFrame) {
11248 // If this is an allowed frame, keep it and record it's Dts
11249 if (currentFrame.dts >= earliestAllowedDts) {
11250 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
11251 track.minSegmentPts = track.minSegmentDts;
11252 return true;
11253 } // Otherwise, discard it
11254
11255
11256 return false;
11257 });
11258 }; // generate the track's raw mdat data from an array of frames
11259
11260
11261 var generateSampleTable = function generateSampleTable(frames) {
11262 var i,
11263 currentFrame,
11264 samples = [];
11265
11266 for (i = 0; i < frames.length; i++) {
11267 currentFrame = frames[i];
11268 samples.push({
11269 size: currentFrame.data.byteLength,
11270 duration: 1024 // For AAC audio, all samples contain 1024 samples
11271
11272 });
11273 }
11274
11275 return samples;
11276 }; // generate the track's sample table from an array of frames
11277
11278
11279 var concatenateFrameData = function concatenateFrameData(frames) {
11280 var i,
11281 currentFrame,
11282 dataOffset = 0,
11283 data = new Uint8Array(sumFrameByteLengths(frames));
11284
11285 for (i = 0; i < frames.length; i++) {
11286 currentFrame = frames[i];
11287 data.set(currentFrame.data, dataOffset);
11288 dataOffset += currentFrame.data.byteLength;
11289 }
11290
11291 return data;
11292 };
11293
11294 var audioFrameUtils = {
11295 prefixWithSilence: prefixWithSilence,
11296 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
11297 generateSampleTable: generateSampleTable,
11298 concatenateFrameData: concatenateFrameData
11299 };
11300 /**
11301 * mux.js
11302 *
11303 * Copyright (c) Brightcove
11304 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11305 */
11306
11307 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
11308 /**
11309 * Store information about the start and end of the track and the
11310 * duration for each frame/sample we process in order to calculate
11311 * the baseMediaDecodeTime
11312 */
11313
11314 var collectDtsInfo = function collectDtsInfo(track, data) {
11315 if (typeof data.pts === 'number') {
11316 if (track.timelineStartInfo.pts === undefined) {
11317 track.timelineStartInfo.pts = data.pts;
11318 }
11319
11320 if (track.minSegmentPts === undefined) {
11321 track.minSegmentPts = data.pts;
11322 } else {
11323 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
11324 }
11325
11326 if (track.maxSegmentPts === undefined) {
11327 track.maxSegmentPts = data.pts;
11328 } else {
11329 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
11330 }
11331 }
11332
11333 if (typeof data.dts === 'number') {
11334 if (track.timelineStartInfo.dts === undefined) {
11335 track.timelineStartInfo.dts = data.dts;
11336 }
11337
11338 if (track.minSegmentDts === undefined) {
11339 track.minSegmentDts = data.dts;
11340 } else {
11341 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
11342 }
11343
11344 if (track.maxSegmentDts === undefined) {
11345 track.maxSegmentDts = data.dts;
11346 } else {
11347 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
11348 }
11349 }
11350 };
11351 /**
11352 * Clear values used to calculate the baseMediaDecodeTime between
11353 * tracks
11354 */
11355
11356
11357 var clearDtsInfo = function clearDtsInfo(track) {
11358 delete track.minSegmentDts;
11359 delete track.maxSegmentDts;
11360 delete track.minSegmentPts;
11361 delete track.maxSegmentPts;
11362 };
11363 /**
11364 * Calculate the track's baseMediaDecodeTime based on the earliest
11365 * DTS the transmuxer has ever seen and the minimum DTS for the
11366 * current track
11367 * @param track {object} track metadata configuration
11368 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
11369 * in the source; false to adjust the first segment to start at 0.
11370 */
11371
11372
11373 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
11374 var baseMediaDecodeTime,
11375 scale,
11376 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
11377
11378 if (!keepOriginalTimestamps) {
11379 minSegmentDts -= track.timelineStartInfo.dts;
11380 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
11381 // we want the start of the first segment to be placed
11382
11383
11384 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
11385
11386 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
11387
11388 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
11389
11390 if (track.type === 'audio') {
11391 // Audio has a different clock equal to the sampling_rate so we need to
11392 // scale the PTS values into the clock rate of the track
11393 scale = track.samplerate / ONE_SECOND_IN_TS$3;
11394 baseMediaDecodeTime *= scale;
11395 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
11396 }
11397
11398 return baseMediaDecodeTime;
11399 };
11400
11401 var trackDecodeInfo = {
11402 clearDtsInfo: clearDtsInfo,
11403 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
11404 collectDtsInfo: collectDtsInfo
11405 };
11406 /**
11407 * mux.js
11408 *
11409 * Copyright (c) Brightcove
11410 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11411 *
11412 * Reads in-band caption information from a video elementary
11413 * stream. Captions must follow the CEA-708 standard for injection
11414 * into an MPEG-2 transport streams.
11415 * @see https://en.wikipedia.org/wiki/CEA-708
11416 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
11417 */
11418 // payload type field to indicate how they are to be
11419 // interpreted. CEAS-708 caption content is always transmitted with
11420 // payload type 0x04.
11421
11422 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
11423 RBSP_TRAILING_BITS = 128;
11424 /**
11425 * Parse a supplemental enhancement information (SEI) NAL unit.
11426 * Stops parsing once a message of type ITU T T35 has been found.
11427 *
11428 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
11429 * @return {object} the parsed SEI payload
11430 * @see Rec. ITU-T H.264, 7.3.2.3.1
11431 */
11432
11433 var parseSei = function parseSei(bytes) {
11434 var i = 0,
11435 result = {
11436 payloadType: -1,
11437 payloadSize: 0
11438 },
11439 payloadType = 0,
11440 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
11441
11442 while (i < bytes.byteLength) {
11443 // stop once we have hit the end of the sei_rbsp
11444 if (bytes[i] === RBSP_TRAILING_BITS) {
11445 break;
11446 } // Parse payload type
11447
11448
11449 while (bytes[i] === 0xFF) {
11450 payloadType += 255;
11451 i++;
11452 }
11453
11454 payloadType += bytes[i++]; // Parse payload size
11455
11456 while (bytes[i] === 0xFF) {
11457 payloadSize += 255;
11458 i++;
11459 }
11460
11461 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
11462 // there can only ever be one caption message in a frame's sei
11463
11464 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
11465 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
11466
11467 if (userIdentifier === 'GA94') {
11468 result.payloadType = payloadType;
11469 result.payloadSize = payloadSize;
11470 result.payload = bytes.subarray(i, i + payloadSize);
11471 break;
11472 } else {
11473 result.payload = void 0;
11474 }
11475 } // skip the payload and parse the next message
11476
11477
11478 i += payloadSize;
11479 payloadType = 0;
11480 payloadSize = 0;
11481 }
11482
11483 return result;
11484 }; // see ANSI/SCTE 128-1 (2013), section 8.1
11485
11486
11487 var parseUserData = function parseUserData(sei) {
11488 // itu_t_t35_contry_code must be 181 (United States) for
11489 // captions
11490 if (sei.payload[0] !== 181) {
11491 return null;
11492 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
11493
11494
11495 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
11496 return null;
11497 } // the user_identifier should be "GA94" to indicate ATSC1 data
11498
11499
11500 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
11501 return null;
11502 } // finally, user_data_type_code should be 0x03 for caption data
11503
11504
11505 if (sei.payload[7] !== 0x03) {
11506 return null;
11507 } // return the user_data_type_structure and strip the trailing
11508 // marker bits
11509
11510
11511 return sei.payload.subarray(8, sei.payload.length - 1);
11512 }; // see CEA-708-D, section 4.4
11513
11514
11515 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
11516 var results = [],
11517 i,
11518 count,
11519 offset,
11520 data; // if this is just filler, return immediately
11521
11522 if (!(userData[0] & 0x40)) {
11523 return results;
11524 } // parse out the cc_data_1 and cc_data_2 fields
11525
11526
11527 count = userData[0] & 0x1f;
11528
11529 for (i = 0; i < count; i++) {
11530 offset = i * 3;
11531 data = {
11532 type: userData[offset + 2] & 0x03,
11533 pts: pts
11534 }; // capture cc data when cc_valid is 1
11535
11536 if (userData[offset + 2] & 0x04) {
11537 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
11538 results.push(data);
11539 }
11540 }
11541
11542 return results;
11543 };
11544
11545 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
11546 var length = data.byteLength,
11547 emulationPreventionBytesPositions = [],
11548 i = 1,
11549 newLength,
11550 newData; // Find all `Emulation Prevention Bytes`
11551
11552 while (i < length - 2) {
11553 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
11554 emulationPreventionBytesPositions.push(i + 2);
11555 i += 2;
11556 } else {
11557 i++;
11558 }
11559 } // If no Emulation Prevention Bytes were found just return the original
11560 // array
11561
11562
11563 if (emulationPreventionBytesPositions.length === 0) {
11564 return data;
11565 } // Create a new array to hold the NAL unit data
11566
11567
11568 newLength = length - emulationPreventionBytesPositions.length;
11569 newData = new Uint8Array(newLength);
11570 var sourceIndex = 0;
11571
11572 for (i = 0; i < newLength; sourceIndex++, i++) {
11573 if (sourceIndex === emulationPreventionBytesPositions[0]) {
11574 // Skip this byte
11575 sourceIndex++; // Remove this position index
11576
11577 emulationPreventionBytesPositions.shift();
11578 }
11579
11580 newData[i] = data[sourceIndex];
11581 }
11582
11583 return newData;
11584 }; // exports
11585
11586
11587 var captionPacketParser = {
11588 parseSei: parseSei,
11589 parseUserData: parseUserData,
11590 parseCaptionPackets: parseCaptionPackets,
11591 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
11592 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
11593 }; // Link To Transport
11594 // -----------------
11595
11596 var CaptionStream$1 = function CaptionStream(options) {
11597 options = options || {};
11598 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
11599
11600 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
11601 this.captionPackets_ = [];
11602 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
11603 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
11604 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
11605 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
11606 ];
11607
11608 if (this.parse708captions_) {
11609 this.cc708Stream_ = new Cea708Stream({
11610 captionServices: options.captionServices
11611 }); // eslint-disable-line no-use-before-define
11612 }
11613
11614 this.reset(); // forward data and done events from CCs to this CaptionStream
11615
11616 this.ccStreams_.forEach(function (cc) {
11617 cc.on('data', this.trigger.bind(this, 'data'));
11618 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
11619 cc.on('done', this.trigger.bind(this, 'done'));
11620 }, this);
11621
11622 if (this.parse708captions_) {
11623 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
11624 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
11625 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
11626 }
11627 };
11628
11629 CaptionStream$1.prototype = new stream();
11630
11631 CaptionStream$1.prototype.push = function (event) {
11632 var sei, userData, newCaptionPackets; // only examine SEI NALs
11633
11634 if (event.nalUnitType !== 'sei_rbsp') {
11635 return;
11636 } // parse the sei
11637
11638
11639 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
11640
11641 if (!sei.payload) {
11642 return;
11643 } // ignore everything but user_data_registered_itu_t_t35
11644
11645
11646 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
11647 return;
11648 } // parse out the user data payload
11649
11650
11651 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
11652
11653 if (!userData) {
11654 return;
11655 } // Sometimes, the same segment # will be downloaded twice. To stop the
11656 // caption data from being processed twice, we track the latest dts we've
11657 // received and ignore everything with a dts before that. However, since
11658 // data for a specific dts can be split across packets on either side of
11659 // a segment boundary, we need to make sure we *don't* ignore the packets
11660 // from the *next* segment that have dts === this.latestDts_. By constantly
11661 // tracking the number of packets received with dts === this.latestDts_, we
11662 // know how many should be ignored once we start receiving duplicates.
11663
11664
11665 if (event.dts < this.latestDts_) {
11666 // We've started getting older data, so set the flag.
11667 this.ignoreNextEqualDts_ = true;
11668 return;
11669 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
11670 this.numSameDts_--;
11671
11672 if (!this.numSameDts_) {
11673 // We've received the last duplicate packet, time to start processing again
11674 this.ignoreNextEqualDts_ = false;
11675 }
11676
11677 return;
11678 } // parse out CC data packets and save them for later
11679
11680
11681 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
11682 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
11683
11684 if (this.latestDts_ !== event.dts) {
11685 this.numSameDts_ = 0;
11686 }
11687
11688 this.numSameDts_++;
11689 this.latestDts_ = event.dts;
11690 };
11691
11692 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
11693 this.ccStreams_.forEach(function (cc) {
11694 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
11695 }, this);
11696 };
11697
11698 CaptionStream$1.prototype.flushStream = function (flushType) {
11699 // make sure we actually parsed captions before proceeding
11700 if (!this.captionPackets_.length) {
11701 this.flushCCStreams(flushType);
11702 return;
11703 } // In Chrome, the Array#sort function is not stable so add a
11704 // presortIndex that we can use to ensure we get a stable-sort
11705
11706
11707 this.captionPackets_.forEach(function (elem, idx) {
11708 elem.presortIndex = idx;
11709 }); // sort caption byte-pairs based on their PTS values
11710
11711 this.captionPackets_.sort(function (a, b) {
11712 if (a.pts === b.pts) {
11713 return a.presortIndex - b.presortIndex;
11714 }
11715
11716 return a.pts - b.pts;
11717 });
11718 this.captionPackets_.forEach(function (packet) {
11719 if (packet.type < 2) {
11720 // Dispatch packet to the right Cea608Stream
11721 this.dispatchCea608Packet(packet);
11722 } else {
11723 // Dispatch packet to the Cea708Stream
11724 this.dispatchCea708Packet(packet);
11725 }
11726 }, this);
11727 this.captionPackets_.length = 0;
11728 this.flushCCStreams(flushType);
11729 };
11730
11731 CaptionStream$1.prototype.flush = function () {
11732 return this.flushStream('flush');
11733 }; // Only called if handling partial data
11734
11735
11736 CaptionStream$1.prototype.partialFlush = function () {
11737 return this.flushStream('partialFlush');
11738 };
11739
11740 CaptionStream$1.prototype.reset = function () {
11741 this.latestDts_ = null;
11742 this.ignoreNextEqualDts_ = false;
11743 this.numSameDts_ = 0;
11744 this.activeCea608Channel_ = [null, null];
11745 this.ccStreams_.forEach(function (ccStream) {
11746 ccStream.reset();
11747 });
11748 }; // From the CEA-608 spec:
11749
11750 /*
11751 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
11752 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
11753 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
11754 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
11755 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
11756 * to switch to captioning or Text.
11757 */
11758 // With that in mind, we ignore any data between an XDS control code and a
11759 // subsequent closed-captioning control code.
11760
11761
11762 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
11763 // NOTE: packet.type is the CEA608 field
11764 if (this.setsTextOrXDSActive(packet)) {
11765 this.activeCea608Channel_[packet.type] = null;
11766 } else if (this.setsChannel1Active(packet)) {
11767 this.activeCea608Channel_[packet.type] = 0;
11768 } else if (this.setsChannel2Active(packet)) {
11769 this.activeCea608Channel_[packet.type] = 1;
11770 }
11771
11772 if (this.activeCea608Channel_[packet.type] === null) {
11773 // If we haven't received anything to set the active channel, or the
11774 // packets are Text/XDS data, discard the data; we don't want jumbled
11775 // captions
11776 return;
11777 }
11778
11779 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
11780 };
11781
11782 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
11783 return (packet.ccData & 0x7800) === 0x1000;
11784 };
11785
11786 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
11787 return (packet.ccData & 0x7800) === 0x1800;
11788 };
11789
11790 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
11791 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
11792 };
11793
11794 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
11795 if (this.parse708captions_) {
11796 this.cc708Stream_.push(packet);
11797 }
11798 }; // ----------------------
11799 // Session to Application
11800 // ----------------------
11801 // This hash maps special and extended character codes to their
11802 // proper Unicode equivalent. The first one-byte key is just a
11803 // non-standard character code. The two-byte keys that follow are
11804 // the extended CEA708 character codes, along with the preceding
11805 // 0x10 extended character byte to distinguish these codes from
11806 // non-extended character codes. Every CEA708 character code that
11807 // is not in this object maps directly to a standard unicode
11808 // character code.
11809 // The transparent space and non-breaking transparent space are
11810 // technically not fully supported since there is no code to
11811 // make them transparent, so they have normal non-transparent
11812 // stand-ins.
11813 // The special closed caption (CC) character isn't a standard
11814 // unicode character, so a fairly similar unicode character was
11815 // chosen in it's place.
11816
11817
11818 var CHARACTER_TRANSLATION_708 = {
11819 0x7f: 0x266a,
11820 // ♪
11821 0x1020: 0x20,
11822 // Transparent Space
11823 0x1021: 0xa0,
11824 // Nob-breaking Transparent Space
11825 0x1025: 0x2026,
11826 // …
11827 0x102a: 0x0160,
11828 // Š
11829 0x102c: 0x0152,
11830 // Œ
11831 0x1030: 0x2588,
11832 // █
11833 0x1031: 0x2018,
11834 // ‘
11835 0x1032: 0x2019,
11836 // ’
11837 0x1033: 0x201c,
11838 // “
11839 0x1034: 0x201d,
11840 // ”
11841 0x1035: 0x2022,
11842 // •
11843 0x1039: 0x2122,
11844 // ™
11845 0x103a: 0x0161,
11846 // š
11847 0x103c: 0x0153,
11848 // œ
11849 0x103d: 0x2120,
11850 // ℠
11851 0x103f: 0x0178,
11852 // Ÿ
11853 0x1076: 0x215b,
11854 // ⅛
11855 0x1077: 0x215c,
11856 // ⅜
11857 0x1078: 0x215d,
11858 // ⅝
11859 0x1079: 0x215e,
11860 // ⅞
11861 0x107a: 0x23d0,
11862 // ⏐
11863 0x107b: 0x23a4,
11864 // ⎤
11865 0x107c: 0x23a3,
11866 // ⎣
11867 0x107d: 0x23af,
11868 // ⎯
11869 0x107e: 0x23a6,
11870 // ⎦
11871 0x107f: 0x23a1,
11872 // ⎡
11873 0x10a0: 0x3138 // ㄸ (CC char)
11874
11875 };
11876
11877 var get708CharFromCode = function get708CharFromCode(code) {
11878 var newCode = CHARACTER_TRANSLATION_708[code] || code;
11879
11880 if (code & 0x1000 && code === newCode) {
11881 // Invalid extended code
11882 return '';
11883 }
11884
11885 return String.fromCharCode(newCode);
11886 };
11887
11888 var within708TextBlock = function within708TextBlock(b) {
11889 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
11890 };
11891
11892 var Cea708Window = function Cea708Window(windowNum) {
11893 this.windowNum = windowNum;
11894 this.reset();
11895 };
11896
11897 Cea708Window.prototype.reset = function () {
11898 this.clearText();
11899 this.pendingNewLine = false;
11900 this.winAttr = {};
11901 this.penAttr = {};
11902 this.penLoc = {};
11903 this.penColor = {}; // These default values are arbitrary,
11904 // defineWindow will usually override them
11905
11906 this.visible = 0;
11907 this.rowLock = 0;
11908 this.columnLock = 0;
11909 this.priority = 0;
11910 this.relativePositioning = 0;
11911 this.anchorVertical = 0;
11912 this.anchorHorizontal = 0;
11913 this.anchorPoint = 0;
11914 this.rowCount = 1;
11915 this.virtualRowCount = this.rowCount + 1;
11916 this.columnCount = 41;
11917 this.windowStyle = 0;
11918 this.penStyle = 0;
11919 };
11920
11921 Cea708Window.prototype.getText = function () {
11922 return this.rows.join('\n');
11923 };
11924
11925 Cea708Window.prototype.clearText = function () {
11926 this.rows = [''];
11927 this.rowIdx = 0;
11928 };
11929
11930 Cea708Window.prototype.newLine = function (pts) {
11931 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
11932 this.beforeRowOverflow(pts);
11933 }
11934
11935 if (this.rows.length > 0) {
11936 this.rows.push('');
11937 this.rowIdx++;
11938 } // Show all virtual rows since there's no visible scrolling
11939
11940
11941 while (this.rows.length > this.virtualRowCount) {
11942 this.rows.shift();
11943 this.rowIdx--;
11944 }
11945 };
11946
11947 Cea708Window.prototype.isEmpty = function () {
11948 if (this.rows.length === 0) {
11949 return true;
11950 } else if (this.rows.length === 1) {
11951 return this.rows[0] === '';
11952 }
11953
11954 return false;
11955 };
11956
11957 Cea708Window.prototype.addText = function (text) {
11958 this.rows[this.rowIdx] += text;
11959 };
11960
11961 Cea708Window.prototype.backspace = function () {
11962 if (!this.isEmpty()) {
11963 var row = this.rows[this.rowIdx];
11964 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
11965 }
11966 };
11967
11968 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
11969 this.serviceNum = serviceNum;
11970 this.text = '';
11971 this.currentWindow = new Cea708Window(-1);
11972 this.windows = [];
11973 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
11974
11975 if (typeof encoding === 'string') {
11976 this.createTextDecoder(encoding);
11977 }
11978 };
11979 /**
11980 * Initialize service windows
11981 * Must be run before service use
11982 *
11983 * @param {Integer} pts PTS value
11984 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
11985 */
11986
11987
11988 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
11989 this.startPts = pts;
11990
11991 for (var win = 0; win < 8; win++) {
11992 this.windows[win] = new Cea708Window(win);
11993
11994 if (typeof beforeRowOverflow === 'function') {
11995 this.windows[win].beforeRowOverflow = beforeRowOverflow;
11996 }
11997 }
11998 };
11999 /**
12000 * Set current window of service to be affected by commands
12001 *
12002 * @param {Integer} windowNum Window number
12003 */
12004
12005
12006 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
12007 this.currentWindow = this.windows[windowNum];
12008 };
12009 /**
12010 * Try to create a TextDecoder if it is natively supported
12011 */
12012
12013
12014 Cea708Service.prototype.createTextDecoder = function (encoding) {
12015 if (typeof TextDecoder === 'undefined') {
12016 this.stream.trigger('log', {
12017 level: 'warn',
12018 message: 'The `encoding` option is unsupported without TextDecoder support'
12019 });
12020 } else {
12021 try {
12022 this.textDecoder_ = new TextDecoder(encoding);
12023 } catch (error) {
12024 this.stream.trigger('log', {
12025 level: 'warn',
12026 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
12027 });
12028 }
12029 }
12030 };
12031
12032 var Cea708Stream = function Cea708Stream(options) {
12033 options = options || {};
12034 Cea708Stream.prototype.init.call(this);
12035 var self = this;
12036 var captionServices = options.captionServices || {};
12037 var captionServiceEncodings = {};
12038 var serviceProps; // Get service encodings from captionServices option block
12039
12040 Object.keys(captionServices).forEach(function (serviceName) {
12041 serviceProps = captionServices[serviceName];
12042
12043 if (/^SERVICE/.test(serviceName)) {
12044 captionServiceEncodings[serviceName] = serviceProps.encoding;
12045 }
12046 });
12047 this.serviceEncodings = captionServiceEncodings;
12048 this.current708Packet = null;
12049 this.services = {};
12050
12051 this.push = function (packet) {
12052 if (packet.type === 3) {
12053 // 708 packet start
12054 self.new708Packet();
12055 self.add708Bytes(packet);
12056 } else {
12057 if (self.current708Packet === null) {
12058 // This should only happen at the start of a file if there's no packet start.
12059 self.new708Packet();
12060 }
12061
12062 self.add708Bytes(packet);
12063 }
12064 };
12065 };
12066
12067 Cea708Stream.prototype = new stream();
12068 /**
12069 * Push current 708 packet, create new 708 packet.
12070 */
12071
12072 Cea708Stream.prototype.new708Packet = function () {
12073 if (this.current708Packet !== null) {
12074 this.push708Packet();
12075 }
12076
12077 this.current708Packet = {
12078 data: [],
12079 ptsVals: []
12080 };
12081 };
12082 /**
12083 * Add pts and both bytes from packet into current 708 packet.
12084 */
12085
12086
12087 Cea708Stream.prototype.add708Bytes = function (packet) {
12088 var data = packet.ccData;
12089 var byte0 = data >>> 8;
12090 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
12091 // that service blocks will always line up with byte pairs.
12092
12093 this.current708Packet.ptsVals.push(packet.pts);
12094 this.current708Packet.data.push(byte0);
12095 this.current708Packet.data.push(byte1);
12096 };
12097 /**
12098 * Parse completed 708 packet into service blocks and push each service block.
12099 */
12100
12101
12102 Cea708Stream.prototype.push708Packet = function () {
12103 var packet708 = this.current708Packet;
12104 var packetData = packet708.data;
12105 var serviceNum = null;
12106 var blockSize = null;
12107 var i = 0;
12108 var b = packetData[i++];
12109 packet708.seq = b >> 6;
12110 packet708.sizeCode = b & 0x3f; // 0b00111111;
12111
12112 for (; i < packetData.length; i++) {
12113 b = packetData[i++];
12114 serviceNum = b >> 5;
12115 blockSize = b & 0x1f; // 0b00011111
12116
12117 if (serviceNum === 7 && blockSize > 0) {
12118 // Extended service num
12119 b = packetData[i++];
12120 serviceNum = b;
12121 }
12122
12123 this.pushServiceBlock(serviceNum, i, blockSize);
12124
12125 if (blockSize > 0) {
12126 i += blockSize - 1;
12127 }
12128 }
12129 };
12130 /**
12131 * Parse service block, execute commands, read text.
12132 *
12133 * Note: While many of these commands serve important purposes,
12134 * many others just parse out the parameters or attributes, but
12135 * nothing is done with them because this is not a full and complete
12136 * implementation of the entire 708 spec.
12137 *
12138 * @param {Integer} serviceNum Service number
12139 * @param {Integer} start Start index of the 708 packet data
12140 * @param {Integer} size Block size
12141 */
12142
12143
12144 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
12145 var b;
12146 var i = start;
12147 var packetData = this.current708Packet.data;
12148 var service = this.services[serviceNum];
12149
12150 if (!service) {
12151 service = this.initService(serviceNum, i);
12152 }
12153
12154 for (; i < start + size && i < packetData.length; i++) {
12155 b = packetData[i];
12156
12157 if (within708TextBlock(b)) {
12158 i = this.handleText(i, service);
12159 } else if (b === 0x18) {
12160 i = this.multiByteCharacter(i, service);
12161 } else if (b === 0x10) {
12162 i = this.extendedCommands(i, service);
12163 } else if (0x80 <= b && b <= 0x87) {
12164 i = this.setCurrentWindow(i, service);
12165 } else if (0x98 <= b && b <= 0x9f) {
12166 i = this.defineWindow(i, service);
12167 } else if (b === 0x88) {
12168 i = this.clearWindows(i, service);
12169 } else if (b === 0x8c) {
12170 i = this.deleteWindows(i, service);
12171 } else if (b === 0x89) {
12172 i = this.displayWindows(i, service);
12173 } else if (b === 0x8a) {
12174 i = this.hideWindows(i, service);
12175 } else if (b === 0x8b) {
12176 i = this.toggleWindows(i, service);
12177 } else if (b === 0x97) {
12178 i = this.setWindowAttributes(i, service);
12179 } else if (b === 0x90) {
12180 i = this.setPenAttributes(i, service);
12181 } else if (b === 0x91) {
12182 i = this.setPenColor(i, service);
12183 } else if (b === 0x92) {
12184 i = this.setPenLocation(i, service);
12185 } else if (b === 0x8f) {
12186 service = this.reset(i, service);
12187 } else if (b === 0x08) {
12188 // BS: Backspace
12189 service.currentWindow.backspace();
12190 } else if (b === 0x0c) {
12191 // FF: Form feed
12192 service.currentWindow.clearText();
12193 } else if (b === 0x0d) {
12194 // CR: Carriage return
12195 service.currentWindow.pendingNewLine = true;
12196 } else if (b === 0x0e) {
12197 // HCR: Horizontal carriage return
12198 service.currentWindow.clearText();
12199 } else if (b === 0x8d) {
12200 // DLY: Delay, nothing to do
12201 i++;
12202 } else ;
12203 }
12204 };
12205 /**
12206 * Execute an extended command
12207 *
12208 * @param {Integer} i Current index in the 708 packet
12209 * @param {Service} service The service object to be affected
12210 * @return {Integer} New index after parsing
12211 */
12212
12213
12214 Cea708Stream.prototype.extendedCommands = function (i, service) {
12215 var packetData = this.current708Packet.data;
12216 var b = packetData[++i];
12217
12218 if (within708TextBlock(b)) {
12219 i = this.handleText(i, service, {
12220 isExtended: true
12221 });
12222 }
12223
12224 return i;
12225 };
12226 /**
12227 * Get PTS value of a given byte index
12228 *
12229 * @param {Integer} byteIndex Index of the byte
12230 * @return {Integer} PTS
12231 */
12232
12233
12234 Cea708Stream.prototype.getPts = function (byteIndex) {
12235 // There's 1 pts value per 2 bytes
12236 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
12237 };
12238 /**
12239 * Initializes a service
12240 *
12241 * @param {Integer} serviceNum Service number
12242 * @return {Service} Initialized service object
12243 */
12244
12245
12246 Cea708Stream.prototype.initService = function (serviceNum, i) {
12247 var serviceName = 'SERVICE' + serviceNum;
12248 var self = this;
12249 var serviceName;
12250 var encoding;
12251
12252 if (serviceName in this.serviceEncodings) {
12253 encoding = this.serviceEncodings[serviceName];
12254 }
12255
12256 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
12257 this.services[serviceNum].init(this.getPts(i), function (pts) {
12258 self.flushDisplayed(pts, self.services[serviceNum]);
12259 });
12260 return this.services[serviceNum];
12261 };
12262 /**
12263 * Execute text writing to current window
12264 *
12265 * @param {Integer} i Current index in the 708 packet
12266 * @param {Service} service The service object to be affected
12267 * @return {Integer} New index after parsing
12268 */
12269
12270
12271 Cea708Stream.prototype.handleText = function (i, service, options) {
12272 var isExtended = options && options.isExtended;
12273 var isMultiByte = options && options.isMultiByte;
12274 var packetData = this.current708Packet.data;
12275 var extended = isExtended ? 0x1000 : 0x0000;
12276 var currentByte = packetData[i];
12277 var nextByte = packetData[i + 1];
12278 var win = service.currentWindow;
12279 var char;
12280 var charCodeArray; // Use the TextDecoder if one was created for this service
12281
12282 if (service.textDecoder_ && !isExtended) {
12283 if (isMultiByte) {
12284 charCodeArray = [currentByte, nextByte];
12285 i++;
12286 } else {
12287 charCodeArray = [currentByte];
12288 }
12289
12290 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
12291 } else {
12292 char = get708CharFromCode(extended | currentByte);
12293 }
12294
12295 if (win.pendingNewLine && !win.isEmpty()) {
12296 win.newLine(this.getPts(i));
12297 }
12298
12299 win.pendingNewLine = false;
12300 win.addText(char);
12301 return i;
12302 };
12303 /**
12304 * Handle decoding of multibyte character
12305 *
12306 * @param {Integer} i Current index in the 708 packet
12307 * @param {Service} service The service object to be affected
12308 * @return {Integer} New index after parsing
12309 */
12310
12311
12312 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
12313 var packetData = this.current708Packet.data;
12314 var firstByte = packetData[i + 1];
12315 var secondByte = packetData[i + 2];
12316
12317 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
12318 i = this.handleText(++i, service, {
12319 isMultiByte: true
12320 });
12321 }
12322
12323 return i;
12324 };
12325 /**
12326 * Parse and execute the CW# command.
12327 *
12328 * Set the current window.
12329 *
12330 * @param {Integer} i Current index in the 708 packet
12331 * @param {Service} service The service object to be affected
12332 * @return {Integer} New index after parsing
12333 */
12334
12335
12336 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
12337 var packetData = this.current708Packet.data;
12338 var b = packetData[i];
12339 var windowNum = b & 0x07;
12340 service.setCurrentWindow(windowNum);
12341 return i;
12342 };
12343 /**
12344 * Parse and execute the DF# command.
12345 *
12346 * Define a window and set it as the current window.
12347 *
12348 * @param {Integer} i Current index in the 708 packet
12349 * @param {Service} service The service object to be affected
12350 * @return {Integer} New index after parsing
12351 */
12352
12353
12354 Cea708Stream.prototype.defineWindow = function (i, service) {
12355 var packetData = this.current708Packet.data;
12356 var b = packetData[i];
12357 var windowNum = b & 0x07;
12358 service.setCurrentWindow(windowNum);
12359 var win = service.currentWindow;
12360 b = packetData[++i];
12361 win.visible = (b & 0x20) >> 5; // v
12362
12363 win.rowLock = (b & 0x10) >> 4; // rl
12364
12365 win.columnLock = (b & 0x08) >> 3; // cl
12366
12367 win.priority = b & 0x07; // p
12368
12369 b = packetData[++i];
12370 win.relativePositioning = (b & 0x80) >> 7; // rp
12371
12372 win.anchorVertical = b & 0x7f; // av
12373
12374 b = packetData[++i];
12375 win.anchorHorizontal = b; // ah
12376
12377 b = packetData[++i];
12378 win.anchorPoint = (b & 0xf0) >> 4; // ap
12379
12380 win.rowCount = b & 0x0f; // rc
12381
12382 b = packetData[++i];
12383 win.columnCount = b & 0x3f; // cc
12384
12385 b = packetData[++i];
12386 win.windowStyle = (b & 0x38) >> 3; // ws
12387
12388 win.penStyle = b & 0x07; // ps
12389 // The spec says there are (rowCount+1) "virtual rows"
12390
12391 win.virtualRowCount = win.rowCount + 1;
12392 return i;
12393 };
12394 /**
12395 * Parse and execute the SWA command.
12396 *
12397 * Set attributes of the current window.
12398 *
12399 * @param {Integer} i Current index in the 708 packet
12400 * @param {Service} service The service object to be affected
12401 * @return {Integer} New index after parsing
12402 */
12403
12404
12405 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
12406 var packetData = this.current708Packet.data;
12407 var b = packetData[i];
12408 var winAttr = service.currentWindow.winAttr;
12409 b = packetData[++i];
12410 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
12411
12412 winAttr.fillRed = (b & 0x30) >> 4; // fr
12413
12414 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
12415
12416 winAttr.fillBlue = b & 0x03; // fb
12417
12418 b = packetData[++i];
12419 winAttr.borderType = (b & 0xc0) >> 6; // bt
12420
12421 winAttr.borderRed = (b & 0x30) >> 4; // br
12422
12423 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
12424
12425 winAttr.borderBlue = b & 0x03; // bb
12426
12427 b = packetData[++i];
12428 winAttr.borderType += (b & 0x80) >> 5; // bt
12429
12430 winAttr.wordWrap = (b & 0x40) >> 6; // ww
12431
12432 winAttr.printDirection = (b & 0x30) >> 4; // pd
12433
12434 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
12435
12436 winAttr.justify = b & 0x03; // j
12437
12438 b = packetData[++i];
12439 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
12440
12441 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
12442
12443 winAttr.displayEffect = b & 0x03; // de
12444
12445 return i;
12446 };
12447 /**
12448 * Gather text from all displayed windows and push a caption to output.
12449 *
12450 * @param {Integer} i Current index in the 708 packet
12451 * @param {Service} service The service object to be affected
12452 */
12453
12454
12455 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
12456 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
12457 // display text in the correct order, but sample files so far have not shown any issue.
12458
12459 for (var winId = 0; winId < 8; winId++) {
12460 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
12461 displayedText.push(service.windows[winId].getText());
12462 }
12463 }
12464
12465 service.endPts = pts;
12466 service.text = displayedText.join('\n\n');
12467 this.pushCaption(service);
12468 service.startPts = pts;
12469 };
12470 /**
12471 * Push a caption to output if the caption contains text.
12472 *
12473 * @param {Service} service The service object to be affected
12474 */
12475
12476
12477 Cea708Stream.prototype.pushCaption = function (service) {
12478 if (service.text !== '') {
12479 this.trigger('data', {
12480 startPts: service.startPts,
12481 endPts: service.endPts,
12482 text: service.text,
12483 stream: 'cc708_' + service.serviceNum
12484 });
12485 service.text = '';
12486 service.startPts = service.endPts;
12487 }
12488 };
12489 /**
12490 * Parse and execute the DSW command.
12491 *
12492 * Set visible property of windows based on the parsed bitmask.
12493 *
12494 * @param {Integer} i Current index in the 708 packet
12495 * @param {Service} service The service object to be affected
12496 * @return {Integer} New index after parsing
12497 */
12498
12499
12500 Cea708Stream.prototype.displayWindows = function (i, service) {
12501 var packetData = this.current708Packet.data;
12502 var b = packetData[++i];
12503 var pts = this.getPts(i);
12504 this.flushDisplayed(pts, service);
12505
12506 for (var winId = 0; winId < 8; winId++) {
12507 if (b & 0x01 << winId) {
12508 service.windows[winId].visible = 1;
12509 }
12510 }
12511
12512 return i;
12513 };
12514 /**
12515 * Parse and execute the HDW command.
12516 *
12517 * Set visible property of windows based on the parsed bitmask.
12518 *
12519 * @param {Integer} i Current index in the 708 packet
12520 * @param {Service} service The service object to be affected
12521 * @return {Integer} New index after parsing
12522 */
12523
12524
12525 Cea708Stream.prototype.hideWindows = function (i, service) {
12526 var packetData = this.current708Packet.data;
12527 var b = packetData[++i];
12528 var pts = this.getPts(i);
12529 this.flushDisplayed(pts, service);
12530
12531 for (var winId = 0; winId < 8; winId++) {
12532 if (b & 0x01 << winId) {
12533 service.windows[winId].visible = 0;
12534 }
12535 }
12536
12537 return i;
12538 };
12539 /**
12540 * Parse and execute the TGW command.
12541 *
12542 * Set visible property of windows based on the parsed bitmask.
12543 *
12544 * @param {Integer} i Current index in the 708 packet
12545 * @param {Service} service The service object to be affected
12546 * @return {Integer} New index after parsing
12547 */
12548
12549
12550 Cea708Stream.prototype.toggleWindows = function (i, service) {
12551 var packetData = this.current708Packet.data;
12552 var b = packetData[++i];
12553 var pts = this.getPts(i);
12554 this.flushDisplayed(pts, service);
12555
12556 for (var winId = 0; winId < 8; winId++) {
12557 if (b & 0x01 << winId) {
12558 service.windows[winId].visible ^= 1;
12559 }
12560 }
12561
12562 return i;
12563 };
12564 /**
12565 * Parse and execute the CLW command.
12566 *
12567 * Clear text of windows based on the parsed bitmask.
12568 *
12569 * @param {Integer} i Current index in the 708 packet
12570 * @param {Service} service The service object to be affected
12571 * @return {Integer} New index after parsing
12572 */
12573
12574
12575 Cea708Stream.prototype.clearWindows = function (i, service) {
12576 var packetData = this.current708Packet.data;
12577 var b = packetData[++i];
12578 var pts = this.getPts(i);
12579 this.flushDisplayed(pts, service);
12580
12581 for (var winId = 0; winId < 8; winId++) {
12582 if (b & 0x01 << winId) {
12583 service.windows[winId].clearText();
12584 }
12585 }
12586
12587 return i;
12588 };
12589 /**
12590 * Parse and execute the DLW command.
12591 *
12592 * Re-initialize windows based on the parsed bitmask.
12593 *
12594 * @param {Integer} i Current index in the 708 packet
12595 * @param {Service} service The service object to be affected
12596 * @return {Integer} New index after parsing
12597 */
12598
12599
12600 Cea708Stream.prototype.deleteWindows = function (i, service) {
12601 var packetData = this.current708Packet.data;
12602 var b = packetData[++i];
12603 var pts = this.getPts(i);
12604 this.flushDisplayed(pts, service);
12605
12606 for (var winId = 0; winId < 8; winId++) {
12607 if (b & 0x01 << winId) {
12608 service.windows[winId].reset();
12609 }
12610 }
12611
12612 return i;
12613 };
12614 /**
12615 * Parse and execute the SPA command.
12616 *
12617 * Set pen attributes of the current window.
12618 *
12619 * @param {Integer} i Current index in the 708 packet
12620 * @param {Service} service The service object to be affected
12621 * @return {Integer} New index after parsing
12622 */
12623
12624
12625 Cea708Stream.prototype.setPenAttributes = function (i, service) {
12626 var packetData = this.current708Packet.data;
12627 var b = packetData[i];
12628 var penAttr = service.currentWindow.penAttr;
12629 b = packetData[++i];
12630 penAttr.textTag = (b & 0xf0) >> 4; // tt
12631
12632 penAttr.offset = (b & 0x0c) >> 2; // o
12633
12634 penAttr.penSize = b & 0x03; // s
12635
12636 b = packetData[++i];
12637 penAttr.italics = (b & 0x80) >> 7; // i
12638
12639 penAttr.underline = (b & 0x40) >> 6; // u
12640
12641 penAttr.edgeType = (b & 0x38) >> 3; // et
12642
12643 penAttr.fontStyle = b & 0x07; // fs
12644
12645 return i;
12646 };
12647 /**
12648 * Parse and execute the SPC command.
12649 *
12650 * Set pen color of the current window.
12651 *
12652 * @param {Integer} i Current index in the 708 packet
12653 * @param {Service} service The service object to be affected
12654 * @return {Integer} New index after parsing
12655 */
12656
12657
12658 Cea708Stream.prototype.setPenColor = function (i, service) {
12659 var packetData = this.current708Packet.data;
12660 var b = packetData[i];
12661 var penColor = service.currentWindow.penColor;
12662 b = packetData[++i];
12663 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
12664
12665 penColor.fgRed = (b & 0x30) >> 4; // fr
12666
12667 penColor.fgGreen = (b & 0x0c) >> 2; // fg
12668
12669 penColor.fgBlue = b & 0x03; // fb
12670
12671 b = packetData[++i];
12672 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
12673
12674 penColor.bgRed = (b & 0x30) >> 4; // br
12675
12676 penColor.bgGreen = (b & 0x0c) >> 2; // bg
12677
12678 penColor.bgBlue = b & 0x03; // bb
12679
12680 b = packetData[++i];
12681 penColor.edgeRed = (b & 0x30) >> 4; // er
12682
12683 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
12684
12685 penColor.edgeBlue = b & 0x03; // eb
12686
12687 return i;
12688 };
12689 /**
12690 * Parse and execute the SPL command.
12691 *
12692 * Set pen location of the current window.
12693 *
12694 * @param {Integer} i Current index in the 708 packet
12695 * @param {Service} service The service object to be affected
12696 * @return {Integer} New index after parsing
12697 */
12698
12699
12700 Cea708Stream.prototype.setPenLocation = function (i, service) {
12701 var packetData = this.current708Packet.data;
12702 var b = packetData[i];
12703 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
12704
12705 service.currentWindow.pendingNewLine = true;
12706 b = packetData[++i];
12707 penLoc.row = b & 0x0f; // r
12708
12709 b = packetData[++i];
12710 penLoc.column = b & 0x3f; // c
12711
12712 return i;
12713 };
12714 /**
12715 * Execute the RST command.
12716 *
12717 * Reset service to a clean slate. Re-initialize.
12718 *
12719 * @param {Integer} i Current index in the 708 packet
12720 * @param {Service} service The service object to be affected
12721 * @return {Service} Re-initialized service
12722 */
12723
12724
12725 Cea708Stream.prototype.reset = function (i, service) {
12726 var pts = this.getPts(i);
12727 this.flushDisplayed(pts, service);
12728 return this.initService(service.serviceNum, i);
12729 }; // This hash maps non-ASCII, special, and extended character codes to their
12730 // proper Unicode equivalent. The first keys that are only a single byte
12731 // are the non-standard ASCII characters, which simply map the CEA608 byte
12732 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
12733 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
12734 // can be performed regardless of the field and data channel on which the
12735 // character code was received.
12736
12737
12738 var CHARACTER_TRANSLATION = {
12739 0x2a: 0xe1,
12740 // á
12741 0x5c: 0xe9,
12742 // é
12743 0x5e: 0xed,
12744 // í
12745 0x5f: 0xf3,
12746 // ó
12747 0x60: 0xfa,
12748 // ú
12749 0x7b: 0xe7,
12750 // ç
12751 0x7c: 0xf7,
12752 // ÷
12753 0x7d: 0xd1,
12754 // Ñ
12755 0x7e: 0xf1,
12756 // ñ
12757 0x7f: 0x2588,
12758 // █
12759 0x0130: 0xae,
12760 // ®
12761 0x0131: 0xb0,
12762 // °
12763 0x0132: 0xbd,
12764 // ½
12765 0x0133: 0xbf,
12766 // ¿
12767 0x0134: 0x2122,
12768 // ™
12769 0x0135: 0xa2,
12770 // ¢
12771 0x0136: 0xa3,
12772 // £
12773 0x0137: 0x266a,
12774 // ♪
12775 0x0138: 0xe0,
12776 // à
12777 0x0139: 0xa0,
12778 //
12779 0x013a: 0xe8,
12780 // è
12781 0x013b: 0xe2,
12782 // â
12783 0x013c: 0xea,
12784 // ê
12785 0x013d: 0xee,
12786 // î
12787 0x013e: 0xf4,
12788 // ô
12789 0x013f: 0xfb,
12790 // û
12791 0x0220: 0xc1,
12792 // Á
12793 0x0221: 0xc9,
12794 // É
12795 0x0222: 0xd3,
12796 // Ó
12797 0x0223: 0xda,
12798 // Ú
12799 0x0224: 0xdc,
12800 // Ü
12801 0x0225: 0xfc,
12802 // ü
12803 0x0226: 0x2018,
12804 // ‘
12805 0x0227: 0xa1,
12806 // ¡
12807 0x0228: 0x2a,
12808 // *
12809 0x0229: 0x27,
12810 // '
12811 0x022a: 0x2014,
12812 // —
12813 0x022b: 0xa9,
12814 // ©
12815 0x022c: 0x2120,
12816 // ℠
12817 0x022d: 0x2022,
12818 // •
12819 0x022e: 0x201c,
12820 // “
12821 0x022f: 0x201d,
12822 // ”
12823 0x0230: 0xc0,
12824 // À
12825 0x0231: 0xc2,
12826 // Â
12827 0x0232: 0xc7,
12828 // Ç
12829 0x0233: 0xc8,
12830 // È
12831 0x0234: 0xca,
12832 // Ê
12833 0x0235: 0xcb,
12834 // Ë
12835 0x0236: 0xeb,
12836 // ë
12837 0x0237: 0xce,
12838 // Î
12839 0x0238: 0xcf,
12840 // Ï
12841 0x0239: 0xef,
12842 // ï
12843 0x023a: 0xd4,
12844 // Ô
12845 0x023b: 0xd9,
12846 // Ù
12847 0x023c: 0xf9,
12848 // ù
12849 0x023d: 0xdb,
12850 // Û
12851 0x023e: 0xab,
12852 // «
12853 0x023f: 0xbb,
12854 // »
12855 0x0320: 0xc3,
12856 // Ã
12857 0x0321: 0xe3,
12858 // ã
12859 0x0322: 0xcd,
12860 // Í
12861 0x0323: 0xcc,
12862 // Ì
12863 0x0324: 0xec,
12864 // ì
12865 0x0325: 0xd2,
12866 // Ò
12867 0x0326: 0xf2,
12868 // ò
12869 0x0327: 0xd5,
12870 // Õ
12871 0x0328: 0xf5,
12872 // õ
12873 0x0329: 0x7b,
12874 // {
12875 0x032a: 0x7d,
12876 // }
12877 0x032b: 0x5c,
12878 // \
12879 0x032c: 0x5e,
12880 // ^
12881 0x032d: 0x5f,
12882 // _
12883 0x032e: 0x7c,
12884 // |
12885 0x032f: 0x7e,
12886 // ~
12887 0x0330: 0xc4,
12888 // Ä
12889 0x0331: 0xe4,
12890 // ä
12891 0x0332: 0xd6,
12892 // Ö
12893 0x0333: 0xf6,
12894 // ö
12895 0x0334: 0xdf,
12896 // ß
12897 0x0335: 0xa5,
12898 // ¥
12899 0x0336: 0xa4,
12900 // ¤
12901 0x0337: 0x2502,
12902 // │
12903 0x0338: 0xc5,
12904 // Å
12905 0x0339: 0xe5,
12906 // å
12907 0x033a: 0xd8,
12908 // Ø
12909 0x033b: 0xf8,
12910 // ø
12911 0x033c: 0x250c,
12912 // ┌
12913 0x033d: 0x2510,
12914 // ┐
12915 0x033e: 0x2514,
12916 // └
12917 0x033f: 0x2518 // ┘
12918
12919 };
12920
12921 var getCharFromCode = function getCharFromCode(code) {
12922 if (code === null) {
12923 return '';
12924 }
12925
12926 code = CHARACTER_TRANSLATION[code] || code;
12927 return String.fromCharCode(code);
12928 }; // the index of the last row in a CEA-608 display buffer
12929
12930
12931 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
12932 // getting it through bit logic.
12933
12934 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
12935 // cells. The "bottom" row is the last element in the outer array.
12936
12937 var createDisplayBuffer = function createDisplayBuffer() {
12938 var result = [],
12939 i = BOTTOM_ROW + 1;
12940
12941 while (i--) {
12942 result.push('');
12943 }
12944
12945 return result;
12946 };
12947
12948 var Cea608Stream = function Cea608Stream(field, dataChannel) {
12949 Cea608Stream.prototype.init.call(this);
12950 this.field_ = field || 0;
12951 this.dataChannel_ = dataChannel || 0;
12952 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
12953 this.setConstants();
12954 this.reset();
12955
12956 this.push = function (packet) {
12957 var data, swap, char0, char1, text; // remove the parity bits
12958
12959 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
12960
12961 if (data === this.lastControlCode_) {
12962 this.lastControlCode_ = null;
12963 return;
12964 } // Store control codes
12965
12966
12967 if ((data & 0xf000) === 0x1000) {
12968 this.lastControlCode_ = data;
12969 } else if (data !== this.PADDING_) {
12970 this.lastControlCode_ = null;
12971 }
12972
12973 char0 = data >>> 8;
12974 char1 = data & 0xff;
12975
12976 if (data === this.PADDING_) {
12977 return;
12978 } else if (data === this.RESUME_CAPTION_LOADING_) {
12979 this.mode_ = 'popOn';
12980 } else if (data === this.END_OF_CAPTION_) {
12981 // If an EOC is received while in paint-on mode, the displayed caption
12982 // text should be swapped to non-displayed memory as if it was a pop-on
12983 // caption. Because of that, we should explicitly switch back to pop-on
12984 // mode
12985 this.mode_ = 'popOn';
12986 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
12987
12988 this.flushDisplayed(packet.pts); // flip memory
12989
12990 swap = this.displayed_;
12991 this.displayed_ = this.nonDisplayed_;
12992 this.nonDisplayed_ = swap; // start measuring the time to display the caption
12993
12994 this.startPts_ = packet.pts;
12995 } else if (data === this.ROLL_UP_2_ROWS_) {
12996 this.rollUpRows_ = 2;
12997 this.setRollUp(packet.pts);
12998 } else if (data === this.ROLL_UP_3_ROWS_) {
12999 this.rollUpRows_ = 3;
13000 this.setRollUp(packet.pts);
13001 } else if (data === this.ROLL_UP_4_ROWS_) {
13002 this.rollUpRows_ = 4;
13003 this.setRollUp(packet.pts);
13004 } else if (data === this.CARRIAGE_RETURN_) {
13005 this.clearFormatting(packet.pts);
13006 this.flushDisplayed(packet.pts);
13007 this.shiftRowsUp_();
13008 this.startPts_ = packet.pts;
13009 } else if (data === this.BACKSPACE_) {
13010 if (this.mode_ === 'popOn') {
13011 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
13012 } else {
13013 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
13014 }
13015 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
13016 this.flushDisplayed(packet.pts);
13017 this.displayed_ = createDisplayBuffer();
13018 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
13019 this.nonDisplayed_ = createDisplayBuffer();
13020 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
13021 if (this.mode_ !== 'paintOn') {
13022 // NOTE: This should be removed when proper caption positioning is
13023 // implemented
13024 this.flushDisplayed(packet.pts);
13025 this.displayed_ = createDisplayBuffer();
13026 }
13027
13028 this.mode_ = 'paintOn';
13029 this.startPts_ = packet.pts; // Append special characters to caption text
13030 } else if (this.isSpecialCharacter(char0, char1)) {
13031 // Bitmask char0 so that we can apply character transformations
13032 // regardless of field and data channel.
13033 // Then byte-shift to the left and OR with char1 so we can pass the
13034 // entire character code to `getCharFromCode`.
13035 char0 = (char0 & 0x03) << 8;
13036 text = getCharFromCode(char0 | char1);
13037 this[this.mode_](packet.pts, text);
13038 this.column_++; // Append extended characters to caption text
13039 } else if (this.isExtCharacter(char0, char1)) {
13040 // Extended characters always follow their "non-extended" equivalents.
13041 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
13042 // decoders are supposed to drop the "è", while compliant decoders
13043 // backspace the "e" and insert "è".
13044 // Delete the previous character
13045 if (this.mode_ === 'popOn') {
13046 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
13047 } else {
13048 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
13049 } // Bitmask char0 so that we can apply character transformations
13050 // regardless of field and data channel.
13051 // Then byte-shift to the left and OR with char1 so we can pass the
13052 // entire character code to `getCharFromCode`.
13053
13054
13055 char0 = (char0 & 0x03) << 8;
13056 text = getCharFromCode(char0 | char1);
13057 this[this.mode_](packet.pts, text);
13058 this.column_++; // Process mid-row codes
13059 } else if (this.isMidRowCode(char0, char1)) {
13060 // Attributes are not additive, so clear all formatting
13061 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
13062 // should be replaced with spaces, so add one now
13063
13064 this[this.mode_](packet.pts, ' ');
13065 this.column_++;
13066
13067 if ((char1 & 0xe) === 0xe) {
13068 this.addFormatting(packet.pts, ['i']);
13069 }
13070
13071 if ((char1 & 0x1) === 0x1) {
13072 this.addFormatting(packet.pts, ['u']);
13073 } // Detect offset control codes and adjust cursor
13074
13075 } else if (this.isOffsetControlCode(char0, char1)) {
13076 // Cursor position is set by indent PAC (see below) in 4-column
13077 // increments, with an additional offset code of 1-3 to reach any
13078 // of the 32 columns specified by CEA-608. So all we need to do
13079 // here is increment the column cursor by the given offset.
13080 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
13081 } else if (this.isPAC(char0, char1)) {
13082 // There's no logic for PAC -> row mapping, so we have to just
13083 // find the row code in an array and use its index :(
13084 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
13085
13086 if (this.mode_ === 'rollUp') {
13087 // This implies that the base row is incorrectly set.
13088 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
13089 // of roll-up rows set.
13090 if (row - this.rollUpRows_ + 1 < 0) {
13091 row = this.rollUpRows_ - 1;
13092 }
13093
13094 this.setRollUp(packet.pts, row);
13095 }
13096
13097 if (row !== this.row_) {
13098 // formatting is only persistent for current row
13099 this.clearFormatting(packet.pts);
13100 this.row_ = row;
13101 } // All PACs can apply underline, so detect and apply
13102 // (All odd-numbered second bytes set underline)
13103
13104
13105 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
13106 this.addFormatting(packet.pts, ['u']);
13107 }
13108
13109 if ((data & 0x10) === 0x10) {
13110 // We've got an indent level code. Each successive even number
13111 // increments the column cursor by 4, so we can get the desired
13112 // column position by bit-shifting to the right (to get n/2)
13113 // and multiplying by 4.
13114 this.column_ = ((data & 0xe) >> 1) * 4;
13115 }
13116
13117 if (this.isColorPAC(char1)) {
13118 // it's a color code, though we only support white, which
13119 // can be either normal or italicized. white italics can be
13120 // either 0x4e or 0x6e depending on the row, so we just
13121 // bitwise-and with 0xe to see if italics should be turned on
13122 if ((char1 & 0xe) === 0xe) {
13123 this.addFormatting(packet.pts, ['i']);
13124 }
13125 } // We have a normal character in char0, and possibly one in char1
13126
13127 } else if (this.isNormalChar(char0)) {
13128 if (char1 === 0x00) {
13129 char1 = null;
13130 }
13131
13132 text = getCharFromCode(char0);
13133 text += getCharFromCode(char1);
13134 this[this.mode_](packet.pts, text);
13135 this.column_ += text.length;
13136 } // finish data processing
13137
13138 };
13139 };
13140
13141 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
13142 // display buffer
13143
13144 Cea608Stream.prototype.flushDisplayed = function (pts) {
13145 var content = this.displayed_ // remove spaces from the start and end of the string
13146 .map(function (row, index) {
13147 try {
13148 return row.trim();
13149 } catch (e) {
13150 // Ordinarily, this shouldn't happen. However, caption
13151 // parsing errors should not throw exceptions and
13152 // break playback.
13153 this.trigger('log', {
13154 level: 'warn',
13155 message: 'Skipping a malformed 608 caption at index ' + index + '.'
13156 });
13157 return '';
13158 }
13159 }, this) // combine all text rows to display in one cue
13160 .join('\n') // and remove blank rows from the start and end, but not the middle
13161 .replace(/^\n+|\n+$/g, '');
13162
13163 if (content.length) {
13164 this.trigger('data', {
13165 startPts: this.startPts_,
13166 endPts: pts,
13167 text: content,
13168 stream: this.name_
13169 });
13170 }
13171 };
13172 /**
13173 * Zero out the data, used for startup and on seek
13174 */
13175
13176
13177 Cea608Stream.prototype.reset = function () {
13178 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
13179 // actually display captions. If a caption is shifted to a row
13180 // with a lower index than this, it is cleared from the display
13181 // buffer
13182
13183 this.topRow_ = 0;
13184 this.startPts_ = 0;
13185 this.displayed_ = createDisplayBuffer();
13186 this.nonDisplayed_ = createDisplayBuffer();
13187 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
13188
13189 this.column_ = 0;
13190 this.row_ = BOTTOM_ROW;
13191 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
13192
13193 this.formatting_ = [];
13194 };
13195 /**
13196 * Sets up control code and related constants for this instance
13197 */
13198
13199
13200 Cea608Stream.prototype.setConstants = function () {
13201 // The following attributes have these uses:
13202 // ext_ : char0 for mid-row codes, and the base for extended
13203 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
13204 // extended codes)
13205 // control_: char0 for control codes, except byte-shifted to the
13206 // left so that we can do this.control_ | CONTROL_CODE
13207 // offset_: char0 for tab offset codes
13208 //
13209 // It's also worth noting that control codes, and _only_ control codes,
13210 // differ between field 1 and field2. Field 2 control codes are always
13211 // their field 1 value plus 1. That's why there's the "| field" on the
13212 // control value.
13213 if (this.dataChannel_ === 0) {
13214 this.BASE_ = 0x10;
13215 this.EXT_ = 0x11;
13216 this.CONTROL_ = (0x14 | this.field_) << 8;
13217 this.OFFSET_ = 0x17;
13218 } else if (this.dataChannel_ === 1) {
13219 this.BASE_ = 0x18;
13220 this.EXT_ = 0x19;
13221 this.CONTROL_ = (0x1c | this.field_) << 8;
13222 this.OFFSET_ = 0x1f;
13223 } // Constants for the LSByte command codes recognized by Cea608Stream. This
13224 // list is not exhaustive. For a more comprehensive listing and semantics see
13225 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
13226 // Padding
13227
13228
13229 this.PADDING_ = 0x0000; // Pop-on Mode
13230
13231 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
13232 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
13233
13234 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
13235 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
13236 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
13237 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
13238
13239 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
13240
13241 this.BACKSPACE_ = this.CONTROL_ | 0x21;
13242 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
13243 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
13244 };
13245 /**
13246 * Detects if the 2-byte packet data is a special character
13247 *
13248 * Special characters have a second byte in the range 0x30 to 0x3f,
13249 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
13250 * data channel 2).
13251 *
13252 * @param {Integer} char0 The first byte
13253 * @param {Integer} char1 The second byte
13254 * @return {Boolean} Whether the 2 bytes are an special character
13255 */
13256
13257
13258 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
13259 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
13260 };
13261 /**
13262 * Detects if the 2-byte packet data is an extended character
13263 *
13264 * Extended characters have a second byte in the range 0x20 to 0x3f,
13265 * with the first byte being 0x12 or 0x13 (for data channel 1) or
13266 * 0x1a or 0x1b (for data channel 2).
13267 *
13268 * @param {Integer} char0 The first byte
13269 * @param {Integer} char1 The second byte
13270 * @return {Boolean} Whether the 2 bytes are an extended character
13271 */
13272
13273
13274 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
13275 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
13276 };
13277 /**
13278 * Detects if the 2-byte packet is a mid-row code
13279 *
13280 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
13281 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
13282 * channel 2).
13283 *
13284 * @param {Integer} char0 The first byte
13285 * @param {Integer} char1 The second byte
13286 * @return {Boolean} Whether the 2 bytes are a mid-row code
13287 */
13288
13289
13290 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
13291 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
13292 };
13293 /**
13294 * Detects if the 2-byte packet is an offset control code
13295 *
13296 * Offset control codes have a second byte in the range 0x21 to 0x23,
13297 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
13298 * data channel 2).
13299 *
13300 * @param {Integer} char0 The first byte
13301 * @param {Integer} char1 The second byte
13302 * @return {Boolean} Whether the 2 bytes are an offset control code
13303 */
13304
13305
13306 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
13307 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
13308 };
13309 /**
13310 * Detects if the 2-byte packet is a Preamble Address Code
13311 *
13312 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
13313 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
13314 * range 0x40 to 0x7f.
13315 *
13316 * @param {Integer} char0 The first byte
13317 * @param {Integer} char1 The second byte
13318 * @return {Boolean} Whether the 2 bytes are a PAC
13319 */
13320
13321
13322 Cea608Stream.prototype.isPAC = function (char0, char1) {
13323 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
13324 };
13325 /**
13326 * Detects if a packet's second byte is in the range of a PAC color code
13327 *
13328 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
13329 * 0x60 to 0x6f.
13330 *
13331 * @param {Integer} char1 The second byte
13332 * @return {Boolean} Whether the byte is a color PAC
13333 */
13334
13335
13336 Cea608Stream.prototype.isColorPAC = function (char1) {
13337 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
13338 };
13339 /**
13340 * Detects if a single byte is in the range of a normal character
13341 *
13342 * Normal text bytes are in the range 0x20 to 0x7f.
13343 *
13344 * @param {Integer} char The byte
13345 * @return {Boolean} Whether the byte is a normal character
13346 */
13347
13348
13349 Cea608Stream.prototype.isNormalChar = function (char) {
13350 return char >= 0x20 && char <= 0x7f;
13351 };
13352 /**
13353 * Configures roll-up
13354 *
13355 * @param {Integer} pts Current PTS
13356 * @param {Integer} newBaseRow Used by PACs to slide the current window to
13357 * a new position
13358 */
13359
13360
13361 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
13362 // Reset the base row to the bottom row when switching modes
13363 if (this.mode_ !== 'rollUp') {
13364 this.row_ = BOTTOM_ROW;
13365 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
13366
13367 this.flushDisplayed(pts);
13368 this.nonDisplayed_ = createDisplayBuffer();
13369 this.displayed_ = createDisplayBuffer();
13370 }
13371
13372 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
13373 // move currently displayed captions (up or down) to the new base row
13374 for (var i = 0; i < this.rollUpRows_; i++) {
13375 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
13376 this.displayed_[this.row_ - i] = '';
13377 }
13378 }
13379
13380 if (newBaseRow === undefined) {
13381 newBaseRow = this.row_;
13382 }
13383
13384 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
13385 }; // Adds the opening HTML tag for the passed character to the caption text,
13386 // and keeps track of it for later closing
13387
13388
13389 Cea608Stream.prototype.addFormatting = function (pts, format) {
13390 this.formatting_ = this.formatting_.concat(format);
13391 var text = format.reduce(function (text, format) {
13392 return text + '<' + format + '>';
13393 }, '');
13394 this[this.mode_](pts, text);
13395 }; // Adds HTML closing tags for current formatting to caption text and
13396 // clears remembered formatting
13397
13398
13399 Cea608Stream.prototype.clearFormatting = function (pts) {
13400 if (!this.formatting_.length) {
13401 return;
13402 }
13403
13404 var text = this.formatting_.reverse().reduce(function (text, format) {
13405 return text + '</' + format + '>';
13406 }, '');
13407 this.formatting_ = [];
13408 this[this.mode_](pts, text);
13409 }; // Mode Implementations
13410
13411
13412 Cea608Stream.prototype.popOn = function (pts, text) {
13413 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
13414
13415 baseRow += text;
13416 this.nonDisplayed_[this.row_] = baseRow;
13417 };
13418
13419 Cea608Stream.prototype.rollUp = function (pts, text) {
13420 var baseRow = this.displayed_[this.row_];
13421 baseRow += text;
13422 this.displayed_[this.row_] = baseRow;
13423 };
13424
13425 Cea608Stream.prototype.shiftRowsUp_ = function () {
13426 var i; // clear out inactive rows
13427
13428 for (i = 0; i < this.topRow_; i++) {
13429 this.displayed_[i] = '';
13430 }
13431
13432 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
13433 this.displayed_[i] = '';
13434 } // shift displayed rows up
13435
13436
13437 for (i = this.topRow_; i < this.row_; i++) {
13438 this.displayed_[i] = this.displayed_[i + 1];
13439 } // clear out the bottom row
13440
13441
13442 this.displayed_[this.row_] = '';
13443 };
13444
13445 Cea608Stream.prototype.paintOn = function (pts, text) {
13446 var baseRow = this.displayed_[this.row_];
13447 baseRow += text;
13448 this.displayed_[this.row_] = baseRow;
13449 }; // exports
13450
13451
13452 var captionStream = {
13453 CaptionStream: CaptionStream$1,
13454 Cea608Stream: Cea608Stream,
13455 Cea708Stream: Cea708Stream
13456 };
13457 /**
13458 * mux.js
13459 *
13460 * Copyright (c) Brightcove
13461 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
13462 */
13463
13464 var streamTypes = {
13465 H264_STREAM_TYPE: 0x1B,
13466 ADTS_STREAM_TYPE: 0x0F,
13467 METADATA_STREAM_TYPE: 0x15
13468 };
13469 var MAX_TS = 8589934592;
13470 var RO_THRESH = 4294967296;
13471 var TYPE_SHARED = 'shared';
13472
13473 var handleRollover$1 = function handleRollover(value, reference) {
13474 var direction = 1;
13475
13476 if (value > reference) {
13477 // If the current timestamp value is greater than our reference timestamp and we detect a
13478 // timestamp rollover, this means the roll over is happening in the opposite direction.
13479 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
13480 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
13481 // rollover point. In loading this segment, the timestamp values will be very large,
13482 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
13483 // the time stamp to be `value - 2^33`.
13484 direction = -1;
13485 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
13486 // cause an incorrect adjustment.
13487
13488
13489 while (Math.abs(reference - value) > RO_THRESH) {
13490 value += direction * MAX_TS;
13491 }
13492
13493 return value;
13494 };
13495
13496 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
13497 var lastDTS, referenceDTS;
13498 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
13499 // video and audio. We could use `undefined` here, but having a string
13500 // makes debugging a little clearer.
13501
13502 this.type_ = type || TYPE_SHARED;
13503
13504 this.push = function (data) {
13505 // Any "shared" rollover streams will accept _all_ data. Otherwise,
13506 // streams will only accept data that matches their type.
13507 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
13508 return;
13509 }
13510
13511 if (referenceDTS === undefined) {
13512 referenceDTS = data.dts;
13513 }
13514
13515 data.dts = handleRollover$1(data.dts, referenceDTS);
13516 data.pts = handleRollover$1(data.pts, referenceDTS);
13517 lastDTS = data.dts;
13518 this.trigger('data', data);
13519 };
13520
13521 this.flush = function () {
13522 referenceDTS = lastDTS;
13523 this.trigger('done');
13524 };
13525
13526 this.endTimeline = function () {
13527 this.flush();
13528 this.trigger('endedtimeline');
13529 };
13530
13531 this.discontinuity = function () {
13532 referenceDTS = void 0;
13533 lastDTS = void 0;
13534 };
13535
13536 this.reset = function () {
13537 this.discontinuity();
13538 this.trigger('reset');
13539 };
13540 };
13541
13542 TimestampRolloverStream$1.prototype = new stream();
13543 var timestampRolloverStream = {
13544 TimestampRolloverStream: TimestampRolloverStream$1,
13545 handleRollover: handleRollover$1
13546 };
13547
13548 var percentEncode$1 = function percentEncode(bytes, start, end) {
13549 var i,
13550 result = '';
13551
13552 for (i = start; i < end; i++) {
13553 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
13554 }
13555
13556 return result;
13557 },
13558 // return the string representation of the specified byte range,
13559 // interpreted as UTf-8.
13560 parseUtf8 = function parseUtf8(bytes, start, end) {
13561 return decodeURIComponent(percentEncode$1(bytes, start, end));
13562 },
13563 // return the string representation of the specified byte range,
13564 // interpreted as ISO-8859-1.
13565 parseIso88591$1 = function parseIso88591(bytes, start, end) {
13566 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
13567 },
13568 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
13569 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
13570 },
13571 tagParsers = {
13572 TXXX: function TXXX(tag) {
13573 var i;
13574
13575 if (tag.data[0] !== 3) {
13576 // ignore frames with unrecognized character encodings
13577 return;
13578 }
13579
13580 for (i = 1; i < tag.data.length; i++) {
13581 if (tag.data[i] === 0) {
13582 // parse the text fields
13583 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
13584
13585 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
13586 break;
13587 }
13588 }
13589
13590 tag.data = tag.value;
13591 },
13592 WXXX: function WXXX(tag) {
13593 var i;
13594
13595 if (tag.data[0] !== 3) {
13596 // ignore frames with unrecognized character encodings
13597 return;
13598 }
13599
13600 for (i = 1; i < tag.data.length; i++) {
13601 if (tag.data[i] === 0) {
13602 // parse the description and URL fields
13603 tag.description = parseUtf8(tag.data, 1, i);
13604 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
13605 break;
13606 }
13607 }
13608 },
13609 PRIV: function PRIV(tag) {
13610 var i;
13611
13612 for (i = 0; i < tag.data.length; i++) {
13613 if (tag.data[i] === 0) {
13614 // parse the description and URL fields
13615 tag.owner = parseIso88591$1(tag.data, 0, i);
13616 break;
13617 }
13618 }
13619
13620 tag.privateData = tag.data.subarray(i + 1);
13621 tag.data = tag.privateData;
13622 }
13623 },
13624 _MetadataStream;
13625
13626 _MetadataStream = function MetadataStream(options) {
13627 var settings = {
13628 // the bytes of the program-level descriptor field in MP2T
13629 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
13630 // program element descriptors"
13631 descriptor: options && options.descriptor
13632 },
13633 // the total size in bytes of the ID3 tag being parsed
13634 tagSize = 0,
13635 // tag data that is not complete enough to be parsed
13636 buffer = [],
13637 // the total number of bytes currently in the buffer
13638 bufferSize = 0,
13639 i;
13640
13641 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
13642 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
13643
13644
13645 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
13646
13647 if (settings.descriptor) {
13648 for (i = 0; i < settings.descriptor.length; i++) {
13649 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
13650 }
13651 }
13652
13653 this.push = function (chunk) {
13654 var tag, frameStart, frameSize, frame, i, frameHeader;
13655
13656 if (chunk.type !== 'timed-metadata') {
13657 return;
13658 } // if data_alignment_indicator is set in the PES header,
13659 // we must have the start of a new ID3 tag. Assume anything
13660 // remaining in the buffer was malformed and throw it out
13661
13662
13663 if (chunk.dataAlignmentIndicator) {
13664 bufferSize = 0;
13665 buffer.length = 0;
13666 } // ignore events that don't look like ID3 data
13667
13668
13669 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
13670 this.trigger('log', {
13671 level: 'warn',
13672 message: 'Skipping unrecognized metadata packet'
13673 });
13674 return;
13675 } // add this chunk to the data we've collected so far
13676
13677
13678 buffer.push(chunk);
13679 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
13680
13681 if (buffer.length === 1) {
13682 // the frame size is transmitted as a 28-bit integer in the
13683 // last four bytes of the ID3 header.
13684 // The most significant bit of each byte is dropped and the
13685 // results concatenated to recover the actual value.
13686 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
13687 // convenient for our comparisons to include it
13688
13689 tagSize += 10;
13690 } // if the entire frame has not arrived, wait for more data
13691
13692
13693 if (bufferSize < tagSize) {
13694 return;
13695 } // collect the entire frame so it can be parsed
13696
13697
13698 tag = {
13699 data: new Uint8Array(tagSize),
13700 frames: [],
13701 pts: buffer[0].pts,
13702 dts: buffer[0].dts
13703 };
13704
13705 for (i = 0; i < tagSize;) {
13706 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
13707 i += buffer[0].data.byteLength;
13708 bufferSize -= buffer[0].data.byteLength;
13709 buffer.shift();
13710 } // find the start of the first frame and the end of the tag
13711
13712
13713 frameStart = 10;
13714
13715 if (tag.data[5] & 0x40) {
13716 // advance the frame start past the extended header
13717 frameStart += 4; // header size field
13718
13719 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
13720
13721 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
13722 } // parse one or more ID3 frames
13723 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
13724
13725
13726 do {
13727 // determine the number of bytes in this frame
13728 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
13729
13730 if (frameSize < 1) {
13731 this.trigger('log', {
13732 level: 'warn',
13733 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
13734 });
13735 return;
13736 }
13737
13738 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
13739 frame = {
13740 id: frameHeader,
13741 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
13742 };
13743 frame.key = frame.id;
13744
13745 if (tagParsers[frame.id]) {
13746 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
13747 // time for raw AAC data
13748
13749 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
13750 var d = frame.data,
13751 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
13752 size *= 4;
13753 size += d[7] & 0x03;
13754 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
13755 // on the value of this frame
13756 // we couldn't have known the appropriate pts and dts before
13757 // parsing this ID3 tag so set those values now
13758
13759 if (tag.pts === undefined && tag.dts === undefined) {
13760 tag.pts = frame.timeStamp;
13761 tag.dts = frame.timeStamp;
13762 }
13763
13764 this.trigger('timestamp', frame);
13765 }
13766 }
13767
13768 tag.frames.push(frame);
13769 frameStart += 10; // advance past the frame header
13770
13771 frameStart += frameSize; // advance past the frame body
13772 } while (frameStart < tagSize);
13773
13774 this.trigger('data', tag);
13775 };
13776 };
13777
13778 _MetadataStream.prototype = new stream();
13779 var metadataStream = _MetadataStream;
13780 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
13781
13782 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
13783
13784
13785 var MP2T_PACKET_LENGTH$1 = 188,
13786 // bytes
13787 SYNC_BYTE$1 = 0x47;
13788 /**
13789 * Splits an incoming stream of binary data into MPEG-2 Transport
13790 * Stream packets.
13791 */
13792
13793 _TransportPacketStream = function TransportPacketStream() {
13794 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
13795 bytesInBuffer = 0;
13796
13797 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
13798
13799 /**
13800 * Split a stream of data into M2TS packets
13801 **/
13802
13803
13804 this.push = function (bytes) {
13805 var startIndex = 0,
13806 endIndex = MP2T_PACKET_LENGTH$1,
13807 everything; // If there are bytes remaining from the last segment, prepend them to the
13808 // bytes that were pushed in
13809
13810 if (bytesInBuffer) {
13811 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
13812 everything.set(buffer.subarray(0, bytesInBuffer));
13813 everything.set(bytes, bytesInBuffer);
13814 bytesInBuffer = 0;
13815 } else {
13816 everything = bytes;
13817 } // While we have enough data for a packet
13818
13819
13820 while (endIndex < everything.byteLength) {
13821 // Look for a pair of start and end sync bytes in the data..
13822 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
13823 // We found a packet so emit it and jump one whole packet forward in
13824 // the stream
13825 this.trigger('data', everything.subarray(startIndex, endIndex));
13826 startIndex += MP2T_PACKET_LENGTH$1;
13827 endIndex += MP2T_PACKET_LENGTH$1;
13828 continue;
13829 } // If we get here, we have somehow become de-synchronized and we need to step
13830 // forward one byte at a time until we find a pair of sync bytes that denote
13831 // a packet
13832
13833
13834 startIndex++;
13835 endIndex++;
13836 } // If there was some data left over at the end of the segment that couldn't
13837 // possibly be a whole packet, keep it because it might be the start of a packet
13838 // that continues in the next segment
13839
13840
13841 if (startIndex < everything.byteLength) {
13842 buffer.set(everything.subarray(startIndex), 0);
13843 bytesInBuffer = everything.byteLength - startIndex;
13844 }
13845 };
13846 /**
13847 * Passes identified M2TS packets to the TransportParseStream to be parsed
13848 **/
13849
13850
13851 this.flush = function () {
13852 // If the buffer contains a whole packet when we are being flushed, emit it
13853 // and empty the buffer. Otherwise hold onto the data because it may be
13854 // important for decoding the next segment
13855 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
13856 this.trigger('data', buffer);
13857 bytesInBuffer = 0;
13858 }
13859
13860 this.trigger('done');
13861 };
13862
13863 this.endTimeline = function () {
13864 this.flush();
13865 this.trigger('endedtimeline');
13866 };
13867
13868 this.reset = function () {
13869 bytesInBuffer = 0;
13870 this.trigger('reset');
13871 };
13872 };
13873
13874 _TransportPacketStream.prototype = new stream();
13875 /**
13876 * Accepts an MP2T TransportPacketStream and emits data events with parsed
13877 * forms of the individual transport stream packets.
13878 */
13879
13880 _TransportParseStream = function TransportParseStream() {
13881 var parsePsi, parsePat, parsePmt, self;
13882
13883 _TransportParseStream.prototype.init.call(this);
13884
13885 self = this;
13886 this.packetsWaitingForPmt = [];
13887 this.programMapTable = undefined;
13888
13889 parsePsi = function parsePsi(payload, psi) {
13890 var offset = 0; // PSI packets may be split into multiple sections and those
13891 // sections may be split into multiple packets. If a PSI
13892 // section starts in this packet, the payload_unit_start_indicator
13893 // will be true and the first byte of the payload will indicate
13894 // the offset from the current position to the start of the
13895 // section.
13896
13897 if (psi.payloadUnitStartIndicator) {
13898 offset += payload[offset] + 1;
13899 }
13900
13901 if (psi.type === 'pat') {
13902 parsePat(payload.subarray(offset), psi);
13903 } else {
13904 parsePmt(payload.subarray(offset), psi);
13905 }
13906 };
13907
13908 parsePat = function parsePat(payload, pat) {
13909 pat.section_number = payload[7]; // eslint-disable-line camelcase
13910
13911 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
13912 // skip the PSI header and parse the first PMT entry
13913
13914 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
13915 pat.pmtPid = self.pmtPid;
13916 };
13917 /**
13918 * Parse out the relevant fields of a Program Map Table (PMT).
13919 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
13920 * packet. The first byte in this array should be the table_id
13921 * field.
13922 * @param pmt {object} the object that should be decorated with
13923 * fields parsed from the PMT.
13924 */
13925
13926
13927 parsePmt = function parsePmt(payload, pmt) {
13928 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
13929 // take effect. We don't believe this should ever be the case
13930 // for HLS but we'll ignore "forward" PMT declarations if we see
13931 // them. Future PMT declarations have the current_next_indicator
13932 // set to zero.
13933
13934 if (!(payload[5] & 0x01)) {
13935 return;
13936 } // overwrite any existing program map table
13937
13938
13939 self.programMapTable = {
13940 video: null,
13941 audio: null,
13942 'timed-metadata': {}
13943 }; // the mapping table ends at the end of the current section
13944
13945 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
13946 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
13947 // long the program info descriptors are
13948
13949 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
13950
13951 offset = 12 + programInfoLength;
13952
13953 while (offset < tableEnd) {
13954 var streamType = payload[offset];
13955 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
13956 // TODO: should this be done for metadata too? for now maintain behavior of
13957 // multiple metadata streams
13958
13959 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
13960 self.programMapTable.video = pid;
13961 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
13962 self.programMapTable.audio = pid;
13963 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
13964 // map pid to stream type for metadata streams
13965 self.programMapTable['timed-metadata'][pid] = streamType;
13966 } // move to the next table entry
13967 // skip past the elementary stream descriptors, if present
13968
13969
13970 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
13971 } // record the map on the packet as well
13972
13973
13974 pmt.programMapTable = self.programMapTable;
13975 };
13976 /**
13977 * Deliver a new MP2T packet to the next stream in the pipeline.
13978 */
13979
13980
13981 this.push = function (packet) {
13982 var result = {},
13983 offset = 4;
13984 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
13985
13986 result.pid = packet[1] & 0x1f;
13987 result.pid <<= 8;
13988 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
13989 // fifth byte of the TS packet header. The adaptation field is
13990 // used to add stuffing to PES packets that don't fill a complete
13991 // TS packet, and to specify some forms of timing and control data
13992 // that we do not currently use.
13993
13994 if ((packet[3] & 0x30) >>> 4 > 0x01) {
13995 offset += packet[offset] + 1;
13996 } // parse the rest of the packet based on the type
13997
13998
13999 if (result.pid === 0) {
14000 result.type = 'pat';
14001 parsePsi(packet.subarray(offset), result);
14002 this.trigger('data', result);
14003 } else if (result.pid === this.pmtPid) {
14004 result.type = 'pmt';
14005 parsePsi(packet.subarray(offset), result);
14006 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
14007
14008 while (this.packetsWaitingForPmt.length) {
14009 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
14010 }
14011 } else if (this.programMapTable === undefined) {
14012 // When we have not seen a PMT yet, defer further processing of
14013 // PES packets until one has been parsed
14014 this.packetsWaitingForPmt.push([packet, offset, result]);
14015 } else {
14016 this.processPes_(packet, offset, result);
14017 }
14018 };
14019
14020 this.processPes_ = function (packet, offset, result) {
14021 // set the appropriate stream type
14022 if (result.pid === this.programMapTable.video) {
14023 result.streamType = streamTypes.H264_STREAM_TYPE;
14024 } else if (result.pid === this.programMapTable.audio) {
14025 result.streamType = streamTypes.ADTS_STREAM_TYPE;
14026 } else {
14027 // if not video or audio, it is timed-metadata or unknown
14028 // if unknown, streamType will be undefined
14029 result.streamType = this.programMapTable['timed-metadata'][result.pid];
14030 }
14031
14032 result.type = 'pes';
14033 result.data = packet.subarray(offset);
14034 this.trigger('data', result);
14035 };
14036 };
14037
14038 _TransportParseStream.prototype = new stream();
14039 _TransportParseStream.STREAM_TYPES = {
14040 h264: 0x1b,
14041 adts: 0x0f
14042 };
14043 /**
14044 * Reconsistutes program elementary stream (PES) packets from parsed
14045 * transport stream packets. That is, if you pipe an
14046 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
14047 * events will be events which capture the bytes for individual PES
14048 * packets plus relevant metadata that has been extracted from the
14049 * container.
14050 */
14051
14052 _ElementaryStream = function ElementaryStream() {
14053 var self = this,
14054 segmentHadPmt = false,
14055 // PES packet fragments
14056 video = {
14057 data: [],
14058 size: 0
14059 },
14060 audio = {
14061 data: [],
14062 size: 0
14063 },
14064 timedMetadata = {
14065 data: [],
14066 size: 0
14067 },
14068 programMapTable,
14069 parsePes = function parsePes(payload, pes) {
14070 var ptsDtsFlags;
14071 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
14072
14073 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
14074 // that are frame data that is continuing from the previous fragment. This
14075 // is to check that the pes data is the start of a new pes payload
14076
14077 if (startPrefix !== 1) {
14078 return;
14079 } // get the packet length, this will be 0 for video
14080
14081
14082 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
14083
14084 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
14085 // and a DTS value. Determine what combination of values is
14086 // available to work with.
14087
14088 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
14089 // performs all bitwise operations on 32-bit integers but javascript
14090 // supports a much greater range (52-bits) of integer using standard
14091 // mathematical operations.
14092 // We construct a 31-bit value using bitwise operators over the 31
14093 // most significant bits and then multiply by 4 (equal to a left-shift
14094 // of 2) before we add the final 2 least significant bits of the
14095 // timestamp (equal to an OR.)
14096
14097 if (ptsDtsFlags & 0xC0) {
14098 // the PTS and DTS are not written out directly. For information
14099 // on how they are encoded, see
14100 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
14101 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
14102 pes.pts *= 4; // Left shift by 2
14103
14104 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
14105
14106 pes.dts = pes.pts;
14107
14108 if (ptsDtsFlags & 0x40) {
14109 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
14110 pes.dts *= 4; // Left shift by 2
14111
14112 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
14113 }
14114 } // the data section starts immediately after the PES header.
14115 // pes_header_data_length specifies the number of header bytes
14116 // that follow the last byte of the field.
14117
14118
14119 pes.data = payload.subarray(9 + payload[8]);
14120 },
14121
14122 /**
14123 * Pass completely parsed PES packets to the next stream in the pipeline
14124 **/
14125 flushStream = function flushStream(stream, type, forceFlush) {
14126 var packetData = new Uint8Array(stream.size),
14127 event = {
14128 type: type
14129 },
14130 i = 0,
14131 offset = 0,
14132 packetFlushable = false,
14133 fragment; // do nothing if there is not enough buffered data for a complete
14134 // PES header
14135
14136 if (!stream.data.length || stream.size < 9) {
14137 return;
14138 }
14139
14140 event.trackId = stream.data[0].pid; // reassemble the packet
14141
14142 for (i = 0; i < stream.data.length; i++) {
14143 fragment = stream.data[i];
14144 packetData.set(fragment.data, offset);
14145 offset += fragment.data.byteLength;
14146 } // parse assembled packet's PES header
14147
14148
14149 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
14150 // check that there is enough stream data to fill the packet
14151
14152 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
14153
14154 if (forceFlush || packetFlushable) {
14155 stream.size = 0;
14156 stream.data.length = 0;
14157 } // only emit packets that are complete. this is to avoid assembling
14158 // incomplete PES packets due to poor segmentation
14159
14160
14161 if (packetFlushable) {
14162 self.trigger('data', event);
14163 }
14164 };
14165
14166 _ElementaryStream.prototype.init.call(this);
14167 /**
14168 * Identifies M2TS packet types and parses PES packets using metadata
14169 * parsed from the PMT
14170 **/
14171
14172
14173 this.push = function (data) {
14174 ({
14175 pat: function pat() {// we have to wait for the PMT to arrive as well before we
14176 // have any meaningful metadata
14177 },
14178 pes: function pes() {
14179 var stream, streamType;
14180
14181 switch (data.streamType) {
14182 case streamTypes.H264_STREAM_TYPE:
14183 stream = video;
14184 streamType = 'video';
14185 break;
14186
14187 case streamTypes.ADTS_STREAM_TYPE:
14188 stream = audio;
14189 streamType = 'audio';
14190 break;
14191
14192 case streamTypes.METADATA_STREAM_TYPE:
14193 stream = timedMetadata;
14194 streamType = 'timed-metadata';
14195 break;
14196
14197 default:
14198 // ignore unknown stream types
14199 return;
14200 } // if a new packet is starting, we can flush the completed
14201 // packet
14202
14203
14204 if (data.payloadUnitStartIndicator) {
14205 flushStream(stream, streamType, true);
14206 } // buffer this fragment until we are sure we've received the
14207 // complete payload
14208
14209
14210 stream.data.push(data);
14211 stream.size += data.data.byteLength;
14212 },
14213 pmt: function pmt() {
14214 var event = {
14215 type: 'metadata',
14216 tracks: []
14217 };
14218 programMapTable = data.programMapTable; // translate audio and video streams to tracks
14219
14220 if (programMapTable.video !== null) {
14221 event.tracks.push({
14222 timelineStartInfo: {
14223 baseMediaDecodeTime: 0
14224 },
14225 id: +programMapTable.video,
14226 codec: 'avc',
14227 type: 'video'
14228 });
14229 }
14230
14231 if (programMapTable.audio !== null) {
14232 event.tracks.push({
14233 timelineStartInfo: {
14234 baseMediaDecodeTime: 0
14235 },
14236 id: +programMapTable.audio,
14237 codec: 'adts',
14238 type: 'audio'
14239 });
14240 }
14241
14242 segmentHadPmt = true;
14243 self.trigger('data', event);
14244 }
14245 })[data.type]();
14246 };
14247
14248 this.reset = function () {
14249 video.size = 0;
14250 video.data.length = 0;
14251 audio.size = 0;
14252 audio.data.length = 0;
14253 this.trigger('reset');
14254 };
14255 /**
14256 * Flush any remaining input. Video PES packets may be of variable
14257 * length. Normally, the start of a new video packet can trigger the
14258 * finalization of the previous packet. That is not possible if no
14259 * more video is forthcoming, however. In that case, some other
14260 * mechanism (like the end of the file) has to be employed. When it is
14261 * clear that no additional data is forthcoming, calling this method
14262 * will flush the buffered packets.
14263 */
14264
14265
14266 this.flushStreams_ = function () {
14267 // !!THIS ORDER IS IMPORTANT!!
14268 // video first then audio
14269 flushStream(video, 'video');
14270 flushStream(audio, 'audio');
14271 flushStream(timedMetadata, 'timed-metadata');
14272 };
14273
14274 this.flush = function () {
14275 // if on flush we haven't had a pmt emitted
14276 // and we have a pmt to emit. emit the pmt
14277 // so that we trigger a trackinfo downstream.
14278 if (!segmentHadPmt && programMapTable) {
14279 var pmt = {
14280 type: 'metadata',
14281 tracks: []
14282 }; // translate audio and video streams to tracks
14283
14284 if (programMapTable.video !== null) {
14285 pmt.tracks.push({
14286 timelineStartInfo: {
14287 baseMediaDecodeTime: 0
14288 },
14289 id: +programMapTable.video,
14290 codec: 'avc',
14291 type: 'video'
14292 });
14293 }
14294
14295 if (programMapTable.audio !== null) {
14296 pmt.tracks.push({
14297 timelineStartInfo: {
14298 baseMediaDecodeTime: 0
14299 },
14300 id: +programMapTable.audio,
14301 codec: 'adts',
14302 type: 'audio'
14303 });
14304 }
14305
14306 self.trigger('data', pmt);
14307 }
14308
14309 segmentHadPmt = false;
14310 this.flushStreams_();
14311 this.trigger('done');
14312 };
14313 };
14314
14315 _ElementaryStream.prototype = new stream();
14316 var m2ts = {
14317 PAT_PID: 0x0000,
14318 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
14319 TransportPacketStream: _TransportPacketStream,
14320 TransportParseStream: _TransportParseStream,
14321 ElementaryStream: _ElementaryStream,
14322 TimestampRolloverStream: TimestampRolloverStream,
14323 CaptionStream: captionStream.CaptionStream,
14324 Cea608Stream: captionStream.Cea608Stream,
14325 Cea708Stream: captionStream.Cea708Stream,
14326 MetadataStream: metadataStream
14327 };
14328
14329 for (var type in streamTypes) {
14330 if (streamTypes.hasOwnProperty(type)) {
14331 m2ts[type] = streamTypes[type];
14332 }
14333 }
14334
14335 var m2ts_1 = m2ts;
14336 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
14337
14338 var _AdtsStream;
14339
14340 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
14341 /*
14342 * Accepts a ElementaryStream and emits data events with parsed
14343 * AAC Audio Frames of the individual packets. Input audio in ADTS
14344 * format is unpacked and re-emitted as AAC frames.
14345 *
14346 * @see http://wiki.multimedia.cx/index.php?title=ADTS
14347 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
14348 */
14349
14350 _AdtsStream = function AdtsStream(handlePartialSegments) {
14351 var buffer,
14352 frameNum = 0;
14353
14354 _AdtsStream.prototype.init.call(this);
14355
14356 this.skipWarn_ = function (start, end) {
14357 this.trigger('log', {
14358 level: 'warn',
14359 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
14360 });
14361 };
14362
14363 this.push = function (packet) {
14364 var i = 0,
14365 frameLength,
14366 protectionSkipBytes,
14367 oldBuffer,
14368 sampleCount,
14369 adtsFrameDuration;
14370
14371 if (!handlePartialSegments) {
14372 frameNum = 0;
14373 }
14374
14375 if (packet.type !== 'audio') {
14376 // ignore non-audio data
14377 return;
14378 } // Prepend any data in the buffer to the input data so that we can parse
14379 // aac frames the cross a PES packet boundary
14380
14381
14382 if (buffer && buffer.length) {
14383 oldBuffer = buffer;
14384 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
14385 buffer.set(oldBuffer);
14386 buffer.set(packet.data, oldBuffer.byteLength);
14387 } else {
14388 buffer = packet.data;
14389 } // unpack any ADTS frames which have been fully received
14390 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
14391
14392
14393 var skip; // We use i + 7 here because we want to be able to parse the entire header.
14394 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
14395
14396 while (i + 7 < buffer.length) {
14397 // Look for the start of an ADTS header..
14398 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
14399 if (typeof skip !== 'number') {
14400 skip = i;
14401 } // If a valid header was not found, jump one forward and attempt to
14402 // find a valid ADTS header starting at the next byte
14403
14404
14405 i++;
14406 continue;
14407 }
14408
14409 if (typeof skip === 'number') {
14410 this.skipWarn_(skip, i);
14411 skip = null;
14412 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
14413 // end of the ADTS header
14414
14415
14416 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
14417 // end of the sync sequence
14418 // NOTE: frame length includes the size of the header
14419
14420 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
14421 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
14422 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
14423 // then we have to wait for more data
14424
14425 if (buffer.byteLength - i < frameLength) {
14426 break;
14427 } // Otherwise, deliver the complete AAC frame
14428
14429
14430 this.trigger('data', {
14431 pts: packet.pts + frameNum * adtsFrameDuration,
14432 dts: packet.dts + frameNum * adtsFrameDuration,
14433 sampleCount: sampleCount,
14434 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
14435 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
14436 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
14437 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
14438 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
14439 samplesize: 16,
14440 // data is the frame without it's header
14441 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
14442 });
14443 frameNum++;
14444 i += frameLength;
14445 }
14446
14447 if (typeof skip === 'number') {
14448 this.skipWarn_(skip, i);
14449 skip = null;
14450 } // remove processed bytes from the buffer.
14451
14452
14453 buffer = buffer.subarray(i);
14454 };
14455
14456 this.flush = function () {
14457 frameNum = 0;
14458 this.trigger('done');
14459 };
14460
14461 this.reset = function () {
14462 buffer = void 0;
14463 this.trigger('reset');
14464 };
14465
14466 this.endTimeline = function () {
14467 buffer = void 0;
14468 this.trigger('endedtimeline');
14469 };
14470 };
14471
14472 _AdtsStream.prototype = new stream();
14473 var adts = _AdtsStream;
14474 /**
14475 * mux.js
14476 *
14477 * Copyright (c) Brightcove
14478 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14479 */
14480
14481 var ExpGolomb;
14482 /**
14483 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
14484 * scheme used by h264.
14485 */
14486
14487 ExpGolomb = function ExpGolomb(workingData) {
14488 var // the number of bytes left to examine in workingData
14489 workingBytesAvailable = workingData.byteLength,
14490 // the current word being examined
14491 workingWord = 0,
14492 // :uint
14493 // the number of bits left to examine in the current word
14494 workingBitsAvailable = 0; // :uint;
14495 // ():uint
14496
14497 this.length = function () {
14498 return 8 * workingBytesAvailable;
14499 }; // ():uint
14500
14501
14502 this.bitsAvailable = function () {
14503 return 8 * workingBytesAvailable + workingBitsAvailable;
14504 }; // ():void
14505
14506
14507 this.loadWord = function () {
14508 var position = workingData.byteLength - workingBytesAvailable,
14509 workingBytes = new Uint8Array(4),
14510 availableBytes = Math.min(4, workingBytesAvailable);
14511
14512 if (availableBytes === 0) {
14513 throw new Error('no bytes available');
14514 }
14515
14516 workingBytes.set(workingData.subarray(position, position + availableBytes));
14517 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
14518
14519 workingBitsAvailable = availableBytes * 8;
14520 workingBytesAvailable -= availableBytes;
14521 }; // (count:int):void
14522
14523
14524 this.skipBits = function (count) {
14525 var skipBytes; // :int
14526
14527 if (workingBitsAvailable > count) {
14528 workingWord <<= count;
14529 workingBitsAvailable -= count;
14530 } else {
14531 count -= workingBitsAvailable;
14532 skipBytes = Math.floor(count / 8);
14533 count -= skipBytes * 8;
14534 workingBytesAvailable -= skipBytes;
14535 this.loadWord();
14536 workingWord <<= count;
14537 workingBitsAvailable -= count;
14538 }
14539 }; // (size:int):uint
14540
14541
14542 this.readBits = function (size) {
14543 var bits = Math.min(workingBitsAvailable, size),
14544 // :uint
14545 valu = workingWord >>> 32 - bits; // :uint
14546 // if size > 31, handle error
14547
14548 workingBitsAvailable -= bits;
14549
14550 if (workingBitsAvailable > 0) {
14551 workingWord <<= bits;
14552 } else if (workingBytesAvailable > 0) {
14553 this.loadWord();
14554 }
14555
14556 bits = size - bits;
14557
14558 if (bits > 0) {
14559 return valu << bits | this.readBits(bits);
14560 }
14561
14562 return valu;
14563 }; // ():uint
14564
14565
14566 this.skipLeadingZeros = function () {
14567 var leadingZeroCount; // :uint
14568
14569 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
14570 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
14571 // the first bit of working word is 1
14572 workingWord <<= leadingZeroCount;
14573 workingBitsAvailable -= leadingZeroCount;
14574 return leadingZeroCount;
14575 }
14576 } // we exhausted workingWord and still have not found a 1
14577
14578
14579 this.loadWord();
14580 return leadingZeroCount + this.skipLeadingZeros();
14581 }; // ():void
14582
14583
14584 this.skipUnsignedExpGolomb = function () {
14585 this.skipBits(1 + this.skipLeadingZeros());
14586 }; // ():void
14587
14588
14589 this.skipExpGolomb = function () {
14590 this.skipBits(1 + this.skipLeadingZeros());
14591 }; // ():uint
14592
14593
14594 this.readUnsignedExpGolomb = function () {
14595 var clz = this.skipLeadingZeros(); // :uint
14596
14597 return this.readBits(clz + 1) - 1;
14598 }; // ():int
14599
14600
14601 this.readExpGolomb = function () {
14602 var valu = this.readUnsignedExpGolomb(); // :int
14603
14604 if (0x01 & valu) {
14605 // the number is odd if the low order bit is set
14606 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
14607 }
14608
14609 return -1 * (valu >>> 1); // divide by two then make it negative
14610 }; // Some convenience functions
14611 // :Boolean
14612
14613
14614 this.readBoolean = function () {
14615 return this.readBits(1) === 1;
14616 }; // ():int
14617
14618
14619 this.readUnsignedByte = function () {
14620 return this.readBits(8);
14621 };
14622
14623 this.loadWord();
14624 };
14625
14626 var expGolomb = ExpGolomb;
14627
14628 var _H264Stream, _NalByteStream;
14629
14630 var PROFILES_WITH_OPTIONAL_SPS_DATA;
14631 /**
14632 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
14633 */
14634
14635 _NalByteStream = function NalByteStream() {
14636 var syncPoint = 0,
14637 i,
14638 buffer;
14639
14640 _NalByteStream.prototype.init.call(this);
14641 /*
14642 * Scans a byte stream and triggers a data event with the NAL units found.
14643 * @param {Object} data Event received from H264Stream
14644 * @param {Uint8Array} data.data The h264 byte stream to be scanned
14645 *
14646 * @see H264Stream.push
14647 */
14648
14649
14650 this.push = function (data) {
14651 var swapBuffer;
14652
14653 if (!buffer) {
14654 buffer = data.data;
14655 } else {
14656 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
14657 swapBuffer.set(buffer);
14658 swapBuffer.set(data.data, buffer.byteLength);
14659 buffer = swapBuffer;
14660 }
14661
14662 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
14663 // scan for NAL unit boundaries
14664 // a match looks like this:
14665 // 0 0 1 .. NAL .. 0 0 1
14666 // ^ sync point ^ i
14667 // or this:
14668 // 0 0 1 .. NAL .. 0 0 0
14669 // ^ sync point ^ i
14670 // advance the sync point to a NAL start, if necessary
14671
14672 for (; syncPoint < len - 3; syncPoint++) {
14673 if (buffer[syncPoint + 2] === 1) {
14674 // the sync point is properly aligned
14675 i = syncPoint + 5;
14676 break;
14677 }
14678 }
14679
14680 while (i < len) {
14681 // look at the current byte to determine if we've hit the end of
14682 // a NAL unit boundary
14683 switch (buffer[i]) {
14684 case 0:
14685 // skip past non-sync sequences
14686 if (buffer[i - 1] !== 0) {
14687 i += 2;
14688 break;
14689 } else if (buffer[i - 2] !== 0) {
14690 i++;
14691 break;
14692 } // deliver the NAL unit if it isn't empty
14693
14694
14695 if (syncPoint + 3 !== i - 2) {
14696 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14697 } // drop trailing zeroes
14698
14699
14700 do {
14701 i++;
14702 } while (buffer[i] !== 1 && i < len);
14703
14704 syncPoint = i - 2;
14705 i += 3;
14706 break;
14707
14708 case 1:
14709 // skip past non-sync sequences
14710 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
14711 i += 3;
14712 break;
14713 } // deliver the NAL unit
14714
14715
14716 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14717 syncPoint = i - 2;
14718 i += 3;
14719 break;
14720
14721 default:
14722 // the current byte isn't a one or zero, so it cannot be part
14723 // of a sync sequence
14724 i += 3;
14725 break;
14726 }
14727 } // filter out the NAL units that were delivered
14728
14729
14730 buffer = buffer.subarray(syncPoint);
14731 i -= syncPoint;
14732 syncPoint = 0;
14733 };
14734
14735 this.reset = function () {
14736 buffer = null;
14737 syncPoint = 0;
14738 this.trigger('reset');
14739 };
14740
14741 this.flush = function () {
14742 // deliver the last buffered NAL unit
14743 if (buffer && buffer.byteLength > 3) {
14744 this.trigger('data', buffer.subarray(syncPoint + 3));
14745 } // reset the stream state
14746
14747
14748 buffer = null;
14749 syncPoint = 0;
14750 this.trigger('done');
14751 };
14752
14753 this.endTimeline = function () {
14754 this.flush();
14755 this.trigger('endedtimeline');
14756 };
14757 };
14758
14759 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
14760 // see Recommendation ITU-T H.264 (4/2013),
14761 // 7.3.2.1.1 Sequence parameter set data syntax
14762
14763 PROFILES_WITH_OPTIONAL_SPS_DATA = {
14764 100: true,
14765 110: true,
14766 122: true,
14767 244: true,
14768 44: true,
14769 83: true,
14770 86: true,
14771 118: true,
14772 128: true,
14773 // TODO: the three profiles below don't
14774 // appear to have sps data in the specificiation anymore?
14775 138: true,
14776 139: true,
14777 134: true
14778 };
14779 /**
14780 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
14781 * events.
14782 */
14783
14784 _H264Stream = function H264Stream() {
14785 var nalByteStream = new _NalByteStream(),
14786 self,
14787 trackId,
14788 currentPts,
14789 currentDts,
14790 discardEmulationPreventionBytes,
14791 readSequenceParameterSet,
14792 skipScalingList;
14793
14794 _H264Stream.prototype.init.call(this);
14795
14796 self = this;
14797 /*
14798 * Pushes a packet from a stream onto the NalByteStream
14799 *
14800 * @param {Object} packet - A packet received from a stream
14801 * @param {Uint8Array} packet.data - The raw bytes of the packet
14802 * @param {Number} packet.dts - Decode timestamp of the packet
14803 * @param {Number} packet.pts - Presentation timestamp of the packet
14804 * @param {Number} packet.trackId - The id of the h264 track this packet came from
14805 * @param {('video'|'audio')} packet.type - The type of packet
14806 *
14807 */
14808
14809 this.push = function (packet) {
14810 if (packet.type !== 'video') {
14811 return;
14812 }
14813
14814 trackId = packet.trackId;
14815 currentPts = packet.pts;
14816 currentDts = packet.dts;
14817 nalByteStream.push(packet);
14818 };
14819 /*
14820 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
14821 * for the NALUs to the next stream component.
14822 * Also, preprocess caption and sequence parameter NALUs.
14823 *
14824 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
14825 * @see NalByteStream.push
14826 */
14827
14828
14829 nalByteStream.on('data', function (data) {
14830 var event = {
14831 trackId: trackId,
14832 pts: currentPts,
14833 dts: currentDts,
14834 data: data,
14835 nalUnitTypeCode: data[0] & 0x1f
14836 };
14837
14838 switch (event.nalUnitTypeCode) {
14839 case 0x05:
14840 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
14841 break;
14842
14843 case 0x06:
14844 event.nalUnitType = 'sei_rbsp';
14845 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14846 break;
14847
14848 case 0x07:
14849 event.nalUnitType = 'seq_parameter_set_rbsp';
14850 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14851 event.config = readSequenceParameterSet(event.escapedRBSP);
14852 break;
14853
14854 case 0x08:
14855 event.nalUnitType = 'pic_parameter_set_rbsp';
14856 break;
14857
14858 case 0x09:
14859 event.nalUnitType = 'access_unit_delimiter_rbsp';
14860 break;
14861 } // This triggers data on the H264Stream
14862
14863
14864 self.trigger('data', event);
14865 });
14866 nalByteStream.on('done', function () {
14867 self.trigger('done');
14868 });
14869 nalByteStream.on('partialdone', function () {
14870 self.trigger('partialdone');
14871 });
14872 nalByteStream.on('reset', function () {
14873 self.trigger('reset');
14874 });
14875 nalByteStream.on('endedtimeline', function () {
14876 self.trigger('endedtimeline');
14877 });
14878
14879 this.flush = function () {
14880 nalByteStream.flush();
14881 };
14882
14883 this.partialFlush = function () {
14884 nalByteStream.partialFlush();
14885 };
14886
14887 this.reset = function () {
14888 nalByteStream.reset();
14889 };
14890
14891 this.endTimeline = function () {
14892 nalByteStream.endTimeline();
14893 };
14894 /**
14895 * Advance the ExpGolomb decoder past a scaling list. The scaling
14896 * list is optionally transmitted as part of a sequence parameter
14897 * set and is not relevant to transmuxing.
14898 * @param count {number} the number of entries in this scaling list
14899 * @param expGolombDecoder {object} an ExpGolomb pointed to the
14900 * start of a scaling list
14901 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14902 */
14903
14904
14905 skipScalingList = function skipScalingList(count, expGolombDecoder) {
14906 var lastScale = 8,
14907 nextScale = 8,
14908 j,
14909 deltaScale;
14910
14911 for (j = 0; j < count; j++) {
14912 if (nextScale !== 0) {
14913 deltaScale = expGolombDecoder.readExpGolomb();
14914 nextScale = (lastScale + deltaScale + 256) % 256;
14915 }
14916
14917 lastScale = nextScale === 0 ? lastScale : nextScale;
14918 }
14919 };
14920 /**
14921 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
14922 * Sequence Payload"
14923 * @param data {Uint8Array} the bytes of a RBSP from a NAL
14924 * unit
14925 * @return {Uint8Array} the RBSP without any Emulation
14926 * Prevention Bytes
14927 */
14928
14929
14930 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
14931 var length = data.byteLength,
14932 emulationPreventionBytesPositions = [],
14933 i = 1,
14934 newLength,
14935 newData; // Find all `Emulation Prevention Bytes`
14936
14937 while (i < length - 2) {
14938 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
14939 emulationPreventionBytesPositions.push(i + 2);
14940 i += 2;
14941 } else {
14942 i++;
14943 }
14944 } // If no Emulation Prevention Bytes were found just return the original
14945 // array
14946
14947
14948 if (emulationPreventionBytesPositions.length === 0) {
14949 return data;
14950 } // Create a new array to hold the NAL unit data
14951
14952
14953 newLength = length - emulationPreventionBytesPositions.length;
14954 newData = new Uint8Array(newLength);
14955 var sourceIndex = 0;
14956
14957 for (i = 0; i < newLength; sourceIndex++, i++) {
14958 if (sourceIndex === emulationPreventionBytesPositions[0]) {
14959 // Skip this byte
14960 sourceIndex++; // Remove this position index
14961
14962 emulationPreventionBytesPositions.shift();
14963 }
14964
14965 newData[i] = data[sourceIndex];
14966 }
14967
14968 return newData;
14969 };
14970 /**
14971 * Read a sequence parameter set and return some interesting video
14972 * properties. A sequence parameter set is the H264 metadata that
14973 * describes the properties of upcoming video frames.
14974 * @param data {Uint8Array} the bytes of a sequence parameter set
14975 * @return {object} an object with configuration parsed from the
14976 * sequence parameter set, including the dimensions of the
14977 * associated video frames.
14978 */
14979
14980
14981 readSequenceParameterSet = function readSequenceParameterSet(data) {
14982 var frameCropLeftOffset = 0,
14983 frameCropRightOffset = 0,
14984 frameCropTopOffset = 0,
14985 frameCropBottomOffset = 0,
14986 expGolombDecoder,
14987 profileIdc,
14988 levelIdc,
14989 profileCompatibility,
14990 chromaFormatIdc,
14991 picOrderCntType,
14992 numRefFramesInPicOrderCntCycle,
14993 picWidthInMbsMinus1,
14994 picHeightInMapUnitsMinus1,
14995 frameMbsOnlyFlag,
14996 scalingListCount,
14997 sarRatio = [1, 1],
14998 aspectRatioIdc,
14999 i;
15000 expGolombDecoder = new expGolomb(data);
15001 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
15002
15003 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
15004
15005 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
15006
15007 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
15008 // some profiles have more optional data we don't need
15009
15010 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
15011 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
15012
15013 if (chromaFormatIdc === 3) {
15014 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
15015 }
15016
15017 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
15018
15019 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
15020
15021 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
15022
15023 if (expGolombDecoder.readBoolean()) {
15024 // seq_scaling_matrix_present_flag
15025 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
15026
15027 for (i = 0; i < scalingListCount; i++) {
15028 if (expGolombDecoder.readBoolean()) {
15029 // seq_scaling_list_present_flag[ i ]
15030 if (i < 6) {
15031 skipScalingList(16, expGolombDecoder);
15032 } else {
15033 skipScalingList(64, expGolombDecoder);
15034 }
15035 }
15036 }
15037 }
15038 }
15039
15040 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
15041
15042 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
15043
15044 if (picOrderCntType === 0) {
15045 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
15046 } else if (picOrderCntType === 1) {
15047 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
15048
15049 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
15050
15051 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
15052
15053 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
15054
15055 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
15056 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
15057 }
15058 }
15059
15060 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
15061
15062 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
15063
15064 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15065 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15066 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
15067
15068 if (frameMbsOnlyFlag === 0) {
15069 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
15070 }
15071
15072 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
15073
15074 if (expGolombDecoder.readBoolean()) {
15075 // frame_cropping_flag
15076 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
15077 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
15078 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
15079 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
15080 }
15081
15082 if (expGolombDecoder.readBoolean()) {
15083 // vui_parameters_present_flag
15084 if (expGolombDecoder.readBoolean()) {
15085 // aspect_ratio_info_present_flag
15086 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
15087
15088 switch (aspectRatioIdc) {
15089 case 1:
15090 sarRatio = [1, 1];
15091 break;
15092
15093 case 2:
15094 sarRatio = [12, 11];
15095 break;
15096
15097 case 3:
15098 sarRatio = [10, 11];
15099 break;
15100
15101 case 4:
15102 sarRatio = [16, 11];
15103 break;
15104
15105 case 5:
15106 sarRatio = [40, 33];
15107 break;
15108
15109 case 6:
15110 sarRatio = [24, 11];
15111 break;
15112
15113 case 7:
15114 sarRatio = [20, 11];
15115 break;
15116
15117 case 8:
15118 sarRatio = [32, 11];
15119 break;
15120
15121 case 9:
15122 sarRatio = [80, 33];
15123 break;
15124
15125 case 10:
15126 sarRatio = [18, 11];
15127 break;
15128
15129 case 11:
15130 sarRatio = [15, 11];
15131 break;
15132
15133 case 12:
15134 sarRatio = [64, 33];
15135 break;
15136
15137 case 13:
15138 sarRatio = [160, 99];
15139 break;
15140
15141 case 14:
15142 sarRatio = [4, 3];
15143 break;
15144
15145 case 15:
15146 sarRatio = [3, 2];
15147 break;
15148
15149 case 16:
15150 sarRatio = [2, 1];
15151 break;
15152
15153 case 255:
15154 {
15155 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
15156 break;
15157 }
15158 }
15159
15160 if (sarRatio) {
15161 sarRatio[0] / sarRatio[1];
15162 }
15163 }
15164 }
15165
15166 return {
15167 profileIdc: profileIdc,
15168 levelIdc: levelIdc,
15169 profileCompatibility: profileCompatibility,
15170 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
15171 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
15172 // sar is sample aspect ratio
15173 sarRatio: sarRatio
15174 };
15175 };
15176 };
15177
15178 _H264Stream.prototype = new stream();
15179 var h264 = {
15180 H264Stream: _H264Stream,
15181 NalByteStream: _NalByteStream
15182 };
15183 /**
15184 * mux.js
15185 *
15186 * Copyright (c) Brightcove
15187 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15188 *
15189 * Utilities to detect basic properties and metadata about Aac data.
15190 */
15191
15192 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
15193
15194 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
15195 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
15196 flags = header[byteIndex + 5],
15197 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
15198
15199 returnSize = returnSize >= 0 ? returnSize : 0;
15200
15201 if (footerPresent) {
15202 return returnSize + 20;
15203 }
15204
15205 return returnSize + 10;
15206 };
15207
15208 var getId3Offset = function getId3Offset(data, offset) {
15209 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
15210 return offset;
15211 }
15212
15213 offset += parseId3TagSize(data, offset);
15214 return getId3Offset(data, offset);
15215 }; // TODO: use vhs-utils
15216
15217
15218 var isLikelyAacData$1 = function isLikelyAacData(data) {
15219 var offset = getId3Offset(data, 0);
15220 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
15221 // is not mp3 data but aac data.
15222 (data[offset + 1] & 0x16) === 0x10;
15223 };
15224
15225 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
15226 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
15227 }; // return a percent-encoded representation of the specified byte range
15228 // @see http://en.wikipedia.org/wiki/Percent-encoding
15229
15230
15231 var percentEncode = function percentEncode(bytes, start, end) {
15232 var i,
15233 result = '';
15234
15235 for (i = start; i < end; i++) {
15236 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
15237 }
15238
15239 return result;
15240 }; // return the string representation of the specified byte range,
15241 // interpreted as ISO-8859-1.
15242
15243
15244 var parseIso88591 = function parseIso88591(bytes, start, end) {
15245 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
15246 };
15247
15248 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
15249 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
15250 middle = header[byteIndex + 4] << 3,
15251 highTwo = header[byteIndex + 3] & 0x3 << 11;
15252 return highTwo | middle | lowThree;
15253 };
15254
15255 var parseType$2 = function parseType(header, byteIndex) {
15256 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
15257 return 'timed-metadata';
15258 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
15259 return 'audio';
15260 }
15261
15262 return null;
15263 };
15264
15265 var parseSampleRate = function parseSampleRate(packet) {
15266 var i = 0;
15267
15268 while (i + 5 < packet.length) {
15269 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
15270 // If a valid header was not found, jump one forward and attempt to
15271 // find a valid ADTS header starting at the next byte
15272 i++;
15273 continue;
15274 }
15275
15276 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
15277 }
15278
15279 return null;
15280 };
15281
15282 var parseAacTimestamp = function parseAacTimestamp(packet) {
15283 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
15284
15285 frameStart = 10;
15286
15287 if (packet[5] & 0x40) {
15288 // advance the frame start past the extended header
15289 frameStart += 4; // header size field
15290
15291 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
15292 } // parse one or more ID3 frames
15293 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
15294
15295
15296 do {
15297 // determine the number of bytes in this frame
15298 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
15299
15300 if (frameSize < 1) {
15301 return null;
15302 }
15303
15304 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
15305
15306 if (frameHeader === 'PRIV') {
15307 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
15308
15309 for (var i = 0; i < frame.byteLength; i++) {
15310 if (frame[i] === 0) {
15311 var owner = parseIso88591(frame, 0, i);
15312
15313 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
15314 var d = frame.subarray(i + 1);
15315 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
15316 size *= 4;
15317 size += d[7] & 0x03;
15318 return size;
15319 }
15320
15321 break;
15322 }
15323 }
15324 }
15325
15326 frameStart += 10; // advance past the frame header
15327
15328 frameStart += frameSize; // advance past the frame body
15329 } while (frameStart < packet.byteLength);
15330
15331 return null;
15332 };
15333
15334 var utils = {
15335 isLikelyAacData: isLikelyAacData$1,
15336 parseId3TagSize: parseId3TagSize,
15337 parseAdtsSize: parseAdtsSize,
15338 parseType: parseType$2,
15339 parseSampleRate: parseSampleRate,
15340 parseAacTimestamp: parseAacTimestamp
15341 };
15342
15343 var _AacStream;
15344 /**
15345 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
15346 */
15347
15348
15349 _AacStream = function AacStream() {
15350 var everything = new Uint8Array(),
15351 timeStamp = 0;
15352
15353 _AacStream.prototype.init.call(this);
15354
15355 this.setTimestamp = function (timestamp) {
15356 timeStamp = timestamp;
15357 };
15358
15359 this.push = function (bytes) {
15360 var frameSize = 0,
15361 byteIndex = 0,
15362 bytesLeft,
15363 chunk,
15364 packet,
15365 tempLength; // If there are bytes remaining from the last segment, prepend them to the
15366 // bytes that were pushed in
15367
15368 if (everything.length) {
15369 tempLength = everything.length;
15370 everything = new Uint8Array(bytes.byteLength + tempLength);
15371 everything.set(everything.subarray(0, tempLength));
15372 everything.set(bytes, tempLength);
15373 } else {
15374 everything = bytes;
15375 }
15376
15377 while (everything.length - byteIndex >= 3) {
15378 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
15379 // Exit early because we don't have enough to parse
15380 // the ID3 tag header
15381 if (everything.length - byteIndex < 10) {
15382 break;
15383 } // check framesize
15384
15385
15386 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15387 // to emit a full packet
15388 // Add to byteIndex to support multiple ID3 tags in sequence
15389
15390 if (byteIndex + frameSize > everything.length) {
15391 break;
15392 }
15393
15394 chunk = {
15395 type: 'timed-metadata',
15396 data: everything.subarray(byteIndex, byteIndex + frameSize)
15397 };
15398 this.trigger('data', chunk);
15399 byteIndex += frameSize;
15400 continue;
15401 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
15402 // Exit early because we don't have enough to parse
15403 // the ADTS frame header
15404 if (everything.length - byteIndex < 7) {
15405 break;
15406 }
15407
15408 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15409 // to emit a full packet
15410
15411 if (byteIndex + frameSize > everything.length) {
15412 break;
15413 }
15414
15415 packet = {
15416 type: 'audio',
15417 data: everything.subarray(byteIndex, byteIndex + frameSize),
15418 pts: timeStamp,
15419 dts: timeStamp
15420 };
15421 this.trigger('data', packet);
15422 byteIndex += frameSize;
15423 continue;
15424 }
15425
15426 byteIndex++;
15427 }
15428
15429 bytesLeft = everything.length - byteIndex;
15430
15431 if (bytesLeft > 0) {
15432 everything = everything.subarray(byteIndex);
15433 } else {
15434 everything = new Uint8Array();
15435 }
15436 };
15437
15438 this.reset = function () {
15439 everything = new Uint8Array();
15440 this.trigger('reset');
15441 };
15442
15443 this.endTimeline = function () {
15444 everything = new Uint8Array();
15445 this.trigger('endedtimeline');
15446 };
15447 };
15448
15449 _AacStream.prototype = new stream();
15450 var aac = _AacStream; // constants
15451
15452 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
15453 var audioProperties = AUDIO_PROPERTIES;
15454 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
15455 var videoProperties = VIDEO_PROPERTIES;
15456 var H264Stream = h264.H264Stream;
15457 var isLikelyAacData = utils.isLikelyAacData;
15458 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
15459
15460 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
15461
15462 var retriggerForStream = function retriggerForStream(key, event) {
15463 event.stream = key;
15464 this.trigger('log', event);
15465 };
15466
15467 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
15468 var keys = Object.keys(pipeline);
15469
15470 for (var i = 0; i < keys.length; i++) {
15471 var key = keys[i]; // skip non-stream keys and headOfPipeline
15472 // which is just a duplicate
15473
15474 if (key === 'headOfPipeline' || !pipeline[key].on) {
15475 continue;
15476 }
15477
15478 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
15479 }
15480 };
15481 /**
15482 * Compare two arrays (even typed) for same-ness
15483 */
15484
15485
15486 var arrayEquals = function arrayEquals(a, b) {
15487 var i;
15488
15489 if (a.length !== b.length) {
15490 return false;
15491 } // compare the value of each element in the array
15492
15493
15494 for (i = 0; i < a.length; i++) {
15495 if (a[i] !== b[i]) {
15496 return false;
15497 }
15498 }
15499
15500 return true;
15501 };
15502
15503 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
15504 var ptsOffsetFromDts = startPts - startDts,
15505 decodeDuration = endDts - startDts,
15506 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
15507 // however, the player time values will reflect a start from the baseMediaDecodeTime.
15508 // In order to provide relevant values for the player times, base timing info on the
15509 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
15510
15511 return {
15512 start: {
15513 dts: baseMediaDecodeTime,
15514 pts: baseMediaDecodeTime + ptsOffsetFromDts
15515 },
15516 end: {
15517 dts: baseMediaDecodeTime + decodeDuration,
15518 pts: baseMediaDecodeTime + presentationDuration
15519 },
15520 prependedContentDuration: prependedContentDuration,
15521 baseMediaDecodeTime: baseMediaDecodeTime
15522 };
15523 };
15524 /**
15525 * Constructs a single-track, ISO BMFF media segment from AAC data
15526 * events. The output of this stream can be fed to a SourceBuffer
15527 * configured with a suitable initialization segment.
15528 * @param track {object} track metadata configuration
15529 * @param options {object} transmuxer options object
15530 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15531 * in the source; false to adjust the first segment to start at 0.
15532 */
15533
15534
15535 _AudioSegmentStream = function AudioSegmentStream(track, options) {
15536 var adtsFrames = [],
15537 sequenceNumber,
15538 earliestAllowedDts = 0,
15539 audioAppendStartTs = 0,
15540 videoBaseMediaDecodeTime = Infinity;
15541 options = options || {};
15542 sequenceNumber = options.firstSequenceNumber || 0;
15543
15544 _AudioSegmentStream.prototype.init.call(this);
15545
15546 this.push = function (data) {
15547 trackDecodeInfo.collectDtsInfo(track, data);
15548
15549 if (track) {
15550 audioProperties.forEach(function (prop) {
15551 track[prop] = data[prop];
15552 });
15553 } // buffer audio data until end() is called
15554
15555
15556 adtsFrames.push(data);
15557 };
15558
15559 this.setEarliestDts = function (earliestDts) {
15560 earliestAllowedDts = earliestDts;
15561 };
15562
15563 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
15564 videoBaseMediaDecodeTime = baseMediaDecodeTime;
15565 };
15566
15567 this.setAudioAppendStart = function (timestamp) {
15568 audioAppendStartTs = timestamp;
15569 };
15570
15571 this.flush = function () {
15572 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
15573
15574 if (adtsFrames.length === 0) {
15575 this.trigger('done', 'AudioSegmentStream');
15576 return;
15577 }
15578
15579 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
15580 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
15581
15582 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
15583 // samples (that is, adts frames) in the audio data
15584
15585 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
15586
15587 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
15588 adtsFrames = [];
15589 moof = mp4Generator.moof(sequenceNumber, [track]);
15590 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
15591
15592 sequenceNumber++;
15593 boxes.set(moof);
15594 boxes.set(mdat, moof.byteLength);
15595 trackDecodeInfo.clearDtsInfo(track);
15596 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
15597 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
15598 // valid use-case where an init segment/data should be triggered without associated
15599 // frames. Leaving for now, but should be looked into.
15600
15601 if (frames.length) {
15602 segmentDuration = frames.length * frameDuration;
15603 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
15604 // frame info is in video clock cycles. Convert to match expectation of
15605 // listeners (that all timestamps will be based on video clock cycles).
15606 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
15607 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
15608 this.trigger('timingInfo', {
15609 start: frames[0].pts,
15610 end: frames[0].pts + segmentDuration
15611 });
15612 }
15613
15614 this.trigger('data', {
15615 track: track,
15616 boxes: boxes
15617 });
15618 this.trigger('done', 'AudioSegmentStream');
15619 };
15620
15621 this.reset = function () {
15622 trackDecodeInfo.clearDtsInfo(track);
15623 adtsFrames = [];
15624 this.trigger('reset');
15625 };
15626 };
15627
15628 _AudioSegmentStream.prototype = new stream();
15629 /**
15630 * Constructs a single-track, ISO BMFF media segment from H264 data
15631 * events. The output of this stream can be fed to a SourceBuffer
15632 * configured with a suitable initialization segment.
15633 * @param track {object} track metadata configuration
15634 * @param options {object} transmuxer options object
15635 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
15636 * gopsToAlignWith list when attempting to align gop pts
15637 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15638 * in the source; false to adjust the first segment to start at 0.
15639 */
15640
15641 _VideoSegmentStream = function VideoSegmentStream(track, options) {
15642 var sequenceNumber,
15643 nalUnits = [],
15644 gopsToAlignWith = [],
15645 config,
15646 pps;
15647 options = options || {};
15648 sequenceNumber = options.firstSequenceNumber || 0;
15649
15650 _VideoSegmentStream.prototype.init.call(this);
15651
15652 delete track.minPTS;
15653 this.gopCache_ = [];
15654 /**
15655 * Constructs a ISO BMFF segment given H264 nalUnits
15656 * @param {Object} nalUnit A data event representing a nalUnit
15657 * @param {String} nalUnit.nalUnitType
15658 * @param {Object} nalUnit.config Properties for a mp4 track
15659 * @param {Uint8Array} nalUnit.data The nalUnit bytes
15660 * @see lib/codecs/h264.js
15661 **/
15662
15663 this.push = function (nalUnit) {
15664 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
15665
15666 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
15667 config = nalUnit.config;
15668 track.sps = [nalUnit.data];
15669 videoProperties.forEach(function (prop) {
15670 track[prop] = config[prop];
15671 }, this);
15672 }
15673
15674 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
15675 pps = nalUnit.data;
15676 track.pps = [nalUnit.data];
15677 } // buffer video until flush() is called
15678
15679
15680 nalUnits.push(nalUnit);
15681 };
15682 /**
15683 * Pass constructed ISO BMFF track and boxes on to the
15684 * next stream in the pipeline
15685 **/
15686
15687
15688 this.flush = function () {
15689 var frames,
15690 gopForFusion,
15691 gops,
15692 moof,
15693 mdat,
15694 boxes,
15695 prependedContentDuration = 0,
15696 firstGop,
15697 lastGop; // Throw away nalUnits at the start of the byte stream until
15698 // we find the first AUD
15699
15700 while (nalUnits.length) {
15701 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
15702 break;
15703 }
15704
15705 nalUnits.shift();
15706 } // Return early if no video data has been observed
15707
15708
15709 if (nalUnits.length === 0) {
15710 this.resetStream_();
15711 this.trigger('done', 'VideoSegmentStream');
15712 return;
15713 } // Organize the raw nal-units into arrays that represent
15714 // higher-level constructs such as frames and gops
15715 // (group-of-pictures)
15716
15717
15718 frames = frameUtils.groupNalsIntoFrames(nalUnits);
15719 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
15720 // a problem since MSE (on Chrome) requires a leading keyframe.
15721 //
15722 // We have two approaches to repairing this situation:
15723 // 1) GOP-FUSION:
15724 // This is where we keep track of the GOPS (group-of-pictures)
15725 // from previous fragments and attempt to find one that we can
15726 // prepend to the current fragment in order to create a valid
15727 // fragment.
15728 // 2) KEYFRAME-PULLING:
15729 // Here we search for the first keyframe in the fragment and
15730 // throw away all the frames between the start of the fragment
15731 // and that keyframe. We then extend the duration and pull the
15732 // PTS of the keyframe forward so that it covers the time range
15733 // of the frames that were disposed of.
15734 //
15735 // #1 is far prefereable over #2 which can cause "stuttering" but
15736 // requires more things to be just right.
15737
15738 if (!gops[0][0].keyFrame) {
15739 // Search for a gop for fusion from our gopCache
15740 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
15741
15742 if (gopForFusion) {
15743 // in order to provide more accurate timing information about the segment, save
15744 // the number of seconds prepended to the original segment due to GOP fusion
15745 prependedContentDuration = gopForFusion.duration;
15746 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
15747 // new gop at the beginning
15748
15749 gops.byteLength += gopForFusion.byteLength;
15750 gops.nalCount += gopForFusion.nalCount;
15751 gops.pts = gopForFusion.pts;
15752 gops.dts = gopForFusion.dts;
15753 gops.duration += gopForFusion.duration;
15754 } else {
15755 // If we didn't find a candidate gop fall back to keyframe-pulling
15756 gops = frameUtils.extendFirstKeyFrame(gops);
15757 }
15758 } // Trim gops to align with gopsToAlignWith
15759
15760
15761 if (gopsToAlignWith.length) {
15762 var alignedGops;
15763
15764 if (options.alignGopsAtEnd) {
15765 alignedGops = this.alignGopsAtEnd_(gops);
15766 } else {
15767 alignedGops = this.alignGopsAtStart_(gops);
15768 }
15769
15770 if (!alignedGops) {
15771 // save all the nals in the last GOP into the gop cache
15772 this.gopCache_.unshift({
15773 gop: gops.pop(),
15774 pps: track.pps,
15775 sps: track.sps
15776 }); // Keep a maximum of 6 GOPs in the cache
15777
15778 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15779
15780 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
15781
15782 this.resetStream_();
15783 this.trigger('done', 'VideoSegmentStream');
15784 return;
15785 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
15786 // when recalculated before sending off to CoalesceStream
15787
15788
15789 trackDecodeInfo.clearDtsInfo(track);
15790 gops = alignedGops;
15791 }
15792
15793 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
15794 // samples (that is, frames) in the video data
15795
15796 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
15797
15798 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
15799 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
15800 this.trigger('processedGopsInfo', gops.map(function (gop) {
15801 return {
15802 pts: gop.pts,
15803 dts: gop.dts,
15804 byteLength: gop.byteLength
15805 };
15806 }));
15807 firstGop = gops[0];
15808 lastGop = gops[gops.length - 1];
15809 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
15810 this.trigger('timingInfo', {
15811 start: gops[0].pts,
15812 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
15813 }); // save all the nals in the last GOP into the gop cache
15814
15815 this.gopCache_.unshift({
15816 gop: gops.pop(),
15817 pps: track.pps,
15818 sps: track.sps
15819 }); // Keep a maximum of 6 GOPs in the cache
15820
15821 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15822
15823 nalUnits = [];
15824 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
15825 this.trigger('timelineStartInfo', track.timelineStartInfo);
15826 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
15827 // throwing away hundreds of media segment fragments
15828
15829 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
15830
15831 sequenceNumber++;
15832 boxes.set(moof);
15833 boxes.set(mdat, moof.byteLength);
15834 this.trigger('data', {
15835 track: track,
15836 boxes: boxes
15837 });
15838 this.resetStream_(); // Continue with the flush process now
15839
15840 this.trigger('done', 'VideoSegmentStream');
15841 };
15842
15843 this.reset = function () {
15844 this.resetStream_();
15845 nalUnits = [];
15846 this.gopCache_.length = 0;
15847 gopsToAlignWith.length = 0;
15848 this.trigger('reset');
15849 };
15850
15851 this.resetStream_ = function () {
15852 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
15853 // for instance, when we are rendition switching
15854
15855 config = undefined;
15856 pps = undefined;
15857 }; // Search for a candidate Gop for gop-fusion from the gop cache and
15858 // return it or return null if no good candidate was found
15859
15860
15861 this.getGopForFusion_ = function (nalUnit) {
15862 var halfSecond = 45000,
15863 // Half-a-second in a 90khz clock
15864 allowableOverlap = 10000,
15865 // About 3 frames @ 30fps
15866 nearestDistance = Infinity,
15867 dtsDistance,
15868 nearestGopObj,
15869 currentGop,
15870 currentGopObj,
15871 i; // Search for the GOP nearest to the beginning of this nal unit
15872
15873 for (i = 0; i < this.gopCache_.length; i++) {
15874 currentGopObj = this.gopCache_[i];
15875 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
15876
15877 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
15878 continue;
15879 } // Reject Gops that would require a negative baseMediaDecodeTime
15880
15881
15882 if (currentGop.dts < track.timelineStartInfo.dts) {
15883 continue;
15884 } // The distance between the end of the gop and the start of the nalUnit
15885
15886
15887 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
15888 // a half-second of the nal unit
15889
15890 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
15891 // Always use the closest GOP we found if there is more than
15892 // one candidate
15893 if (!nearestGopObj || nearestDistance > dtsDistance) {
15894 nearestGopObj = currentGopObj;
15895 nearestDistance = dtsDistance;
15896 }
15897 }
15898 }
15899
15900 if (nearestGopObj) {
15901 return nearestGopObj.gop;
15902 }
15903
15904 return null;
15905 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15906 // of gopsToAlignWith starting from the START of the list
15907
15908
15909 this.alignGopsAtStart_ = function (gops) {
15910 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
15911 byteLength = gops.byteLength;
15912 nalCount = gops.nalCount;
15913 duration = gops.duration;
15914 alignIndex = gopIndex = 0;
15915
15916 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
15917 align = gopsToAlignWith[alignIndex];
15918 gop = gops[gopIndex];
15919
15920 if (align.pts === gop.pts) {
15921 break;
15922 }
15923
15924 if (gop.pts > align.pts) {
15925 // this current gop starts after the current gop we want to align on, so increment
15926 // align index
15927 alignIndex++;
15928 continue;
15929 } // current gop starts before the current gop we want to align on. so increment gop
15930 // index
15931
15932
15933 gopIndex++;
15934 byteLength -= gop.byteLength;
15935 nalCount -= gop.nalCount;
15936 duration -= gop.duration;
15937 }
15938
15939 if (gopIndex === 0) {
15940 // no gops to trim
15941 return gops;
15942 }
15943
15944 if (gopIndex === gops.length) {
15945 // all gops trimmed, skip appending all gops
15946 return null;
15947 }
15948
15949 alignedGops = gops.slice(gopIndex);
15950 alignedGops.byteLength = byteLength;
15951 alignedGops.duration = duration;
15952 alignedGops.nalCount = nalCount;
15953 alignedGops.pts = alignedGops[0].pts;
15954 alignedGops.dts = alignedGops[0].dts;
15955 return alignedGops;
15956 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15957 // of gopsToAlignWith starting from the END of the list
15958
15959
15960 this.alignGopsAtEnd_ = function (gops) {
15961 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
15962 alignIndex = gopsToAlignWith.length - 1;
15963 gopIndex = gops.length - 1;
15964 alignEndIndex = null;
15965 matchFound = false;
15966
15967 while (alignIndex >= 0 && gopIndex >= 0) {
15968 align = gopsToAlignWith[alignIndex];
15969 gop = gops[gopIndex];
15970
15971 if (align.pts === gop.pts) {
15972 matchFound = true;
15973 break;
15974 }
15975
15976 if (align.pts > gop.pts) {
15977 alignIndex--;
15978 continue;
15979 }
15980
15981 if (alignIndex === gopsToAlignWith.length - 1) {
15982 // gop.pts is greater than the last alignment candidate. If no match is found
15983 // by the end of this loop, we still want to append gops that come after this
15984 // point
15985 alignEndIndex = gopIndex;
15986 }
15987
15988 gopIndex--;
15989 }
15990
15991 if (!matchFound && alignEndIndex === null) {
15992 return null;
15993 }
15994
15995 var trimIndex;
15996
15997 if (matchFound) {
15998 trimIndex = gopIndex;
15999 } else {
16000 trimIndex = alignEndIndex;
16001 }
16002
16003 if (trimIndex === 0) {
16004 return gops;
16005 }
16006
16007 var alignedGops = gops.slice(trimIndex);
16008 var metadata = alignedGops.reduce(function (total, gop) {
16009 total.byteLength += gop.byteLength;
16010 total.duration += gop.duration;
16011 total.nalCount += gop.nalCount;
16012 return total;
16013 }, {
16014 byteLength: 0,
16015 duration: 0,
16016 nalCount: 0
16017 });
16018 alignedGops.byteLength = metadata.byteLength;
16019 alignedGops.duration = metadata.duration;
16020 alignedGops.nalCount = metadata.nalCount;
16021 alignedGops.pts = alignedGops[0].pts;
16022 alignedGops.dts = alignedGops[0].dts;
16023 return alignedGops;
16024 };
16025
16026 this.alignGopsWith = function (newGopsToAlignWith) {
16027 gopsToAlignWith = newGopsToAlignWith;
16028 };
16029 };
16030
16031 _VideoSegmentStream.prototype = new stream();
16032 /**
16033 * A Stream that can combine multiple streams (ie. audio & video)
16034 * into a single output segment for MSE. Also supports audio-only
16035 * and video-only streams.
16036 * @param options {object} transmuxer options object
16037 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
16038 * in the source; false to adjust the first segment to start at media timeline start.
16039 */
16040
16041 _CoalesceStream = function CoalesceStream(options, metadataStream) {
16042 // Number of Tracks per output segment
16043 // If greater than 1, we combine multiple
16044 // tracks into a single segment
16045 this.numberOfTracks = 0;
16046 this.metadataStream = metadataStream;
16047 options = options || {};
16048
16049 if (typeof options.remux !== 'undefined') {
16050 this.remuxTracks = !!options.remux;
16051 } else {
16052 this.remuxTracks = true;
16053 }
16054
16055 if (typeof options.keepOriginalTimestamps === 'boolean') {
16056 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
16057 } else {
16058 this.keepOriginalTimestamps = false;
16059 }
16060
16061 this.pendingTracks = [];
16062 this.videoTrack = null;
16063 this.pendingBoxes = [];
16064 this.pendingCaptions = [];
16065 this.pendingMetadata = [];
16066 this.pendingBytes = 0;
16067 this.emittedTracks = 0;
16068
16069 _CoalesceStream.prototype.init.call(this); // Take output from multiple
16070
16071
16072 this.push = function (output) {
16073 // buffer incoming captions until the associated video segment
16074 // finishes
16075 if (output.text) {
16076 return this.pendingCaptions.push(output);
16077 } // buffer incoming id3 tags until the final flush
16078
16079
16080 if (output.frames) {
16081 return this.pendingMetadata.push(output);
16082 } // Add this track to the list of pending tracks and store
16083 // important information required for the construction of
16084 // the final segment
16085
16086
16087 this.pendingTracks.push(output.track);
16088 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
16089 // We unshift audio and push video because
16090 // as of Chrome 75 when switching from
16091 // one init segment to another if the video
16092 // mdat does not appear after the audio mdat
16093 // only audio will play for the duration of our transmux.
16094
16095 if (output.track.type === 'video') {
16096 this.videoTrack = output.track;
16097 this.pendingBoxes.push(output.boxes);
16098 }
16099
16100 if (output.track.type === 'audio') {
16101 this.audioTrack = output.track;
16102 this.pendingBoxes.unshift(output.boxes);
16103 }
16104 };
16105 };
16106
16107 _CoalesceStream.prototype = new stream();
16108
16109 _CoalesceStream.prototype.flush = function (flushSource) {
16110 var offset = 0,
16111 event = {
16112 captions: [],
16113 captionStreams: {},
16114 metadata: [],
16115 info: {}
16116 },
16117 caption,
16118 id3,
16119 initSegment,
16120 timelineStartPts = 0,
16121 i;
16122
16123 if (this.pendingTracks.length < this.numberOfTracks) {
16124 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
16125 // Return because we haven't received a flush from a data-generating
16126 // portion of the segment (meaning that we have only recieved meta-data
16127 // or captions.)
16128 return;
16129 } else if (this.remuxTracks) {
16130 // Return until we have enough tracks from the pipeline to remux (if we
16131 // are remuxing audio and video into a single MP4)
16132 return;
16133 } else if (this.pendingTracks.length === 0) {
16134 // In the case where we receive a flush without any data having been
16135 // received we consider it an emitted track for the purposes of coalescing
16136 // `done` events.
16137 // We do this for the case where there is an audio and video track in the
16138 // segment but no audio data. (seen in several playlists with alternate
16139 // audio tracks and no audio present in the main TS segments.)
16140 this.emittedTracks++;
16141
16142 if (this.emittedTracks >= this.numberOfTracks) {
16143 this.trigger('done');
16144 this.emittedTracks = 0;
16145 }
16146
16147 return;
16148 }
16149 }
16150
16151 if (this.videoTrack) {
16152 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
16153 videoProperties.forEach(function (prop) {
16154 event.info[prop] = this.videoTrack[prop];
16155 }, this);
16156 } else if (this.audioTrack) {
16157 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
16158 audioProperties.forEach(function (prop) {
16159 event.info[prop] = this.audioTrack[prop];
16160 }, this);
16161 }
16162
16163 if (this.videoTrack || this.audioTrack) {
16164 if (this.pendingTracks.length === 1) {
16165 event.type = this.pendingTracks[0].type;
16166 } else {
16167 event.type = 'combined';
16168 }
16169
16170 this.emittedTracks += this.pendingTracks.length;
16171 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
16172
16173 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
16174 // and track definitions
16175
16176 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
16177
16178 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
16179
16180 for (i = 0; i < this.pendingBoxes.length; i++) {
16181 event.data.set(this.pendingBoxes[i], offset);
16182 offset += this.pendingBoxes[i].byteLength;
16183 } // Translate caption PTS times into second offsets to match the
16184 // video timeline for the segment, and add track info
16185
16186
16187 for (i = 0; i < this.pendingCaptions.length; i++) {
16188 caption = this.pendingCaptions[i];
16189 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
16190 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
16191 event.captionStreams[caption.stream] = true;
16192 event.captions.push(caption);
16193 } // Translate ID3 frame PTS times into second offsets to match the
16194 // video timeline for the segment
16195
16196
16197 for (i = 0; i < this.pendingMetadata.length; i++) {
16198 id3 = this.pendingMetadata[i];
16199 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
16200 event.metadata.push(id3);
16201 } // We add this to every single emitted segment even though we only need
16202 // it for the first
16203
16204
16205 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
16206
16207 this.pendingTracks.length = 0;
16208 this.videoTrack = null;
16209 this.pendingBoxes.length = 0;
16210 this.pendingCaptions.length = 0;
16211 this.pendingBytes = 0;
16212 this.pendingMetadata.length = 0; // Emit the built segment
16213 // We include captions and ID3 tags for backwards compatibility,
16214 // ideally we should send only video and audio in the data event
16215
16216 this.trigger('data', event); // Emit each caption to the outside world
16217 // Ideally, this would happen immediately on parsing captions,
16218 // but we need to ensure that video data is sent back first
16219 // so that caption timing can be adjusted to match video timing
16220
16221 for (i = 0; i < event.captions.length; i++) {
16222 caption = event.captions[i];
16223 this.trigger('caption', caption);
16224 } // Emit each id3 tag to the outside world
16225 // Ideally, this would happen immediately on parsing the tag,
16226 // but we need to ensure that video data is sent back first
16227 // so that ID3 frame timing can be adjusted to match video timing
16228
16229
16230 for (i = 0; i < event.metadata.length; i++) {
16231 id3 = event.metadata[i];
16232 this.trigger('id3Frame', id3);
16233 }
16234 } // Only emit `done` if all tracks have been flushed and emitted
16235
16236
16237 if (this.emittedTracks >= this.numberOfTracks) {
16238 this.trigger('done');
16239 this.emittedTracks = 0;
16240 }
16241 };
16242
16243 _CoalesceStream.prototype.setRemux = function (val) {
16244 this.remuxTracks = val;
16245 };
16246 /**
16247 * A Stream that expects MP2T binary data as input and produces
16248 * corresponding media segments, suitable for use with Media Source
16249 * Extension (MSE) implementations that support the ISO BMFF byte
16250 * stream format, like Chrome.
16251 */
16252
16253
16254 _Transmuxer = function Transmuxer(options) {
16255 var self = this,
16256 hasFlushed = true,
16257 videoTrack,
16258 audioTrack;
16259
16260 _Transmuxer.prototype.init.call(this);
16261
16262 options = options || {};
16263 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
16264 this.transmuxPipeline_ = {};
16265
16266 this.setupAacPipeline = function () {
16267 var pipeline = {};
16268 this.transmuxPipeline_ = pipeline;
16269 pipeline.type = 'aac';
16270 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16271
16272 pipeline.aacStream = new aac();
16273 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
16274 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
16275 pipeline.adtsStream = new adts();
16276 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16277 pipeline.headOfPipeline = pipeline.aacStream;
16278 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
16279 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
16280 pipeline.metadataStream.on('timestamp', function (frame) {
16281 pipeline.aacStream.setTimestamp(frame.timeStamp);
16282 });
16283 pipeline.aacStream.on('data', function (data) {
16284 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
16285 return;
16286 }
16287
16288 audioTrack = audioTrack || {
16289 timelineStartInfo: {
16290 baseMediaDecodeTime: self.baseMediaDecodeTime
16291 },
16292 codec: 'adts',
16293 type: 'audio'
16294 }; // hook up the audio segment stream to the first track with aac data
16295
16296 pipeline.coalesceStream.numberOfTracks++;
16297 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16298 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16299 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
16300
16301 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
16302
16303 self.trigger('trackinfo', {
16304 hasAudio: !!audioTrack,
16305 hasVideo: !!videoTrack
16306 });
16307 }); // Re-emit any data coming from the coalesce stream to the outside world
16308
16309 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
16310
16311 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16312 addPipelineLogRetriggers(this, pipeline);
16313 };
16314
16315 this.setupTsPipeline = function () {
16316 var pipeline = {};
16317 this.transmuxPipeline_ = pipeline;
16318 pipeline.type = 'ts';
16319 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16320
16321 pipeline.packetStream = new m2ts_1.TransportPacketStream();
16322 pipeline.parseStream = new m2ts_1.TransportParseStream();
16323 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
16324 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
16325 pipeline.adtsStream = new adts();
16326 pipeline.h264Stream = new H264Stream();
16327 pipeline.captionStream = new m2ts_1.CaptionStream(options);
16328 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16329 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
16330
16331 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
16332 // demux the streams
16333
16334 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
16335 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
16336 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
16337
16338 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
16339 pipeline.elementaryStream.on('data', function (data) {
16340 var i;
16341
16342 if (data.type === 'metadata') {
16343 i = data.tracks.length; // scan the tracks listed in the metadata
16344
16345 while (i--) {
16346 if (!videoTrack && data.tracks[i].type === 'video') {
16347 videoTrack = data.tracks[i];
16348 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16349 } else if (!audioTrack && data.tracks[i].type === 'audio') {
16350 audioTrack = data.tracks[i];
16351 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16352 }
16353 } // hook up the video segment stream to the first track with h264 data
16354
16355
16356 if (videoTrack && !pipeline.videoSegmentStream) {
16357 pipeline.coalesceStream.numberOfTracks++;
16358 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
16359 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
16360 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
16361 // When video emits timelineStartInfo data after a flush, we forward that
16362 // info to the AudioSegmentStream, if it exists, because video timeline
16363 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
16364 // because this is a particularly subtle form of timestamp alteration.
16365 if (audioTrack && !options.keepOriginalTimestamps) {
16366 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
16367 // very earliest DTS we have seen in video because Chrome will
16368 // interpret any video track with a baseMediaDecodeTime that is
16369 // non-zero as a gap.
16370
16371 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
16372 }
16373 });
16374 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
16375 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
16376 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
16377 if (audioTrack) {
16378 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
16379 }
16380 });
16381 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
16382
16383 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
16384 }
16385
16386 if (audioTrack && !pipeline.audioSegmentStream) {
16387 // hook up the audio segment stream to the first track with aac data
16388 pipeline.coalesceStream.numberOfTracks++;
16389 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16390 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16391 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
16392 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
16393
16394 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
16395 } // emit pmt info
16396
16397
16398 self.trigger('trackinfo', {
16399 hasAudio: !!audioTrack,
16400 hasVideo: !!videoTrack
16401 });
16402 }
16403 }); // Re-emit any data coming from the coalesce stream to the outside world
16404
16405 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
16406 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
16407 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
16408 self.trigger('id3Frame', id3Frame);
16409 });
16410 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
16411
16412 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16413 addPipelineLogRetriggers(this, pipeline);
16414 }; // hook up the segment streams once track metadata is delivered
16415
16416
16417 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
16418 var pipeline = this.transmuxPipeline_;
16419
16420 if (!options.keepOriginalTimestamps) {
16421 this.baseMediaDecodeTime = baseMediaDecodeTime;
16422 }
16423
16424 if (audioTrack) {
16425 audioTrack.timelineStartInfo.dts = undefined;
16426 audioTrack.timelineStartInfo.pts = undefined;
16427 trackDecodeInfo.clearDtsInfo(audioTrack);
16428
16429 if (pipeline.audioTimestampRolloverStream) {
16430 pipeline.audioTimestampRolloverStream.discontinuity();
16431 }
16432 }
16433
16434 if (videoTrack) {
16435 if (pipeline.videoSegmentStream) {
16436 pipeline.videoSegmentStream.gopCache_ = [];
16437 }
16438
16439 videoTrack.timelineStartInfo.dts = undefined;
16440 videoTrack.timelineStartInfo.pts = undefined;
16441 trackDecodeInfo.clearDtsInfo(videoTrack);
16442 pipeline.captionStream.reset();
16443 }
16444
16445 if (pipeline.timestampRolloverStream) {
16446 pipeline.timestampRolloverStream.discontinuity();
16447 }
16448 };
16449
16450 this.setAudioAppendStart = function (timestamp) {
16451 if (audioTrack) {
16452 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
16453 }
16454 };
16455
16456 this.setRemux = function (val) {
16457 var pipeline = this.transmuxPipeline_;
16458 options.remux = val;
16459
16460 if (pipeline && pipeline.coalesceStream) {
16461 pipeline.coalesceStream.setRemux(val);
16462 }
16463 };
16464
16465 this.alignGopsWith = function (gopsToAlignWith) {
16466 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
16467 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
16468 }
16469 };
16470
16471 this.getLogTrigger_ = function (key) {
16472 var self = this;
16473 return function (event) {
16474 event.stream = key;
16475 self.trigger('log', event);
16476 };
16477 }; // feed incoming data to the front of the parsing pipeline
16478
16479
16480 this.push = function (data) {
16481 if (hasFlushed) {
16482 var isAac = isLikelyAacData(data);
16483
16484 if (isAac && this.transmuxPipeline_.type !== 'aac') {
16485 this.setupAacPipeline();
16486 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
16487 this.setupTsPipeline();
16488 }
16489
16490 hasFlushed = false;
16491 }
16492
16493 this.transmuxPipeline_.headOfPipeline.push(data);
16494 }; // flush any buffered data
16495
16496
16497 this.flush = function () {
16498 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
16499
16500 this.transmuxPipeline_.headOfPipeline.flush();
16501 };
16502
16503 this.endTimeline = function () {
16504 this.transmuxPipeline_.headOfPipeline.endTimeline();
16505 };
16506
16507 this.reset = function () {
16508 if (this.transmuxPipeline_.headOfPipeline) {
16509 this.transmuxPipeline_.headOfPipeline.reset();
16510 }
16511 }; // Caption data has to be reset when seeking outside buffered range
16512
16513
16514 this.resetCaptions = function () {
16515 if (this.transmuxPipeline_.captionStream) {
16516 this.transmuxPipeline_.captionStream.reset();
16517 }
16518 };
16519 };
16520
16521 _Transmuxer.prototype = new stream();
16522 var transmuxer = {
16523 Transmuxer: _Transmuxer,
16524 VideoSegmentStream: _VideoSegmentStream,
16525 AudioSegmentStream: _AudioSegmentStream,
16526 AUDIO_PROPERTIES: audioProperties,
16527 VIDEO_PROPERTIES: videoProperties,
16528 // exported for testing
16529 generateSegmentTimingInfo: generateSegmentTimingInfo
16530 };
16531 /**
16532 * mux.js
16533 *
16534 * Copyright (c) Brightcove
16535 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16536 */
16537
16538 var toUnsigned$3 = function toUnsigned(value) {
16539 return value >>> 0;
16540 };
16541
16542 var toHexString$1 = function toHexString(value) {
16543 return ('00' + value.toString(16)).slice(-2);
16544 };
16545
16546 var bin = {
16547 toUnsigned: toUnsigned$3,
16548 toHexString: toHexString$1
16549 };
16550
16551 var parseType$1 = function parseType(buffer) {
16552 var result = '';
16553 result += String.fromCharCode(buffer[0]);
16554 result += String.fromCharCode(buffer[1]);
16555 result += String.fromCharCode(buffer[2]);
16556 result += String.fromCharCode(buffer[3]);
16557 return result;
16558 };
16559
16560 var parseType_1 = parseType$1;
16561 var toUnsigned$2 = bin.toUnsigned;
16562
16563 var findBox = function findBox(data, path) {
16564 var results = [],
16565 i,
16566 size,
16567 type,
16568 end,
16569 subresults;
16570
16571 if (!path.length) {
16572 // short-circuit the search for empty paths
16573 return null;
16574 }
16575
16576 for (i = 0; i < data.byteLength;) {
16577 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
16578 type = parseType_1(data.subarray(i + 4, i + 8));
16579 end = size > 1 ? i + size : data.byteLength;
16580
16581 if (type === path[0]) {
16582 if (path.length === 1) {
16583 // this is the end of the path and we've found the box we were
16584 // looking for
16585 results.push(data.subarray(i + 8, end));
16586 } else {
16587 // recursively search for the next box along the path
16588 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
16589
16590 if (subresults.length) {
16591 results = results.concat(subresults);
16592 }
16593 }
16594 }
16595
16596 i = end;
16597 } // we've finished searching all of data
16598
16599
16600 return results;
16601 };
16602
16603 var findBox_1 = findBox;
16604 var toUnsigned$1 = bin.toUnsigned;
16605 var getUint64$1 = numbers.getUint64;
16606
16607 var tfdt = function tfdt(data) {
16608 var result = {
16609 version: data[0],
16610 flags: new Uint8Array(data.subarray(1, 4))
16611 };
16612
16613 if (result.version === 1) {
16614 result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
16615 } else {
16616 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
16617 }
16618
16619 return result;
16620 };
16621
16622 var parseTfdt = tfdt;
16623
16624 var parseSampleFlags = function parseSampleFlags(flags) {
16625 return {
16626 isLeading: (flags[0] & 0x0c) >>> 2,
16627 dependsOn: flags[0] & 0x03,
16628 isDependedOn: (flags[1] & 0xc0) >>> 6,
16629 hasRedundancy: (flags[1] & 0x30) >>> 4,
16630 paddingValue: (flags[1] & 0x0e) >>> 1,
16631 isNonSyncSample: flags[1] & 0x01,
16632 degradationPriority: flags[2] << 8 | flags[3]
16633 };
16634 };
16635
16636 var parseSampleFlags_1 = parseSampleFlags;
16637
16638 var trun = function trun(data) {
16639 var result = {
16640 version: data[0],
16641 flags: new Uint8Array(data.subarray(1, 4)),
16642 samples: []
16643 },
16644 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16645 // Flag interpretation
16646 dataOffsetPresent = result.flags[2] & 0x01,
16647 // compare with 2nd byte of 0x1
16648 firstSampleFlagsPresent = result.flags[2] & 0x04,
16649 // compare with 2nd byte of 0x4
16650 sampleDurationPresent = result.flags[1] & 0x01,
16651 // compare with 2nd byte of 0x100
16652 sampleSizePresent = result.flags[1] & 0x02,
16653 // compare with 2nd byte of 0x200
16654 sampleFlagsPresent = result.flags[1] & 0x04,
16655 // compare with 2nd byte of 0x400
16656 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
16657 // compare with 2nd byte of 0x800
16658 sampleCount = view.getUint32(4),
16659 offset = 8,
16660 sample;
16661
16662 if (dataOffsetPresent) {
16663 // 32 bit signed integer
16664 result.dataOffset = view.getInt32(offset);
16665 offset += 4;
16666 } // Overrides the flags for the first sample only. The order of
16667 // optional values will be: duration, size, compositionTimeOffset
16668
16669
16670 if (firstSampleFlagsPresent && sampleCount) {
16671 sample = {
16672 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
16673 };
16674 offset += 4;
16675
16676 if (sampleDurationPresent) {
16677 sample.duration = view.getUint32(offset);
16678 offset += 4;
16679 }
16680
16681 if (sampleSizePresent) {
16682 sample.size = view.getUint32(offset);
16683 offset += 4;
16684 }
16685
16686 if (sampleCompositionTimeOffsetPresent) {
16687 if (result.version === 1) {
16688 sample.compositionTimeOffset = view.getInt32(offset);
16689 } else {
16690 sample.compositionTimeOffset = view.getUint32(offset);
16691 }
16692
16693 offset += 4;
16694 }
16695
16696 result.samples.push(sample);
16697 sampleCount--;
16698 }
16699
16700 while (sampleCount--) {
16701 sample = {};
16702
16703 if (sampleDurationPresent) {
16704 sample.duration = view.getUint32(offset);
16705 offset += 4;
16706 }
16707
16708 if (sampleSizePresent) {
16709 sample.size = view.getUint32(offset);
16710 offset += 4;
16711 }
16712
16713 if (sampleFlagsPresent) {
16714 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
16715 offset += 4;
16716 }
16717
16718 if (sampleCompositionTimeOffsetPresent) {
16719 if (result.version === 1) {
16720 sample.compositionTimeOffset = view.getInt32(offset);
16721 } else {
16722 sample.compositionTimeOffset = view.getUint32(offset);
16723 }
16724
16725 offset += 4;
16726 }
16727
16728 result.samples.push(sample);
16729 }
16730
16731 return result;
16732 };
16733
16734 var parseTrun = trun;
16735
16736 var tfhd = function tfhd(data) {
16737 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16738 result = {
16739 version: data[0],
16740 flags: new Uint8Array(data.subarray(1, 4)),
16741 trackId: view.getUint32(4)
16742 },
16743 baseDataOffsetPresent = result.flags[2] & 0x01,
16744 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
16745 defaultSampleDurationPresent = result.flags[2] & 0x08,
16746 defaultSampleSizePresent = result.flags[2] & 0x10,
16747 defaultSampleFlagsPresent = result.flags[2] & 0x20,
16748 durationIsEmpty = result.flags[0] & 0x010000,
16749 defaultBaseIsMoof = result.flags[0] & 0x020000,
16750 i;
16751 i = 8;
16752
16753 if (baseDataOffsetPresent) {
16754 i += 4; // truncate top 4 bytes
16755 // FIXME: should we read the full 64 bits?
16756
16757 result.baseDataOffset = view.getUint32(12);
16758 i += 4;
16759 }
16760
16761 if (sampleDescriptionIndexPresent) {
16762 result.sampleDescriptionIndex = view.getUint32(i);
16763 i += 4;
16764 }
16765
16766 if (defaultSampleDurationPresent) {
16767 result.defaultSampleDuration = view.getUint32(i);
16768 i += 4;
16769 }
16770
16771 if (defaultSampleSizePresent) {
16772 result.defaultSampleSize = view.getUint32(i);
16773 i += 4;
16774 }
16775
16776 if (defaultSampleFlagsPresent) {
16777 result.defaultSampleFlags = view.getUint32(i);
16778 }
16779
16780 if (durationIsEmpty) {
16781 result.durationIsEmpty = true;
16782 }
16783
16784 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
16785 result.baseDataOffsetIsMoof = true;
16786 }
16787
16788 return result;
16789 };
16790
16791 var parseTfhd = tfhd;
16792 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
16793 var win;
16794
16795 if (typeof window !== "undefined") {
16796 win = window;
16797 } else if (typeof commonjsGlobal !== "undefined") {
16798 win = commonjsGlobal;
16799 } else if (typeof self !== "undefined") {
16800 win = self;
16801 } else {
16802 win = {};
16803 }
16804
16805 var window_1 = win;
16806 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
16807 var CaptionStream = captionStream.CaptionStream;
16808 /**
16809 * Maps an offset in the mdat to a sample based on the the size of the samples.
16810 * Assumes that `parseSamples` has been called first.
16811 *
16812 * @param {Number} offset - The offset into the mdat
16813 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
16814 * @return {?Object} The matching sample, or null if no match was found.
16815 *
16816 * @see ISO-BMFF-12/2015, Section 8.8.8
16817 **/
16818
16819 var mapToSample = function mapToSample(offset, samples) {
16820 var approximateOffset = offset;
16821
16822 for (var i = 0; i < samples.length; i++) {
16823 var sample = samples[i];
16824
16825 if (approximateOffset < sample.size) {
16826 return sample;
16827 }
16828
16829 approximateOffset -= sample.size;
16830 }
16831
16832 return null;
16833 };
16834 /**
16835 * Finds SEI nal units contained in a Media Data Box.
16836 * Assumes that `parseSamples` has been called first.
16837 *
16838 * @param {Uint8Array} avcStream - The bytes of the mdat
16839 * @param {Object[]} samples - The samples parsed out by `parseSamples`
16840 * @param {Number} trackId - The trackId of this video track
16841 * @return {Object[]} seiNals - the parsed SEI NALUs found.
16842 * The contents of the seiNal should match what is expected by
16843 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
16844 *
16845 * @see ISO-BMFF-12/2015, Section 8.1.1
16846 * @see Rec. ITU-T H.264, 7.3.2.3.1
16847 **/
16848
16849
16850 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
16851 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
16852 result = {
16853 logs: [],
16854 seiNals: []
16855 },
16856 seiNal,
16857 i,
16858 length,
16859 lastMatchedSample;
16860
16861 for (i = 0; i + 4 < avcStream.length; i += length) {
16862 length = avcView.getUint32(i);
16863 i += 4; // Bail if this doesn't appear to be an H264 stream
16864
16865 if (length <= 0) {
16866 continue;
16867 }
16868
16869 switch (avcStream[i] & 0x1F) {
16870 case 0x06:
16871 var data = avcStream.subarray(i + 1, i + 1 + length);
16872 var matchingSample = mapToSample(i, samples);
16873 seiNal = {
16874 nalUnitType: 'sei_rbsp',
16875 size: length,
16876 data: data,
16877 escapedRBSP: discardEmulationPreventionBytes(data),
16878 trackId: trackId
16879 };
16880
16881 if (matchingSample) {
16882 seiNal.pts = matchingSample.pts;
16883 seiNal.dts = matchingSample.dts;
16884 lastMatchedSample = matchingSample;
16885 } else if (lastMatchedSample) {
16886 // If a matching sample cannot be found, use the last
16887 // sample's values as they should be as close as possible
16888 seiNal.pts = lastMatchedSample.pts;
16889 seiNal.dts = lastMatchedSample.dts;
16890 } else {
16891 result.logs.push({
16892 level: 'warn',
16893 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
16894 });
16895 break;
16896 }
16897
16898 result.seiNals.push(seiNal);
16899 break;
16900 }
16901 }
16902
16903 return result;
16904 };
16905 /**
16906 * Parses sample information out of Track Run Boxes and calculates
16907 * the absolute presentation and decode timestamps of each sample.
16908 *
16909 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
16910 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
16911 @see ISO-BMFF-12/2015, Section 8.8.12
16912 * @param {Object} tfhd - The parsed Track Fragment Header
16913 * @see inspect.parseTfhd
16914 * @return {Object[]} the parsed samples
16915 *
16916 * @see ISO-BMFF-12/2015, Section 8.8.8
16917 **/
16918
16919
16920 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
16921 var currentDts = baseMediaDecodeTime;
16922 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
16923 var defaultSampleSize = tfhd.defaultSampleSize || 0;
16924 var trackId = tfhd.trackId;
16925 var allSamples = [];
16926 truns.forEach(function (trun) {
16927 // Note: We currently do not parse the sample table as well
16928 // as the trun. It's possible some sources will require this.
16929 // moov > trak > mdia > minf > stbl
16930 var trackRun = parseTrun(trun);
16931 var samples = trackRun.samples;
16932 samples.forEach(function (sample) {
16933 if (sample.duration === undefined) {
16934 sample.duration = defaultSampleDuration;
16935 }
16936
16937 if (sample.size === undefined) {
16938 sample.size = defaultSampleSize;
16939 }
16940
16941 sample.trackId = trackId;
16942 sample.dts = currentDts;
16943
16944 if (sample.compositionTimeOffset === undefined) {
16945 sample.compositionTimeOffset = 0;
16946 }
16947
16948 if (typeof currentDts === 'bigint') {
16949 sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
16950 currentDts += window_1.BigInt(sample.duration);
16951 } else {
16952 sample.pts = currentDts + sample.compositionTimeOffset;
16953 currentDts += sample.duration;
16954 }
16955 });
16956 allSamples = allSamples.concat(samples);
16957 });
16958 return allSamples;
16959 };
16960 /**
16961 * Parses out caption nals from an FMP4 segment's video tracks.
16962 *
16963 * @param {Uint8Array} segment - The bytes of a single segment
16964 * @param {Number} videoTrackId - The trackId of a video track in the segment
16965 * @return {Object.<Number, Object[]>} A mapping of video trackId to
16966 * a list of seiNals found in that track
16967 **/
16968
16969
16970 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
16971 // To get the samples
16972 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
16973
16974 var mdats = findBox_1(segment, ['mdat']);
16975 var captionNals = {};
16976 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
16977
16978 mdats.forEach(function (mdat, index) {
16979 var matchingTraf = trafs[index];
16980 mdatTrafPairs.push({
16981 mdat: mdat,
16982 traf: matchingTraf
16983 });
16984 });
16985 mdatTrafPairs.forEach(function (pair) {
16986 var mdat = pair.mdat;
16987 var traf = pair.traf;
16988 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
16989
16990 var headerInfo = parseTfhd(tfhd[0]);
16991 var trackId = headerInfo.trackId;
16992 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
16993
16994 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
16995 var truns = findBox_1(traf, ['trun']);
16996 var samples;
16997 var result; // Only parse video data for the chosen video track
16998
16999 if (videoTrackId === trackId && truns.length > 0) {
17000 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
17001 result = findSeiNals(mdat, samples, trackId);
17002
17003 if (!captionNals[trackId]) {
17004 captionNals[trackId] = {
17005 seiNals: [],
17006 logs: []
17007 };
17008 }
17009
17010 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
17011 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
17012 }
17013 });
17014 return captionNals;
17015 };
17016 /**
17017 * Parses out inband captions from an MP4 container and returns
17018 * caption objects that can be used by WebVTT and the TextTrack API.
17019 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
17020 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
17021 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
17022 *
17023 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
17024 * @param {Number} trackId - The id of the video track to parse
17025 * @param {Number} timescale - The timescale for the video track from the init segment
17026 *
17027 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
17028 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
17029 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
17030 * @return {String} parsedCaptions[].text - The visible content of the caption
17031 **/
17032
17033
17034 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
17035 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
17036
17037 if (trackId === null) {
17038 return null;
17039 }
17040
17041 captionNals = parseCaptionNals(segment, trackId);
17042 var trackNals = captionNals[trackId] || {};
17043 return {
17044 seiNals: trackNals.seiNals,
17045 logs: trackNals.logs,
17046 timescale: timescale
17047 };
17048 };
17049 /**
17050 * Converts SEI NALUs into captions that can be used by video.js
17051 **/
17052
17053
17054 var CaptionParser = function CaptionParser() {
17055 var isInitialized = false;
17056 var captionStream; // Stores segments seen before trackId and timescale are set
17057
17058 var segmentCache; // Stores video track ID of the track being parsed
17059
17060 var trackId; // Stores the timescale of the track being parsed
17061
17062 var timescale; // Stores captions parsed so far
17063
17064 var parsedCaptions; // Stores whether we are receiving partial data or not
17065
17066 var parsingPartial;
17067 /**
17068 * A method to indicate whether a CaptionParser has been initalized
17069 * @returns {Boolean}
17070 **/
17071
17072 this.isInitialized = function () {
17073 return isInitialized;
17074 };
17075 /**
17076 * Initializes the underlying CaptionStream, SEI NAL parsing
17077 * and management, and caption collection
17078 **/
17079
17080
17081 this.init = function (options) {
17082 captionStream = new CaptionStream();
17083 isInitialized = true;
17084 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
17085
17086 captionStream.on('data', function (event) {
17087 // Convert to seconds in the source's timescale
17088 event.startTime = event.startPts / timescale;
17089 event.endTime = event.endPts / timescale;
17090 parsedCaptions.captions.push(event);
17091 parsedCaptions.captionStreams[event.stream] = true;
17092 });
17093 captionStream.on('log', function (log) {
17094 parsedCaptions.logs.push(log);
17095 });
17096 };
17097 /**
17098 * Determines if a new video track will be selected
17099 * or if the timescale changed
17100 * @return {Boolean}
17101 **/
17102
17103
17104 this.isNewInit = function (videoTrackIds, timescales) {
17105 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
17106 return false;
17107 }
17108
17109 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
17110 };
17111 /**
17112 * Parses out SEI captions and interacts with underlying
17113 * CaptionStream to return dispatched captions
17114 *
17115 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
17116 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
17117 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
17118 * @see parseEmbeddedCaptions
17119 * @see m2ts/caption-stream.js
17120 **/
17121
17122
17123 this.parse = function (segment, videoTrackIds, timescales) {
17124 var parsedData;
17125
17126 if (!this.isInitialized()) {
17127 return null; // This is not likely to be a video segment
17128 } else if (!videoTrackIds || !timescales) {
17129 return null;
17130 } else if (this.isNewInit(videoTrackIds, timescales)) {
17131 // Use the first video track only as there is no
17132 // mechanism to switch to other video tracks
17133 trackId = videoTrackIds[0];
17134 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
17135 // data until we have one.
17136 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
17137 } else if (trackId === null || !timescale) {
17138 segmentCache.push(segment);
17139 return null;
17140 } // Now that a timescale and trackId is set, parse cached segments
17141
17142
17143 while (segmentCache.length > 0) {
17144 var cachedSegment = segmentCache.shift();
17145 this.parse(cachedSegment, videoTrackIds, timescales);
17146 }
17147
17148 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
17149
17150 if (parsedData && parsedData.logs) {
17151 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
17152 }
17153
17154 if (parsedData === null || !parsedData.seiNals) {
17155 if (parsedCaptions.logs.length) {
17156 return {
17157 logs: parsedCaptions.logs,
17158 captions: [],
17159 captionStreams: []
17160 };
17161 }
17162
17163 return null;
17164 }
17165
17166 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
17167
17168 this.flushStream();
17169 return parsedCaptions;
17170 };
17171 /**
17172 * Pushes SEI NALUs onto CaptionStream
17173 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
17174 * Assumes that `parseCaptionNals` has been called first
17175 * @see m2ts/caption-stream.js
17176 **/
17177
17178
17179 this.pushNals = function (nals) {
17180 if (!this.isInitialized() || !nals || nals.length === 0) {
17181 return null;
17182 }
17183
17184 nals.forEach(function (nal) {
17185 captionStream.push(nal);
17186 });
17187 };
17188 /**
17189 * Flushes underlying CaptionStream to dispatch processed, displayable captions
17190 * @see m2ts/caption-stream.js
17191 **/
17192
17193
17194 this.flushStream = function () {
17195 if (!this.isInitialized()) {
17196 return null;
17197 }
17198
17199 if (!parsingPartial) {
17200 captionStream.flush();
17201 } else {
17202 captionStream.partialFlush();
17203 }
17204 };
17205 /**
17206 * Reset caption buckets for new data
17207 **/
17208
17209
17210 this.clearParsedCaptions = function () {
17211 parsedCaptions.captions = [];
17212 parsedCaptions.captionStreams = {};
17213 parsedCaptions.logs = [];
17214 };
17215 /**
17216 * Resets underlying CaptionStream
17217 * @see m2ts/caption-stream.js
17218 **/
17219
17220
17221 this.resetCaptionStream = function () {
17222 if (!this.isInitialized()) {
17223 return null;
17224 }
17225
17226 captionStream.reset();
17227 };
17228 /**
17229 * Convenience method to clear all captions flushed from the
17230 * CaptionStream and still being parsed
17231 * @see m2ts/caption-stream.js
17232 **/
17233
17234
17235 this.clearAllCaptions = function () {
17236 this.clearParsedCaptions();
17237 this.resetCaptionStream();
17238 };
17239 /**
17240 * Reset caption parser
17241 **/
17242
17243
17244 this.reset = function () {
17245 segmentCache = [];
17246 trackId = null;
17247 timescale = null;
17248
17249 if (!parsedCaptions) {
17250 parsedCaptions = {
17251 captions: [],
17252 // CC1, CC2, CC3, CC4
17253 captionStreams: {},
17254 logs: []
17255 };
17256 } else {
17257 this.clearParsedCaptions();
17258 }
17259
17260 this.resetCaptionStream();
17261 };
17262
17263 this.reset();
17264 };
17265
17266 var captionParser = CaptionParser;
17267 var toUnsigned = bin.toUnsigned;
17268 var toHexString = bin.toHexString;
17269 var getUint64 = numbers.getUint64;
17270 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
17271 /**
17272 * Parses an MP4 initialization segment and extracts the timescale
17273 * values for any declared tracks. Timescale values indicate the
17274 * number of clock ticks per second to assume for time-based values
17275 * elsewhere in the MP4.
17276 *
17277 * To determine the start time of an MP4, you need two pieces of
17278 * information: the timescale unit and the earliest base media decode
17279 * time. Multiple timescales can be specified within an MP4 but the
17280 * base media decode time is always expressed in the timescale from
17281 * the media header box for the track:
17282 * ```
17283 * moov > trak > mdia > mdhd.timescale
17284 * ```
17285 * @param init {Uint8Array} the bytes of the init segment
17286 * @return {object} a hash of track ids to timescale values or null if
17287 * the init segment is malformed.
17288 */
17289
17290 timescale = function timescale(init) {
17291 var result = {},
17292 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
17293
17294 return traks.reduce(function (result, trak) {
17295 var tkhd, version, index, id, mdhd;
17296 tkhd = findBox_1(trak, ['tkhd'])[0];
17297
17298 if (!tkhd) {
17299 return null;
17300 }
17301
17302 version = tkhd[0];
17303 index = version === 0 ? 12 : 20;
17304 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
17305 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17306
17307 if (!mdhd) {
17308 return null;
17309 }
17310
17311 version = mdhd[0];
17312 index = version === 0 ? 12 : 20;
17313 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17314 return result;
17315 }, result);
17316 };
17317 /**
17318 * Determine the base media decode start time, in seconds, for an MP4
17319 * fragment. If multiple fragments are specified, the earliest time is
17320 * returned.
17321 *
17322 * The base media decode time can be parsed from track fragment
17323 * metadata:
17324 * ```
17325 * moof > traf > tfdt.baseMediaDecodeTime
17326 * ```
17327 * It requires the timescale value from the mdhd to interpret.
17328 *
17329 * @param timescale {object} a hash of track ids to timescale values.
17330 * @return {number} the earliest base media decode start time for the
17331 * fragment, in seconds
17332 */
17333
17334
17335 startTime = function startTime(timescale, fragment) {
17336 var trafs; // we need info from two childrend of each track fragment box
17337
17338 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
17339
17340 var lowestTime = trafs.reduce(function (acc, traf) {
17341 var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
17342
17343 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
17344
17345 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
17346
17347 var tfdt = findBox_1(traf, ['tfdt'])[0];
17348 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
17349 var baseTime; // version 1 is 64 bit
17350
17351 if (tfdt[0] === 1) {
17352 baseTime = getUint64(tfdt.subarray(4, 12));
17353 } else {
17354 baseTime = dv.getUint32(4);
17355 } // convert base time to seconds if it is a valid number.
17356
17357
17358 var seconds;
17359
17360 if (typeof baseTime === 'bigint') {
17361 seconds = baseTime / window_1.BigInt(scale);
17362 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
17363 seconds = baseTime / scale;
17364 }
17365
17366 if (seconds < Number.MAX_SAFE_INTEGER) {
17367 seconds = Number(seconds);
17368 }
17369
17370 if (seconds < acc) {
17371 acc = seconds;
17372 }
17373
17374 return acc;
17375 }, Infinity);
17376 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
17377 };
17378 /**
17379 * Determine the composition start, in seconds, for an MP4
17380 * fragment.
17381 *
17382 * The composition start time of a fragment can be calculated using the base
17383 * media decode time, composition time offset, and timescale, as follows:
17384 *
17385 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
17386 *
17387 * All of the aforementioned information is contained within a media fragment's
17388 * `traf` box, except for timescale info, which comes from the initialization
17389 * segment, so a track id (also contained within a `traf`) is also necessary to
17390 * associate it with a timescale
17391 *
17392 *
17393 * @param timescales {object} - a hash of track ids to timescale values.
17394 * @param fragment {Unit8Array} - the bytes of a media segment
17395 * @return {number} the composition start time for the fragment, in seconds
17396 **/
17397
17398
17399 compositionStartTime = function compositionStartTime(timescales, fragment) {
17400 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
17401 var baseMediaDecodeTime = 0;
17402 var compositionTimeOffset = 0;
17403 var trackId;
17404
17405 if (trafBoxes && trafBoxes.length) {
17406 // The spec states that track run samples contained within a `traf` box are contiguous, but
17407 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
17408 // We will assume that they are, so we only need the first to calculate start time.
17409 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
17410 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
17411 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
17412
17413 if (tfhd) {
17414 var parsedTfhd = parseTfhd(tfhd);
17415 trackId = parsedTfhd.trackId;
17416 }
17417
17418 if (tfdt) {
17419 var parsedTfdt = parseTfdt(tfdt);
17420 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
17421 }
17422
17423 if (trun) {
17424 var parsedTrun = parseTrun(trun);
17425
17426 if (parsedTrun.samples && parsedTrun.samples.length) {
17427 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
17428 }
17429 }
17430 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
17431 // specified.
17432
17433
17434 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
17435
17436 if (typeof baseMediaDecodeTime === 'bigint') {
17437 compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
17438 timescale = window_1.BigInt(timescale);
17439 }
17440
17441 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
17442
17443 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
17444 result = Number(result);
17445 }
17446
17447 return result;
17448 };
17449 /**
17450 * Find the trackIds of the video tracks in this source.
17451 * Found by parsing the Handler Reference and Track Header Boxes:
17452 * moov > trak > mdia > hdlr
17453 * moov > trak > tkhd
17454 *
17455 * @param {Uint8Array} init - The bytes of the init segment for this source
17456 * @return {Number[]} A list of trackIds
17457 *
17458 * @see ISO-BMFF-12/2015, Section 8.4.3
17459 **/
17460
17461
17462 getVideoTrackIds = function getVideoTrackIds(init) {
17463 var traks = findBox_1(init, ['moov', 'trak']);
17464 var videoTrackIds = [];
17465 traks.forEach(function (trak) {
17466 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
17467 var tkhds = findBox_1(trak, ['tkhd']);
17468 hdlrs.forEach(function (hdlr, index) {
17469 var handlerType = parseType_1(hdlr.subarray(8, 12));
17470 var tkhd = tkhds[index];
17471 var view;
17472 var version;
17473 var trackId;
17474
17475 if (handlerType === 'vide') {
17476 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17477 version = view.getUint8(0);
17478 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
17479 videoTrackIds.push(trackId);
17480 }
17481 });
17482 });
17483 return videoTrackIds;
17484 };
17485
17486 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
17487 // mdhd is a FullBox, meaning it will have its own version as the first byte
17488 var version = mdhd[0];
17489 var index = version === 0 ? 12 : 20;
17490 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17491 };
17492 /**
17493 * Get all the video, audio, and hint tracks from a non fragmented
17494 * mp4 segment
17495 */
17496
17497
17498 getTracks = function getTracks(init) {
17499 var traks = findBox_1(init, ['moov', 'trak']);
17500 var tracks = [];
17501 traks.forEach(function (trak) {
17502 var track = {};
17503 var tkhd = findBox_1(trak, ['tkhd'])[0];
17504 var view, tkhdVersion; // id
17505
17506 if (tkhd) {
17507 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17508 tkhdVersion = view.getUint8(0);
17509 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
17510 }
17511
17512 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
17513
17514 if (hdlr) {
17515 var type = parseType_1(hdlr.subarray(8, 12));
17516
17517 if (type === 'vide') {
17518 track.type = 'video';
17519 } else if (type === 'soun') {
17520 track.type = 'audio';
17521 } else {
17522 track.type = type;
17523 }
17524 } // codec
17525
17526
17527 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
17528
17529 if (stsd) {
17530 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
17531
17532 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
17533 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
17534 var codecConfig, codecConfigType;
17535
17536 if (codecBox) {
17537 // https://tools.ietf.org/html/rfc6381#section-3.3
17538 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
17539 // we don't need anything but the "config" parameter of the
17540 // avc1 codecBox
17541 codecConfig = codecBox.subarray(78);
17542 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17543
17544 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
17545 track.codec += '.'; // left padded with zeroes for single digit hex
17546 // profile idc
17547
17548 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
17549
17550 track.codec += toHexString(codecConfig[10]); // level idc
17551
17552 track.codec += toHexString(codecConfig[11]);
17553 } else {
17554 // TODO: show a warning that we couldn't parse the codec
17555 // and are using the default
17556 track.codec = 'avc1.4d400d';
17557 }
17558 } else if (/^mp4[a,v]$/i.test(track.codec)) {
17559 // we do not need anything but the streamDescriptor of the mp4a codecBox
17560 codecConfig = codecBox.subarray(28);
17561 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17562
17563 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
17564 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
17565
17566 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
17567 } else {
17568 // TODO: show a warning that we couldn't parse the codec
17569 // and are using the default
17570 track.codec = 'mp4a.40.2';
17571 }
17572 } else {
17573 // flac, opus, etc
17574 track.codec = track.codec.toLowerCase();
17575 }
17576 }
17577 }
17578
17579 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17580
17581 if (mdhd) {
17582 track.timescale = getTimescaleFromMediaHeader(mdhd);
17583 }
17584
17585 tracks.push(track);
17586 });
17587 return tracks;
17588 };
17589
17590 var probe$2 = {
17591 // export mp4 inspector's findBox and parseType for backwards compatibility
17592 findBox: findBox_1,
17593 parseType: parseType_1,
17594 timescale: timescale,
17595 startTime: startTime,
17596 compositionStartTime: compositionStartTime,
17597 videoTrackIds: getVideoTrackIds,
17598 tracks: getTracks,
17599 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
17600 };
17601
17602 var parsePid = function parsePid(packet) {
17603 var pid = packet[1] & 0x1f;
17604 pid <<= 8;
17605 pid |= packet[2];
17606 return pid;
17607 };
17608
17609 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
17610 return !!(packet[1] & 0x40);
17611 };
17612
17613 var parseAdaptionField = function parseAdaptionField(packet) {
17614 var offset = 0; // if an adaption field is present, its length is specified by the
17615 // fifth byte of the TS packet header. The adaptation field is
17616 // used to add stuffing to PES packets that don't fill a complete
17617 // TS packet, and to specify some forms of timing and control data
17618 // that we do not currently use.
17619
17620 if ((packet[3] & 0x30) >>> 4 > 0x01) {
17621 offset += packet[4] + 1;
17622 }
17623
17624 return offset;
17625 };
17626
17627 var parseType = function parseType(packet, pmtPid) {
17628 var pid = parsePid(packet);
17629
17630 if (pid === 0) {
17631 return 'pat';
17632 } else if (pid === pmtPid) {
17633 return 'pmt';
17634 } else if (pmtPid) {
17635 return 'pes';
17636 }
17637
17638 return null;
17639 };
17640
17641 var parsePat = function parsePat(packet) {
17642 var pusi = parsePayloadUnitStartIndicator(packet);
17643 var offset = 4 + parseAdaptionField(packet);
17644
17645 if (pusi) {
17646 offset += packet[offset] + 1;
17647 }
17648
17649 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
17650 };
17651
17652 var parsePmt = function parsePmt(packet) {
17653 var programMapTable = {};
17654 var pusi = parsePayloadUnitStartIndicator(packet);
17655 var payloadOffset = 4 + parseAdaptionField(packet);
17656
17657 if (pusi) {
17658 payloadOffset += packet[payloadOffset] + 1;
17659 } // PMTs can be sent ahead of the time when they should actually
17660 // take effect. We don't believe this should ever be the case
17661 // for HLS but we'll ignore "forward" PMT declarations if we see
17662 // them. Future PMT declarations have the current_next_indicator
17663 // set to zero.
17664
17665
17666 if (!(packet[payloadOffset + 5] & 0x01)) {
17667 return;
17668 }
17669
17670 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
17671
17672 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
17673 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
17674 // long the program info descriptors are
17675
17676 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
17677
17678 var offset = 12 + programInfoLength;
17679
17680 while (offset < tableEnd) {
17681 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
17682
17683 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
17684 // skip past the elementary stream descriptors, if present
17685
17686 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
17687 }
17688
17689 return programMapTable;
17690 };
17691
17692 var parsePesType = function parsePesType(packet, programMapTable) {
17693 var pid = parsePid(packet);
17694 var type = programMapTable[pid];
17695
17696 switch (type) {
17697 case streamTypes.H264_STREAM_TYPE:
17698 return 'video';
17699
17700 case streamTypes.ADTS_STREAM_TYPE:
17701 return 'audio';
17702
17703 case streamTypes.METADATA_STREAM_TYPE:
17704 return 'timed-metadata';
17705
17706 default:
17707 return null;
17708 }
17709 };
17710
17711 var parsePesTime = function parsePesTime(packet) {
17712 var pusi = parsePayloadUnitStartIndicator(packet);
17713
17714 if (!pusi) {
17715 return null;
17716 }
17717
17718 var offset = 4 + parseAdaptionField(packet);
17719
17720 if (offset >= packet.byteLength) {
17721 // From the H 222.0 MPEG-TS spec
17722 // "For transport stream packets carrying PES packets, stuffing is needed when there
17723 // is insufficient PES packet data to completely fill the transport stream packet
17724 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
17725 // the sum of the lengths of the data elements in it, so that the payload bytes
17726 // remaining after the adaptation field exactly accommodates the available PES packet
17727 // data."
17728 //
17729 // If the offset is >= the length of the packet, then the packet contains no data
17730 // and instead is just adaption field stuffing bytes
17731 return null;
17732 }
17733
17734 var pes = null;
17735 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
17736 // and a DTS value. Determine what combination of values is
17737 // available to work with.
17738
17739 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
17740 // performs all bitwise operations on 32-bit integers but javascript
17741 // supports a much greater range (52-bits) of integer using standard
17742 // mathematical operations.
17743 // We construct a 31-bit value using bitwise operators over the 31
17744 // most significant bits and then multiply by 4 (equal to a left-shift
17745 // of 2) before we add the final 2 least significant bits of the
17746 // timestamp (equal to an OR.)
17747
17748 if (ptsDtsFlags & 0xC0) {
17749 pes = {}; // the PTS and DTS are not written out directly. For information
17750 // on how they are encoded, see
17751 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
17752
17753 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
17754 pes.pts *= 4; // Left shift by 2
17755
17756 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
17757
17758 pes.dts = pes.pts;
17759
17760 if (ptsDtsFlags & 0x40) {
17761 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
17762 pes.dts *= 4; // Left shift by 2
17763
17764 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
17765 }
17766 }
17767
17768 return pes;
17769 };
17770
17771 var parseNalUnitType = function parseNalUnitType(type) {
17772 switch (type) {
17773 case 0x05:
17774 return 'slice_layer_without_partitioning_rbsp_idr';
17775
17776 case 0x06:
17777 return 'sei_rbsp';
17778
17779 case 0x07:
17780 return 'seq_parameter_set_rbsp';
17781
17782 case 0x08:
17783 return 'pic_parameter_set_rbsp';
17784
17785 case 0x09:
17786 return 'access_unit_delimiter_rbsp';
17787
17788 default:
17789 return null;
17790 }
17791 };
17792
17793 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
17794 var offset = 4 + parseAdaptionField(packet);
17795 var frameBuffer = packet.subarray(offset);
17796 var frameI = 0;
17797 var frameSyncPoint = 0;
17798 var foundKeyFrame = false;
17799 var nalType; // advance the sync point to a NAL start, if necessary
17800
17801 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
17802 if (frameBuffer[frameSyncPoint + 2] === 1) {
17803 // the sync point is properly aligned
17804 frameI = frameSyncPoint + 5;
17805 break;
17806 }
17807 }
17808
17809 while (frameI < frameBuffer.byteLength) {
17810 // look at the current byte to determine if we've hit the end of
17811 // a NAL unit boundary
17812 switch (frameBuffer[frameI]) {
17813 case 0:
17814 // skip past non-sync sequences
17815 if (frameBuffer[frameI - 1] !== 0) {
17816 frameI += 2;
17817 break;
17818 } else if (frameBuffer[frameI - 2] !== 0) {
17819 frameI++;
17820 break;
17821 }
17822
17823 if (frameSyncPoint + 3 !== frameI - 2) {
17824 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17825
17826 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17827 foundKeyFrame = true;
17828 }
17829 } // drop trailing zeroes
17830
17831
17832 do {
17833 frameI++;
17834 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
17835
17836 frameSyncPoint = frameI - 2;
17837 frameI += 3;
17838 break;
17839
17840 case 1:
17841 // skip past non-sync sequences
17842 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
17843 frameI += 3;
17844 break;
17845 }
17846
17847 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17848
17849 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17850 foundKeyFrame = true;
17851 }
17852
17853 frameSyncPoint = frameI - 2;
17854 frameI += 3;
17855 break;
17856
17857 default:
17858 // the current byte isn't a one or zero, so it cannot be part
17859 // of a sync sequence
17860 frameI += 3;
17861 break;
17862 }
17863 }
17864
17865 frameBuffer = frameBuffer.subarray(frameSyncPoint);
17866 frameI -= frameSyncPoint;
17867 frameSyncPoint = 0; // parse the final nal
17868
17869 if (frameBuffer && frameBuffer.byteLength > 3) {
17870 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17871
17872 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17873 foundKeyFrame = true;
17874 }
17875 }
17876
17877 return foundKeyFrame;
17878 };
17879
17880 var probe$1 = {
17881 parseType: parseType,
17882 parsePat: parsePat,
17883 parsePmt: parsePmt,
17884 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
17885 parsePesType: parsePesType,
17886 parsePesTime: parsePesTime,
17887 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
17888 };
17889 var handleRollover = timestampRolloverStream.handleRollover;
17890 var probe = {};
17891 probe.ts = probe$1;
17892 probe.aac = utils;
17893 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
17894 var MP2T_PACKET_LENGTH = 188,
17895 // bytes
17896 SYNC_BYTE = 0x47;
17897 /**
17898 * walks through segment data looking for pat and pmt packets to parse out
17899 * program map table information
17900 */
17901
17902 var parsePsi_ = function parsePsi_(bytes, pmt) {
17903 var startIndex = 0,
17904 endIndex = MP2T_PACKET_LENGTH,
17905 packet,
17906 type;
17907
17908 while (endIndex < bytes.byteLength) {
17909 // Look for a pair of start and end sync bytes in the data..
17910 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17911 // We found a packet
17912 packet = bytes.subarray(startIndex, endIndex);
17913 type = probe.ts.parseType(packet, pmt.pid);
17914
17915 switch (type) {
17916 case 'pat':
17917 pmt.pid = probe.ts.parsePat(packet);
17918 break;
17919
17920 case 'pmt':
17921 var table = probe.ts.parsePmt(packet);
17922 pmt.table = pmt.table || {};
17923 Object.keys(table).forEach(function (key) {
17924 pmt.table[key] = table[key];
17925 });
17926 break;
17927 }
17928
17929 startIndex += MP2T_PACKET_LENGTH;
17930 endIndex += MP2T_PACKET_LENGTH;
17931 continue;
17932 } // If we get here, we have somehow become de-synchronized and we need to step
17933 // forward one byte at a time until we find a pair of sync bytes that denote
17934 // a packet
17935
17936
17937 startIndex++;
17938 endIndex++;
17939 }
17940 };
17941 /**
17942 * walks through the segment data from the start and end to get timing information
17943 * for the first and last audio pes packets
17944 */
17945
17946
17947 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
17948 var startIndex = 0,
17949 endIndex = MP2T_PACKET_LENGTH,
17950 packet,
17951 type,
17952 pesType,
17953 pusi,
17954 parsed;
17955 var endLoop = false; // Start walking from start of segment to get first audio packet
17956
17957 while (endIndex <= bytes.byteLength) {
17958 // Look for a pair of start and end sync bytes in the data..
17959 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17960 // We found a packet
17961 packet = bytes.subarray(startIndex, endIndex);
17962 type = probe.ts.parseType(packet, pmt.pid);
17963
17964 switch (type) {
17965 case 'pes':
17966 pesType = probe.ts.parsePesType(packet, pmt.table);
17967 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17968
17969 if (pesType === 'audio' && pusi) {
17970 parsed = probe.ts.parsePesTime(packet);
17971
17972 if (parsed) {
17973 parsed.type = 'audio';
17974 result.audio.push(parsed);
17975 endLoop = true;
17976 }
17977 }
17978
17979 break;
17980 }
17981
17982 if (endLoop) {
17983 break;
17984 }
17985
17986 startIndex += MP2T_PACKET_LENGTH;
17987 endIndex += MP2T_PACKET_LENGTH;
17988 continue;
17989 } // If we get here, we have somehow become de-synchronized and we need to step
17990 // forward one byte at a time until we find a pair of sync bytes that denote
17991 // a packet
17992
17993
17994 startIndex++;
17995 endIndex++;
17996 } // Start walking from end of segment to get last audio packet
17997
17998
17999 endIndex = bytes.byteLength;
18000 startIndex = endIndex - MP2T_PACKET_LENGTH;
18001 endLoop = false;
18002
18003 while (startIndex >= 0) {
18004 // Look for a pair of start and end sync bytes in the data..
18005 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
18006 // We found a packet
18007 packet = bytes.subarray(startIndex, endIndex);
18008 type = probe.ts.parseType(packet, pmt.pid);
18009
18010 switch (type) {
18011 case 'pes':
18012 pesType = probe.ts.parsePesType(packet, pmt.table);
18013 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18014
18015 if (pesType === 'audio' && pusi) {
18016 parsed = probe.ts.parsePesTime(packet);
18017
18018 if (parsed) {
18019 parsed.type = 'audio';
18020 result.audio.push(parsed);
18021 endLoop = true;
18022 }
18023 }
18024
18025 break;
18026 }
18027
18028 if (endLoop) {
18029 break;
18030 }
18031
18032 startIndex -= MP2T_PACKET_LENGTH;
18033 endIndex -= MP2T_PACKET_LENGTH;
18034 continue;
18035 } // If we get here, we have somehow become de-synchronized and we need to step
18036 // forward one byte at a time until we find a pair of sync bytes that denote
18037 // a packet
18038
18039
18040 startIndex--;
18041 endIndex--;
18042 }
18043 };
18044 /**
18045 * walks through the segment data from the start and end to get timing information
18046 * for the first and last video pes packets as well as timing information for the first
18047 * key frame.
18048 */
18049
18050
18051 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
18052 var startIndex = 0,
18053 endIndex = MP2T_PACKET_LENGTH,
18054 packet,
18055 type,
18056 pesType,
18057 pusi,
18058 parsed,
18059 frame,
18060 i,
18061 pes;
18062 var endLoop = false;
18063 var currentFrame = {
18064 data: [],
18065 size: 0
18066 }; // Start walking from start of segment to get first video packet
18067
18068 while (endIndex < bytes.byteLength) {
18069 // Look for a pair of start and end sync bytes in the data..
18070 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
18071 // We found a packet
18072 packet = bytes.subarray(startIndex, endIndex);
18073 type = probe.ts.parseType(packet, pmt.pid);
18074
18075 switch (type) {
18076 case 'pes':
18077 pesType = probe.ts.parsePesType(packet, pmt.table);
18078 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18079
18080 if (pesType === 'video') {
18081 if (pusi && !endLoop) {
18082 parsed = probe.ts.parsePesTime(packet);
18083
18084 if (parsed) {
18085 parsed.type = 'video';
18086 result.video.push(parsed);
18087 endLoop = true;
18088 }
18089 }
18090
18091 if (!result.firstKeyFrame) {
18092 if (pusi) {
18093 if (currentFrame.size !== 0) {
18094 frame = new Uint8Array(currentFrame.size);
18095 i = 0;
18096
18097 while (currentFrame.data.length) {
18098 pes = currentFrame.data.shift();
18099 frame.set(pes, i);
18100 i += pes.byteLength;
18101 }
18102
18103 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
18104 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
18105 // the keyframe seems to work fine with HLS playback
18106 // and definitely preferable to a crash with TypeError...
18107
18108 if (firstKeyFrame) {
18109 result.firstKeyFrame = firstKeyFrame;
18110 result.firstKeyFrame.type = 'video';
18111 } else {
18112 // eslint-disable-next-line
18113 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
18114 }
18115 }
18116
18117 currentFrame.size = 0;
18118 }
18119 }
18120
18121 currentFrame.data.push(packet);
18122 currentFrame.size += packet.byteLength;
18123 }
18124 }
18125
18126 break;
18127 }
18128
18129 if (endLoop && result.firstKeyFrame) {
18130 break;
18131 }
18132
18133 startIndex += MP2T_PACKET_LENGTH;
18134 endIndex += MP2T_PACKET_LENGTH;
18135 continue;
18136 } // If we get here, we have somehow become de-synchronized and we need to step
18137 // forward one byte at a time until we find a pair of sync bytes that denote
18138 // a packet
18139
18140
18141 startIndex++;
18142 endIndex++;
18143 } // Start walking from end of segment to get last video packet
18144
18145
18146 endIndex = bytes.byteLength;
18147 startIndex = endIndex - MP2T_PACKET_LENGTH;
18148 endLoop = false;
18149
18150 while (startIndex >= 0) {
18151 // Look for a pair of start and end sync bytes in the data..
18152 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
18153 // We found a packet
18154 packet = bytes.subarray(startIndex, endIndex);
18155 type = probe.ts.parseType(packet, pmt.pid);
18156
18157 switch (type) {
18158 case 'pes':
18159 pesType = probe.ts.parsePesType(packet, pmt.table);
18160 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18161
18162 if (pesType === 'video' && pusi) {
18163 parsed = probe.ts.parsePesTime(packet);
18164
18165 if (parsed) {
18166 parsed.type = 'video';
18167 result.video.push(parsed);
18168 endLoop = true;
18169 }
18170 }
18171
18172 break;
18173 }
18174
18175 if (endLoop) {
18176 break;
18177 }
18178
18179 startIndex -= MP2T_PACKET_LENGTH;
18180 endIndex -= MP2T_PACKET_LENGTH;
18181 continue;
18182 } // If we get here, we have somehow become de-synchronized and we need to step
18183 // forward one byte at a time until we find a pair of sync bytes that denote
18184 // a packet
18185
18186
18187 startIndex--;
18188 endIndex--;
18189 }
18190 };
18191 /**
18192 * Adjusts the timestamp information for the segment to account for
18193 * rollover and convert to seconds based on pes packet timescale (90khz clock)
18194 */
18195
18196
18197 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
18198 if (segmentInfo.audio && segmentInfo.audio.length) {
18199 var audioBaseTimestamp = baseTimestamp;
18200
18201 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
18202 audioBaseTimestamp = segmentInfo.audio[0].dts;
18203 }
18204
18205 segmentInfo.audio.forEach(function (info) {
18206 info.dts = handleRollover(info.dts, audioBaseTimestamp);
18207 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
18208
18209 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18210 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18211 });
18212 }
18213
18214 if (segmentInfo.video && segmentInfo.video.length) {
18215 var videoBaseTimestamp = baseTimestamp;
18216
18217 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
18218 videoBaseTimestamp = segmentInfo.video[0].dts;
18219 }
18220
18221 segmentInfo.video.forEach(function (info) {
18222 info.dts = handleRollover(info.dts, videoBaseTimestamp);
18223 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
18224
18225 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18226 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18227 });
18228
18229 if (segmentInfo.firstKeyFrame) {
18230 var frame = segmentInfo.firstKeyFrame;
18231 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
18232 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
18233
18234 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
18235 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
18236 }
18237 }
18238 };
18239 /**
18240 * inspects the aac data stream for start and end time information
18241 */
18242
18243
18244 var inspectAac_ = function inspectAac_(bytes) {
18245 var endLoop = false,
18246 audioCount = 0,
18247 sampleRate = null,
18248 timestamp = null,
18249 frameSize = 0,
18250 byteIndex = 0,
18251 packet;
18252
18253 while (bytes.length - byteIndex >= 3) {
18254 var type = probe.aac.parseType(bytes, byteIndex);
18255
18256 switch (type) {
18257 case 'timed-metadata':
18258 // Exit early because we don't have enough to parse
18259 // the ID3 tag header
18260 if (bytes.length - byteIndex < 10) {
18261 endLoop = true;
18262 break;
18263 }
18264
18265 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18266 // to emit a full packet
18267
18268 if (frameSize > bytes.length) {
18269 endLoop = true;
18270 break;
18271 }
18272
18273 if (timestamp === null) {
18274 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18275 timestamp = probe.aac.parseAacTimestamp(packet);
18276 }
18277
18278 byteIndex += frameSize;
18279 break;
18280
18281 case 'audio':
18282 // Exit early because we don't have enough to parse
18283 // the ADTS frame header
18284 if (bytes.length - byteIndex < 7) {
18285 endLoop = true;
18286 break;
18287 }
18288
18289 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18290 // to emit a full packet
18291
18292 if (frameSize > bytes.length) {
18293 endLoop = true;
18294 break;
18295 }
18296
18297 if (sampleRate === null) {
18298 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18299 sampleRate = probe.aac.parseSampleRate(packet);
18300 }
18301
18302 audioCount++;
18303 byteIndex += frameSize;
18304 break;
18305
18306 default:
18307 byteIndex++;
18308 break;
18309 }
18310
18311 if (endLoop) {
18312 return null;
18313 }
18314 }
18315
18316 if (sampleRate === null || timestamp === null) {
18317 return null;
18318 }
18319
18320 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
18321 var result = {
18322 audio: [{
18323 type: 'audio',
18324 dts: timestamp,
18325 pts: timestamp
18326 }, {
18327 type: 'audio',
18328 dts: timestamp + audioCount * 1024 * audioTimescale,
18329 pts: timestamp + audioCount * 1024 * audioTimescale
18330 }]
18331 };
18332 return result;
18333 };
18334 /**
18335 * inspects the transport stream segment data for start and end time information
18336 * of the audio and video tracks (when present) as well as the first key frame's
18337 * start time.
18338 */
18339
18340
18341 var inspectTs_ = function inspectTs_(bytes) {
18342 var pmt = {
18343 pid: null,
18344 table: null
18345 };
18346 var result = {};
18347 parsePsi_(bytes, pmt);
18348
18349 for (var pid in pmt.table) {
18350 if (pmt.table.hasOwnProperty(pid)) {
18351 var type = pmt.table[pid];
18352
18353 switch (type) {
18354 case streamTypes.H264_STREAM_TYPE:
18355 result.video = [];
18356 parseVideoPes_(bytes, pmt, result);
18357
18358 if (result.video.length === 0) {
18359 delete result.video;
18360 }
18361
18362 break;
18363
18364 case streamTypes.ADTS_STREAM_TYPE:
18365 result.audio = [];
18366 parseAudioPes_(bytes, pmt, result);
18367
18368 if (result.audio.length === 0) {
18369 delete result.audio;
18370 }
18371
18372 break;
18373 }
18374 }
18375 }
18376
18377 return result;
18378 };
18379 /**
18380 * Inspects segment byte data and returns an object with start and end timing information
18381 *
18382 * @param {Uint8Array} bytes The segment byte data
18383 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
18384 * timestamps for rollover. This value must be in 90khz clock.
18385 * @return {Object} Object containing start and end frame timing info of segment.
18386 */
18387
18388
18389 var inspect = function inspect(bytes, baseTimestamp) {
18390 var isAacData = probe.aac.isLikelyAacData(bytes);
18391 var result;
18392
18393 if (isAacData) {
18394 result = inspectAac_(bytes);
18395 } else {
18396 result = inspectTs_(bytes);
18397 }
18398
18399 if (!result || !result.audio && !result.video) {
18400 return null;
18401 }
18402
18403 adjustTimestamp_(result, baseTimestamp);
18404 return result;
18405 };
18406
18407 var tsInspector = {
18408 inspect: inspect,
18409 parseAudioPes_: parseAudioPes_
18410 };
18411 /* global self */
18412
18413 /**
18414 * Re-emits transmuxer events by converting them into messages to the
18415 * world outside the worker.
18416 *
18417 * @param {Object} transmuxer the transmuxer to wire events on
18418 * @private
18419 */
18420
18421 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
18422 transmuxer.on('data', function (segment) {
18423 // transfer ownership of the underlying ArrayBuffer
18424 // instead of doing a copy to save memory
18425 // ArrayBuffers are transferable but generic TypedArrays are not
18426 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
18427 var initArray = segment.initSegment;
18428 segment.initSegment = {
18429 data: initArray.buffer,
18430 byteOffset: initArray.byteOffset,
18431 byteLength: initArray.byteLength
18432 };
18433 var typedArray = segment.data;
18434 segment.data = typedArray.buffer;
18435 self.postMessage({
18436 action: 'data',
18437 segment: segment,
18438 byteOffset: typedArray.byteOffset,
18439 byteLength: typedArray.byteLength
18440 }, [segment.data]);
18441 });
18442 transmuxer.on('done', function (data) {
18443 self.postMessage({
18444 action: 'done'
18445 });
18446 });
18447 transmuxer.on('gopInfo', function (gopInfo) {
18448 self.postMessage({
18449 action: 'gopInfo',
18450 gopInfo: gopInfo
18451 });
18452 });
18453 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
18454 var videoSegmentTimingInfo = {
18455 start: {
18456 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18457 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18458 },
18459 end: {
18460 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18461 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18462 },
18463 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18464 };
18465
18466 if (timingInfo.prependedContentDuration) {
18467 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18468 }
18469
18470 self.postMessage({
18471 action: 'videoSegmentTimingInfo',
18472 videoSegmentTimingInfo: videoSegmentTimingInfo
18473 });
18474 });
18475 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
18476 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
18477 var audioSegmentTimingInfo = {
18478 start: {
18479 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18480 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18481 },
18482 end: {
18483 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18484 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18485 },
18486 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18487 };
18488
18489 if (timingInfo.prependedContentDuration) {
18490 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18491 }
18492
18493 self.postMessage({
18494 action: 'audioSegmentTimingInfo',
18495 audioSegmentTimingInfo: audioSegmentTimingInfo
18496 });
18497 });
18498 transmuxer.on('id3Frame', function (id3Frame) {
18499 self.postMessage({
18500 action: 'id3Frame',
18501 id3Frame: id3Frame
18502 });
18503 });
18504 transmuxer.on('caption', function (caption) {
18505 self.postMessage({
18506 action: 'caption',
18507 caption: caption
18508 });
18509 });
18510 transmuxer.on('trackinfo', function (trackInfo) {
18511 self.postMessage({
18512 action: 'trackinfo',
18513 trackInfo: trackInfo
18514 });
18515 });
18516 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
18517 // convert to video TS since we prioritize video time over audio
18518 self.postMessage({
18519 action: 'audioTimingInfo',
18520 audioTimingInfo: {
18521 start: clock.videoTsToSeconds(audioTimingInfo.start),
18522 end: clock.videoTsToSeconds(audioTimingInfo.end)
18523 }
18524 });
18525 });
18526 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
18527 self.postMessage({
18528 action: 'videoTimingInfo',
18529 videoTimingInfo: {
18530 start: clock.videoTsToSeconds(videoTimingInfo.start),
18531 end: clock.videoTsToSeconds(videoTimingInfo.end)
18532 }
18533 });
18534 });
18535 transmuxer.on('log', function (log) {
18536 self.postMessage({
18537 action: 'log',
18538 log: log
18539 });
18540 });
18541 };
18542 /**
18543 * All incoming messages route through this hash. If no function exists
18544 * to handle an incoming message, then we ignore the message.
18545 *
18546 * @class MessageHandlers
18547 * @param {Object} options the options to initialize with
18548 */
18549
18550
18551 var MessageHandlers = /*#__PURE__*/function () {
18552 function MessageHandlers(self, options) {
18553 this.options = options || {};
18554 this.self = self;
18555 this.init();
18556 }
18557 /**
18558 * initialize our web worker and wire all the events.
18559 */
18560
18561
18562 var _proto = MessageHandlers.prototype;
18563
18564 _proto.init = function init() {
18565 if (this.transmuxer) {
18566 this.transmuxer.dispose();
18567 }
18568
18569 this.transmuxer = new transmuxer.Transmuxer(this.options);
18570 wireTransmuxerEvents(this.self, this.transmuxer);
18571 };
18572
18573 _proto.pushMp4Captions = function pushMp4Captions(data) {
18574 if (!this.captionParser) {
18575 this.captionParser = new captionParser();
18576 this.captionParser.init();
18577 }
18578
18579 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18580 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
18581 this.self.postMessage({
18582 action: 'mp4Captions',
18583 captions: parsed && parsed.captions || [],
18584 logs: parsed && parsed.logs || [],
18585 data: segment.buffer
18586 }, [segment.buffer]);
18587 };
18588
18589 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
18590 var timescales = _ref.timescales,
18591 data = _ref.data;
18592 var startTime = probe$2.startTime(timescales, data);
18593 this.self.postMessage({
18594 action: 'probeMp4StartTime',
18595 startTime: startTime,
18596 data: data
18597 }, [data.buffer]);
18598 };
18599
18600 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
18601 var data = _ref2.data;
18602 var tracks = probe$2.tracks(data);
18603 this.self.postMessage({
18604 action: 'probeMp4Tracks',
18605 tracks: tracks,
18606 data: data
18607 }, [data.buffer]);
18608 }
18609 /**
18610 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
18611 * internal "media time," as well as whether it contains video and/or audio.
18612 *
18613 * @private
18614 * @param {Uint8Array} bytes - segment bytes
18615 * @param {number} baseStartTime
18616 * Relative reference timestamp used when adjusting frame timestamps for rollover.
18617 * This value should be in seconds, as it's converted to a 90khz clock within the
18618 * function body.
18619 * @return {Object} The start time of the current segment in "media time" as well as
18620 * whether it contains video and/or audio
18621 */
18622 ;
18623
18624 _proto.probeTs = function probeTs(_ref3) {
18625 var data = _ref3.data,
18626 baseStartTime = _ref3.baseStartTime;
18627 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
18628 var timeInfo = tsInspector.inspect(data, tsStartTime);
18629 var result = null;
18630
18631 if (timeInfo) {
18632 result = {
18633 // each type's time info comes back as an array of 2 times, start and end
18634 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
18635 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
18636 };
18637
18638 if (result.hasVideo) {
18639 result.videoStart = timeInfo.video[0].ptsTime;
18640 }
18641
18642 if (result.hasAudio) {
18643 result.audioStart = timeInfo.audio[0].ptsTime;
18644 }
18645 }
18646
18647 this.self.postMessage({
18648 action: 'probeTs',
18649 result: result,
18650 data: data
18651 }, [data.buffer]);
18652 };
18653
18654 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
18655 if (this.captionParser) {
18656 this.captionParser.clearAllCaptions();
18657 }
18658 };
18659
18660 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
18661 if (this.captionParser) {
18662 this.captionParser.clearParsedCaptions();
18663 }
18664 }
18665 /**
18666 * Adds data (a ts segment) to the start of the transmuxer pipeline for
18667 * processing.
18668 *
18669 * @param {ArrayBuffer} data data to push into the muxer
18670 */
18671 ;
18672
18673 _proto.push = function push(data) {
18674 // Cast array buffer to correct type for transmuxer
18675 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18676 this.transmuxer.push(segment);
18677 }
18678 /**
18679 * Recreate the transmuxer so that the next segment added via `push`
18680 * start with a fresh transmuxer.
18681 */
18682 ;
18683
18684 _proto.reset = function reset() {
18685 this.transmuxer.reset();
18686 }
18687 /**
18688 * Set the value that will be used as the `baseMediaDecodeTime` time for the
18689 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
18690 * set relative to the first based on the PTS values.
18691 *
18692 * @param {Object} data used to set the timestamp offset in the muxer
18693 */
18694 ;
18695
18696 _proto.setTimestampOffset = function setTimestampOffset(data) {
18697 var timestampOffset = data.timestampOffset || 0;
18698 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
18699 };
18700
18701 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
18702 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
18703 };
18704
18705 _proto.setRemux = function setRemux(data) {
18706 this.transmuxer.setRemux(data.remux);
18707 }
18708 /**
18709 * Forces the pipeline to finish processing the last segment and emit it's
18710 * results.
18711 *
18712 * @param {Object} data event data, not really used
18713 */
18714 ;
18715
18716 _proto.flush = function flush(data) {
18717 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
18718
18719 self.postMessage({
18720 action: 'done',
18721 type: 'transmuxed'
18722 });
18723 };
18724
18725 _proto.endTimeline = function endTimeline() {
18726 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
18727 // timelines
18728
18729 self.postMessage({
18730 action: 'endedtimeline',
18731 type: 'transmuxed'
18732 });
18733 };
18734
18735 _proto.alignGopsWith = function alignGopsWith(data) {
18736 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
18737 };
18738
18739 return MessageHandlers;
18740 }();
18741 /**
18742 * Our web worker interface so that things can talk to mux.js
18743 * that will be running in a web worker. the scope is passed to this by
18744 * webworkify.
18745 *
18746 * @param {Object} self the scope for the web worker
18747 */
18748
18749
18750 self.onmessage = function (event) {
18751 if (event.data.action === 'init' && event.data.options) {
18752 this.messageHandlers = new MessageHandlers(self, event.data.options);
18753 return;
18754 }
18755
18756 if (!this.messageHandlers) {
18757 this.messageHandlers = new MessageHandlers(self);
18758 }
18759
18760 if (event.data && event.data.action && event.data.action !== 'init') {
18761 if (this.messageHandlers[event.data.action]) {
18762 this.messageHandlers[event.data.action](event.data);
18763 }
18764 }
18765 };
18766 });
18767 var TransmuxWorker = factory(workerCode$1);
18768 /* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
18769
18770 var handleData_ = function handleData_(event, transmuxedData, callback) {
18771 var _event$data$segment = event.data.segment,
18772 type = _event$data$segment.type,
18773 initSegment = _event$data$segment.initSegment,
18774 captions = _event$data$segment.captions,
18775 captionStreams = _event$data$segment.captionStreams,
18776 metadata = _event$data$segment.metadata,
18777 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
18778 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
18779 transmuxedData.buffer.push({
18780 captions: captions,
18781 captionStreams: captionStreams,
18782 metadata: metadata
18783 });
18784 var boxes = event.data.segment.boxes || {
18785 data: event.data.segment.data
18786 };
18787 var result = {
18788 type: type,
18789 // cast ArrayBuffer to TypedArray
18790 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
18791 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
18792 };
18793
18794 if (typeof videoFrameDtsTime !== 'undefined') {
18795 result.videoFrameDtsTime = videoFrameDtsTime;
18796 }
18797
18798 if (typeof videoFramePtsTime !== 'undefined') {
18799 result.videoFramePtsTime = videoFramePtsTime;
18800 }
18801
18802 callback(result);
18803 };
18804 var handleDone_ = function handleDone_(_ref) {
18805 var transmuxedData = _ref.transmuxedData,
18806 callback = _ref.callback;
18807 // Previously we only returned data on data events,
18808 // not on done events. Clear out the buffer to keep that consistent.
18809 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
18810 // have received
18811
18812 callback(transmuxedData);
18813 };
18814 var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
18815 transmuxedData.gopInfo = event.data.gopInfo;
18816 };
18817 var processTransmux = function processTransmux(options) {
18818 var transmuxer = options.transmuxer,
18819 bytes = options.bytes,
18820 audioAppendStart = options.audioAppendStart,
18821 gopsToAlignWith = options.gopsToAlignWith,
18822 remux = options.remux,
18823 onData = options.onData,
18824 onTrackInfo = options.onTrackInfo,
18825 onAudioTimingInfo = options.onAudioTimingInfo,
18826 onVideoTimingInfo = options.onVideoTimingInfo,
18827 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
18828 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
18829 onId3 = options.onId3,
18830 onCaptions = options.onCaptions,
18831 onDone = options.onDone,
18832 onEndedTimeline = options.onEndedTimeline,
18833 onTransmuxerLog = options.onTransmuxerLog,
18834 isEndOfTimeline = options.isEndOfTimeline;
18835 var transmuxedData = {
18836 buffer: []
18837 };
18838 var waitForEndedTimelineEvent = isEndOfTimeline;
18839
18840 var handleMessage = function handleMessage(event) {
18841 if (transmuxer.currentTransmux !== options) {
18842 // disposed
18843 return;
18844 }
18845
18846 if (event.data.action === 'data') {
18847 handleData_(event, transmuxedData, onData);
18848 }
18849
18850 if (event.data.action === 'trackinfo') {
18851 onTrackInfo(event.data.trackInfo);
18852 }
18853
18854 if (event.data.action === 'gopInfo') {
18855 handleGopInfo_(event, transmuxedData);
18856 }
18857
18858 if (event.data.action === 'audioTimingInfo') {
18859 onAudioTimingInfo(event.data.audioTimingInfo);
18860 }
18861
18862 if (event.data.action === 'videoTimingInfo') {
18863 onVideoTimingInfo(event.data.videoTimingInfo);
18864 }
18865
18866 if (event.data.action === 'videoSegmentTimingInfo') {
18867 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
18868 }
18869
18870 if (event.data.action === 'audioSegmentTimingInfo') {
18871 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
18872 }
18873
18874 if (event.data.action === 'id3Frame') {
18875 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
18876 }
18877
18878 if (event.data.action === 'caption') {
18879 onCaptions(event.data.caption);
18880 }
18881
18882 if (event.data.action === 'endedtimeline') {
18883 waitForEndedTimelineEvent = false;
18884 onEndedTimeline();
18885 }
18886
18887 if (event.data.action === 'log') {
18888 onTransmuxerLog(event.data.log);
18889 } // wait for the transmuxed event since we may have audio and video
18890
18891
18892 if (event.data.type !== 'transmuxed') {
18893 return;
18894 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
18895 // of a timeline, that means there may still be data events before the segment
18896 // processing can be considerred complete. In that case, the final event should be
18897 // an "endedtimeline" event with the type "transmuxed."
18898
18899
18900 if (waitForEndedTimelineEvent) {
18901 return;
18902 }
18903
18904 transmuxer.onmessage = null;
18905 handleDone_({
18906 transmuxedData: transmuxedData,
18907 callback: onDone
18908 });
18909 /* eslint-disable no-use-before-define */
18910
18911 dequeue(transmuxer);
18912 /* eslint-enable */
18913 };
18914
18915 transmuxer.onmessage = handleMessage;
18916
18917 if (audioAppendStart) {
18918 transmuxer.postMessage({
18919 action: 'setAudioAppendStart',
18920 appendStart: audioAppendStart
18921 });
18922 } // allow empty arrays to be passed to clear out GOPs
18923
18924
18925 if (Array.isArray(gopsToAlignWith)) {
18926 transmuxer.postMessage({
18927 action: 'alignGopsWith',
18928 gopsToAlignWith: gopsToAlignWith
18929 });
18930 }
18931
18932 if (typeof remux !== 'undefined') {
18933 transmuxer.postMessage({
18934 action: 'setRemux',
18935 remux: remux
18936 });
18937 }
18938
18939 if (bytes.byteLength) {
18940 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
18941 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
18942 transmuxer.postMessage({
18943 action: 'push',
18944 // Send the typed-array of data as an ArrayBuffer so that
18945 // it can be sent as a "Transferable" and avoid the costly
18946 // memory copy
18947 data: buffer,
18948 // To recreate the original typed-array, we need information
18949 // about what portion of the ArrayBuffer it was a view into
18950 byteOffset: byteOffset,
18951 byteLength: bytes.byteLength
18952 }, [buffer]);
18953 }
18954
18955 if (isEndOfTimeline) {
18956 transmuxer.postMessage({
18957 action: 'endTimeline'
18958 });
18959 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
18960 // the end of the segment
18961
18962
18963 transmuxer.postMessage({
18964 action: 'flush'
18965 });
18966 };
18967 var dequeue = function dequeue(transmuxer) {
18968 transmuxer.currentTransmux = null;
18969
18970 if (transmuxer.transmuxQueue.length) {
18971 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
18972
18973 if (typeof transmuxer.currentTransmux === 'function') {
18974 transmuxer.currentTransmux();
18975 } else {
18976 processTransmux(transmuxer.currentTransmux);
18977 }
18978 }
18979 };
18980 var processAction = function processAction(transmuxer, action) {
18981 transmuxer.postMessage({
18982 action: action
18983 });
18984 dequeue(transmuxer);
18985 };
18986 var enqueueAction = function enqueueAction(action, transmuxer) {
18987 if (!transmuxer.currentTransmux) {
18988 transmuxer.currentTransmux = action;
18989 processAction(transmuxer, action);
18990 return;
18991 }
18992
18993 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
18994 };
18995 var reset = function reset(transmuxer) {
18996 enqueueAction('reset', transmuxer);
18997 };
18998 var endTimeline = function endTimeline(transmuxer) {
18999 enqueueAction('endTimeline', transmuxer);
19000 };
19001 var transmux = function transmux(options) {
19002 if (!options.transmuxer.currentTransmux) {
19003 options.transmuxer.currentTransmux = options;
19004 processTransmux(options);
19005 return;
19006 }
19007
19008 options.transmuxer.transmuxQueue.push(options);
19009 };
19010 var createTransmuxer = function createTransmuxer(options) {
19011 var transmuxer = new TransmuxWorker();
19012 transmuxer.currentTransmux = null;
19013 transmuxer.transmuxQueue = [];
19014 var term = transmuxer.terminate;
19015
19016 transmuxer.terminate = function () {
19017 transmuxer.currentTransmux = null;
19018 transmuxer.transmuxQueue.length = 0;
19019 return term.call(transmuxer);
19020 };
19021
19022 transmuxer.postMessage({
19023 action: 'init',
19024 options: options
19025 });
19026 return transmuxer;
19027 };
19028 var segmentTransmuxer = {
19029 reset: reset,
19030 endTimeline: endTimeline,
19031 transmux: transmux,
19032 createTransmuxer: createTransmuxer
19033 };
19034
19035 var workerCallback = function workerCallback(options) {
19036 var transmuxer = options.transmuxer;
19037 var endAction = options.endAction || options.action;
19038 var callback = options.callback;
19039
19040 var message = _extends_1({}, options, {
19041 endAction: null,
19042 transmuxer: null,
19043 callback: null
19044 });
19045
19046 var listenForEndEvent = function listenForEndEvent(event) {
19047 if (event.data.action !== endAction) {
19048 return;
19049 }
19050
19051 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
19052
19053 if (event.data.data) {
19054 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
19055
19056 if (options.data) {
19057 options.data = event.data.data;
19058 }
19059 }
19060
19061 callback(event.data);
19062 };
19063
19064 transmuxer.addEventListener('message', listenForEndEvent);
19065
19066 if (options.data) {
19067 var isArrayBuffer = options.data instanceof ArrayBuffer;
19068 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
19069 message.byteLength = options.data.byteLength;
19070 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
19071 transmuxer.postMessage(message, transfers);
19072 } else {
19073 transmuxer.postMessage(message);
19074 }
19075 };
19076
19077 var REQUEST_ERRORS = {
19078 FAILURE: 2,
19079 TIMEOUT: -101,
19080 ABORTED: -102
19081 };
19082 /**
19083 * Abort all requests
19084 *
19085 * @param {Object} activeXhrs - an object that tracks all XHR requests
19086 */
19087
19088 var abortAll = function abortAll(activeXhrs) {
19089 activeXhrs.forEach(function (xhr) {
19090 xhr.abort();
19091 });
19092 };
19093 /**
19094 * Gather important bandwidth stats once a request has completed
19095 *
19096 * @param {Object} request - the XHR request from which to gather stats
19097 */
19098
19099
19100 var getRequestStats = function getRequestStats(request) {
19101 return {
19102 bandwidth: request.bandwidth,
19103 bytesReceived: request.bytesReceived || 0,
19104 roundTripTime: request.roundTripTime || 0
19105 };
19106 };
19107 /**
19108 * If possible gather bandwidth stats as a request is in
19109 * progress
19110 *
19111 * @param {Event} progressEvent - an event object from an XHR's progress event
19112 */
19113
19114
19115 var getProgressStats = function getProgressStats(progressEvent) {
19116 var request = progressEvent.target;
19117 var roundTripTime = Date.now() - request.requestTime;
19118 var stats = {
19119 bandwidth: Infinity,
19120 bytesReceived: 0,
19121 roundTripTime: roundTripTime || 0
19122 };
19123 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
19124 // because we should only use bandwidth stats on progress to determine when
19125 // abort a request early due to insufficient bandwidth
19126
19127 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
19128 return stats;
19129 };
19130 /**
19131 * Handle all error conditions in one place and return an object
19132 * with all the information
19133 *
19134 * @param {Error|null} error - if non-null signals an error occured with the XHR
19135 * @param {Object} request - the XHR request that possibly generated the error
19136 */
19137
19138
19139 var handleErrors = function handleErrors(error, request) {
19140 if (request.timedout) {
19141 return {
19142 status: request.status,
19143 message: 'HLS request timed-out at URL: ' + request.uri,
19144 code: REQUEST_ERRORS.TIMEOUT,
19145 xhr: request
19146 };
19147 }
19148
19149 if (request.aborted) {
19150 return {
19151 status: request.status,
19152 message: 'HLS request aborted at URL: ' + request.uri,
19153 code: REQUEST_ERRORS.ABORTED,
19154 xhr: request
19155 };
19156 }
19157
19158 if (error) {
19159 return {
19160 status: request.status,
19161 message: 'HLS request errored at URL: ' + request.uri,
19162 code: REQUEST_ERRORS.FAILURE,
19163 xhr: request
19164 };
19165 }
19166
19167 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
19168 return {
19169 status: request.status,
19170 message: 'Empty HLS response at URL: ' + request.uri,
19171 code: REQUEST_ERRORS.FAILURE,
19172 xhr: request
19173 };
19174 }
19175
19176 return null;
19177 };
19178 /**
19179 * Handle responses for key data and convert the key data to the correct format
19180 * for the decryption step later
19181 *
19182 * @param {Object} segment - a simplified copy of the segmentInfo object
19183 * from SegmentLoader
19184 * @param {Array} objects - objects to add the key bytes to.
19185 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19186 * this request
19187 */
19188
19189
19190 var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
19191 return function (error, request) {
19192 var response = request.response;
19193 var errorObj = handleErrors(error, request);
19194
19195 if (errorObj) {
19196 return finishProcessingFn(errorObj, segment);
19197 }
19198
19199 if (response.byteLength !== 16) {
19200 return finishProcessingFn({
19201 status: request.status,
19202 message: 'Invalid HLS key at URL: ' + request.uri,
19203 code: REQUEST_ERRORS.FAILURE,
19204 xhr: request
19205 }, segment);
19206 }
19207
19208 var view = new DataView(response);
19209 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
19210
19211 for (var i = 0; i < objects.length; i++) {
19212 objects[i].bytes = bytes;
19213 }
19214
19215 return finishProcessingFn(null, segment);
19216 };
19217 };
19218
19219 var parseInitSegment = function parseInitSegment(segment, _callback) {
19220 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
19221 // only know how to parse mp4 init segments at the moment
19222
19223 if (type !== 'mp4') {
19224 var uri = segment.map.resolvedUri || segment.map.uri;
19225 return _callback({
19226 internal: true,
19227 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
19228 code: REQUEST_ERRORS.FAILURE
19229 });
19230 }
19231
19232 workerCallback({
19233 action: 'probeMp4Tracks',
19234 data: segment.map.bytes,
19235 transmuxer: segment.transmuxer,
19236 callback: function callback(_ref) {
19237 var tracks = _ref.tracks,
19238 data = _ref.data;
19239 // transfer bytes back to us
19240 segment.map.bytes = data;
19241 tracks.forEach(function (track) {
19242 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
19243
19244 if (segment.map.tracks[track.type]) {
19245 return;
19246 }
19247
19248 segment.map.tracks[track.type] = track;
19249
19250 if (typeof track.id === 'number' && track.timescale) {
19251 segment.map.timescales = segment.map.timescales || {};
19252 segment.map.timescales[track.id] = track.timescale;
19253 }
19254 });
19255 return _callback(null);
19256 }
19257 });
19258 };
19259 /**
19260 * Handle init-segment responses
19261 *
19262 * @param {Object} segment - a simplified copy of the segmentInfo object
19263 * from SegmentLoader
19264 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19265 * this request
19266 */
19267
19268
19269 var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
19270 var segment = _ref2.segment,
19271 finishProcessingFn = _ref2.finishProcessingFn;
19272 return function (error, request) {
19273 var errorObj = handleErrors(error, request);
19274
19275 if (errorObj) {
19276 return finishProcessingFn(errorObj, segment);
19277 }
19278
19279 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
19280 // until the key request is done to decrypt.
19281
19282 if (segment.map.key) {
19283 segment.map.encryptedBytes = bytes;
19284 return finishProcessingFn(null, segment);
19285 }
19286
19287 segment.map.bytes = bytes;
19288 parseInitSegment(segment, function (parseError) {
19289 if (parseError) {
19290 parseError.xhr = request;
19291 parseError.status = request.status;
19292 return finishProcessingFn(parseError, segment);
19293 }
19294
19295 finishProcessingFn(null, segment);
19296 });
19297 };
19298 };
19299 /**
19300 * Response handler for segment-requests being sure to set the correct
19301 * property depending on whether the segment is encryped or not
19302 * Also records and keeps track of stats that are used for ABR purposes
19303 *
19304 * @param {Object} segment - a simplified copy of the segmentInfo object
19305 * from SegmentLoader
19306 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19307 * this request
19308 */
19309
19310
19311 var handleSegmentResponse = function handleSegmentResponse(_ref3) {
19312 var segment = _ref3.segment,
19313 finishProcessingFn = _ref3.finishProcessingFn,
19314 responseType = _ref3.responseType;
19315 return function (error, request) {
19316 var errorObj = handleErrors(error, request);
19317
19318 if (errorObj) {
19319 return finishProcessingFn(errorObj, segment);
19320 }
19321
19322 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
19323 // thrown for two primary cases:
19324 // 1. the mime type override stops working, or is not implemented for a specific
19325 // browser
19326 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
19327 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
19328 segment.stats = getRequestStats(request);
19329
19330 if (segment.key) {
19331 segment.encryptedBytes = new Uint8Array(newBytes);
19332 } else {
19333 segment.bytes = new Uint8Array(newBytes);
19334 }
19335
19336 return finishProcessingFn(null, segment);
19337 };
19338 };
19339
19340 var transmuxAndNotify = function transmuxAndNotify(_ref4) {
19341 var segment = _ref4.segment,
19342 bytes = _ref4.bytes,
19343 trackInfoFn = _ref4.trackInfoFn,
19344 timingInfoFn = _ref4.timingInfoFn,
19345 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
19346 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
19347 id3Fn = _ref4.id3Fn,
19348 captionsFn = _ref4.captionsFn,
19349 isEndOfTimeline = _ref4.isEndOfTimeline,
19350 endedTimelineFn = _ref4.endedTimelineFn,
19351 dataFn = _ref4.dataFn,
19352 doneFn = _ref4.doneFn,
19353 onTransmuxerLog = _ref4.onTransmuxerLog;
19354 var fmp4Tracks = segment.map && segment.map.tracks || {};
19355 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
19356 // One reason for this is that in the case of full segments, we want to trust start
19357 // times from the probe, rather than the transmuxer.
19358
19359 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
19360 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
19361 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
19362 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
19363
19364 var finish = function finish() {
19365 return transmux({
19366 bytes: bytes,
19367 transmuxer: segment.transmuxer,
19368 audioAppendStart: segment.audioAppendStart,
19369 gopsToAlignWith: segment.gopsToAlignWith,
19370 remux: isMuxed,
19371 onData: function onData(result) {
19372 result.type = result.type === 'combined' ? 'video' : result.type;
19373 dataFn(segment, result);
19374 },
19375 onTrackInfo: function onTrackInfo(trackInfo) {
19376 if (trackInfoFn) {
19377 if (isMuxed) {
19378 trackInfo.isMuxed = true;
19379 }
19380
19381 trackInfoFn(segment, trackInfo);
19382 }
19383 },
19384 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
19385 // we only want the first start value we encounter
19386 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
19387 audioStartFn(audioTimingInfo.start);
19388 audioStartFn = null;
19389 } // we want to continually update the end time
19390
19391
19392 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
19393 audioEndFn(audioTimingInfo.end);
19394 }
19395 },
19396 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
19397 // we only want the first start value we encounter
19398 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
19399 videoStartFn(videoTimingInfo.start);
19400 videoStartFn = null;
19401 } // we want to continually update the end time
19402
19403
19404 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
19405 videoEndFn(videoTimingInfo.end);
19406 }
19407 },
19408 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
19409 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
19410 },
19411 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
19412 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
19413 },
19414 onId3: function onId3(id3Frames, dispatchType) {
19415 id3Fn(segment, id3Frames, dispatchType);
19416 },
19417 onCaptions: function onCaptions(captions) {
19418 captionsFn(segment, [captions]);
19419 },
19420 isEndOfTimeline: isEndOfTimeline,
19421 onEndedTimeline: function onEndedTimeline() {
19422 endedTimelineFn();
19423 },
19424 onTransmuxerLog: onTransmuxerLog,
19425 onDone: function onDone(result) {
19426 if (!doneFn) {
19427 return;
19428 }
19429
19430 result.type = result.type === 'combined' ? 'video' : result.type;
19431 doneFn(null, segment, result);
19432 }
19433 });
19434 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
19435 // Meaning cached frame data may corrupt our notion of where this segment
19436 // really starts. To get around this, probe for the info needed.
19437
19438
19439 workerCallback({
19440 action: 'probeTs',
19441 transmuxer: segment.transmuxer,
19442 data: bytes,
19443 baseStartTime: segment.baseStartTime,
19444 callback: function callback(data) {
19445 segment.bytes = bytes = data.data;
19446 var probeResult = data.result;
19447
19448 if (probeResult) {
19449 trackInfoFn(segment, {
19450 hasAudio: probeResult.hasAudio,
19451 hasVideo: probeResult.hasVideo,
19452 isMuxed: isMuxed
19453 });
19454 trackInfoFn = null;
19455
19456 if (probeResult.hasAudio && !isMuxed) {
19457 audioStartFn(probeResult.audioStart);
19458 }
19459
19460 if (probeResult.hasVideo) {
19461 videoStartFn(probeResult.videoStart);
19462 }
19463
19464 audioStartFn = null;
19465 videoStartFn = null;
19466 }
19467
19468 finish();
19469 }
19470 });
19471 };
19472
19473 var handleSegmentBytes = function handleSegmentBytes(_ref5) {
19474 var segment = _ref5.segment,
19475 bytes = _ref5.bytes,
19476 trackInfoFn = _ref5.trackInfoFn,
19477 timingInfoFn = _ref5.timingInfoFn,
19478 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
19479 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
19480 id3Fn = _ref5.id3Fn,
19481 captionsFn = _ref5.captionsFn,
19482 isEndOfTimeline = _ref5.isEndOfTimeline,
19483 endedTimelineFn = _ref5.endedTimelineFn,
19484 dataFn = _ref5.dataFn,
19485 doneFn = _ref5.doneFn,
19486 onTransmuxerLog = _ref5.onTransmuxerLog;
19487 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
19488 // We should have a handler that fetches the number of bytes required
19489 // to check if something is fmp4. This will allow us to save bandwidth
19490 // because we can only blacklist a playlist and abort requests
19491 // by codec after trackinfo triggers.
19492
19493 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
19494 segment.isFmp4 = true;
19495 var tracks = segment.map.tracks;
19496 var trackInfo = {
19497 isFmp4: true,
19498 hasVideo: !!tracks.video,
19499 hasAudio: !!tracks.audio
19500 }; // if we have a audio track, with a codec that is not set to
19501 // encrypted audio
19502
19503 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
19504 trackInfo.audioCodec = tracks.audio.codec;
19505 } // if we have a video track, with a codec that is not set to
19506 // encrypted video
19507
19508
19509 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
19510 trackInfo.videoCodec = tracks.video.codec;
19511 }
19512
19513 if (tracks.video && tracks.audio) {
19514 trackInfo.isMuxed = true;
19515 } // since we don't support appending fmp4 data on progress, we know we have the full
19516 // segment here
19517
19518
19519 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
19520 // time. The end time can be roughly calculated by the receiver using the duration.
19521 //
19522 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
19523 // that is the true start of the segment (where the playback engine should begin
19524 // decoding).
19525
19526 var finishLoading = function finishLoading(captions) {
19527 // if the track still has audio at this point it is only possible
19528 // for it to be audio only. See `tracks.video && tracks.audio` if statement
19529 // above.
19530 // we make sure to use segment.bytes here as that
19531 dataFn(segment, {
19532 data: bytesAsUint8Array,
19533 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
19534 });
19535
19536 if (captions && captions.length) {
19537 captionsFn(segment, captions);
19538 }
19539
19540 doneFn(null, segment, {});
19541 };
19542
19543 workerCallback({
19544 action: 'probeMp4StartTime',
19545 timescales: segment.map.timescales,
19546 data: bytesAsUint8Array,
19547 transmuxer: segment.transmuxer,
19548 callback: function callback(_ref6) {
19549 var data = _ref6.data,
19550 startTime = _ref6.startTime;
19551 // transfer bytes back to us
19552 bytes = data.buffer;
19553 segment.bytes = bytesAsUint8Array = data;
19554
19555 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
19556 timingInfoFn(segment, 'audio', 'start', startTime);
19557 }
19558
19559 if (trackInfo.hasVideo) {
19560 timingInfoFn(segment, 'video', 'start', startTime);
19561 } // Run through the CaptionParser in case there are captions.
19562 // Initialize CaptionParser if it hasn't been yet
19563
19564
19565 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
19566 finishLoading();
19567 return;
19568 }
19569
19570 workerCallback({
19571 action: 'pushMp4Captions',
19572 endAction: 'mp4Captions',
19573 transmuxer: segment.transmuxer,
19574 data: bytesAsUint8Array,
19575 timescales: segment.map.timescales,
19576 trackIds: [tracks.video.id],
19577 callback: function callback(message) {
19578 // transfer bytes back to us
19579 bytes = message.data.buffer;
19580 segment.bytes = bytesAsUint8Array = message.data;
19581 message.logs.forEach(function (log) {
19582 onTransmuxerLog(videojs__default["default"].mergeOptions(log, {
19583 stream: 'mp4CaptionParser'
19584 }));
19585 });
19586 finishLoading(message.captions);
19587 }
19588 });
19589 }
19590 });
19591 return;
19592 } // VTT or other segments that don't need processing
19593
19594
19595 if (!segment.transmuxer) {
19596 doneFn(null, segment, {});
19597 return;
19598 }
19599
19600 if (typeof segment.container === 'undefined') {
19601 segment.container = detectContainerForBytes(bytesAsUint8Array);
19602 }
19603
19604 if (segment.container !== 'ts' && segment.container !== 'aac') {
19605 trackInfoFn(segment, {
19606 hasAudio: false,
19607 hasVideo: false
19608 });
19609 doneFn(null, segment, {});
19610 return;
19611 } // ts or aac
19612
19613
19614 transmuxAndNotify({
19615 segment: segment,
19616 bytes: bytes,
19617 trackInfoFn: trackInfoFn,
19618 timingInfoFn: timingInfoFn,
19619 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19620 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19621 id3Fn: id3Fn,
19622 captionsFn: captionsFn,
19623 isEndOfTimeline: isEndOfTimeline,
19624 endedTimelineFn: endedTimelineFn,
19625 dataFn: dataFn,
19626 doneFn: doneFn,
19627 onTransmuxerLog: onTransmuxerLog
19628 });
19629 };
19630
19631 var decrypt = function decrypt(_ref7, callback) {
19632 var id = _ref7.id,
19633 key = _ref7.key,
19634 encryptedBytes = _ref7.encryptedBytes,
19635 decryptionWorker = _ref7.decryptionWorker;
19636
19637 var decryptionHandler = function decryptionHandler(event) {
19638 if (event.data.source === id) {
19639 decryptionWorker.removeEventListener('message', decryptionHandler);
19640 var decrypted = event.data.decrypted;
19641 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
19642 }
19643 };
19644
19645 decryptionWorker.addEventListener('message', decryptionHandler);
19646 var keyBytes;
19647
19648 if (key.bytes.slice) {
19649 keyBytes = key.bytes.slice();
19650 } else {
19651 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
19652 } // incrementally decrypt the bytes
19653
19654
19655 decryptionWorker.postMessage(createTransferableMessage({
19656 source: id,
19657 encrypted: encryptedBytes,
19658 key: keyBytes,
19659 iv: key.iv
19660 }), [encryptedBytes.buffer, keyBytes.buffer]);
19661 };
19662 /**
19663 * Decrypt the segment via the decryption web worker
19664 *
19665 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19666 * routines
19667 * @param {Object} segment - a simplified copy of the segmentInfo object
19668 * from SegmentLoader
19669 * @param {Function} trackInfoFn - a callback that receives track info
19670 * @param {Function} timingInfoFn - a callback that receives timing info
19671 * @param {Function} videoSegmentTimingInfoFn
19672 * a callback that receives video timing info based on media times and
19673 * any adjustments made by the transmuxer
19674 * @param {Function} audioSegmentTimingInfoFn
19675 * a callback that receives audio timing info based on media times and
19676 * any adjustments made by the transmuxer
19677 * @param {boolean} isEndOfTimeline
19678 * true if this segment represents the last segment in a timeline
19679 * @param {Function} endedTimelineFn
19680 * a callback made when a timeline is ended, will only be called if
19681 * isEndOfTimeline is true
19682 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19683 * and ready to use
19684 * @param {Function} doneFn - a callback that is executed after decryption has completed
19685 */
19686
19687
19688 var decryptSegment = function decryptSegment(_ref8) {
19689 var decryptionWorker = _ref8.decryptionWorker,
19690 segment = _ref8.segment,
19691 trackInfoFn = _ref8.trackInfoFn,
19692 timingInfoFn = _ref8.timingInfoFn,
19693 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
19694 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
19695 id3Fn = _ref8.id3Fn,
19696 captionsFn = _ref8.captionsFn,
19697 isEndOfTimeline = _ref8.isEndOfTimeline,
19698 endedTimelineFn = _ref8.endedTimelineFn,
19699 dataFn = _ref8.dataFn,
19700 doneFn = _ref8.doneFn,
19701 onTransmuxerLog = _ref8.onTransmuxerLog;
19702 decrypt({
19703 id: segment.requestId,
19704 key: segment.key,
19705 encryptedBytes: segment.encryptedBytes,
19706 decryptionWorker: decryptionWorker
19707 }, function (decryptedBytes) {
19708 segment.bytes = decryptedBytes;
19709 handleSegmentBytes({
19710 segment: segment,
19711 bytes: segment.bytes,
19712 trackInfoFn: trackInfoFn,
19713 timingInfoFn: timingInfoFn,
19714 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19715 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19716 id3Fn: id3Fn,
19717 captionsFn: captionsFn,
19718 isEndOfTimeline: isEndOfTimeline,
19719 endedTimelineFn: endedTimelineFn,
19720 dataFn: dataFn,
19721 doneFn: doneFn,
19722 onTransmuxerLog: onTransmuxerLog
19723 });
19724 });
19725 };
19726 /**
19727 * This function waits for all XHRs to finish (with either success or failure)
19728 * before continueing processing via it's callback. The function gathers errors
19729 * from each request into a single errors array so that the error status for
19730 * each request can be examined later.
19731 *
19732 * @param {Object} activeXhrs - an object that tracks all XHR requests
19733 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19734 * routines
19735 * @param {Function} trackInfoFn - a callback that receives track info
19736 * @param {Function} timingInfoFn - a callback that receives timing info
19737 * @param {Function} videoSegmentTimingInfoFn
19738 * a callback that receives video timing info based on media times and
19739 * any adjustments made by the transmuxer
19740 * @param {Function} audioSegmentTimingInfoFn
19741 * a callback that receives audio timing info based on media times and
19742 * any adjustments made by the transmuxer
19743 * @param {Function} id3Fn - a callback that receives ID3 metadata
19744 * @param {Function} captionsFn - a callback that receives captions
19745 * @param {boolean} isEndOfTimeline
19746 * true if this segment represents the last segment in a timeline
19747 * @param {Function} endedTimelineFn
19748 * a callback made when a timeline is ended, will only be called if
19749 * isEndOfTimeline is true
19750 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19751 * and ready to use
19752 * @param {Function} doneFn - a callback that is executed after all resources have been
19753 * downloaded and any decryption completed
19754 */
19755
19756
19757 var waitForCompletion = function waitForCompletion(_ref9) {
19758 var activeXhrs = _ref9.activeXhrs,
19759 decryptionWorker = _ref9.decryptionWorker,
19760 trackInfoFn = _ref9.trackInfoFn,
19761 timingInfoFn = _ref9.timingInfoFn,
19762 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
19763 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
19764 id3Fn = _ref9.id3Fn,
19765 captionsFn = _ref9.captionsFn,
19766 isEndOfTimeline = _ref9.isEndOfTimeline,
19767 endedTimelineFn = _ref9.endedTimelineFn,
19768 dataFn = _ref9.dataFn,
19769 doneFn = _ref9.doneFn,
19770 onTransmuxerLog = _ref9.onTransmuxerLog;
19771 var count = 0;
19772 var didError = false;
19773 return function (error, segment) {
19774 if (didError) {
19775 return;
19776 }
19777
19778 if (error) {
19779 didError = true; // If there are errors, we have to abort any outstanding requests
19780
19781 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
19782 // handle the aborted events from those requests, there are some cases where we may
19783 // never get an aborted event. For instance, if the network connection is lost and
19784 // there were two requests, the first may have triggered an error immediately, while
19785 // the second request remains unsent. In that case, the aborted algorithm will not
19786 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
19787 //
19788 // We also can't rely on the ready state of the XHR, since the request that
19789 // triggered the connection error may also show as a ready state of 0 (unsent).
19790 // Therefore, we have to finish this group of requests immediately after the first
19791 // seen error.
19792
19793 return doneFn(error, segment);
19794 }
19795
19796 count += 1;
19797
19798 if (count === activeXhrs.length) {
19799 var segmentFinish = function segmentFinish() {
19800 if (segment.encryptedBytes) {
19801 return decryptSegment({
19802 decryptionWorker: decryptionWorker,
19803 segment: segment,
19804 trackInfoFn: trackInfoFn,
19805 timingInfoFn: timingInfoFn,
19806 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19807 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19808 id3Fn: id3Fn,
19809 captionsFn: captionsFn,
19810 isEndOfTimeline: isEndOfTimeline,
19811 endedTimelineFn: endedTimelineFn,
19812 dataFn: dataFn,
19813 doneFn: doneFn,
19814 onTransmuxerLog: onTransmuxerLog
19815 });
19816 } // Otherwise, everything is ready just continue
19817
19818
19819 handleSegmentBytes({
19820 segment: segment,
19821 bytes: segment.bytes,
19822 trackInfoFn: trackInfoFn,
19823 timingInfoFn: timingInfoFn,
19824 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19825 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19826 id3Fn: id3Fn,
19827 captionsFn: captionsFn,
19828 isEndOfTimeline: isEndOfTimeline,
19829 endedTimelineFn: endedTimelineFn,
19830 dataFn: dataFn,
19831 doneFn: doneFn,
19832 onTransmuxerLog: onTransmuxerLog
19833 });
19834 }; // Keep track of when *all* of the requests have completed
19835
19836
19837 segment.endOfAllRequests = Date.now();
19838
19839 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
19840 return decrypt({
19841 decryptionWorker: decryptionWorker,
19842 // add -init to the "id" to differentiate between segment
19843 // and init segment decryption, just in case they happen
19844 // at the same time at some point in the future.
19845 id: segment.requestId + '-init',
19846 encryptedBytes: segment.map.encryptedBytes,
19847 key: segment.map.key
19848 }, function (decryptedBytes) {
19849 segment.map.bytes = decryptedBytes;
19850 parseInitSegment(segment, function (parseError) {
19851 if (parseError) {
19852 abortAll(activeXhrs);
19853 return doneFn(parseError, segment);
19854 }
19855
19856 segmentFinish();
19857 });
19858 });
19859 }
19860
19861 segmentFinish();
19862 }
19863 };
19864 };
19865 /**
19866 * Calls the abort callback if any request within the batch was aborted. Will only call
19867 * the callback once per batch of requests, even if multiple were aborted.
19868 *
19869 * @param {Object} loadendState - state to check to see if the abort function was called
19870 * @param {Function} abortFn - callback to call for abort
19871 */
19872
19873
19874 var handleLoadEnd = function handleLoadEnd(_ref10) {
19875 var loadendState = _ref10.loadendState,
19876 abortFn = _ref10.abortFn;
19877 return function (event) {
19878 var request = event.target;
19879
19880 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
19881 abortFn();
19882 loadendState.calledAbortFn = true;
19883 }
19884 };
19885 };
19886 /**
19887 * Simple progress event callback handler that gathers some stats before
19888 * executing a provided callback with the `segment` object
19889 *
19890 * @param {Object} segment - a simplified copy of the segmentInfo object
19891 * from SegmentLoader
19892 * @param {Function} progressFn - a callback that is executed each time a progress event
19893 * is received
19894 * @param {Function} trackInfoFn - a callback that receives track info
19895 * @param {Function} timingInfoFn - a callback that receives timing info
19896 * @param {Function} videoSegmentTimingInfoFn
19897 * a callback that receives video timing info based on media times and
19898 * any adjustments made by the transmuxer
19899 * @param {Function} audioSegmentTimingInfoFn
19900 * a callback that receives audio timing info based on media times and
19901 * any adjustments made by the transmuxer
19902 * @param {boolean} isEndOfTimeline
19903 * true if this segment represents the last segment in a timeline
19904 * @param {Function} endedTimelineFn
19905 * a callback made when a timeline is ended, will only be called if
19906 * isEndOfTimeline is true
19907 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19908 * and ready to use
19909 * @param {Event} event - the progress event object from XMLHttpRequest
19910 */
19911
19912
19913 var handleProgress = function handleProgress(_ref11) {
19914 var segment = _ref11.segment,
19915 progressFn = _ref11.progressFn;
19916 _ref11.trackInfoFn;
19917 _ref11.timingInfoFn;
19918 _ref11.videoSegmentTimingInfoFn;
19919 _ref11.audioSegmentTimingInfoFn;
19920 _ref11.id3Fn;
19921 _ref11.captionsFn;
19922 _ref11.isEndOfTimeline;
19923 _ref11.endedTimelineFn;
19924 _ref11.dataFn;
19925 return function (event) {
19926 var request = event.target;
19927
19928 if (request.aborted) {
19929 return;
19930 }
19931
19932 segment.stats = videojs__default["default"].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
19933
19934 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
19935 segment.stats.firstBytesReceivedAt = Date.now();
19936 }
19937
19938 return progressFn(event, segment);
19939 };
19940 };
19941 /**
19942 * Load all resources and does any processing necessary for a media-segment
19943 *
19944 * Features:
19945 * decrypts the media-segment if it has a key uri and an iv
19946 * aborts *all* requests if *any* one request fails
19947 *
19948 * The segment object, at minimum, has the following format:
19949 * {
19950 * resolvedUri: String,
19951 * [transmuxer]: Object,
19952 * [byterange]: {
19953 * offset: Number,
19954 * length: Number
19955 * },
19956 * [key]: {
19957 * resolvedUri: String
19958 * [byterange]: {
19959 * offset: Number,
19960 * length: Number
19961 * },
19962 * iv: {
19963 * bytes: Uint32Array
19964 * }
19965 * },
19966 * [map]: {
19967 * resolvedUri: String,
19968 * [byterange]: {
19969 * offset: Number,
19970 * length: Number
19971 * },
19972 * [bytes]: Uint8Array
19973 * }
19974 * }
19975 * ...where [name] denotes optional properties
19976 *
19977 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
19978 * @param {Object} xhrOptions - the base options to provide to all xhr requests
19979 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
19980 * decryption routines
19981 * @param {Object} segment - a simplified copy of the segmentInfo object
19982 * from SegmentLoader
19983 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
19984 * aborted
19985 * @param {Function} progressFn - a callback that receives progress events from the main
19986 * segment's xhr request
19987 * @param {Function} trackInfoFn - a callback that receives track info
19988 * @param {Function} timingInfoFn - a callback that receives timing info
19989 * @param {Function} videoSegmentTimingInfoFn
19990 * a callback that receives video timing info based on media times and
19991 * any adjustments made by the transmuxer
19992 * @param {Function} audioSegmentTimingInfoFn
19993 * a callback that receives audio timing info based on media times and
19994 * any adjustments made by the transmuxer
19995 * @param {Function} id3Fn - a callback that receives ID3 metadata
19996 * @param {Function} captionsFn - a callback that receives captions
19997 * @param {boolean} isEndOfTimeline
19998 * true if this segment represents the last segment in a timeline
19999 * @param {Function} endedTimelineFn
20000 * a callback made when a timeline is ended, will only be called if
20001 * isEndOfTimeline is true
20002 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
20003 * request, transmuxed if needed
20004 * @param {Function} doneFn - a callback that is executed only once all requests have
20005 * succeeded or failed
20006 * @return {Function} a function that, when invoked, immediately aborts all
20007 * outstanding requests
20008 */
20009
20010
20011 var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
20012 var xhr = _ref12.xhr,
20013 xhrOptions = _ref12.xhrOptions,
20014 decryptionWorker = _ref12.decryptionWorker,
20015 segment = _ref12.segment,
20016 abortFn = _ref12.abortFn,
20017 progressFn = _ref12.progressFn,
20018 trackInfoFn = _ref12.trackInfoFn,
20019 timingInfoFn = _ref12.timingInfoFn,
20020 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
20021 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
20022 id3Fn = _ref12.id3Fn,
20023 captionsFn = _ref12.captionsFn,
20024 isEndOfTimeline = _ref12.isEndOfTimeline,
20025 endedTimelineFn = _ref12.endedTimelineFn,
20026 dataFn = _ref12.dataFn,
20027 doneFn = _ref12.doneFn,
20028 onTransmuxerLog = _ref12.onTransmuxerLog;
20029 var activeXhrs = [];
20030 var finishProcessingFn = waitForCompletion({
20031 activeXhrs: activeXhrs,
20032 decryptionWorker: decryptionWorker,
20033 trackInfoFn: trackInfoFn,
20034 timingInfoFn: timingInfoFn,
20035 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
20036 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
20037 id3Fn: id3Fn,
20038 captionsFn: captionsFn,
20039 isEndOfTimeline: isEndOfTimeline,
20040 endedTimelineFn: endedTimelineFn,
20041 dataFn: dataFn,
20042 doneFn: doneFn,
20043 onTransmuxerLog: onTransmuxerLog
20044 }); // optionally, request the decryption key
20045
20046 if (segment.key && !segment.key.bytes) {
20047 var objects = [segment.key];
20048
20049 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
20050 objects.push(segment.map.key);
20051 }
20052
20053 var keyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20054 uri: segment.key.resolvedUri,
20055 responseType: 'arraybuffer'
20056 });
20057 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
20058 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
20059 activeXhrs.push(keyXhr);
20060 } // optionally, request the associated media init segment
20061
20062
20063 if (segment.map && !segment.map.bytes) {
20064 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
20065
20066 if (differentMapKey) {
20067 var mapKeyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20068 uri: segment.map.key.resolvedUri,
20069 responseType: 'arraybuffer'
20070 });
20071 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
20072 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
20073 activeXhrs.push(mapKeyXhr);
20074 }
20075
20076 var initSegmentOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20077 uri: segment.map.resolvedUri,
20078 responseType: 'arraybuffer',
20079 headers: segmentXhrHeaders(segment.map)
20080 });
20081 var initSegmentRequestCallback = handleInitSegmentResponse({
20082 segment: segment,
20083 finishProcessingFn: finishProcessingFn
20084 });
20085 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
20086 activeXhrs.push(initSegmentXhr);
20087 }
20088
20089 var segmentRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20090 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
20091 responseType: 'arraybuffer',
20092 headers: segmentXhrHeaders(segment)
20093 });
20094 var segmentRequestCallback = handleSegmentResponse({
20095 segment: segment,
20096 finishProcessingFn: finishProcessingFn,
20097 responseType: segmentRequestOptions.responseType
20098 });
20099 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
20100 segmentXhr.addEventListener('progress', handleProgress({
20101 segment: segment,
20102 progressFn: progressFn,
20103 trackInfoFn: trackInfoFn,
20104 timingInfoFn: timingInfoFn,
20105 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
20106 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
20107 id3Fn: id3Fn,
20108 captionsFn: captionsFn,
20109 isEndOfTimeline: isEndOfTimeline,
20110 endedTimelineFn: endedTimelineFn,
20111 dataFn: dataFn
20112 }));
20113 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
20114 // multiple times, provide a shared state object
20115
20116 var loadendState = {};
20117 activeXhrs.forEach(function (activeXhr) {
20118 activeXhr.addEventListener('loadend', handleLoadEnd({
20119 loadendState: loadendState,
20120 abortFn: abortFn
20121 }));
20122 });
20123 return function () {
20124 return abortAll(activeXhrs);
20125 };
20126 };
20127
20128 /**
20129 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
20130 * codec strings, or translating codec strings into objects that can be examined.
20131 */
20132 var logFn$1 = logger('CodecUtils');
20133 /**
20134 * Returns a set of codec strings parsed from the playlist or the default
20135 * codec strings if no codecs were specified in the playlist
20136 *
20137 * @param {Playlist} media the current media playlist
20138 * @return {Object} an object with the video and audio codecs
20139 */
20140
20141 var getCodecs = function getCodecs(media) {
20142 // if the codecs were explicitly specified, use them instead of the
20143 // defaults
20144 var mediaAttributes = media.attributes || {};
20145
20146 if (mediaAttributes.CODECS) {
20147 return parseCodecs(mediaAttributes.CODECS);
20148 }
20149 };
20150
20151 var isMaat = function isMaat(master, media) {
20152 var mediaAttributes = media.attributes || {};
20153 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20154 };
20155 var isMuxed = function isMuxed(master, media) {
20156 if (!isMaat(master, media)) {
20157 return true;
20158 }
20159
20160 var mediaAttributes = media.attributes || {};
20161 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20162
20163 for (var groupId in audioGroup) {
20164 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
20165 // or there are listed playlists (the case for DASH, as the manifest will have already
20166 // provided all of the details necessary to generate the audio playlist, as opposed to
20167 // HLS' externally requested playlists), then the content is demuxed.
20168 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
20169 return true;
20170 }
20171 }
20172
20173 return false;
20174 };
20175 var unwrapCodecList = function unwrapCodecList(codecList) {
20176 var codecs = {};
20177 codecList.forEach(function (_ref) {
20178 var mediaType = _ref.mediaType,
20179 type = _ref.type,
20180 details = _ref.details;
20181 codecs[mediaType] = codecs[mediaType] || [];
20182 codecs[mediaType].push(translateLegacyCodec("" + type + details));
20183 });
20184 Object.keys(codecs).forEach(function (mediaType) {
20185 if (codecs[mediaType].length > 1) {
20186 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
20187 codecs[mediaType] = null;
20188 return;
20189 }
20190
20191 codecs[mediaType] = codecs[mediaType][0];
20192 });
20193 return codecs;
20194 };
20195 var codecCount = function codecCount(codecObj) {
20196 var count = 0;
20197
20198 if (codecObj.audio) {
20199 count++;
20200 }
20201
20202 if (codecObj.video) {
20203 count++;
20204 }
20205
20206 return count;
20207 };
20208 /**
20209 * Calculates the codec strings for a working configuration of
20210 * SourceBuffers to play variant streams in a master playlist. If
20211 * there is no possible working configuration, an empty object will be
20212 * returned.
20213 *
20214 * @param master {Object} the m3u8 object for the master playlist
20215 * @param media {Object} the m3u8 object for the variant playlist
20216 * @return {Object} the codec strings.
20217 *
20218 * @private
20219 */
20220
20221 var codecsForPlaylist = function codecsForPlaylist(master, media) {
20222 var mediaAttributes = media.attributes || {};
20223 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
20224 // Put another way, there is no way to have a video-only multiple-audio HLS!
20225
20226 if (isMaat(master, media) && !codecInfo.audio) {
20227 if (!isMuxed(master, media)) {
20228 // It is possible for codecs to be specified on the audio media group playlist but
20229 // not on the rendition playlist. This is mostly the case for DASH, where audio and
20230 // video are always separate (and separately specified).
20231 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
20232
20233 if (defaultCodecs.audio) {
20234 codecInfo.audio = defaultCodecs.audio;
20235 }
20236 }
20237 }
20238
20239 return codecInfo;
20240 };
20241
20242 var logFn = logger('PlaylistSelector');
20243
20244 var representationToString = function representationToString(representation) {
20245 if (!representation || !representation.playlist) {
20246 return;
20247 }
20248
20249 var playlist = representation.playlist;
20250 return JSON.stringify({
20251 id: playlist.id,
20252 bandwidth: representation.bandwidth,
20253 width: representation.width,
20254 height: representation.height,
20255 codecs: playlist.attributes && playlist.attributes.CODECS || ''
20256 });
20257 }; // Utilities
20258
20259 /**
20260 * Returns the CSS value for the specified property on an element
20261 * using `getComputedStyle`. Firefox has a long-standing issue where
20262 * getComputedStyle() may return null when running in an iframe with
20263 * `display: none`.
20264 *
20265 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
20266 * @param {HTMLElement} el the htmlelement to work on
20267 * @param {string} the proprety to get the style for
20268 */
20269
20270
20271 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
20272 if (!el) {
20273 return '';
20274 }
20275
20276 var result = window.getComputedStyle(el);
20277
20278 if (!result) {
20279 return '';
20280 }
20281
20282 return result[property];
20283 };
20284 /**
20285 * Resuable stable sort function
20286 *
20287 * @param {Playlists} array
20288 * @param {Function} sortFn Different comparators
20289 * @function stableSort
20290 */
20291
20292
20293 var stableSort = function stableSort(array, sortFn) {
20294 var newArray = array.slice();
20295 array.sort(function (left, right) {
20296 var cmp = sortFn(left, right);
20297
20298 if (cmp === 0) {
20299 return newArray.indexOf(left) - newArray.indexOf(right);
20300 }
20301
20302 return cmp;
20303 });
20304 };
20305 /**
20306 * A comparator function to sort two playlist object by bandwidth.
20307 *
20308 * @param {Object} left a media playlist object
20309 * @param {Object} right a media playlist object
20310 * @return {number} Greater than zero if the bandwidth attribute of
20311 * left is greater than the corresponding attribute of right. Less
20312 * than zero if the bandwidth of right is greater than left and
20313 * exactly zero if the two are equal.
20314 */
20315
20316
20317 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
20318 var leftBandwidth;
20319 var rightBandwidth;
20320
20321 if (left.attributes.BANDWIDTH) {
20322 leftBandwidth = left.attributes.BANDWIDTH;
20323 }
20324
20325 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
20326
20327 if (right.attributes.BANDWIDTH) {
20328 rightBandwidth = right.attributes.BANDWIDTH;
20329 }
20330
20331 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
20332 return leftBandwidth - rightBandwidth;
20333 };
20334 /**
20335 * A comparator function to sort two playlist object by resolution (width).
20336 *
20337 * @param {Object} left a media playlist object
20338 * @param {Object} right a media playlist object
20339 * @return {number} Greater than zero if the resolution.width attribute of
20340 * left is greater than the corresponding attribute of right. Less
20341 * than zero if the resolution.width of right is greater than left and
20342 * exactly zero if the two are equal.
20343 */
20344
20345 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
20346 var leftWidth;
20347 var rightWidth;
20348
20349 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
20350 leftWidth = left.attributes.RESOLUTION.width;
20351 }
20352
20353 leftWidth = leftWidth || window.Number.MAX_VALUE;
20354
20355 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
20356 rightWidth = right.attributes.RESOLUTION.width;
20357 }
20358
20359 rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
20360 // have the same media dimensions/ resolution
20361
20362 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
20363 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
20364 }
20365
20366 return leftWidth - rightWidth;
20367 };
20368 /**
20369 * Chooses the appropriate media playlist based on bandwidth and player size
20370 *
20371 * @param {Object} master
20372 * Object representation of the master manifest
20373 * @param {number} playerBandwidth
20374 * Current calculated bandwidth of the player
20375 * @param {number} playerWidth
20376 * Current width of the player element (should account for the device pixel ratio)
20377 * @param {number} playerHeight
20378 * Current height of the player element (should account for the device pixel ratio)
20379 * @param {boolean} limitRenditionByPlayerDimensions
20380 * True if the player width and height should be used during the selection, false otherwise
20381 * @param {Object} masterPlaylistController
20382 * the current masterPlaylistController object
20383 * @return {Playlist} the highest bitrate playlist less than the
20384 * currently detected bandwidth, accounting for some amount of
20385 * bandwidth variance
20386 */
20387
20388 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
20389 // If we end up getting called before `master` is available, exit early
20390 if (!master) {
20391 return;
20392 }
20393
20394 var options = {
20395 bandwidth: playerBandwidth,
20396 width: playerWidth,
20397 height: playerHeight,
20398 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
20399 };
20400 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
20401
20402 if (Playlist.isAudioOnly(master)) {
20403 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
20404 // at the buttom of this function for debugging.
20405
20406 options.audioOnly = true;
20407 } // convert the playlists to an intermediary representation to make comparisons easier
20408
20409
20410 var sortedPlaylistReps = playlists.map(function (playlist) {
20411 var bandwidth;
20412 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
20413 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
20414 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
20415 bandwidth = bandwidth || window.Number.MAX_VALUE;
20416 return {
20417 bandwidth: bandwidth,
20418 width: width,
20419 height: height,
20420 playlist: playlist
20421 };
20422 });
20423 stableSort(sortedPlaylistReps, function (left, right) {
20424 return left.bandwidth - right.bandwidth;
20425 }); // filter out any playlists that have been excluded due to
20426 // incompatible configurations
20427
20428 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20429 return !Playlist.isIncompatible(rep.playlist);
20430 }); // filter out any playlists that have been disabled manually through the representations
20431 // api or blacklisted temporarily due to playback errors.
20432
20433 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20434 return Playlist.isEnabled(rep.playlist);
20435 });
20436
20437 if (!enabledPlaylistReps.length) {
20438 // if there are no enabled playlists, then they have all been blacklisted or disabled
20439 // by the user through the representations api. In this case, ignore blacklisting and
20440 // fallback to what the user wants by using playlists the user has not disabled.
20441 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20442 return !Playlist.isDisabled(rep.playlist);
20443 });
20444 } // filter out any variant that has greater effective bitrate
20445 // than the current estimated bandwidth
20446
20447
20448 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
20449 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
20450 });
20451 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
20452 // and then taking the very first element
20453
20454 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
20455 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20456 })[0]; // if we're not going to limit renditions by player size, make an early decision.
20457
20458 if (limitRenditionByPlayerDimensions === false) {
20459 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20460
20461 if (_chosenRep && _chosenRep.playlist) {
20462 var type = 'sortedPlaylistReps';
20463
20464 if (bandwidthBestRep) {
20465 type = 'bandwidthBestRep';
20466 }
20467
20468 if (enabledPlaylistReps[0]) {
20469 type = 'enabledPlaylistReps';
20470 }
20471
20472 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
20473 return _chosenRep.playlist;
20474 }
20475
20476 logFn('could not choose a playlist with options', options);
20477 return null;
20478 } // filter out playlists without resolution information
20479
20480
20481 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
20482 return rep.width && rep.height;
20483 }); // sort variants by resolution
20484
20485 stableSort(haveResolution, function (left, right) {
20486 return left.width - right.width;
20487 }); // if we have the exact resolution as the player use it
20488
20489 var resolutionBestRepList = haveResolution.filter(function (rep) {
20490 return rep.width === playerWidth && rep.height === playerHeight;
20491 });
20492 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
20493
20494 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
20495 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20496 })[0];
20497 var resolutionPlusOneList;
20498 var resolutionPlusOneSmallest;
20499 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
20500 // if there is no match of exact resolution
20501
20502 if (!resolutionBestRep) {
20503 resolutionPlusOneList = haveResolution.filter(function (rep) {
20504 return rep.width > playerWidth || rep.height > playerHeight;
20505 }); // find all the variants have the same smallest resolution
20506
20507 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
20508 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
20509 }); // ensure that we also pick the highest bandwidth variant that
20510 // is just-larger-than the video player
20511
20512 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
20513 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
20514 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20515 })[0];
20516 }
20517
20518 var leastPixelDiffRep; // If this selector proves to be better than others,
20519 // resolutionPlusOneRep and resolutionBestRep and all
20520 // the code involving them should be removed.
20521
20522 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
20523 // find the variant that is closest to the player's pixel size
20524 var leastPixelDiffList = haveResolution.map(function (rep) {
20525 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
20526 return rep;
20527 }); // get the highest bandwidth, closest resolution playlist
20528
20529 stableSort(leastPixelDiffList, function (left, right) {
20530 // sort by highest bandwidth if pixelDiff is the same
20531 if (left.pixelDiff === right.pixelDiff) {
20532 return right.bandwidth - left.bandwidth;
20533 }
20534
20535 return left.pixelDiff - right.pixelDiff;
20536 });
20537 leastPixelDiffRep = leastPixelDiffList[0];
20538 } // fallback chain of variants
20539
20540
20541 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20542
20543 if (chosenRep && chosenRep.playlist) {
20544 var _type = 'sortedPlaylistReps';
20545
20546 if (leastPixelDiffRep) {
20547 _type = 'leastPixelDiffRep';
20548 } else if (resolutionPlusOneRep) {
20549 _type = 'resolutionPlusOneRep';
20550 } else if (resolutionBestRep) {
20551 _type = 'resolutionBestRep';
20552 } else if (bandwidthBestRep) {
20553 _type = 'bandwidthBestRep';
20554 } else if (enabledPlaylistReps[0]) {
20555 _type = 'enabledPlaylistReps';
20556 }
20557
20558 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
20559 return chosenRep.playlist;
20560 }
20561
20562 logFn('could not choose a playlist with options', options);
20563 return null;
20564 };
20565
20566 /**
20567 * Chooses the appropriate media playlist based on the most recent
20568 * bandwidth estimate and the player size.
20569 *
20570 * Expects to be called within the context of an instance of VhsHandler
20571 *
20572 * @return {Playlist} the highest bitrate playlist less than the
20573 * currently detected bandwidth, accounting for some amount of
20574 * bandwidth variance
20575 */
20576
20577 var lastBandwidthSelector = function lastBandwidthSelector() {
20578 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20579 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20580 };
20581 /**
20582 * Chooses the appropriate media playlist based on an
20583 * exponential-weighted moving average of the bandwidth after
20584 * filtering for player size.
20585 *
20586 * Expects to be called within the context of an instance of VhsHandler
20587 *
20588 * @param {number} decay - a number between 0 and 1. Higher values of
20589 * this parameter will cause previous bandwidth estimates to lose
20590 * significance more quickly.
20591 * @return {Function} a function which can be invoked to create a new
20592 * playlist selector function.
20593 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
20594 */
20595
20596 var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
20597 var average = -1;
20598 var lastSystemBandwidth = -1;
20599
20600 if (decay < 0 || decay > 1) {
20601 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
20602 }
20603
20604 return function () {
20605 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20606
20607 if (average < 0) {
20608 average = this.systemBandwidth;
20609 lastSystemBandwidth = this.systemBandwidth;
20610 } // stop the average value from decaying for every 250ms
20611 // when the systemBandwidth is constant
20612 // and
20613 // stop average from setting to a very low value when the
20614 // systemBandwidth becomes 0 in case of chunk cancellation
20615
20616
20617 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
20618 average = decay * this.systemBandwidth + (1 - decay) * average;
20619 lastSystemBandwidth = this.systemBandwidth;
20620 }
20621
20622 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20623 };
20624 };
20625 /**
20626 * Chooses the appropriate media playlist based on the potential to rebuffer
20627 *
20628 * @param {Object} settings
20629 * Object of information required to use this selector
20630 * @param {Object} settings.master
20631 * Object representation of the master manifest
20632 * @param {number} settings.currentTime
20633 * The current time of the player
20634 * @param {number} settings.bandwidth
20635 * Current measured bandwidth
20636 * @param {number} settings.duration
20637 * Duration of the media
20638 * @param {number} settings.segmentDuration
20639 * Segment duration to be used in round trip time calculations
20640 * @param {number} settings.timeUntilRebuffer
20641 * Time left in seconds until the player has to rebuffer
20642 * @param {number} settings.currentTimeline
20643 * The current timeline segments are being loaded from
20644 * @param {SyncController} settings.syncController
20645 * SyncController for determining if we have a sync point for a given playlist
20646 * @return {Object|null}
20647 * {Object} return.playlist
20648 * The highest bandwidth playlist with the least amount of rebuffering
20649 * {Number} return.rebufferingImpact
20650 * The amount of time in seconds switching to this playlist will rebuffer. A
20651 * negative value means that switching will cause zero rebuffering.
20652 */
20653
20654 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
20655 var master = settings.master,
20656 currentTime = settings.currentTime,
20657 bandwidth = settings.bandwidth,
20658 duration = settings.duration,
20659 segmentDuration = settings.segmentDuration,
20660 timeUntilRebuffer = settings.timeUntilRebuffer,
20661 currentTimeline = settings.currentTimeline,
20662 syncController = settings.syncController; // filter out any playlists that have been excluded due to
20663 // incompatible configurations
20664
20665 var compatiblePlaylists = master.playlists.filter(function (playlist) {
20666 return !Playlist.isIncompatible(playlist);
20667 }); // filter out any playlists that have been disabled manually through the representations
20668 // api or blacklisted temporarily due to playback errors.
20669
20670 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
20671
20672 if (!enabledPlaylists.length) {
20673 // if there are no enabled playlists, then they have all been blacklisted or disabled
20674 // by the user through the representations api. In this case, ignore blacklisting and
20675 // fallback to what the user wants by using playlists the user has not disabled.
20676 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
20677 return !Playlist.isDisabled(playlist);
20678 });
20679 }
20680
20681 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
20682 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
20683 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
20684 // sync request first. This will double the request time
20685
20686 var numRequests = syncPoint ? 1 : 2;
20687 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
20688 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
20689 return {
20690 playlist: playlist,
20691 rebufferingImpact: rebufferingImpact
20692 };
20693 });
20694 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
20695 return estimate.rebufferingImpact <= 0;
20696 }); // Sort by bandwidth DESC
20697
20698 stableSort(noRebufferingPlaylists, function (a, b) {
20699 return comparePlaylistBandwidth(b.playlist, a.playlist);
20700 });
20701
20702 if (noRebufferingPlaylists.length) {
20703 return noRebufferingPlaylists[0];
20704 }
20705
20706 stableSort(rebufferingEstimates, function (a, b) {
20707 return a.rebufferingImpact - b.rebufferingImpact;
20708 });
20709 return rebufferingEstimates[0] || null;
20710 };
20711 /**
20712 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
20713 * one with video. If no renditions with video exist, return the lowest audio rendition.
20714 *
20715 * Expects to be called within the context of an instance of VhsHandler
20716 *
20717 * @return {Object|null}
20718 * {Object} return.playlist
20719 * The lowest bitrate playlist that contains a video codec. If no such rendition
20720 * exists pick the lowest audio rendition.
20721 */
20722
20723 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
20724 var _this = this;
20725
20726 // filter out any playlists that have been excluded due to
20727 // incompatible configurations or playback errors
20728 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
20729
20730 stableSort(playlists, function (a, b) {
20731 return comparePlaylistBandwidth(a, b);
20732 }); // Parse and assume that playlists with no video codec have no video
20733 // (this is not necessarily true, although it is generally true).
20734 //
20735 // If an entire manifest has no valid videos everything will get filtered
20736 // out.
20737
20738 var playlistsWithVideo = playlists.filter(function (playlist) {
20739 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
20740 });
20741 return playlistsWithVideo[0] || null;
20742 };
20743
20744 /**
20745 * Combine all segments into a single Uint8Array
20746 *
20747 * @param {Object} segmentObj
20748 * @return {Uint8Array} concatenated bytes
20749 * @private
20750 */
20751 var concatSegments = function concatSegments(segmentObj) {
20752 var offset = 0;
20753 var tempBuffer;
20754
20755 if (segmentObj.bytes) {
20756 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
20757
20758 segmentObj.segments.forEach(function (segment) {
20759 tempBuffer.set(segment, offset);
20760 offset += segment.byteLength;
20761 });
20762 }
20763
20764 return tempBuffer;
20765 };
20766
20767 /**
20768 * @file text-tracks.js
20769 */
20770 /**
20771 * Create captions text tracks on video.js if they do not exist
20772 *
20773 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20774 * @param {Object} tech the video.js tech
20775 * @param {Object} captionStream the caption stream to create
20776 * @private
20777 */
20778
20779 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
20780 if (!inbandTextTracks[captionStream]) {
20781 tech.trigger({
20782 type: 'usage',
20783 name: 'vhs-608'
20784 });
20785 tech.trigger({
20786 type: 'usage',
20787 name: 'hls-608'
20788 });
20789 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
20790
20791 if (/^cc708_/.test(captionStream)) {
20792 instreamId = 'SERVICE' + captionStream.split('_')[1];
20793 }
20794
20795 var track = tech.textTracks().getTrackById(instreamId);
20796
20797 if (track) {
20798 // Resuse an existing track with a CC# id because this was
20799 // very likely created by videojs-contrib-hls from information
20800 // in the m3u8 for us to use
20801 inbandTextTracks[captionStream] = track;
20802 } else {
20803 // This section gets called when we have caption services that aren't specified in the manifest.
20804 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
20805 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
20806 var label = captionStream;
20807 var language = captionStream;
20808 var def = false;
20809 var captionService = captionServices[instreamId];
20810
20811 if (captionService) {
20812 label = captionService.label;
20813 language = captionService.language;
20814 def = captionService.default;
20815 } // Otherwise, create a track with the default `CC#` label and
20816 // without a language
20817
20818
20819 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
20820 kind: 'captions',
20821 id: instreamId,
20822 // TODO: investigate why this doesn't seem to turn the caption on by default
20823 default: def,
20824 label: label,
20825 language: language
20826 }, false).track;
20827 }
20828 }
20829 };
20830 /**
20831 * Add caption text track data to a source handler given an array of captions
20832 *
20833 * @param {Object}
20834 * @param {Object} inbandTextTracks the inband text tracks
20835 * @param {number} timestampOffset the timestamp offset of the source buffer
20836 * @param {Array} captionArray an array of caption data
20837 * @private
20838 */
20839
20840 var addCaptionData = function addCaptionData(_ref) {
20841 var inbandTextTracks = _ref.inbandTextTracks,
20842 captionArray = _ref.captionArray,
20843 timestampOffset = _ref.timestampOffset;
20844
20845 if (!captionArray) {
20846 return;
20847 }
20848
20849 var Cue = window.WebKitDataCue || window.VTTCue;
20850 captionArray.forEach(function (caption) {
20851 var track = caption.stream;
20852 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
20853 });
20854 };
20855 /**
20856 * Define properties on a cue for backwards compatability,
20857 * but warn the user that the way that they are using it
20858 * is depricated and will be removed at a later date.
20859 *
20860 * @param {Cue} cue the cue to add the properties on
20861 * @private
20862 */
20863
20864 var deprecateOldCue = function deprecateOldCue(cue) {
20865 Object.defineProperties(cue.frame, {
20866 id: {
20867 get: function get() {
20868 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
20869 return cue.value.key;
20870 }
20871 },
20872 value: {
20873 get: function get() {
20874 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
20875 return cue.value.data;
20876 }
20877 },
20878 privateData: {
20879 get: function get() {
20880 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
20881 return cue.value.data;
20882 }
20883 }
20884 });
20885 };
20886 /**
20887 * Add metadata text track data to a source handler given an array of metadata
20888 *
20889 * @param {Object}
20890 * @param {Object} inbandTextTracks the inband text tracks
20891 * @param {Array} metadataArray an array of meta data
20892 * @param {number} timestampOffset the timestamp offset of the source buffer
20893 * @param {number} videoDuration the duration of the video
20894 * @private
20895 */
20896
20897
20898 var addMetadata = function addMetadata(_ref2) {
20899 var inbandTextTracks = _ref2.inbandTextTracks,
20900 metadataArray = _ref2.metadataArray,
20901 timestampOffset = _ref2.timestampOffset,
20902 videoDuration = _ref2.videoDuration;
20903
20904 if (!metadataArray) {
20905 return;
20906 }
20907
20908 var Cue = window.WebKitDataCue || window.VTTCue;
20909 var metadataTrack = inbandTextTracks.metadataTrack_;
20910
20911 if (!metadataTrack) {
20912 return;
20913 }
20914
20915 metadataArray.forEach(function (metadata) {
20916 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
20917 // ignore this bit of metadata.
20918 // This likely occurs when you have an non-timed ID3 tag like TIT2,
20919 // which is the "Title/Songname/Content description" frame
20920
20921 if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
20922 return;
20923 }
20924
20925 metadata.frames.forEach(function (frame) {
20926 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
20927 cue.frame = frame;
20928 cue.value = frame;
20929 deprecateOldCue(cue);
20930 metadataTrack.addCue(cue);
20931 });
20932 });
20933
20934 if (!metadataTrack.cues || !metadataTrack.cues.length) {
20935 return;
20936 } // Updating the metadeta cues so that
20937 // the endTime of each cue is the startTime of the next cue
20938 // the endTime of last cue is the duration of the video
20939
20940
20941 var cues = metadataTrack.cues;
20942 var cuesArray = []; // Create a copy of the TextTrackCueList...
20943 // ...disregarding cues with a falsey value
20944
20945 for (var i = 0; i < cues.length; i++) {
20946 if (cues[i]) {
20947 cuesArray.push(cues[i]);
20948 }
20949 } // Group cues by their startTime value
20950
20951
20952 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
20953 var timeSlot = obj[cue.startTime] || [];
20954 timeSlot.push(cue);
20955 obj[cue.startTime] = timeSlot;
20956 return obj;
20957 }, {}); // Sort startTimes by ascending order
20958
20959 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
20960 return Number(a) - Number(b);
20961 }); // Map each cue group's endTime to the next group's startTime
20962
20963 sortedStartTimes.forEach(function (startTime, idx) {
20964 var cueGroup = cuesGroupedByStartTime[startTime];
20965 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
20966
20967 cueGroup.forEach(function (cue) {
20968 cue.endTime = nextTime;
20969 });
20970 });
20971 };
20972 /**
20973 * Create metadata text track on video.js if it does not exist
20974 *
20975 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20976 * @param {string} dispatchType the inband metadata track dispatch type
20977 * @param {Object} tech the video.js tech
20978 * @private
20979 */
20980
20981 var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
20982 if (inbandTextTracks.metadataTrack_) {
20983 return;
20984 }
20985
20986 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
20987 kind: 'metadata',
20988 label: 'Timed Metadata'
20989 }, false).track;
20990 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
20991 };
20992 /**
20993 * Remove cues from a track on video.js.
20994 *
20995 * @param {Double} start start of where we should remove the cue
20996 * @param {Double} end end of where the we should remove the cue
20997 * @param {Object} track the text track to remove the cues from
20998 * @private
20999 */
21000
21001 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
21002 var i;
21003 var cue;
21004
21005 if (!track) {
21006 return;
21007 }
21008
21009 if (!track.cues) {
21010 return;
21011 }
21012
21013 i = track.cues.length;
21014
21015 while (i--) {
21016 cue = track.cues[i]; // Remove any cue within the provided start and end time
21017
21018 if (cue.startTime >= start && cue.endTime <= end) {
21019 track.removeCue(cue);
21020 }
21021 }
21022 };
21023 /**
21024 * Remove duplicate cues from a track on video.js (a cue is considered a
21025 * duplicate if it has the same time interval and text as another)
21026 *
21027 * @param {Object} track the text track to remove the duplicate cues from
21028 * @private
21029 */
21030
21031 var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
21032 var cues = track.cues;
21033
21034 if (!cues) {
21035 return;
21036 }
21037
21038 for (var i = 0; i < cues.length; i++) {
21039 var duplicates = [];
21040 var occurrences = 0;
21041
21042 for (var j = 0; j < cues.length; j++) {
21043 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
21044 occurrences++;
21045
21046 if (occurrences > 1) {
21047 duplicates.push(cues[j]);
21048 }
21049 }
21050 }
21051
21052 if (duplicates.length) {
21053 duplicates.forEach(function (dupe) {
21054 return track.removeCue(dupe);
21055 });
21056 }
21057 }
21058 };
21059
21060 /**
21061 * mux.js
21062 *
21063 * Copyright (c) Brightcove
21064 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
21065 */
21066 var ONE_SECOND_IN_TS = 90000,
21067 // 90kHz clock
21068 secondsToVideoTs,
21069 secondsToAudioTs,
21070 videoTsToSeconds,
21071 audioTsToSeconds,
21072 audioTsToVideoTs,
21073 videoTsToAudioTs,
21074 metadataTsToSeconds;
21075
21076 secondsToVideoTs = function secondsToVideoTs(seconds) {
21077 return seconds * ONE_SECOND_IN_TS;
21078 };
21079
21080 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
21081 return seconds * sampleRate;
21082 };
21083
21084 videoTsToSeconds = function videoTsToSeconds(timestamp) {
21085 return timestamp / ONE_SECOND_IN_TS;
21086 };
21087
21088 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
21089 return timestamp / sampleRate;
21090 };
21091
21092 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
21093 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
21094 };
21095
21096 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
21097 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
21098 };
21099 /**
21100 * Adjust ID3 tag or caption timing information by the timeline pts values
21101 * (if keepOriginalTimestamps is false) and convert to seconds
21102 */
21103
21104
21105 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
21106 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
21107 };
21108
21109 var clock = {
21110 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
21111 secondsToVideoTs: secondsToVideoTs,
21112 secondsToAudioTs: secondsToAudioTs,
21113 videoTsToSeconds: videoTsToSeconds,
21114 audioTsToSeconds: audioTsToSeconds,
21115 audioTsToVideoTs: audioTsToVideoTs,
21116 videoTsToAudioTs: videoTsToAudioTs,
21117 metadataTsToSeconds: metadataTsToSeconds
21118 };
21119
21120 /**
21121 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
21122 * front of current time.
21123 *
21124 * @param {Array} buffer
21125 * The current buffer of gop information
21126 * @param {number} currentTime
21127 * The current time
21128 * @param {Double} mapping
21129 * Offset to map display time to stream presentation time
21130 * @return {Array}
21131 * List of gops considered safe to append over
21132 */
21133
21134 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
21135 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
21136 return [];
21137 } // pts value for current time + 3 seconds to give a bit more wiggle room
21138
21139
21140 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
21141 var i;
21142
21143 for (i = 0; i < buffer.length; i++) {
21144 if (buffer[i].pts > currentTimePts) {
21145 break;
21146 }
21147 }
21148
21149 return buffer.slice(i);
21150 };
21151 /**
21152 * Appends gop information (timing and byteLength) received by the transmuxer for the
21153 * gops appended in the last call to appendBuffer
21154 *
21155 * @param {Array} buffer
21156 * The current buffer of gop information
21157 * @param {Array} gops
21158 * List of new gop information
21159 * @param {boolean} replace
21160 * If true, replace the buffer with the new gop information. If false, append the
21161 * new gop information to the buffer in the right location of time.
21162 * @return {Array}
21163 * Updated list of gop information
21164 */
21165
21166 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
21167 if (!gops.length) {
21168 return buffer;
21169 }
21170
21171 if (replace) {
21172 // If we are in safe append mode, then completely overwrite the gop buffer
21173 // with the most recent appeneded data. This will make sure that when appending
21174 // future segments, we only try to align with gops that are both ahead of current
21175 // time and in the last segment appended.
21176 return gops.slice();
21177 }
21178
21179 var start = gops[0].pts;
21180 var i = 0;
21181
21182 for (i; i < buffer.length; i++) {
21183 if (buffer[i].pts >= start) {
21184 break;
21185 }
21186 }
21187
21188 return buffer.slice(0, i).concat(gops);
21189 };
21190 /**
21191 * Removes gop information in buffer that overlaps with provided start and end
21192 *
21193 * @param {Array} buffer
21194 * The current buffer of gop information
21195 * @param {Double} start
21196 * position to start the remove at
21197 * @param {Double} end
21198 * position to end the remove at
21199 * @param {Double} mapping
21200 * Offset to map display time to stream presentation time
21201 */
21202
21203 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
21204 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
21205 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
21206 var updatedBuffer = buffer.slice();
21207 var i = buffer.length;
21208
21209 while (i--) {
21210 if (buffer[i].pts <= endPts) {
21211 break;
21212 }
21213 }
21214
21215 if (i === -1) {
21216 // no removal because end of remove range is before start of buffer
21217 return updatedBuffer;
21218 }
21219
21220 var j = i + 1;
21221
21222 while (j--) {
21223 if (buffer[j].pts <= startPts) {
21224 break;
21225 }
21226 } // clamp remove range start to 0 index
21227
21228
21229 j = Math.max(j, 0);
21230 updatedBuffer.splice(j, i - j + 1);
21231 return updatedBuffer;
21232 };
21233
21234 var shallowEqual = function shallowEqual(a, b) {
21235 // if both are undefined
21236 // or one or the other is undefined
21237 // they are not equal
21238 if (!a && !b || !a && b || a && !b) {
21239 return false;
21240 } // they are the same object and thus, equal
21241
21242
21243 if (a === b) {
21244 return true;
21245 } // sort keys so we can make sure they have
21246 // all the same keys later.
21247
21248
21249 var akeys = Object.keys(a).sort();
21250 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
21251
21252 if (akeys.length !== bkeys.length) {
21253 return false;
21254 }
21255
21256 for (var i = 0; i < akeys.length; i++) {
21257 var key = akeys[i]; // different sorted keys, not equal
21258
21259 if (key !== bkeys[i]) {
21260 return false;
21261 } // different values, not equal
21262
21263
21264 if (a[key] !== b[key]) {
21265 return false;
21266 }
21267 }
21268
21269 return true;
21270 };
21271
21272 // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
21273 var QUOTA_EXCEEDED_ERR = 22;
21274
21275 /**
21276 * The segment loader has no recourse except to fetch a segment in the
21277 * current playlist and use the internal timestamps in that segment to
21278 * generate a syncPoint. This function returns a good candidate index
21279 * for that process.
21280 *
21281 * @param {Array} segments - the segments array from a playlist.
21282 * @return {number} An index of a segment from the playlist to load
21283 */
21284
21285 var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
21286 segments = segments || [];
21287 var timelineSegments = [];
21288 var time = 0;
21289
21290 for (var i = 0; i < segments.length; i++) {
21291 var segment = segments[i];
21292
21293 if (currentTimeline === segment.timeline) {
21294 timelineSegments.push(i);
21295 time += segment.duration;
21296
21297 if (time > targetTime) {
21298 return i;
21299 }
21300 }
21301 }
21302
21303 if (timelineSegments.length === 0) {
21304 return 0;
21305 } // default to the last timeline segment
21306
21307
21308 return timelineSegments[timelineSegments.length - 1];
21309 }; // In the event of a quota exceeded error, keep at least one second of back buffer. This
21310 // number was arbitrarily chosen and may be updated in the future, but seemed reasonable
21311 // as a start to prevent any potential issues with removing content too close to the
21312 // playhead.
21313
21314 var MIN_BACK_BUFFER = 1; // in ms
21315
21316 var CHECK_BUFFER_DELAY = 500;
21317
21318 var finite = function finite(num) {
21319 return typeof num === 'number' && isFinite(num);
21320 }; // With most content hovering around 30fps, if a segment has a duration less than a half
21321 // frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
21322 // not accurately reflect the rest of the content.
21323
21324
21325 var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
21326 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
21327 // Although these checks should most likely cover non 'main' types, for now it narrows
21328 // the scope of our checks.
21329 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
21330 return null;
21331 }
21332
21333 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
21334 return 'Neither audio nor video found in segment.';
21335 }
21336
21337 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
21338 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
21339 }
21340
21341 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
21342 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
21343 }
21344
21345 return null;
21346 };
21347 /**
21348 * Calculates a time value that is safe to remove from the back buffer without interrupting
21349 * playback.
21350 *
21351 * @param {TimeRange} seekable
21352 * The current seekable range
21353 * @param {number} currentTime
21354 * The current time of the player
21355 * @param {number} targetDuration
21356 * The target duration of the current playlist
21357 * @return {number}
21358 * Time that is safe to remove from the back buffer without interrupting playback
21359 */
21360
21361 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
21362 // 30 seconds before the playhead provides a safe default for trimming.
21363 //
21364 // Choosing a reasonable default is particularly important for high bitrate content and
21365 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
21366 // throw an APPEND_BUFFER_ERR.
21367 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
21368
21369 if (seekable.length) {
21370 // Some live playlists may have a shorter window of content than the full allowed back
21371 // buffer. For these playlists, don't save content that's no longer within the window.
21372 trimTime = Math.max(trimTime, seekable.start(0));
21373 } // Don't remove within target duration of the current time to avoid the possibility of
21374 // removing the GOP currently being played, as removing it can cause playback stalls.
21375
21376
21377 var maxTrimTime = currentTime - targetDuration;
21378 return Math.min(maxTrimTime, trimTime);
21379 };
21380 var segmentInfoString = function segmentInfoString(segmentInfo) {
21381 var startOfSegment = segmentInfo.startOfSegment,
21382 duration = segmentInfo.duration,
21383 segment = segmentInfo.segment,
21384 part = segmentInfo.part,
21385 _segmentInfo$playlist = segmentInfo.playlist,
21386 seq = _segmentInfo$playlist.mediaSequence,
21387 id = _segmentInfo$playlist.id,
21388 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
21389 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
21390 index = segmentInfo.mediaIndex,
21391 partIndex = segmentInfo.partIndex,
21392 timeline = segmentInfo.timeline;
21393 var segmentLen = segments.length - 1;
21394 var selection = 'mediaIndex/partIndex increment';
21395
21396 if (segmentInfo.getMediaInfoForTime) {
21397 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
21398 } else if (segmentInfo.isSyncRequest) {
21399 selection = 'getSyncSegmentCandidate (isSyncRequest)';
21400 }
21401
21402 if (segmentInfo.independent) {
21403 selection += " with independent " + segmentInfo.independent;
21404 }
21405
21406 var hasPartIndex = typeof partIndex === 'number';
21407 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
21408 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
21409 preloadSegment: segment
21410 }) - 1 : 0;
21411 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
21412 };
21413
21414 var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
21415 return mediaType + "TimingInfo";
21416 };
21417 /**
21418 * Returns the timestamp offset to use for the segment.
21419 *
21420 * @param {number} segmentTimeline
21421 * The timeline of the segment
21422 * @param {number} currentTimeline
21423 * The timeline currently being followed by the loader
21424 * @param {number} startOfSegment
21425 * The estimated segment start
21426 * @param {TimeRange[]} buffered
21427 * The loader's buffer
21428 * @param {boolean} overrideCheck
21429 * If true, no checks are made to see if the timestamp offset value should be set,
21430 * but sets it directly to a value.
21431 *
21432 * @return {number|null}
21433 * Either a number representing a new timestamp offset, or null if the segment is
21434 * part of the same timeline
21435 */
21436
21437
21438 var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
21439 var segmentTimeline = _ref.segmentTimeline,
21440 currentTimeline = _ref.currentTimeline,
21441 startOfSegment = _ref.startOfSegment,
21442 buffered = _ref.buffered,
21443 overrideCheck = _ref.overrideCheck;
21444
21445 // Check to see if we are crossing a discontinuity to see if we need to set the
21446 // timestamp offset on the transmuxer and source buffer.
21447 //
21448 // Previously, we changed the timestampOffset if the start of this segment was less than
21449 // the currently set timestampOffset, but this isn't desirable as it can produce bad
21450 // behavior, especially around long running live streams.
21451 if (!overrideCheck && segmentTimeline === currentTimeline) {
21452 return null;
21453 } // When changing renditions, it's possible to request a segment on an older timeline. For
21454 // instance, given two renditions with the following:
21455 //
21456 // #EXTINF:10
21457 // segment1
21458 // #EXT-X-DISCONTINUITY
21459 // #EXTINF:10
21460 // segment2
21461 // #EXTINF:10
21462 // segment3
21463 //
21464 // And the current player state:
21465 //
21466 // current time: 8
21467 // buffer: 0 => 20
21468 //
21469 // The next segment on the current rendition would be segment3, filling the buffer from
21470 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
21471 // then the next segment to be requested will be segment1 from the new rendition in
21472 // order to fill time 8 and onwards. Using the buffered end would result in repeated
21473 // content (since it would position segment1 of the new rendition starting at 20s). This
21474 // case can be identified when the new segment's timeline is a prior value. Instead of
21475 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
21476 // more accurate to the actual start time of the segment.
21477
21478
21479 if (segmentTimeline < currentTimeline) {
21480 return startOfSegment;
21481 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
21482 // value uses the end of the last segment if it is available. While this value
21483 // should often be correct, it's better to rely on the buffered end, as the new
21484 // content post discontinuity should line up with the buffered end as if it were
21485 // time 0 for the new content.
21486
21487
21488 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
21489 };
21490 /**
21491 * Returns whether or not the loader should wait for a timeline change from the timeline
21492 * change controller before processing the segment.
21493 *
21494 * Primary timing in VHS goes by video. This is different from most media players, as
21495 * audio is more often used as the primary timing source. For the foreseeable future, VHS
21496 * will continue to use video as the primary timing source, due to the current logic and
21497 * expectations built around it.
21498
21499 * Since the timing follows video, in order to maintain sync, the video loader is
21500 * responsible for setting both audio and video source buffer timestamp offsets.
21501 *
21502 * Setting different values for audio and video source buffers could lead to
21503 * desyncing. The following examples demonstrate some of the situations where this
21504 * distinction is important. Note that all of these cases involve demuxed content. When
21505 * content is muxed, the audio and video are packaged together, therefore syncing
21506 * separate media playlists is not an issue.
21507 *
21508 * CASE 1: Audio prepares to load a new timeline before video:
21509 *
21510 * Timeline: 0 1
21511 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21512 * Audio Loader: ^
21513 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21514 * Video Loader ^
21515 *
21516 * In the above example, the audio loader is preparing to load the 6th segment, the first
21517 * after a discontinuity, while the video loader is still loading the 5th segment, before
21518 * the discontinuity.
21519 *
21520 * If the audio loader goes ahead and loads and appends the 6th segment before the video
21521 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
21522 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
21523 * the audio loader must provide the audioAppendStart value to trim the content in the
21524 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
21525 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
21526 * segment until that value is provided.
21527 *
21528 * CASE 2: Video prepares to load a new timeline before audio:
21529 *
21530 * Timeline: 0 1
21531 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21532 * Audio Loader: ^
21533 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21534 * Video Loader ^
21535 *
21536 * In the above example, the video loader is preparing to load the 6th segment, the first
21537 * after a discontinuity, while the audio loader is still loading the 5th segment, before
21538 * the discontinuity.
21539 *
21540 * If the video loader goes ahead and loads and appends the 6th segment, then once the
21541 * segment is loaded and processed, both the video and audio timestamp offsets will be
21542 * set, since video is used as the primary timing source. This is to ensure content lines
21543 * up appropriately, as any modifications to the video timing are reflected by audio when
21544 * the video loader sets the audio and video timestamp offsets to the same value. However,
21545 * setting the timestamp offset for audio before audio has had a chance to change
21546 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
21547 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
21548 *
21549 * CASE 3: When seeking, audio prepares to load a new timeline before video
21550 *
21551 * Timeline: 0 1
21552 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21553 * Audio Loader: ^
21554 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21555 * Video Loader ^
21556 *
21557 * In the above example, both audio and video loaders are loading segments from timeline
21558 * 0, but imagine that the seek originated from timeline 1.
21559 *
21560 * When seeking to a new timeline, the timestamp offset will be set based on the expected
21561 * segment start of the loaded video segment. In order to maintain sync, the audio loader
21562 * must wait for the video loader to load its segment and update both the audio and video
21563 * timestamp offsets before it may load and append its own segment. This is the case
21564 * whether the seek results in a mismatched segment request (e.g., the audio loader
21565 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
21566 * loaders choose to load the same segment index from each playlist, as the segments may
21567 * not be aligned perfectly, even for matching segment indexes.
21568 *
21569 * @param {Object} timelinechangeController
21570 * @param {number} currentTimeline
21571 * The timeline currently being followed by the loader
21572 * @param {number} segmentTimeline
21573 * The timeline of the segment being loaded
21574 * @param {('main'|'audio')} loaderType
21575 * The loader type
21576 * @param {boolean} audioDisabled
21577 * Whether the audio is disabled for the loader. This should only be true when the
21578 * loader may have muxed audio in its segment, but should not append it, e.g., for
21579 * the main loader when an alternate audio playlist is active.
21580 *
21581 * @return {boolean}
21582 * Whether the loader should wait for a timeline change from the timeline change
21583 * controller before processing the segment
21584 */
21585
21586 var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
21587 var timelineChangeController = _ref2.timelineChangeController,
21588 currentTimeline = _ref2.currentTimeline,
21589 segmentTimeline = _ref2.segmentTimeline,
21590 loaderType = _ref2.loaderType,
21591 audioDisabled = _ref2.audioDisabled;
21592
21593 if (currentTimeline === segmentTimeline) {
21594 return false;
21595 }
21596
21597 if (loaderType === 'audio') {
21598 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
21599 type: 'main'
21600 }); // Audio loader should wait if:
21601 //
21602 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
21603 // * main hasn't yet changed to the timeline audio is looking to load
21604
21605 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
21606 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
21607 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
21608 // loader's segments (or the content is audio/video only and handled by the main
21609 // loader).
21610
21611
21612 if (loaderType === 'main' && audioDisabled) {
21613 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
21614 type: 'audio'
21615 }); // Main loader should wait for the audio loader if audio is not pending a timeline
21616 // change to the current timeline.
21617 //
21618 // Since the main loader is responsible for setting the timestamp offset for both
21619 // audio and video, the main loader must wait for audio to be about to change to its
21620 // timeline before setting the offset, otherwise, if audio is behind in loading,
21621 // segments from the previous timeline would be adjusted by the new timestamp offset.
21622 //
21623 // This requirement means that video will not cross a timeline until the audio is
21624 // about to cross to it, so that way audio and video will always cross the timeline
21625 // together.
21626 //
21627 // In addition to normal timeline changes, these rules also apply to the start of a
21628 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
21629 // that these rules apply to the first timeline change because if they did not, it's
21630 // possible that the main loader will cross two timelines before the audio loader has
21631 // crossed one. Logic may be implemented to handle the startup as a special case, but
21632 // it's easier to simply treat all timeline changes the same.
21633
21634 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
21635 return false;
21636 }
21637
21638 return true;
21639 }
21640
21641 return false;
21642 };
21643 var mediaDuration = function mediaDuration(timingInfos) {
21644 var maxDuration = 0;
21645 ['video', 'audio'].forEach(function (type) {
21646 var typeTimingInfo = timingInfos[type + "TimingInfo"];
21647
21648 if (!typeTimingInfo) {
21649 return;
21650 }
21651
21652 var start = typeTimingInfo.start,
21653 end = typeTimingInfo.end;
21654 var duration;
21655
21656 if (typeof start === 'bigint' || typeof end === 'bigint') {
21657 duration = window.BigInt(end) - window.BigInt(start);
21658 } else if (typeof start === 'number' && typeof end === 'number') {
21659 duration = end - start;
21660 }
21661
21662 if (typeof duration !== 'undefined' && duration > maxDuration) {
21663 maxDuration = duration;
21664 }
21665 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
21666 // as we only need BigInt when we are above that.
21667
21668 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
21669 maxDuration = Number(maxDuration);
21670 }
21671
21672 return maxDuration;
21673 };
21674 var segmentTooLong = function segmentTooLong(_ref3) {
21675 var segmentDuration = _ref3.segmentDuration,
21676 maxDuration = _ref3.maxDuration;
21677
21678 // 0 duration segments are most likely due to metadata only segments or a lack of
21679 // information.
21680 if (!segmentDuration) {
21681 return false;
21682 } // For HLS:
21683 //
21684 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
21685 // The EXTINF duration of each Media Segment in the Playlist
21686 // file, when rounded to the nearest integer, MUST be less than or equal
21687 // to the target duration; longer segments can trigger playback stalls
21688 // or other errors.
21689 //
21690 // For DASH, the mpd-parser uses the largest reported segment duration as the target
21691 // duration. Although that reported duration is occasionally approximate (i.e., not
21692 // exact), a strict check may report that a segment is too long more often in DASH.
21693
21694
21695 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
21696 };
21697 var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
21698 // Right now we aren't following DASH's timing model exactly, so only perform
21699 // this check for HLS content.
21700 if (sourceType !== 'hls') {
21701 return null;
21702 }
21703
21704 var segmentDuration = mediaDuration({
21705 audioTimingInfo: segmentInfo.audioTimingInfo,
21706 videoTimingInfo: segmentInfo.videoTimingInfo
21707 }); // Don't report if we lack information.
21708 //
21709 // If the segment has a duration of 0 it is either a lack of information or a
21710 // metadata only segment and shouldn't be reported here.
21711
21712 if (!segmentDuration) {
21713 return null;
21714 }
21715
21716 var targetDuration = segmentInfo.playlist.targetDuration;
21717 var isSegmentWayTooLong = segmentTooLong({
21718 segmentDuration: segmentDuration,
21719 maxDuration: targetDuration * 2
21720 });
21721 var isSegmentSlightlyTooLong = segmentTooLong({
21722 segmentDuration: segmentDuration,
21723 maxDuration: targetDuration
21724 });
21725 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
21726
21727 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
21728 return {
21729 severity: isSegmentWayTooLong ? 'warn' : 'info',
21730 message: segmentTooLongMessage
21731 };
21732 }
21733
21734 return null;
21735 };
21736 /**
21737 * An object that manages segment loading and appending.
21738 *
21739 * @class SegmentLoader
21740 * @param {Object} options required and optional options
21741 * @extends videojs.EventTarget
21742 */
21743
21744 var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
21745 inheritsLoose(SegmentLoader, _videojs$EventTarget);
21746
21747 function SegmentLoader(settings, options) {
21748 var _this;
21749
21750 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
21751
21752 if (!settings) {
21753 throw new TypeError('Initialization settings are required');
21754 }
21755
21756 if (typeof settings.currentTime !== 'function') {
21757 throw new TypeError('No currentTime getter specified');
21758 }
21759
21760 if (!settings.mediaSource) {
21761 throw new TypeError('No MediaSource specified');
21762 } // public properties
21763
21764
21765 _this.bandwidth = settings.bandwidth;
21766 _this.throughput = {
21767 rate: 0,
21768 count: 0
21769 };
21770 _this.roundTrip = NaN;
21771
21772 _this.resetStats_();
21773
21774 _this.mediaIndex = null;
21775 _this.partIndex = null; // private settings
21776
21777 _this.hasPlayed_ = settings.hasPlayed;
21778 _this.currentTime_ = settings.currentTime;
21779 _this.seekable_ = settings.seekable;
21780 _this.seeking_ = settings.seeking;
21781 _this.duration_ = settings.duration;
21782 _this.mediaSource_ = settings.mediaSource;
21783 _this.vhs_ = settings.vhs;
21784 _this.loaderType_ = settings.loaderType;
21785 _this.currentMediaInfo_ = void 0;
21786 _this.startingMediaInfo_ = void 0;
21787 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
21788 _this.goalBufferLength_ = settings.goalBufferLength;
21789 _this.sourceType_ = settings.sourceType;
21790 _this.sourceUpdater_ = settings.sourceUpdater;
21791 _this.inbandTextTracks_ = settings.inbandTextTracks;
21792 _this.state_ = 'INIT';
21793 _this.timelineChangeController_ = settings.timelineChangeController;
21794 _this.shouldSaveSegmentTimingInfo_ = true;
21795 _this.parse708captions_ = settings.parse708captions;
21796 _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
21797 _this.captionServices_ = settings.captionServices;
21798 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
21799
21800 _this.checkBufferTimeout_ = null;
21801 _this.error_ = void 0;
21802 _this.currentTimeline_ = -1;
21803 _this.pendingSegment_ = null;
21804 _this.xhrOptions_ = null;
21805 _this.pendingSegments_ = [];
21806 _this.audioDisabled_ = false;
21807 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
21808
21809 _this.gopBuffer_ = [];
21810 _this.timeMapping_ = 0;
21811 _this.safeAppend_ = videojs__default["default"].browser.IE_VERSION >= 11;
21812 _this.appendInitSegment_ = {
21813 audio: true,
21814 video: true
21815 };
21816 _this.playlistOfLastInitSegment_ = {
21817 audio: null,
21818 video: null
21819 };
21820 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
21821 // information yet to start the loading process (e.g., if the audio loader wants to
21822 // load a segment from the next timeline but the main loader hasn't yet crossed that
21823 // timeline), then the load call will be added to the queue until it is ready to be
21824 // processed.
21825
21826 _this.loadQueue_ = [];
21827 _this.metadataQueue_ = {
21828 id3: [],
21829 caption: []
21830 };
21831 _this.waitingOnRemove_ = false;
21832 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
21833
21834 _this.activeInitSegmentId_ = null;
21835 _this.initSegments_ = {}; // HLSe playback
21836
21837 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
21838 _this.keyCache_ = {};
21839 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
21840 // between a time in the display time and a segment index within
21841 // a playlist
21842
21843 _this.syncController_ = settings.syncController;
21844 _this.syncPoint_ = {
21845 segmentIndex: 0,
21846 time: 0
21847 };
21848 _this.transmuxer_ = _this.createTransmuxer_();
21849
21850 _this.triggerSyncInfoUpdate_ = function () {
21851 return _this.trigger('syncinfoupdate');
21852 };
21853
21854 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
21855
21856 _this.mediaSource_.addEventListener('sourceopen', function () {
21857 if (!_this.isEndOfStream_()) {
21858 _this.ended_ = false;
21859 }
21860 }); // ...for determining the fetch location
21861
21862
21863 _this.fetchAtBuffer_ = false;
21864 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
21865 Object.defineProperty(assertThisInitialized(_this), 'state', {
21866 get: function get() {
21867 return this.state_;
21868 },
21869 set: function set(newState) {
21870 if (newState !== this.state_) {
21871 this.logger_(this.state_ + " -> " + newState);
21872 this.state_ = newState;
21873 this.trigger('statechange');
21874 }
21875 }
21876 });
21877
21878 _this.sourceUpdater_.on('ready', function () {
21879 if (_this.hasEnoughInfoToAppend_()) {
21880 _this.processCallQueue_();
21881 }
21882 }); // Only the main loader needs to listen for pending timeline changes, as the main
21883 // loader should wait for audio to be ready to change its timeline so that both main
21884 // and audio timelines change together. For more details, see the
21885 // shouldWaitForTimelineChange function.
21886
21887
21888 if (_this.loaderType_ === 'main') {
21889 _this.timelineChangeController_.on('pendingtimelinechange', function () {
21890 if (_this.hasEnoughInfoToAppend_()) {
21891 _this.processCallQueue_();
21892 }
21893 });
21894 } // The main loader only listens on pending timeline changes, but the audio loader,
21895 // since its loads follow main, needs to listen on timeline changes. For more details,
21896 // see the shouldWaitForTimelineChange function.
21897
21898
21899 if (_this.loaderType_ === 'audio') {
21900 _this.timelineChangeController_.on('timelinechange', function () {
21901 if (_this.hasEnoughInfoToLoad_()) {
21902 _this.processLoadQueue_();
21903 }
21904
21905 if (_this.hasEnoughInfoToAppend_()) {
21906 _this.processCallQueue_();
21907 }
21908 });
21909 }
21910
21911 return _this;
21912 }
21913
21914 var _proto = SegmentLoader.prototype;
21915
21916 _proto.createTransmuxer_ = function createTransmuxer_() {
21917 return segmentTransmuxer.createTransmuxer({
21918 remux: false,
21919 alignGopsAtEnd: this.safeAppend_,
21920 keepOriginalTimestamps: true,
21921 parse708captions: this.parse708captions_,
21922 captionServices: this.captionServices_
21923 });
21924 }
21925 /**
21926 * reset all of our media stats
21927 *
21928 * @private
21929 */
21930 ;
21931
21932 _proto.resetStats_ = function resetStats_() {
21933 this.mediaBytesTransferred = 0;
21934 this.mediaRequests = 0;
21935 this.mediaRequestsAborted = 0;
21936 this.mediaRequestsTimedout = 0;
21937 this.mediaRequestsErrored = 0;
21938 this.mediaTransferDuration = 0;
21939 this.mediaSecondsLoaded = 0;
21940 this.mediaAppends = 0;
21941 }
21942 /**
21943 * dispose of the SegmentLoader and reset to the default state
21944 */
21945 ;
21946
21947 _proto.dispose = function dispose() {
21948 this.trigger('dispose');
21949 this.state = 'DISPOSED';
21950 this.pause();
21951 this.abort_();
21952
21953 if (this.transmuxer_) {
21954 this.transmuxer_.terminate();
21955 }
21956
21957 this.resetStats_();
21958
21959 if (this.checkBufferTimeout_) {
21960 window.clearTimeout(this.checkBufferTimeout_);
21961 }
21962
21963 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
21964 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
21965 }
21966
21967 this.off();
21968 };
21969
21970 _proto.setAudio = function setAudio(enable) {
21971 this.audioDisabled_ = !enable;
21972
21973 if (enable) {
21974 this.appendInitSegment_.audio = true;
21975 } else {
21976 // remove current track audio if it gets disabled
21977 this.sourceUpdater_.removeAudio(0, this.duration_());
21978 }
21979 }
21980 /**
21981 * abort anything that is currently doing on with the SegmentLoader
21982 * and reset to a default state
21983 */
21984 ;
21985
21986 _proto.abort = function abort() {
21987 if (this.state !== 'WAITING') {
21988 if (this.pendingSegment_) {
21989 this.pendingSegment_ = null;
21990 }
21991
21992 return;
21993 }
21994
21995 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
21996 // since we are no longer "waiting" on any requests. XHR callback is not always run
21997 // when the request is aborted. This will prevent the loader from being stuck in the
21998 // WAITING state indefinitely.
21999
22000 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
22001 // next segment
22002
22003 if (!this.paused()) {
22004 this.monitorBuffer_();
22005 }
22006 }
22007 /**
22008 * abort all pending xhr requests and null any pending segements
22009 *
22010 * @private
22011 */
22012 ;
22013
22014 _proto.abort_ = function abort_() {
22015 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
22016 this.pendingSegment_.abortRequests();
22017 } // clear out the segment being processed
22018
22019
22020 this.pendingSegment_ = null;
22021 this.callQueue_ = [];
22022 this.loadQueue_ = [];
22023 this.metadataQueue_.id3 = [];
22024 this.metadataQueue_.caption = [];
22025 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
22026 this.waitingOnRemove_ = false;
22027 window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
22028 this.quotaExceededErrorRetryTimeout_ = null;
22029 };
22030
22031 _proto.checkForAbort_ = function checkForAbort_(requestId) {
22032 // If the state is APPENDING, then aborts will not modify the state, meaning the first
22033 // callback that happens should reset the state to READY so that loading can continue.
22034 if (this.state === 'APPENDING' && !this.pendingSegment_) {
22035 this.state = 'READY';
22036 return true;
22037 }
22038
22039 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
22040 return true;
22041 }
22042
22043 return false;
22044 }
22045 /**
22046 * set an error on the segment loader and null out any pending segements
22047 *
22048 * @param {Error} error the error to set on the SegmentLoader
22049 * @return {Error} the error that was set or that is currently set
22050 */
22051 ;
22052
22053 _proto.error = function error(_error) {
22054 if (typeof _error !== 'undefined') {
22055 this.logger_('error occurred:', _error);
22056 this.error_ = _error;
22057 }
22058
22059 this.pendingSegment_ = null;
22060 return this.error_;
22061 };
22062
22063 _proto.endOfStream = function endOfStream() {
22064 this.ended_ = true;
22065
22066 if (this.transmuxer_) {
22067 // need to clear out any cached data to prepare for the new segment
22068 segmentTransmuxer.reset(this.transmuxer_);
22069 }
22070
22071 this.gopBuffer_.length = 0;
22072 this.pause();
22073 this.trigger('ended');
22074 }
22075 /**
22076 * Indicates which time ranges are buffered
22077 *
22078 * @return {TimeRange}
22079 * TimeRange object representing the current buffered ranges
22080 */
22081 ;
22082
22083 _proto.buffered_ = function buffered_() {
22084 var trackInfo = this.getMediaInfo_();
22085
22086 if (!this.sourceUpdater_ || !trackInfo) {
22087 return videojs__default["default"].createTimeRanges();
22088 }
22089
22090 if (this.loaderType_ === 'main') {
22091 var hasAudio = trackInfo.hasAudio,
22092 hasVideo = trackInfo.hasVideo,
22093 isMuxed = trackInfo.isMuxed;
22094
22095 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
22096 return this.sourceUpdater_.buffered();
22097 }
22098
22099 if (hasVideo) {
22100 return this.sourceUpdater_.videoBuffered();
22101 }
22102 } // One case that can be ignored for now is audio only with alt audio,
22103 // as we don't yet have proper support for that.
22104
22105
22106 return this.sourceUpdater_.audioBuffered();
22107 }
22108 /**
22109 * Gets and sets init segment for the provided map
22110 *
22111 * @param {Object} map
22112 * The map object representing the init segment to get or set
22113 * @param {boolean=} set
22114 * If true, the init segment for the provided map should be saved
22115 * @return {Object}
22116 * map object for desired init segment
22117 */
22118 ;
22119
22120 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
22121 if (set === void 0) {
22122 set = false;
22123 }
22124
22125 if (!map) {
22126 return null;
22127 }
22128
22129 var id = initSegmentId(map);
22130 var storedMap = this.initSegments_[id];
22131
22132 if (set && !storedMap && map.bytes) {
22133 this.initSegments_[id] = storedMap = {
22134 resolvedUri: map.resolvedUri,
22135 byterange: map.byterange,
22136 bytes: map.bytes,
22137 tracks: map.tracks,
22138 timescales: map.timescales
22139 };
22140 }
22141
22142 return storedMap || map;
22143 }
22144 /**
22145 * Gets and sets key for the provided key
22146 *
22147 * @param {Object} key
22148 * The key object representing the key to get or set
22149 * @param {boolean=} set
22150 * If true, the key for the provided key should be saved
22151 * @return {Object}
22152 * Key object for desired key
22153 */
22154 ;
22155
22156 _proto.segmentKey = function segmentKey(key, set) {
22157 if (set === void 0) {
22158 set = false;
22159 }
22160
22161 if (!key) {
22162 return null;
22163 }
22164
22165 var id = segmentKeyId(key);
22166 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
22167 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
22168
22169 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
22170 this.keyCache_[id] = storedKey = {
22171 resolvedUri: key.resolvedUri,
22172 bytes: key.bytes
22173 };
22174 }
22175
22176 var result = {
22177 resolvedUri: (storedKey || key).resolvedUri
22178 };
22179
22180 if (storedKey) {
22181 result.bytes = storedKey.bytes;
22182 }
22183
22184 return result;
22185 }
22186 /**
22187 * Returns true if all configuration required for loading is present, otherwise false.
22188 *
22189 * @return {boolean} True if the all configuration is ready for loading
22190 * @private
22191 */
22192 ;
22193
22194 _proto.couldBeginLoading_ = function couldBeginLoading_() {
22195 return this.playlist_ && !this.paused();
22196 }
22197 /**
22198 * load a playlist and start to fill the buffer
22199 */
22200 ;
22201
22202 _proto.load = function load() {
22203 // un-pause
22204 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
22205 // specified
22206
22207 if (!this.playlist_) {
22208 return;
22209 } // if all the configuration is ready, initialize and begin loading
22210
22211
22212 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22213 return this.init_();
22214 } // if we're in the middle of processing a segment already, don't
22215 // kick off an additional segment request
22216
22217
22218 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
22219 return;
22220 }
22221
22222 this.state = 'READY';
22223 }
22224 /**
22225 * Once all the starting parameters have been specified, begin
22226 * operation. This method should only be invoked from the INIT
22227 * state.
22228 *
22229 * @private
22230 */
22231 ;
22232
22233 _proto.init_ = function init_() {
22234 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
22235 // audio data from the muxed content should be removed
22236
22237 this.resetEverything();
22238 return this.monitorBuffer_();
22239 }
22240 /**
22241 * set a playlist on the segment loader
22242 *
22243 * @param {PlaylistLoader} media the playlist to set on the segment loader
22244 */
22245 ;
22246
22247 _proto.playlist = function playlist(newPlaylist, options) {
22248 if (options === void 0) {
22249 options = {};
22250 }
22251
22252 if (!newPlaylist) {
22253 return;
22254 }
22255
22256 var oldPlaylist = this.playlist_;
22257 var segmentInfo = this.pendingSegment_;
22258 this.playlist_ = newPlaylist;
22259 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
22260 // is always our zero-time so force a sync update each time the playlist
22261 // is refreshed from the server
22262 //
22263 // Use the INIT state to determine if playback has started, as the playlist sync info
22264 // should be fixed once requests begin (as sync points are generated based on sync
22265 // info), but not before then.
22266
22267 if (this.state === 'INIT') {
22268 newPlaylist.syncInfo = {
22269 mediaSequence: newPlaylist.mediaSequence,
22270 time: 0
22271 }; // Setting the date time mapping means mapping the program date time (if available)
22272 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
22273 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
22274 // be updated as the playlist is refreshed before the loader starts loading, the
22275 // program date time mapping needs to be updated as well.
22276 //
22277 // This mapping is only done for the main loader because a program date time should
22278 // map equivalently between playlists.
22279
22280 if (this.loaderType_ === 'main') {
22281 this.syncController_.setDateTimeMappingForStart(newPlaylist);
22282 }
22283 }
22284
22285 var oldId = null;
22286
22287 if (oldPlaylist) {
22288 if (oldPlaylist.id) {
22289 oldId = oldPlaylist.id;
22290 } else if (oldPlaylist.uri) {
22291 oldId = oldPlaylist.uri;
22292 }
22293 }
22294
22295 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
22296 // in LIVE, we always want to update with new playlists (including refreshes)
22297
22298 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
22299 // buffering now
22300
22301 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22302 return this.init_();
22303 }
22304
22305 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
22306 if (this.mediaIndex !== null) {
22307 // we must reset/resync the segment loader when we switch renditions and
22308 // the segment loader is already synced to the previous rendition
22309 // on playlist changes we want it to be possible to fetch
22310 // at the buffer for vod but not for live. So we use resetLoader
22311 // for live and resyncLoader for vod. We want this because
22312 // if a playlist uses independent and non-independent segments/parts the
22313 // buffer may not accurately reflect the next segment that we should try
22314 // downloading.
22315 if (!newPlaylist.endList) {
22316 this.resetLoader();
22317 } else {
22318 this.resyncLoader();
22319 }
22320 }
22321
22322 this.currentMediaInfo_ = void 0;
22323 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
22324
22325 return;
22326 } // we reloaded the same playlist so we are in a live scenario
22327 // and we will likely need to adjust the mediaIndex
22328
22329
22330 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
22331 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
22332 // this is important because we can abort a request and this value must be
22333 // equal to the last appended mediaIndex
22334
22335 if (this.mediaIndex !== null) {
22336 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
22337 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
22338 // new playlist was incremented by 1.
22339
22340 if (this.mediaIndex < 0) {
22341 this.mediaIndex = null;
22342 this.partIndex = null;
22343 } else {
22344 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
22345 // unless parts fell off of the playlist for this segment.
22346 // In that case we need to reset partIndex and resync
22347
22348 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
22349 var mediaIndex = this.mediaIndex;
22350 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
22351 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
22352 // as the part was dropped from our current playlists segment.
22353 // The mediaIndex will still be valid so keep that around.
22354
22355 this.mediaIndex = mediaIndex;
22356 }
22357 }
22358 } // update the mediaIndex on the SegmentInfo object
22359 // this is important because we will update this.mediaIndex with this value
22360 // in `handleAppendsDone_` after the segment has been successfully appended
22361
22362
22363 if (segmentInfo) {
22364 segmentInfo.mediaIndex -= mediaSequenceDiff;
22365
22366 if (segmentInfo.mediaIndex < 0) {
22367 segmentInfo.mediaIndex = null;
22368 segmentInfo.partIndex = null;
22369 } else {
22370 // we need to update the referenced segment so that timing information is
22371 // saved for the new playlist's segment, however, if the segment fell off the
22372 // playlist, we can leave the old reference and just lose the timing info
22373 if (segmentInfo.mediaIndex >= 0) {
22374 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
22375 }
22376
22377 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
22378 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
22379 }
22380 }
22381 }
22382
22383 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
22384 }
22385 /**
22386 * Prevent the loader from fetching additional segments. If there
22387 * is a segment request outstanding, it will finish processing
22388 * before the loader halts. A segment loader can be unpaused by
22389 * calling load().
22390 */
22391 ;
22392
22393 _proto.pause = function pause() {
22394 if (this.checkBufferTimeout_) {
22395 window.clearTimeout(this.checkBufferTimeout_);
22396 this.checkBufferTimeout_ = null;
22397 }
22398 }
22399 /**
22400 * Returns whether the segment loader is fetching additional
22401 * segments when given the opportunity. This property can be
22402 * modified through calls to pause() and load().
22403 */
22404 ;
22405
22406 _proto.paused = function paused() {
22407 return this.checkBufferTimeout_ === null;
22408 }
22409 /**
22410 * Delete all the buffered data and reset the SegmentLoader
22411 *
22412 * @param {Function} [done] an optional callback to be executed when the remove
22413 * operation is complete
22414 */
22415 ;
22416
22417 _proto.resetEverything = function resetEverything(done) {
22418 this.ended_ = false;
22419 this.appendInitSegment_ = {
22420 audio: true,
22421 video: true
22422 };
22423 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
22424 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
22425 // we then clamp the value to duration if necessary.
22426
22427 this.remove(0, Infinity, done); // clears fmp4 captions
22428
22429 if (this.transmuxer_) {
22430 this.transmuxer_.postMessage({
22431 action: 'clearAllMp4Captions'
22432 }); // reset the cache in the transmuxer
22433
22434 this.transmuxer_.postMessage({
22435 action: 'reset'
22436 });
22437 }
22438 }
22439 /**
22440 * Force the SegmentLoader to resync and start loading around the currentTime instead
22441 * of starting at the end of the buffer
22442 *
22443 * Useful for fast quality changes
22444 */
22445 ;
22446
22447 _proto.resetLoader = function resetLoader() {
22448 this.fetchAtBuffer_ = false;
22449 this.resyncLoader();
22450 }
22451 /**
22452 * Force the SegmentLoader to restart synchronization and make a conservative guess
22453 * before returning to the simple walk-forward method
22454 */
22455 ;
22456
22457 _proto.resyncLoader = function resyncLoader() {
22458 if (this.transmuxer_) {
22459 // need to clear out any cached data to prepare for the new segment
22460 segmentTransmuxer.reset(this.transmuxer_);
22461 }
22462
22463 this.mediaIndex = null;
22464 this.partIndex = null;
22465 this.syncPoint_ = null;
22466 this.isPendingTimestampOffset_ = false;
22467 this.callQueue_ = [];
22468 this.loadQueue_ = [];
22469 this.metadataQueue_.id3 = [];
22470 this.metadataQueue_.caption = [];
22471 this.abort();
22472
22473 if (this.transmuxer_) {
22474 this.transmuxer_.postMessage({
22475 action: 'clearParsedMp4Captions'
22476 });
22477 }
22478 }
22479 /**
22480 * Remove any data in the source buffer between start and end times
22481 *
22482 * @param {number} start - the start time of the region to remove from the buffer
22483 * @param {number} end - the end time of the region to remove from the buffer
22484 * @param {Function} [done] - an optional callback to be executed when the remove
22485 * @param {boolean} force - force all remove operations to happen
22486 * operation is complete
22487 */
22488 ;
22489
22490 _proto.remove = function remove(start, end, done, force) {
22491 if (done === void 0) {
22492 done = function done() {};
22493 }
22494
22495 if (force === void 0) {
22496 force = false;
22497 }
22498
22499 // clamp end to duration if we need to remove everything.
22500 // This is due to a browser bug that causes issues if we remove to Infinity.
22501 // videojs/videojs-contrib-hls#1225
22502 if (end === Infinity) {
22503 end = this.duration_();
22504 } // skip removes that would throw an error
22505 // commonly happens during a rendition switch at the start of a video
22506 // from start 0 to end 0
22507
22508
22509 if (end <= start) {
22510 this.logger_('skipping remove because end ${end} is <= start ${start}');
22511 return;
22512 }
22513
22514 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
22515 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
22516
22517 return;
22518 } // set it to one to complete this function's removes
22519
22520
22521 var removesRemaining = 1;
22522
22523 var removeFinished = function removeFinished() {
22524 removesRemaining--;
22525
22526 if (removesRemaining === 0) {
22527 done();
22528 }
22529 };
22530
22531 if (force || !this.audioDisabled_) {
22532 removesRemaining++;
22533 this.sourceUpdater_.removeAudio(start, end, removeFinished);
22534 } // While it would be better to only remove video if the main loader has video, this
22535 // should be safe with audio only as removeVideo will call back even if there's no
22536 // video buffer.
22537 //
22538 // In theory we can check to see if there's video before calling the remove, but in
22539 // the event that we're switching between renditions and from video to audio only
22540 // (when we add support for that), we may need to clear the video contents despite
22541 // what the new media will contain.
22542
22543
22544 if (force || this.loaderType_ === 'main') {
22545 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
22546 removesRemaining++;
22547 this.sourceUpdater_.removeVideo(start, end, removeFinished);
22548 } // remove any captions and ID3 tags
22549
22550
22551 for (var track in this.inbandTextTracks_) {
22552 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
22553 }
22554
22555 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
22556
22557 removeFinished();
22558 }
22559 /**
22560 * (re-)schedule monitorBufferTick_ to run as soon as possible
22561 *
22562 * @private
22563 */
22564 ;
22565
22566 _proto.monitorBuffer_ = function monitorBuffer_() {
22567 if (this.checkBufferTimeout_) {
22568 window.clearTimeout(this.checkBufferTimeout_);
22569 }
22570
22571 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);
22572 }
22573 /**
22574 * As long as the SegmentLoader is in the READY state, periodically
22575 * invoke fillBuffer_().
22576 *
22577 * @private
22578 */
22579 ;
22580
22581 _proto.monitorBufferTick_ = function monitorBufferTick_() {
22582 if (this.state === 'READY') {
22583 this.fillBuffer_();
22584 }
22585
22586 if (this.checkBufferTimeout_) {
22587 window.clearTimeout(this.checkBufferTimeout_);
22588 }
22589
22590 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
22591 }
22592 /**
22593 * fill the buffer with segements unless the sourceBuffers are
22594 * currently updating
22595 *
22596 * Note: this function should only ever be called by monitorBuffer_
22597 * and never directly
22598 *
22599 * @private
22600 */
22601 ;
22602
22603 _proto.fillBuffer_ = function fillBuffer_() {
22604 // TODO since the source buffer maintains a queue, and we shouldn't call this function
22605 // except when we're ready for the next segment, this check can most likely be removed
22606 if (this.sourceUpdater_.updating()) {
22607 return;
22608 } // see if we need to begin loading immediately
22609
22610
22611 var segmentInfo = this.chooseNextRequest_();
22612
22613 if (!segmentInfo) {
22614 return;
22615 }
22616
22617 if (typeof segmentInfo.timestampOffset === 'number') {
22618 this.isPendingTimestampOffset_ = false;
22619 this.timelineChangeController_.pendingTimelineChange({
22620 type: this.loaderType_,
22621 from: this.currentTimeline_,
22622 to: segmentInfo.timeline
22623 });
22624 }
22625
22626 this.loadSegment_(segmentInfo);
22627 }
22628 /**
22629 * Determines if we should call endOfStream on the media source based
22630 * on the state of the buffer or if appened segment was the final
22631 * segment in the playlist.
22632 *
22633 * @param {number} [mediaIndex] the media index of segment we last appended
22634 * @param {Object} [playlist] a media playlist object
22635 * @return {boolean} do we need to call endOfStream on the MediaSource
22636 */
22637 ;
22638
22639 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
22640 if (mediaIndex === void 0) {
22641 mediaIndex = this.mediaIndex;
22642 }
22643
22644 if (playlist === void 0) {
22645 playlist = this.playlist_;
22646 }
22647
22648 if (partIndex === void 0) {
22649 partIndex = this.partIndex;
22650 }
22651
22652 if (!playlist || !this.mediaSource_) {
22653 return false;
22654 }
22655
22656 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
22657
22658 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
22659
22660 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
22661 // so that MediaSources can trigger the `ended` event when it runs out of
22662 // buffered data instead of waiting for me
22663
22664 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
22665 }
22666 /**
22667 * Determines what request should be made given current segment loader state.
22668 *
22669 * @return {Object} a request object that describes the segment/part to load
22670 */
22671 ;
22672
22673 _proto.chooseNextRequest_ = function chooseNextRequest_() {
22674 var buffered = this.buffered_();
22675 var bufferedEnd = lastBufferedEnd(buffered) || 0;
22676 var bufferedTime = timeAheadOf(buffered, this.currentTime_());
22677 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
22678 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
22679 var segments = this.playlist_.segments; // return no segment if:
22680 // 1. we don't have segments
22681 // 2. The video has not yet played and we already downloaded a segment
22682 // 3. we already have enough buffered time
22683
22684 if (!segments.length || preloaded || haveEnoughBuffer) {
22685 return null;
22686 }
22687
22688 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
22689 var next = {
22690 partIndex: null,
22691 mediaIndex: null,
22692 startOfSegment: null,
22693 playlist: this.playlist_,
22694 isSyncRequest: Boolean(!this.syncPoint_)
22695 };
22696
22697 if (next.isSyncRequest) {
22698 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
22699 } else if (this.mediaIndex !== null) {
22700 var segment = segments[this.mediaIndex];
22701 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
22702 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
22703
22704 if (segment.parts && segment.parts[partIndex + 1]) {
22705 next.mediaIndex = this.mediaIndex;
22706 next.partIndex = partIndex + 1;
22707 } else {
22708 next.mediaIndex = this.mediaIndex + 1;
22709 }
22710 } else {
22711 // Find the segment containing the end of the buffer or current time.
22712 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
22713 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
22714 playlist: this.playlist_,
22715 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
22716 startingPartIndex: this.syncPoint_.partIndex,
22717 startingSegmentIndex: this.syncPoint_.segmentIndex,
22718 startTime: this.syncPoint_.time
22719 }),
22720 segmentIndex = _Playlist$getMediaInf.segmentIndex,
22721 startTime = _Playlist$getMediaInf.startTime,
22722 _partIndex = _Playlist$getMediaInf.partIndex;
22723
22724 next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
22725 next.mediaIndex = segmentIndex;
22726 next.startOfSegment = startTime;
22727 next.partIndex = _partIndex;
22728 }
22729
22730 var nextSegment = segments[next.mediaIndex];
22731 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
22732 // the next partIndex is invalid do not choose a next segment.
22733
22734 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
22735 return null;
22736 } // if the next segment has parts, and we don't have a partIndex.
22737 // Set partIndex to 0
22738
22739
22740 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
22741 next.partIndex = 0;
22742 nextPart = nextSegment.parts[0];
22743 } // if we have no buffered data then we need to make sure
22744 // that the next part we append is "independent" if possible.
22745 // So we check if the previous part is independent, and request
22746 // it if it is.
22747
22748
22749 if (!bufferedTime && nextPart && !nextPart.independent) {
22750 if (next.partIndex === 0) {
22751 var lastSegment = segments[next.mediaIndex - 1];
22752 var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
22753
22754 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
22755 next.mediaIndex -= 1;
22756 next.partIndex = lastSegment.parts.length - 1;
22757 next.independent = 'previous segment';
22758 }
22759 } else if (nextSegment.parts[next.partIndex - 1].independent) {
22760 next.partIndex -= 1;
22761 next.independent = 'previous part';
22762 }
22763 }
22764
22765 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
22766 // 1. this is the last segment in the playlist
22767 // 2. end of stream has been called on the media source already
22768 // 3. the player is not seeking
22769
22770 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
22771 return null;
22772 }
22773
22774 return this.generateSegmentInfo_(next);
22775 };
22776
22777 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
22778 var independent = options.independent,
22779 playlist = options.playlist,
22780 mediaIndex = options.mediaIndex,
22781 startOfSegment = options.startOfSegment,
22782 isSyncRequest = options.isSyncRequest,
22783 partIndex = options.partIndex,
22784 forceTimestampOffset = options.forceTimestampOffset,
22785 getMediaInfoForTime = options.getMediaInfoForTime;
22786 var segment = playlist.segments[mediaIndex];
22787 var part = typeof partIndex === 'number' && segment.parts[partIndex];
22788 var segmentInfo = {
22789 requestId: 'segment-loader-' + Math.random(),
22790 // resolve the segment URL relative to the playlist
22791 uri: part && part.resolvedUri || segment.resolvedUri,
22792 // the segment's mediaIndex at the time it was requested
22793 mediaIndex: mediaIndex,
22794 partIndex: part ? partIndex : null,
22795 // whether or not to update the SegmentLoader's state with this
22796 // segment's mediaIndex
22797 isSyncRequest: isSyncRequest,
22798 startOfSegment: startOfSegment,
22799 // the segment's playlist
22800 playlist: playlist,
22801 // unencrypted bytes of the segment
22802 bytes: null,
22803 // when a key is defined for this segment, the encrypted bytes
22804 encryptedBytes: null,
22805 // The target timestampOffset for this segment when we append it
22806 // to the source buffer
22807 timestampOffset: null,
22808 // The timeline that the segment is in
22809 timeline: segment.timeline,
22810 // The expected duration of the segment in seconds
22811 duration: part && part.duration || segment.duration,
22812 // retain the segment in case the playlist updates while doing an async process
22813 segment: segment,
22814 part: part,
22815 byteLength: 0,
22816 transmuxer: this.transmuxer_,
22817 // type of getMediaInfoForTime that was used to get this segment
22818 getMediaInfoForTime: getMediaInfoForTime,
22819 independent: independent
22820 };
22821 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
22822 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
22823 segmentTimeline: segment.timeline,
22824 currentTimeline: this.currentTimeline_,
22825 startOfSegment: startOfSegment,
22826 buffered: this.buffered_(),
22827 overrideCheck: overrideCheck
22828 });
22829 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
22830
22831 if (typeof audioBufferedEnd === 'number') {
22832 // since the transmuxer is using the actual timing values, but the buffer is
22833 // adjusted by the timestamp offset, we must adjust the value here
22834 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
22835 }
22836
22837 if (this.sourceUpdater_.videoBuffered().length) {
22838 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
22839 // adjusted by the timestmap offset, we must adjust the value here
22840 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
22841 }
22842
22843 return segmentInfo;
22844 } // get the timestampoffset for a segment,
22845 // added so that vtt segment loader can override and prevent
22846 // adding timestamp offsets.
22847 ;
22848
22849 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
22850 return timestampOffsetForSegment(options);
22851 }
22852 /**
22853 * Determines if the network has enough bandwidth to complete the current segment
22854 * request in a timely manner. If not, the request will be aborted early and bandwidth
22855 * updated to trigger a playlist switch.
22856 *
22857 * @param {Object} stats
22858 * Object containing stats about the request timing and size
22859 * @private
22860 */
22861 ;
22862
22863 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
22864 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
22865 // TODO: Replace using timeout with a boolean indicating whether this playlist is
22866 // the lowestEnabledRendition.
22867 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
22868 !this.playlist_.attributes.BANDWIDTH) {
22869 return;
22870 } // Wait at least 1 second since the first byte of data has been received before
22871 // using the calculated bandwidth from the progress event to allow the bitrate
22872 // to stabilize
22873
22874
22875 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
22876 return;
22877 }
22878
22879 var currentTime = this.currentTime_();
22880 var measuredBandwidth = stats.bandwidth;
22881 var segmentDuration = this.pendingSegment_.duration;
22882 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
22883 // if we are only left with less than 1 second when the request completes.
22884 // A negative timeUntilRebuffering indicates we are already rebuffering
22885
22886 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
22887 // is larger than the estimated time until the player runs out of forward buffer
22888
22889 if (requestTimeRemaining <= timeUntilRebuffer$1) {
22890 return;
22891 }
22892
22893 var switchCandidate = minRebufferMaxBandwidthSelector({
22894 master: this.vhs_.playlists.master,
22895 currentTime: currentTime,
22896 bandwidth: measuredBandwidth,
22897 duration: this.duration_(),
22898 segmentDuration: segmentDuration,
22899 timeUntilRebuffer: timeUntilRebuffer$1,
22900 currentTimeline: this.currentTimeline_,
22901 syncController: this.syncController_
22902 });
22903
22904 if (!switchCandidate) {
22905 return;
22906 }
22907
22908 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
22909 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
22910 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
22911 // potential round trip time of the new request so that we are not too aggressive
22912 // with switching to a playlist that might save us a fraction of a second.
22913
22914 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
22915 minimumTimeSaving = 1;
22916 }
22917
22918 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
22919 return;
22920 } // set the bandwidth to that of the desired playlist being sure to scale by
22921 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
22922 // don't trigger a bandwidthupdate as the bandwidth is artifial
22923
22924
22925 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
22926 this.trigger('earlyabort');
22927 };
22928
22929 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
22930 this.logger_("Aborting " + segmentInfoString(segmentInfo));
22931 this.mediaRequestsAborted += 1;
22932 }
22933 /**
22934 * XHR `progress` event handler
22935 *
22936 * @param {Event}
22937 * The XHR `progress` event
22938 * @param {Object} simpleSegment
22939 * A simplified segment object copy
22940 * @private
22941 */
22942 ;
22943
22944 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
22945 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22946
22947 if (this.checkForAbort_(simpleSegment.requestId)) {
22948 return;
22949 }
22950
22951 this.trigger('progress');
22952 };
22953
22954 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
22955 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22956
22957 if (this.checkForAbort_(simpleSegment.requestId)) {
22958 return;
22959 }
22960
22961 if (this.checkForIllegalMediaSwitch(trackInfo)) {
22962 return;
22963 }
22964
22965 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
22966 // Guard against cases where we're not getting track info at all until we are
22967 // certain that all streams will provide it.
22968
22969 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
22970 this.appendInitSegment_ = {
22971 audio: true,
22972 video: true
22973 };
22974 this.startingMediaInfo_ = trackInfo;
22975 this.currentMediaInfo_ = trackInfo;
22976 this.logger_('trackinfo update', trackInfo);
22977 this.trigger('trackinfo');
22978 } // trackinfo may cause an abort if the trackinfo
22979 // causes a codec change to an unsupported codec.
22980
22981
22982 if (this.checkForAbort_(simpleSegment.requestId)) {
22983 return;
22984 } // set trackinfo on the pending segment so that
22985 // it can append.
22986
22987
22988 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
22989
22990 if (this.hasEnoughInfoToAppend_()) {
22991 this.processCallQueue_();
22992 }
22993 };
22994
22995 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
22996 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22997
22998 if (this.checkForAbort_(simpleSegment.requestId)) {
22999 return;
23000 }
23001
23002 var segmentInfo = this.pendingSegment_;
23003 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
23004 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
23005 segmentInfo[timingInfoProperty][timeType] = time;
23006 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
23007
23008 if (this.hasEnoughInfoToAppend_()) {
23009 this.processCallQueue_();
23010 }
23011 };
23012
23013 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
23014 var _this2 = this;
23015
23016 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23017
23018 if (this.checkForAbort_(simpleSegment.requestId)) {
23019 return;
23020 } // This could only happen with fmp4 segments, but
23021 // should still not happen in general
23022
23023
23024 if (captionData.length === 0) {
23025 this.logger_('SegmentLoader received no captions from a caption event');
23026 return;
23027 }
23028
23029 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
23030 // can be adjusted by the timestamp offset
23031
23032 if (!segmentInfo.hasAppendedData_) {
23033 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
23034 return;
23035 }
23036
23037 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
23038 var captionTracks = {}; // get total start/end and captions for each track/stream
23039
23040 captionData.forEach(function (caption) {
23041 // caption.stream is actually a track name...
23042 // set to the existing values in tracks or default values
23043 captionTracks[caption.stream] = captionTracks[caption.stream] || {
23044 // Infinity, as any other value will be less than this
23045 startTime: Infinity,
23046 captions: [],
23047 // 0 as an other value will be more than this
23048 endTime: 0
23049 };
23050 var captionTrack = captionTracks[caption.stream];
23051 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
23052 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
23053 captionTrack.captions.push(caption);
23054 });
23055 Object.keys(captionTracks).forEach(function (trackName) {
23056 var _captionTracks$trackN = captionTracks[trackName],
23057 startTime = _captionTracks$trackN.startTime,
23058 endTime = _captionTracks$trackN.endTime,
23059 captions = _captionTracks$trackN.captions;
23060 var inbandTextTracks = _this2.inbandTextTracks_;
23061
23062 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
23063
23064 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
23065 // We do this because a rendition change that also changes the timescale for captions
23066 // will result in captions being re-parsed for certain segments. If we add them again
23067 // without clearing we will have two of the same captions visible.
23068
23069 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
23070 addCaptionData({
23071 captionArray: captions,
23072 inbandTextTracks: inbandTextTracks,
23073 timestampOffset: timestampOffset
23074 });
23075 }); // Reset stored captions since we added parsed
23076 // captions to a text track at this point
23077
23078 if (this.transmuxer_) {
23079 this.transmuxer_.postMessage({
23080 action: 'clearParsedMp4Captions'
23081 });
23082 }
23083 };
23084
23085 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
23086 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23087
23088 if (this.checkForAbort_(simpleSegment.requestId)) {
23089 return;
23090 }
23091
23092 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
23093
23094 if (!segmentInfo.hasAppendedData_) {
23095 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
23096 return;
23097 }
23098
23099 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
23100 // audio/video source with a metadata track, and an alt audio with a metadata track.
23101 // However, this probably won't happen, and if it does it can be handled then.
23102
23103 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
23104 addMetadata({
23105 inbandTextTracks: this.inbandTextTracks_,
23106 metadataArray: id3Frames,
23107 timestampOffset: timestampOffset,
23108 videoDuration: this.duration_()
23109 });
23110 };
23111
23112 _proto.processMetadataQueue_ = function processMetadataQueue_() {
23113 this.metadataQueue_.id3.forEach(function (fn) {
23114 return fn();
23115 });
23116 this.metadataQueue_.caption.forEach(function (fn) {
23117 return fn();
23118 });
23119 this.metadataQueue_.id3 = [];
23120 this.metadataQueue_.caption = [];
23121 };
23122
23123 _proto.processCallQueue_ = function processCallQueue_() {
23124 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
23125 // functions may check the length of the load queue and default to pushing themselves
23126 // back onto the queue.
23127
23128 this.callQueue_ = [];
23129 callQueue.forEach(function (fun) {
23130 return fun();
23131 });
23132 };
23133
23134 _proto.processLoadQueue_ = function processLoadQueue_() {
23135 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
23136 // functions may check the length of the load queue and default to pushing themselves
23137 // back onto the queue.
23138
23139 this.loadQueue_ = [];
23140 loadQueue.forEach(function (fun) {
23141 return fun();
23142 });
23143 }
23144 /**
23145 * Determines whether the loader has enough info to load the next segment.
23146 *
23147 * @return {boolean}
23148 * Whether or not the loader has enough info to load the next segment
23149 */
23150 ;
23151
23152 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
23153 // Since primary timing goes by video, only the audio loader potentially needs to wait
23154 // to load.
23155 if (this.loaderType_ !== 'audio') {
23156 return true;
23157 }
23158
23159 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
23160 // enough info to load.
23161
23162 if (!segmentInfo) {
23163 return false;
23164 } // The first segment can and should be loaded immediately so that source buffers are
23165 // created together (before appending). Source buffer creation uses the presence of
23166 // audio and video data to determine whether to create audio/video source buffers, and
23167 // uses processed (transmuxed or parsed) media to determine the types required.
23168
23169
23170 if (!this.getCurrentMediaInfo_()) {
23171 return true;
23172 }
23173
23174 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
23175 // can be requested and downloaded and only wait before it is transmuxed or parsed.
23176 // But in practice, there are a few reasons why it is better to wait until a loader
23177 // is ready to append that segment before requesting and downloading:
23178 //
23179 // 1. Because audio and main loaders cross discontinuities together, if this loader
23180 // is waiting for the other to catch up, then instead of requesting another
23181 // segment and using up more bandwidth, by not yet loading, more bandwidth is
23182 // allotted to the loader currently behind.
23183 // 2. media-segment-request doesn't have to have logic to consider whether a segment
23184 // is ready to be processed or not, isolating the queueing behavior to the loader.
23185 // 3. The audio loader bases some of its segment properties on timing information
23186 // provided by the main loader, meaning that, if the logic for waiting on
23187 // processing was in media-segment-request, then it would also need to know how
23188 // to re-generate the segment information after the main loader caught up.
23189 shouldWaitForTimelineChange({
23190 timelineChangeController: this.timelineChangeController_,
23191 currentTimeline: this.currentTimeline_,
23192 segmentTimeline: segmentInfo.timeline,
23193 loaderType: this.loaderType_,
23194 audioDisabled: this.audioDisabled_
23195 })) {
23196 return false;
23197 }
23198
23199 return true;
23200 };
23201
23202 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
23203 if (segmentInfo === void 0) {
23204 segmentInfo = this.pendingSegment_;
23205 }
23206
23207 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
23208 };
23209
23210 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
23211 if (segmentInfo === void 0) {
23212 segmentInfo = this.pendingSegment_;
23213 }
23214
23215 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
23216 };
23217
23218 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
23219 if (!this.sourceUpdater_.ready()) {
23220 return false;
23221 } // If content needs to be removed or the loader is waiting on an append reattempt,
23222 // then no additional content should be appended until the prior append is resolved.
23223
23224
23225 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
23226 return false;
23227 }
23228
23229 var segmentInfo = this.pendingSegment_;
23230 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
23231 // we do not have information on this specific
23232 // segment yet
23233
23234 if (!segmentInfo || !trackInfo) {
23235 return false;
23236 }
23237
23238 var hasAudio = trackInfo.hasAudio,
23239 hasVideo = trackInfo.hasVideo,
23240 isMuxed = trackInfo.isMuxed;
23241
23242 if (hasVideo && !segmentInfo.videoTimingInfo) {
23243 return false;
23244 } // muxed content only relies on video timing information for now.
23245
23246
23247 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
23248 return false;
23249 }
23250
23251 if (shouldWaitForTimelineChange({
23252 timelineChangeController: this.timelineChangeController_,
23253 currentTimeline: this.currentTimeline_,
23254 segmentTimeline: segmentInfo.timeline,
23255 loaderType: this.loaderType_,
23256 audioDisabled: this.audioDisabled_
23257 })) {
23258 return false;
23259 }
23260
23261 return true;
23262 };
23263
23264 _proto.handleData_ = function handleData_(simpleSegment, result) {
23265 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23266
23267 if (this.checkForAbort_(simpleSegment.requestId)) {
23268 return;
23269 } // If there's anything in the call queue, then this data came later and should be
23270 // executed after the calls currently queued.
23271
23272
23273 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
23274 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
23275 return;
23276 }
23277
23278 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
23279
23280 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
23281
23282 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
23283 // logic may change behavior depending on the state, and changing state too early may
23284 // inflate our estimates of bandwidth. In the future this should be re-examined to
23285 // note more granular states.
23286 // don't process and append data if the mediaSource is closed
23287
23288 if (this.mediaSource_.readyState === 'closed') {
23289 return;
23290 } // if this request included an initialization segment, save that data
23291 // to the initSegment cache
23292
23293
23294 if (simpleSegment.map) {
23295 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
23296
23297 segmentInfo.segment.map = simpleSegment.map;
23298 } // if this request included a segment key, save that data in the cache
23299
23300
23301 if (simpleSegment.key) {
23302 this.segmentKey(simpleSegment.key, true);
23303 }
23304
23305 segmentInfo.isFmp4 = simpleSegment.isFmp4;
23306 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
23307
23308 if (segmentInfo.isFmp4) {
23309 this.trigger('fmp4');
23310 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
23311 } else {
23312 var trackInfo = this.getCurrentMediaInfo_();
23313 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
23314 var firstVideoFrameTimeForData;
23315
23316 if (useVideoTimingInfo) {
23317 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
23318 } // Segment loader knows more about segment timing than the transmuxer (in certain
23319 // aspects), so make any changes required for a more accurate start time.
23320 // Don't set the end time yet, as the segment may not be finished processing.
23321
23322
23323 segmentInfo.timingInfo.start = this.trueSegmentStart_({
23324 currentStart: segmentInfo.timingInfo.start,
23325 playlist: segmentInfo.playlist,
23326 mediaIndex: segmentInfo.mediaIndex,
23327 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
23328 useVideoTimingInfo: useVideoTimingInfo,
23329 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
23330 videoTimingInfo: segmentInfo.videoTimingInfo,
23331 audioTimingInfo: segmentInfo.audioTimingInfo
23332 });
23333 } // Init segments for audio and video only need to be appended in certain cases. Now
23334 // that data is about to be appended, we can check the final cases to determine
23335 // whether we should append an init segment.
23336
23337
23338 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
23339 // as we use the start of the segment to offset the best guess (playlist provided)
23340 // timestamp offset.
23341
23342 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
23343 // be appended or not.
23344
23345 if (segmentInfo.isSyncRequest) {
23346 // first save/update our timing info for this segment.
23347 // this is what allows us to choose an accurate segment
23348 // and the main reason we make a sync request.
23349 this.updateTimingInfoEnd_(segmentInfo);
23350 this.syncController_.saveSegmentTimingInfo({
23351 segmentInfo: segmentInfo,
23352 shouldSaveTimelineMapping: this.loaderType_ === 'main'
23353 });
23354 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
23355 // after taking into account its timing info, do not append it.
23356
23357 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
23358 this.logger_('sync segment was incorrect, not appending');
23359 return;
23360 } // otherwise append it like any other segment as our guess was correct.
23361
23362
23363 this.logger_('sync segment was correct, appending');
23364 } // Save some state so that in the future anything waiting on first append (and/or
23365 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
23366 // we need some notion of whether the timestamp offset or other relevant information
23367 // has had a chance to be set.
23368
23369
23370 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
23371
23372 this.processMetadataQueue_();
23373 this.appendData_(segmentInfo, result);
23374 };
23375
23376 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
23377 // alt audio doesn't manage timestamp offset
23378 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
23379 // segment for each chunk
23380 !segmentInfo.changedTimestampOffset) {
23381 // if the timestamp offset changed, the timeline may have changed, so we have to re-
23382 // append init segments
23383 this.appendInitSegment_ = {
23384 audio: true,
23385 video: true
23386 };
23387 }
23388
23389 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
23390 // make sure we append init segment on playlist changes, in case the media config
23391 // changed
23392 this.appendInitSegment_[type] = true;
23393 }
23394 };
23395
23396 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
23397 var type = _ref4.type,
23398 initSegment = _ref4.initSegment,
23399 map = _ref4.map,
23400 playlist = _ref4.playlist;
23401
23402 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
23403 // (Section 3) required to parse the applicable Media Segments. It applies to every
23404 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
23405 // or until the end of the playlist."
23406 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
23407 if (map) {
23408 var id = initSegmentId(map);
23409
23410 if (this.activeInitSegmentId_ === id) {
23411 // don't need to re-append the init segment if the ID matches
23412 return null;
23413 } // a map-specified init segment takes priority over any transmuxed (or otherwise
23414 // obtained) init segment
23415 //
23416 // this also caches the init segment for later use
23417
23418
23419 initSegment = this.initSegmentForMap(map, true).bytes;
23420 this.activeInitSegmentId_ = id;
23421 } // We used to always prepend init segments for video, however, that shouldn't be
23422 // necessary. Instead, we should only append on changes, similar to what we've always
23423 // done for audio. This is more important (though may not be that important) for
23424 // frame-by-frame appending for LHLS, simply because of the increased quantity of
23425 // appends.
23426
23427
23428 if (initSegment && this.appendInitSegment_[type]) {
23429 // Make sure we track the playlist that we last used for the init segment, so that
23430 // we can re-append the init segment in the event that we get data from a new
23431 // playlist. Discontinuities and track changes are handled in other sections.
23432 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
23433
23434 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
23435 // we are appending the muxer init segment
23436
23437 this.activeInitSegmentId_ = null;
23438 return initSegment;
23439 }
23440
23441 return null;
23442 };
23443
23444 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
23445 var _this3 = this;
23446
23447 var segmentInfo = _ref5.segmentInfo,
23448 type = _ref5.type,
23449 bytes = _ref5.bytes;
23450 var audioBuffered = this.sourceUpdater_.audioBuffered();
23451 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
23452 // should be cleared out during the buffer removals. However, log in case it helps
23453 // debug.
23454
23455 if (audioBuffered.length > 1) {
23456 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
23457 }
23458
23459 if (videoBuffered.length > 1) {
23460 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
23461 }
23462
23463 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
23464 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
23465 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
23466 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
23467
23468 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
23469 // Can't remove enough buffer to make room for new segment (or the browser doesn't
23470 // allow for appends of segments this size). In the future, it may be possible to
23471 // split up the segment and append in pieces, but for now, error out this playlist
23472 // in an attempt to switch to a more manageable rendition.
23473 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
23474 this.error({
23475 message: 'Quota exceeded error with append of a single segment of content',
23476 excludeUntil: Infinity
23477 });
23478 this.trigger('error');
23479 return;
23480 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
23481 // that the segment-loader should block on future events until this one is handled, so
23482 // that it doesn't keep moving onto further segments. Adding the call to the call
23483 // queue will prevent further appends until waitingOnRemove_ and
23484 // quotaExceededErrorRetryTimeout_ are cleared.
23485 //
23486 // Note that this will only block the current loader. In the case of demuxed content,
23487 // the other load may keep filling as fast as possible. In practice, this should be
23488 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
23489 // source buffer, or video fills without enough room for audio to append (and without
23490 // the availability of clearing out seconds of back buffer to make room for audio).
23491 // But it might still be good to handle this case in the future as a TODO.
23492
23493
23494 this.waitingOnRemove_ = true;
23495 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
23496 segmentInfo: segmentInfo,
23497 type: type,
23498 bytes: bytes
23499 }));
23500 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
23501 // before retrying.
23502
23503 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
23504 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
23505 this.remove(0, timeToRemoveUntil, function () {
23506 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
23507
23508 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
23509 // attempts (since we can't clear less than the minimum)
23510
23511 _this3.quotaExceededErrorRetryTimeout_ = window.setTimeout(function () {
23512 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
23513
23514 _this3.quotaExceededErrorRetryTimeout_ = null;
23515
23516 _this3.processCallQueue_();
23517 }, MIN_BACK_BUFFER * 1000);
23518 }, true);
23519 };
23520
23521 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
23522 var segmentInfo = _ref6.segmentInfo,
23523 type = _ref6.type,
23524 bytes = _ref6.bytes;
23525
23526 // if there's no error, nothing to do
23527 if (!error) {
23528 return;
23529 }
23530
23531 if (error.code === QUOTA_EXCEEDED_ERR) {
23532 this.handleQuotaExceededError_({
23533 segmentInfo: segmentInfo,
23534 type: type,
23535 bytes: bytes
23536 }); // A quota exceeded error should be recoverable with a future re-append, so no need
23537 // to trigger an append error.
23538
23539 return;
23540 }
23541
23542 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
23543 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
23544 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
23545 //
23546 // Trigger a special error so that it can be handled separately from normal,
23547 // recoverable errors.
23548
23549 this.trigger('appenderror');
23550 };
23551
23552 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
23553 var segmentInfo = _ref7.segmentInfo,
23554 type = _ref7.type,
23555 initSegment = _ref7.initSegment,
23556 data = _ref7.data,
23557 bytes = _ref7.bytes;
23558
23559 // If this is a re-append, bytes were already created and don't need to be recreated
23560 if (!bytes) {
23561 var segments = [data];
23562 var byteLength = data.byteLength;
23563
23564 if (initSegment) {
23565 // if the media initialization segment is changing, append it before the content
23566 // segment
23567 segments.unshift(initSegment);
23568 byteLength += initSegment.byteLength;
23569 } // Technically we should be OK appending the init segment separately, however, we
23570 // haven't yet tested that, and prepending is how we have always done things.
23571
23572
23573 bytes = concatSegments({
23574 bytes: byteLength,
23575 segments: segments
23576 });
23577 }
23578
23579 this.sourceUpdater_.appendBuffer({
23580 segmentInfo: segmentInfo,
23581 type: type,
23582 bytes: bytes
23583 }, this.handleAppendError_.bind(this, {
23584 segmentInfo: segmentInfo,
23585 type: type,
23586 bytes: bytes
23587 }));
23588 };
23589
23590 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
23591 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
23592 return;
23593 }
23594
23595 var segment = this.pendingSegment_.segment;
23596 var timingInfoProperty = type + "TimingInfo";
23597
23598 if (!segment[timingInfoProperty]) {
23599 segment[timingInfoProperty] = {};
23600 }
23601
23602 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
23603 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
23604 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
23605 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
23606 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
23607
23608 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
23609 };
23610
23611 _proto.appendData_ = function appendData_(segmentInfo, result) {
23612 var type = result.type,
23613 data = result.data;
23614
23615 if (!data || !data.byteLength) {
23616 return;
23617 }
23618
23619 if (type === 'audio' && this.audioDisabled_) {
23620 return;
23621 }
23622
23623 var initSegment = this.getInitSegmentAndUpdateState_({
23624 type: type,
23625 initSegment: result.initSegment,
23626 playlist: segmentInfo.playlist,
23627 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
23628 });
23629 this.appendToSourceBuffer_({
23630 segmentInfo: segmentInfo,
23631 type: type,
23632 initSegment: initSegment,
23633 data: data
23634 });
23635 }
23636 /**
23637 * load a specific segment from a request into the buffer
23638 *
23639 * @private
23640 */
23641 ;
23642
23643 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
23644 var _this4 = this;
23645
23646 this.state = 'WAITING';
23647 this.pendingSegment_ = segmentInfo;
23648 this.trimBackBuffer_(segmentInfo);
23649
23650 if (typeof segmentInfo.timestampOffset === 'number') {
23651 if (this.transmuxer_) {
23652 this.transmuxer_.postMessage({
23653 action: 'clearAllMp4Captions'
23654 });
23655 }
23656 }
23657
23658 if (!this.hasEnoughInfoToLoad_()) {
23659 this.loadQueue_.push(function () {
23660 // regenerate the audioAppendStart, timestampOffset, etc as they
23661 // may have changed since this function was added to the queue.
23662 var options = _extends_1({}, segmentInfo, {
23663 forceTimestampOffset: true
23664 });
23665
23666 _extends_1(segmentInfo, _this4.generateSegmentInfo_(options));
23667
23668 _this4.isPendingTimestampOffset_ = false;
23669
23670 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
23671 });
23672 return;
23673 }
23674
23675 this.updateTransmuxerAndRequestSegment_(segmentInfo);
23676 };
23677
23678 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
23679 var _this5 = this;
23680
23681 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
23682 // the transmuxer still needs to be updated before then.
23683 //
23684 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
23685 // offset must be passed to the transmuxer for stream correcting adjustments.
23686 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
23687 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
23688
23689 segmentInfo.gopsToAlignWith = [];
23690 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
23691
23692 this.transmuxer_.postMessage({
23693 action: 'reset'
23694 });
23695 this.transmuxer_.postMessage({
23696 action: 'setTimestampOffset',
23697 timestampOffset: segmentInfo.timestampOffset
23698 });
23699 }
23700
23701 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
23702 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
23703 var isWalkingForward = this.mediaIndex !== null;
23704 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
23705 // the first timeline
23706 segmentInfo.timeline > 0;
23707 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
23708 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
23709 // then this init segment has never been seen before and should be appended.
23710 //
23711 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
23712 // both to true and leave the decision of whether to append the init segment to append time.
23713
23714 if (simpleSegment.map && !simpleSegment.map.bytes) {
23715 this.logger_('going to request init segment.');
23716 this.appendInitSegment_ = {
23717 video: true,
23718 audio: true
23719 };
23720 }
23721
23722 segmentInfo.abortRequests = mediaSegmentRequest({
23723 xhr: this.vhs_.xhr,
23724 xhrOptions: this.xhrOptions_,
23725 decryptionWorker: this.decrypter_,
23726 segment: simpleSegment,
23727 abortFn: this.handleAbort_.bind(this, segmentInfo),
23728 progressFn: this.handleProgress_.bind(this),
23729 trackInfoFn: this.handleTrackInfo_.bind(this),
23730 timingInfoFn: this.handleTimingInfo_.bind(this),
23731 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
23732 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
23733 captionsFn: this.handleCaptions_.bind(this),
23734 isEndOfTimeline: isEndOfTimeline,
23735 endedTimelineFn: function endedTimelineFn() {
23736 _this5.logger_('received endedtimeline callback');
23737 },
23738 id3Fn: this.handleId3_.bind(this),
23739 dataFn: this.handleData_.bind(this),
23740 doneFn: this.segmentRequestFinished_.bind(this),
23741 onTransmuxerLog: function onTransmuxerLog(_ref8) {
23742 var message = _ref8.message,
23743 level = _ref8.level,
23744 stream = _ref8.stream;
23745
23746 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
23747 }
23748 });
23749 }
23750 /**
23751 * trim the back buffer so that we don't have too much data
23752 * in the source buffer
23753 *
23754 * @private
23755 *
23756 * @param {Object} segmentInfo - the current segment
23757 */
23758 ;
23759
23760 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
23761 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
23762 // buffer and a very conservative "garbage collector"
23763 // We manually clear out the old buffer to ensure
23764 // we don't trigger the QuotaExceeded error
23765 // on the source buffer during subsequent appends
23766
23767 if (removeToTime > 0) {
23768 this.remove(0, removeToTime);
23769 }
23770 }
23771 /**
23772 * created a simplified copy of the segment object with just the
23773 * information necessary to perform the XHR and decryption
23774 *
23775 * @private
23776 *
23777 * @param {Object} segmentInfo - the current segment
23778 * @return {Object} a simplified segment object copy
23779 */
23780 ;
23781
23782 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
23783 var segment = segmentInfo.segment;
23784 var part = segmentInfo.part;
23785 var simpleSegment = {
23786 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
23787 byterange: part ? part.byterange : segment.byterange,
23788 requestId: segmentInfo.requestId,
23789 transmuxer: segmentInfo.transmuxer,
23790 audioAppendStart: segmentInfo.audioAppendStart,
23791 gopsToAlignWith: segmentInfo.gopsToAlignWith,
23792 part: segmentInfo.part
23793 };
23794 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
23795
23796 if (previousSegment && previousSegment.timeline === segment.timeline) {
23797 // The baseStartTime of a segment is used to handle rollover when probing the TS
23798 // segment to retrieve timing information. Since the probe only looks at the media's
23799 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
23800 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
23801 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
23802 // seconds of media time, so should be used here. The previous segment is used since
23803 // the end of the previous segment should represent the beginning of the current
23804 // segment, so long as they are on the same timeline.
23805 if (previousSegment.videoTimingInfo) {
23806 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
23807 } else if (previousSegment.audioTimingInfo) {
23808 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
23809 }
23810 }
23811
23812 if (segment.key) {
23813 // if the media sequence is greater than 2^32, the IV will be incorrect
23814 // assuming 10s segments, that would be about 1300 years
23815 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
23816 simpleSegment.key = this.segmentKey(segment.key);
23817 simpleSegment.key.iv = iv;
23818 }
23819
23820 if (segment.map) {
23821 simpleSegment.map = this.initSegmentForMap(segment.map);
23822 }
23823
23824 return simpleSegment;
23825 };
23826
23827 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
23828 // every request counts as a media request even if it has been aborted
23829 // or canceled due to a timeout
23830 this.mediaRequests += 1;
23831
23832 if (stats) {
23833 this.mediaBytesTransferred += stats.bytesReceived;
23834 this.mediaTransferDuration += stats.roundTripTime;
23835 }
23836 };
23837
23838 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
23839 // byteLength will be used for throughput, and should be based on bytes receieved,
23840 // which we only know at the end of the request and should reflect total bytes
23841 // downloaded rather than just bytes processed from components of the segment
23842 this.pendingSegment_.byteLength = stats.bytesReceived;
23843
23844 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23845 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23846 return;
23847 }
23848
23849 this.bandwidth = stats.bandwidth;
23850 this.roundTrip = stats.roundTripTime;
23851 };
23852
23853 _proto.handleTimeout_ = function handleTimeout_() {
23854 // although the VTT segment loader bandwidth isn't really used, it's good to
23855 // maintain functinality between segment loaders
23856 this.mediaRequestsTimedout += 1;
23857 this.bandwidth = 1;
23858 this.roundTrip = NaN;
23859 this.trigger('bandwidthupdate');
23860 this.trigger('timeout');
23861 }
23862 /**
23863 * Handle the callback from the segmentRequest function and set the
23864 * associated SegmentLoader state and errors if necessary
23865 *
23866 * @private
23867 */
23868 ;
23869
23870 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
23871 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
23872 // check the call queue directly since this function doesn't need to deal with any
23873 // data, and can continue even if the source buffers are not set up and we didn't get
23874 // any data from the segment
23875 if (this.callQueue_.length) {
23876 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
23877 return;
23878 }
23879
23880 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
23881
23882 if (!this.pendingSegment_) {
23883 return;
23884 } // the request was aborted and the SegmentLoader has already started
23885 // another request. this can happen when the timeout for an aborted
23886 // request triggers due to a limitation in the XHR library
23887 // do not count this as any sort of request or we risk double-counting
23888
23889
23890 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
23891 return;
23892 } // an error occurred from the active pendingSegment_ so reset everything
23893
23894
23895 if (error) {
23896 this.pendingSegment_ = null;
23897 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
23898
23899 if (error.code === REQUEST_ERRORS.ABORTED) {
23900 return;
23901 }
23902
23903 this.pause(); // the error is really just that at least one of the requests timed-out
23904 // set the bandwidth to a very low value and trigger an ABR switch to
23905 // take emergency action
23906
23907 if (error.code === REQUEST_ERRORS.TIMEOUT) {
23908 this.handleTimeout_();
23909 return;
23910 } // if control-flow has arrived here, then the error is real
23911 // emit an error event to blacklist the current playlist
23912
23913
23914 this.mediaRequestsErrored += 1;
23915 this.error(error);
23916 this.trigger('error');
23917 return;
23918 }
23919
23920 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
23921 // generated for ABR purposes
23922
23923 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
23924 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
23925
23926 if (result.gopInfo) {
23927 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
23928 } // Although we may have already started appending on progress, we shouldn't switch the
23929 // state away from loading until we are officially done loading the segment data.
23930
23931
23932 this.state = 'APPENDING'; // used for testing
23933
23934 this.trigger('appending');
23935 this.waitForAppendsToComplete_(segmentInfo);
23936 };
23937
23938 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
23939 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
23940
23941 if (timelineMapping !== null) {
23942 this.timeMapping_ = timelineMapping;
23943 }
23944 };
23945
23946 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
23947 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
23948 this.mediaSecondsLoaded += segment.end - segment.start;
23949 } else {
23950 this.mediaSecondsLoaded += segment.duration;
23951 }
23952 };
23953
23954 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
23955 if (timestampOffset === null) {
23956 return false;
23957 } // note that we're potentially using the same timestamp offset for both video and
23958 // audio
23959
23960
23961 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23962 return true;
23963 }
23964
23965 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23966 return true;
23967 }
23968
23969 return false;
23970 };
23971
23972 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
23973 var currentStart = _ref9.currentStart,
23974 playlist = _ref9.playlist,
23975 mediaIndex = _ref9.mediaIndex,
23976 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
23977 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
23978 useVideoTimingInfo = _ref9.useVideoTimingInfo,
23979 videoTimingInfo = _ref9.videoTimingInfo,
23980 audioTimingInfo = _ref9.audioTimingInfo;
23981
23982 if (typeof currentStart !== 'undefined') {
23983 // if start was set once, keep using it
23984 return currentStart;
23985 }
23986
23987 if (!useVideoTimingInfo) {
23988 return audioTimingInfo.start;
23989 }
23990
23991 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
23992 // within that segment. Since the transmuxer maintains a cache of incomplete data
23993 // from and/or the last frame seen, the start time may reflect a frame that starts
23994 // in the previous segment. Check for that case and ensure the start time is
23995 // accurate for the segment.
23996
23997 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
23998 return firstVideoFrameTimeForData;
23999 }
24000
24001 return videoTimingInfo.start;
24002 };
24003
24004 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
24005 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
24006
24007 if (!trackInfo) {
24008 this.error({
24009 message: 'No starting media returned, likely due to an unsupported media format.',
24010 blacklistDuration: Infinity
24011 });
24012 this.trigger('error');
24013 return;
24014 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
24015 // on each queue this loader is responsible for to ensure that the appends are
24016 // complete.
24017
24018
24019 var hasAudio = trackInfo.hasAudio,
24020 hasVideo = trackInfo.hasVideo,
24021 isMuxed = trackInfo.isMuxed;
24022 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
24023 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
24024 segmentInfo.waitingOnAppends = 0; // segments with no data
24025
24026 if (!segmentInfo.hasAppendedData_) {
24027 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
24028 // When there's no audio or video data in the segment, there's no audio or video
24029 // timing information.
24030 //
24031 // If there's no audio or video timing information, then the timestamp offset
24032 // can't be adjusted to the appropriate value for the transmuxer and source
24033 // buffers.
24034 //
24035 // Therefore, the next segment should be used to set the timestamp offset.
24036 this.isPendingTimestampOffset_ = true;
24037 } // override settings for metadata only segments
24038
24039
24040 segmentInfo.timingInfo = {
24041 start: 0
24042 };
24043 segmentInfo.waitingOnAppends++;
24044
24045 if (!this.isPendingTimestampOffset_) {
24046 // update the timestampoffset
24047 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
24048 // no video/audio data.
24049
24050 this.processMetadataQueue_();
24051 } // append is "done" instantly with no data.
24052
24053
24054 this.checkAppendsDone_(segmentInfo);
24055 return;
24056 } // Since source updater could call back synchronously, do the increments first.
24057
24058
24059 if (waitForVideo) {
24060 segmentInfo.waitingOnAppends++;
24061 }
24062
24063 if (waitForAudio) {
24064 segmentInfo.waitingOnAppends++;
24065 }
24066
24067 if (waitForVideo) {
24068 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
24069 }
24070
24071 if (waitForAudio) {
24072 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
24073 }
24074 };
24075
24076 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
24077 if (this.checkForAbort_(segmentInfo.requestId)) {
24078 return;
24079 }
24080
24081 segmentInfo.waitingOnAppends--;
24082
24083 if (segmentInfo.waitingOnAppends === 0) {
24084 this.handleAppendsDone_();
24085 }
24086 };
24087
24088 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
24089 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
24090
24091 if (illegalMediaSwitchError) {
24092 this.error({
24093 message: illegalMediaSwitchError,
24094 blacklistDuration: Infinity
24095 });
24096 this.trigger('error');
24097 return true;
24098 }
24099
24100 return false;
24101 };
24102
24103 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
24104 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
24105 // priority, timing-wise, so we must wait
24106 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
24107 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
24108 this.loaderType_ !== 'main') {
24109 return;
24110 }
24111
24112 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
24113 // the timing info here comes from video. In the event that the audio is longer than
24114 // the video, this will trim the start of the audio.
24115 // This also trims any offset from 0 at the beginning of the media
24116
24117 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
24118 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
24119 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
24120 timingInfo: segmentInfo.timingInfo
24121 }); // In the event that there are part segment downloads, each will try to update the
24122 // timestamp offset. Retaining this bit of state prevents us from updating in the
24123 // future (within the same segment), however, there may be a better way to handle it.
24124
24125 segmentInfo.changedTimestampOffset = true;
24126
24127 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
24128 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
24129 didChange = true;
24130 }
24131
24132 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
24133 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
24134 didChange = true;
24135 }
24136
24137 if (didChange) {
24138 this.trigger('timestampoffset');
24139 }
24140 };
24141
24142 _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
24143 var videoTimingInfo = _ref10.videoTimingInfo,
24144 audioTimingInfo = _ref10.audioTimingInfo,
24145 timingInfo = _ref10.timingInfo;
24146
24147 if (!this.useDtsForTimestampOffset_) {
24148 return timingInfo.start;
24149 }
24150
24151 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
24152 return videoTimingInfo.transmuxedDecodeStart;
24153 } // handle audio only
24154
24155
24156 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
24157 return audioTimingInfo.transmuxedDecodeStart;
24158 } // handle content not transmuxed (e.g., MP4)
24159
24160
24161 return timingInfo.start;
24162 };
24163
24164 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
24165 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
24166 var trackInfo = this.getMediaInfo_();
24167 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
24168 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
24169
24170 if (!prioritizedTimingInfo) {
24171 return;
24172 }
24173
24174 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
24175 // current example is the case of fmp4), so use the rough duration to calculate an
24176 // end time.
24177 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
24178 }
24179 /**
24180 * callback to run when appendBuffer is finished. detects if we are
24181 * in a good state to do things with the data we got, or if we need
24182 * to wait for more
24183 *
24184 * @private
24185 */
24186 ;
24187
24188 _proto.handleAppendsDone_ = function handleAppendsDone_() {
24189 // appendsdone can cause an abort
24190 if (this.pendingSegment_) {
24191 this.trigger('appendsdone');
24192 }
24193
24194 if (!this.pendingSegment_) {
24195 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
24196 // all appending cases?
24197
24198 if (!this.paused()) {
24199 this.monitorBuffer_();
24200 }
24201
24202 return;
24203 }
24204
24205 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
24206 // best to wait until all appends are done so we're sure that the primary media is
24207 // finished (and we have its end time).
24208
24209 this.updateTimingInfoEnd_(segmentInfo);
24210
24211 if (this.shouldSaveSegmentTimingInfo_) {
24212 // Timeline mappings should only be saved for the main loader. This is for multiple
24213 // reasons:
24214 //
24215 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
24216 // and the main loader try to save the timeline mapping, whichever comes later
24217 // will overwrite the first. In theory this is OK, as the mappings should be the
24218 // same, however, it breaks for (2)
24219 // 2) In the event of a live stream, the initial live point will make for a somewhat
24220 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
24221 // the mapping will be off for one of the streams, dependent on which one was
24222 // first saved (see (1)).
24223 // 3) Primary timing goes by video in VHS, so the mapping should be video.
24224 //
24225 // Since the audio loader will wait for the main loader to load the first segment,
24226 // the main loader will save the first timeline mapping, and ensure that there won't
24227 // be a case where audio loads two segments without saving a mapping (thus leading
24228 // to missing segment timing info).
24229 this.syncController_.saveSegmentTimingInfo({
24230 segmentInfo: segmentInfo,
24231 shouldSaveTimelineMapping: this.loaderType_ === 'main'
24232 });
24233 }
24234
24235 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
24236
24237 if (segmentDurationMessage) {
24238 if (segmentDurationMessage.severity === 'warn') {
24239 videojs__default["default"].log.warn(segmentDurationMessage.message);
24240 } else {
24241 this.logger_(segmentDurationMessage.message);
24242 }
24243 }
24244
24245 this.recordThroughput_(segmentInfo);
24246 this.pendingSegment_ = null;
24247 this.state = 'READY';
24248
24249 if (segmentInfo.isSyncRequest) {
24250 this.trigger('syncinfoupdate'); // if the sync request was not appended
24251 // then it was not the correct segment.
24252 // throw it away and use the data it gave us
24253 // to get the correct one.
24254
24255 if (!segmentInfo.hasAppendedData_) {
24256 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
24257 return;
24258 }
24259 }
24260
24261 this.logger_("Appended " + segmentInfoString(segmentInfo));
24262 this.addSegmentMetadataCue_(segmentInfo);
24263 this.fetchAtBuffer_ = true;
24264
24265 if (this.currentTimeline_ !== segmentInfo.timeline) {
24266 this.timelineChangeController_.lastTimelineChange({
24267 type: this.loaderType_,
24268 from: this.currentTimeline_,
24269 to: segmentInfo.timeline
24270 }); // If audio is not disabled, the main segment loader is responsible for updating
24271 // the audio timeline as well. If the content is video only, this won't have any
24272 // impact.
24273
24274 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
24275 this.timelineChangeController_.lastTimelineChange({
24276 type: 'audio',
24277 from: this.currentTimeline_,
24278 to: segmentInfo.timeline
24279 });
24280 }
24281 }
24282
24283 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
24284 // the following conditional otherwise it may consider this a bad "guess"
24285 // and attempt to resync when the post-update seekable window and live
24286 // point would mean that this was the perfect segment to fetch
24287
24288 this.trigger('syncinfoupdate');
24289 var segment = segmentInfo.segment;
24290 var part = segmentInfo.part;
24291 var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
24292 var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
24293 // the currentTime_ that means that our conservative guess was too conservative.
24294 // In that case, reset the loader state so that we try to use any information gained
24295 // from the previous request to create a new, more accurate, sync-point.
24296
24297 if (badSegmentGuess || badPartGuess) {
24298 this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
24299 this.resetEverything();
24300 return;
24301 }
24302
24303 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
24304 // and conservatively guess
24305
24306 if (isWalkingForward) {
24307 this.trigger('bandwidthupdate');
24308 }
24309
24310 this.trigger('progress');
24311 this.mediaIndex = segmentInfo.mediaIndex;
24312 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
24313 // buffer, end the stream. this ensures the "ended" event will
24314 // fire if playback reaches that point.
24315
24316 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
24317 this.endOfStream();
24318 } // used for testing
24319
24320
24321 this.trigger('appended');
24322
24323 if (segmentInfo.hasAppendedData_) {
24324 this.mediaAppends++;
24325 }
24326
24327 if (!this.paused()) {
24328 this.monitorBuffer_();
24329 }
24330 }
24331 /**
24332 * Records the current throughput of the decrypt, transmux, and append
24333 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
24334 * moving average of the throughput. `throughput.count` is the number of
24335 * data points in the average.
24336 *
24337 * @private
24338 * @param {Object} segmentInfo the object returned by loadSegment
24339 */
24340 ;
24341
24342 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
24343 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
24344 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
24345 return;
24346 }
24347
24348 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
24349 // by zero in the case where the throughput is ridiculously high
24350
24351 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
24352
24353 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
24354 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
24355
24356 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
24357 }
24358 /**
24359 * Adds a cue to the segment-metadata track with some metadata information about the
24360 * segment
24361 *
24362 * @private
24363 * @param {Object} segmentInfo
24364 * the object returned by loadSegment
24365 * @method addSegmentMetadataCue_
24366 */
24367 ;
24368
24369 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
24370 if (!this.segmentMetadataTrack_) {
24371 return;
24372 }
24373
24374 var segment = segmentInfo.segment;
24375 var start = segment.start;
24376 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
24377
24378 if (!finite(start) || !finite(end)) {
24379 return;
24380 }
24381
24382 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
24383 var Cue = window.WebKitDataCue || window.VTTCue;
24384 var value = {
24385 custom: segment.custom,
24386 dateTimeObject: segment.dateTimeObject,
24387 dateTimeString: segment.dateTimeString,
24388 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
24389 resolution: segmentInfo.playlist.attributes.RESOLUTION,
24390 codecs: segmentInfo.playlist.attributes.CODECS,
24391 byteLength: segmentInfo.byteLength,
24392 uri: segmentInfo.uri,
24393 timeline: segmentInfo.timeline,
24394 playlist: segmentInfo.playlist.id,
24395 start: start,
24396 end: end
24397 };
24398 var data = JSON.stringify(value);
24399 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
24400 // the differences of WebKitDataCue in safari and VTTCue in other browsers
24401
24402 cue.value = value;
24403 this.segmentMetadataTrack_.addCue(cue);
24404 };
24405
24406 return SegmentLoader;
24407 }(videojs__default["default"].EventTarget);
24408
24409 function noop() {}
24410
24411 var toTitleCase = function toTitleCase(string) {
24412 if (typeof string !== 'string') {
24413 return string;
24414 }
24415
24416 return string.replace(/./, function (w) {
24417 return w.toUpperCase();
24418 });
24419 };
24420
24421 var bufferTypes = ['video', 'audio'];
24422
24423 var _updating = function updating(type, sourceUpdater) {
24424 var sourceBuffer = sourceUpdater[type + "Buffer"];
24425 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
24426 };
24427
24428 var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
24429 for (var i = 0; i < queue.length; i++) {
24430 var queueEntry = queue[i];
24431
24432 if (queueEntry.type === 'mediaSource') {
24433 // If the next entry is a media source entry (uses multiple source buffers), block
24434 // processing to allow it to go through first.
24435 return null;
24436 }
24437
24438 if (queueEntry.type === type) {
24439 return i;
24440 }
24441 }
24442
24443 return null;
24444 };
24445
24446 var shiftQueue = function shiftQueue(type, sourceUpdater) {
24447 if (sourceUpdater.queue.length === 0) {
24448 return;
24449 }
24450
24451 var queueIndex = 0;
24452 var queueEntry = sourceUpdater.queue[queueIndex];
24453
24454 if (queueEntry.type === 'mediaSource') {
24455 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
24456 sourceUpdater.queue.shift();
24457 queueEntry.action(sourceUpdater);
24458
24459 if (queueEntry.doneFn) {
24460 queueEntry.doneFn();
24461 } // Only specific source buffer actions must wait for async updateend events. Media
24462 // Source actions process synchronously. Therefore, both audio and video source
24463 // buffers are now clear to process the next queue entries.
24464
24465
24466 shiftQueue('audio', sourceUpdater);
24467 shiftQueue('video', sourceUpdater);
24468 } // Media Source actions require both source buffers, so if the media source action
24469 // couldn't process yet (because one or both source buffers are busy), block other
24470 // queue actions until both are available and the media source action can process.
24471
24472
24473 return;
24474 }
24475
24476 if (type === 'mediaSource') {
24477 // If the queue was shifted by a media source action (this happens when pushing a
24478 // media source action onto the queue), then it wasn't from an updateend event from an
24479 // audio or video source buffer, so there's no change from previous state, and no
24480 // processing should be done.
24481 return;
24482 } // Media source queue entries don't need to consider whether the source updater is
24483 // started (i.e., source buffers are created) as they don't need the source buffers, but
24484 // source buffer queue entries do.
24485
24486
24487 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
24488 return;
24489 }
24490
24491 if (queueEntry.type !== type) {
24492 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
24493
24494 if (queueIndex === null) {
24495 // Either there's no queue entry that uses this source buffer type in the queue, or
24496 // there's a media source queue entry before the next entry of this type, in which
24497 // case wait for that action to process first.
24498 return;
24499 }
24500
24501 queueEntry = sourceUpdater.queue[queueIndex];
24502 }
24503
24504 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
24505 //
24506 // The queue pending operation must be set before the action is performed in the event
24507 // that the action results in a synchronous event that is acted upon. For instance, if
24508 // an exception is thrown that can be handled, it's possible that new actions will be
24509 // appended to an empty queue and immediately executed, but would not have the correct
24510 // pending information if this property was set after the action was performed.
24511
24512 sourceUpdater.queuePending[type] = queueEntry;
24513 queueEntry.action(type, sourceUpdater);
24514
24515 if (!queueEntry.doneFn) {
24516 // synchronous operation, process next entry
24517 sourceUpdater.queuePending[type] = null;
24518 shiftQueue(type, sourceUpdater);
24519 return;
24520 }
24521 };
24522
24523 var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
24524 var buffer = sourceUpdater[type + "Buffer"];
24525 var titleType = toTitleCase(type);
24526
24527 if (!buffer) {
24528 return;
24529 }
24530
24531 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24532 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24533 sourceUpdater.codecs[type] = null;
24534 sourceUpdater[type + "Buffer"] = null;
24535 };
24536
24537 var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
24538 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
24539 };
24540
24541 var actions = {
24542 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
24543 return function (type, sourceUpdater) {
24544 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24545 // or the media source does not contain this source buffer.
24546
24547 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24548 return;
24549 }
24550
24551 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
24552
24553 try {
24554 sourceBuffer.appendBuffer(bytes);
24555 } catch (e) {
24556 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
24557 sourceUpdater.queuePending[type] = null;
24558 onError(e);
24559 }
24560 };
24561 },
24562 remove: function remove(start, end) {
24563 return function (type, sourceUpdater) {
24564 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24565 // or the media source does not contain this source buffer.
24566
24567 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24568 return;
24569 }
24570
24571 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
24572
24573 try {
24574 sourceBuffer.remove(start, end);
24575 } catch (e) {
24576 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
24577 }
24578 };
24579 },
24580 timestampOffset: function timestampOffset(offset) {
24581 return function (type, sourceUpdater) {
24582 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24583 // or the media source does not contain this source buffer.
24584
24585 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24586 return;
24587 }
24588
24589 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
24590 sourceBuffer.timestampOffset = offset;
24591 };
24592 },
24593 callback: function callback(_callback) {
24594 return function (type, sourceUpdater) {
24595 _callback();
24596 };
24597 },
24598 endOfStream: function endOfStream(error) {
24599 return function (sourceUpdater) {
24600 if (sourceUpdater.mediaSource.readyState !== 'open') {
24601 return;
24602 }
24603
24604 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
24605
24606 try {
24607 sourceUpdater.mediaSource.endOfStream(error);
24608 } catch (e) {
24609 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
24610 }
24611 };
24612 },
24613 duration: function duration(_duration) {
24614 return function (sourceUpdater) {
24615 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
24616
24617 try {
24618 sourceUpdater.mediaSource.duration = _duration;
24619 } catch (e) {
24620 videojs__default["default"].log.warn('Failed to set media source duration', e);
24621 }
24622 };
24623 },
24624 abort: function abort() {
24625 return function (type, sourceUpdater) {
24626 if (sourceUpdater.mediaSource.readyState !== 'open') {
24627 return;
24628 }
24629
24630 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24631 // or the media source does not contain this source buffer.
24632
24633 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24634 return;
24635 }
24636
24637 sourceUpdater.logger_("calling abort on " + type + "Buffer");
24638
24639 try {
24640 sourceBuffer.abort();
24641 } catch (e) {
24642 videojs__default["default"].log.warn("Failed to abort on " + type + "Buffer", e);
24643 }
24644 };
24645 },
24646 addSourceBuffer: function addSourceBuffer(type, codec) {
24647 return function (sourceUpdater) {
24648 var titleType = toTitleCase(type);
24649 var mime = getMimeForCodec(codec);
24650 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
24651 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
24652 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24653 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24654 sourceUpdater.codecs[type] = codec;
24655 sourceUpdater[type + "Buffer"] = sourceBuffer;
24656 };
24657 },
24658 removeSourceBuffer: function removeSourceBuffer(type) {
24659 return function (sourceUpdater) {
24660 var sourceBuffer = sourceUpdater[type + "Buffer"];
24661 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
24662 // or the media source does not contain this source buffer.
24663
24664 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24665 return;
24666 }
24667
24668 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
24669
24670 try {
24671 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
24672 } catch (e) {
24673 videojs__default["default"].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
24674 }
24675 };
24676 },
24677 changeType: function changeType(codec) {
24678 return function (type, sourceUpdater) {
24679 var sourceBuffer = sourceUpdater[type + "Buffer"];
24680 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
24681 // or the media source does not contain this source buffer.
24682
24683 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24684 return;
24685 } // do not update codec if we don't need to.
24686
24687
24688 if (sourceUpdater.codecs[type] === codec) {
24689 return;
24690 }
24691
24692 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
24693 sourceBuffer.changeType(mime);
24694 sourceUpdater.codecs[type] = codec;
24695 };
24696 }
24697 };
24698
24699 var pushQueue = function pushQueue(_ref) {
24700 var type = _ref.type,
24701 sourceUpdater = _ref.sourceUpdater,
24702 action = _ref.action,
24703 doneFn = _ref.doneFn,
24704 name = _ref.name;
24705 sourceUpdater.queue.push({
24706 type: type,
24707 action: action,
24708 doneFn: doneFn,
24709 name: name
24710 });
24711 shiftQueue(type, sourceUpdater);
24712 };
24713
24714 var onUpdateend = function onUpdateend(type, sourceUpdater) {
24715 return function (e) {
24716 // Although there should, in theory, be a pending action for any updateend receieved,
24717 // there are some actions that may trigger updateend events without set definitions in
24718 // the w3c spec. For instance, setting the duration on the media source may trigger
24719 // updateend events on source buffers. This does not appear to be in the spec. As such,
24720 // if we encounter an updateend without a corresponding pending action from our queue
24721 // for that source buffer type, process the next action.
24722 if (sourceUpdater.queuePending[type]) {
24723 var doneFn = sourceUpdater.queuePending[type].doneFn;
24724 sourceUpdater.queuePending[type] = null;
24725
24726 if (doneFn) {
24727 // if there's an error, report it
24728 doneFn(sourceUpdater[type + "Error_"]);
24729 }
24730 }
24731
24732 shiftQueue(type, sourceUpdater);
24733 };
24734 };
24735 /**
24736 * A queue of callbacks to be serialized and applied when a
24737 * MediaSource and its associated SourceBuffers are not in the
24738 * updating state. It is used by the segment loader to update the
24739 * underlying SourceBuffers when new data is loaded, for instance.
24740 *
24741 * @class SourceUpdater
24742 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
24743 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
24744 */
24745
24746
24747 var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
24748 inheritsLoose(SourceUpdater, _videojs$EventTarget);
24749
24750 function SourceUpdater(mediaSource) {
24751 var _this;
24752
24753 _this = _videojs$EventTarget.call(this) || this;
24754 _this.mediaSource = mediaSource;
24755
24756 _this.sourceopenListener_ = function () {
24757 return shiftQueue('mediaSource', assertThisInitialized(_this));
24758 };
24759
24760 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
24761
24762 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
24763
24764 _this.audioTimestampOffset_ = 0;
24765 _this.videoTimestampOffset_ = 0;
24766 _this.queue = [];
24767 _this.queuePending = {
24768 audio: null,
24769 video: null
24770 };
24771 _this.delayedAudioAppendQueue_ = [];
24772 _this.videoAppendQueued_ = false;
24773 _this.codecs = {};
24774 _this.onVideoUpdateEnd_ = onUpdateend('video', assertThisInitialized(_this));
24775 _this.onAudioUpdateEnd_ = onUpdateend('audio', assertThisInitialized(_this));
24776
24777 _this.onVideoError_ = function (e) {
24778 // used for debugging
24779 _this.videoError_ = e;
24780 };
24781
24782 _this.onAudioError_ = function (e) {
24783 // used for debugging
24784 _this.audioError_ = e;
24785 };
24786
24787 _this.createdSourceBuffers_ = false;
24788 _this.initializedEme_ = false;
24789 _this.triggeredReady_ = false;
24790 return _this;
24791 }
24792
24793 var _proto = SourceUpdater.prototype;
24794
24795 _proto.initializedEme = function initializedEme() {
24796 this.initializedEme_ = true;
24797 this.triggerReady();
24798 };
24799
24800 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
24801 // if false, likely waiting on one of the segment loaders to get enough data to create
24802 // source buffers
24803 return this.createdSourceBuffers_;
24804 };
24805
24806 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
24807 return this.initializedEme_;
24808 };
24809
24810 _proto.ready = function ready() {
24811 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
24812 };
24813
24814 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
24815 if (this.hasCreatedSourceBuffers()) {
24816 // already created them before
24817 return;
24818 } // the intial addOrChangeSourceBuffers will always be
24819 // two add buffers.
24820
24821
24822 this.addOrChangeSourceBuffers(codecs);
24823 this.createdSourceBuffers_ = true;
24824 this.trigger('createdsourcebuffers');
24825 this.triggerReady();
24826 };
24827
24828 _proto.triggerReady = function triggerReady() {
24829 // only allow ready to be triggered once, this prevents the case
24830 // where:
24831 // 1. we trigger createdsourcebuffers
24832 // 2. ie 11 synchronously initializates eme
24833 // 3. the synchronous initialization causes us to trigger ready
24834 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
24835 if (this.ready() && !this.triggeredReady_) {
24836 this.triggeredReady_ = true;
24837 this.trigger('ready');
24838 }
24839 }
24840 /**
24841 * Add a type of source buffer to the media source.
24842 *
24843 * @param {string} type
24844 * The type of source buffer to add.
24845 *
24846 * @param {string} codec
24847 * The codec to add the source buffer with.
24848 */
24849 ;
24850
24851 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
24852 pushQueue({
24853 type: 'mediaSource',
24854 sourceUpdater: this,
24855 action: actions.addSourceBuffer(type, codec),
24856 name: 'addSourceBuffer'
24857 });
24858 }
24859 /**
24860 * call abort on a source buffer.
24861 *
24862 * @param {string} type
24863 * The type of source buffer to call abort on.
24864 */
24865 ;
24866
24867 _proto.abort = function abort(type) {
24868 pushQueue({
24869 type: type,
24870 sourceUpdater: this,
24871 action: actions.abort(type),
24872 name: 'abort'
24873 });
24874 }
24875 /**
24876 * Call removeSourceBuffer and remove a specific type
24877 * of source buffer on the mediaSource.
24878 *
24879 * @param {string} type
24880 * The type of source buffer to remove.
24881 */
24882 ;
24883
24884 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
24885 if (!this.canRemoveSourceBuffer()) {
24886 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
24887 return;
24888 }
24889
24890 pushQueue({
24891 type: 'mediaSource',
24892 sourceUpdater: this,
24893 action: actions.removeSourceBuffer(type),
24894 name: 'removeSourceBuffer'
24895 });
24896 }
24897 /**
24898 * Whether or not the removeSourceBuffer function is supported
24899 * on the mediaSource.
24900 *
24901 * @return {boolean}
24902 * if removeSourceBuffer can be called.
24903 */
24904 ;
24905
24906 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
24907 // IE reports that it supports removeSourceBuffer, but often throws
24908 // errors when attempting to use the function. So we report that it
24909 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
24910 // throws errors, so we report that it does not support this as well.
24911 return !videojs__default["default"].browser.IE_VERSION && !videojs__default["default"].browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
24912 }
24913 /**
24914 * Whether or not the changeType function is supported
24915 * on our SourceBuffers.
24916 *
24917 * @return {boolean}
24918 * if changeType can be called.
24919 */
24920 ;
24921
24922 SourceUpdater.canChangeType = function canChangeType() {
24923 return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';
24924 }
24925 /**
24926 * Whether or not the changeType function is supported
24927 * on our SourceBuffers.
24928 *
24929 * @return {boolean}
24930 * if changeType can be called.
24931 */
24932 ;
24933
24934 _proto.canChangeType = function canChangeType() {
24935 return this.constructor.canChangeType();
24936 }
24937 /**
24938 * Call the changeType function on a source buffer, given the code and type.
24939 *
24940 * @param {string} type
24941 * The type of source buffer to call changeType on.
24942 *
24943 * @param {string} codec
24944 * The codec string to change type with on the source buffer.
24945 */
24946 ;
24947
24948 _proto.changeType = function changeType(type, codec) {
24949 if (!this.canChangeType()) {
24950 videojs__default["default"].log.error('changeType is not supported!');
24951 return;
24952 }
24953
24954 pushQueue({
24955 type: type,
24956 sourceUpdater: this,
24957 action: actions.changeType(codec),
24958 name: 'changeType'
24959 });
24960 }
24961 /**
24962 * Add source buffers with a codec or, if they are already created,
24963 * call changeType on source buffers using changeType.
24964 *
24965 * @param {Object} codecs
24966 * Codecs to switch to
24967 */
24968 ;
24969
24970 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
24971 var _this2 = this;
24972
24973 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
24974 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
24975 }
24976
24977 Object.keys(codecs).forEach(function (type) {
24978 var codec = codecs[type];
24979
24980 if (!_this2.hasCreatedSourceBuffers()) {
24981 return _this2.addSourceBuffer(type, codec);
24982 }
24983
24984 if (_this2.canChangeType()) {
24985 _this2.changeType(type, codec);
24986 }
24987 });
24988 }
24989 /**
24990 * Queue an update to append an ArrayBuffer.
24991 *
24992 * @param {MediaObject} object containing audioBytes and/or videoBytes
24993 * @param {Function} done the function to call when done
24994 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
24995 */
24996 ;
24997
24998 _proto.appendBuffer = function appendBuffer(options, doneFn) {
24999 var _this3 = this;
25000
25001 var segmentInfo = options.segmentInfo,
25002 type = options.type,
25003 bytes = options.bytes;
25004 this.processedAppend_ = true;
25005
25006 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
25007 this.delayedAudioAppendQueue_.push([options, doneFn]);
25008 this.logger_("delayed audio append of " + bytes.length + " until video append");
25009 return;
25010 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
25011 // not be fired. This means that the queue will be blocked until the next action
25012 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
25013 // these errors by calling the doneFn with the specific error.
25014
25015
25016 var onError = doneFn;
25017 pushQueue({
25018 type: type,
25019 sourceUpdater: this,
25020 action: actions.appendBuffer(bytes, segmentInfo || {
25021 mediaIndex: -1
25022 }, onError),
25023 doneFn: doneFn,
25024 name: 'appendBuffer'
25025 });
25026
25027 if (type === 'video') {
25028 this.videoAppendQueued_ = true;
25029
25030 if (!this.delayedAudioAppendQueue_.length) {
25031 return;
25032 }
25033
25034 var queue = this.delayedAudioAppendQueue_.slice();
25035 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
25036 this.delayedAudioAppendQueue_.length = 0;
25037 queue.forEach(function (que) {
25038 _this3.appendBuffer.apply(_this3, que);
25039 });
25040 }
25041 }
25042 /**
25043 * Get the audio buffer's buffered timerange.
25044 *
25045 * @return {TimeRange}
25046 * The audio buffer's buffered time range
25047 */
25048 ;
25049
25050 _proto.audioBuffered = function audioBuffered() {
25051 // no media source/source buffer or it isn't in the media sources
25052 // source buffer list
25053 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
25054 return videojs__default["default"].createTimeRange();
25055 }
25056
25057 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default["default"].createTimeRange();
25058 }
25059 /**
25060 * Get the video buffer's buffered timerange.
25061 *
25062 * @return {TimeRange}
25063 * The video buffer's buffered time range
25064 */
25065 ;
25066
25067 _proto.videoBuffered = function videoBuffered() {
25068 // no media source/source buffer or it isn't in the media sources
25069 // source buffer list
25070 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
25071 return videojs__default["default"].createTimeRange();
25072 }
25073
25074 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default["default"].createTimeRange();
25075 }
25076 /**
25077 * Get a combined video/audio buffer's buffered timerange.
25078 *
25079 * @return {TimeRange}
25080 * the combined time range
25081 */
25082 ;
25083
25084 _proto.buffered = function buffered() {
25085 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
25086 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
25087
25088 if (audio && !video) {
25089 return this.audioBuffered();
25090 }
25091
25092 if (video && !audio) {
25093 return this.videoBuffered();
25094 }
25095
25096 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
25097 }
25098 /**
25099 * Add a callback to the queue that will set duration on the mediaSource.
25100 *
25101 * @param {number} duration
25102 * The duration to set
25103 *
25104 * @param {Function} [doneFn]
25105 * function to run after duration has been set.
25106 */
25107 ;
25108
25109 _proto.setDuration = function setDuration(duration, doneFn) {
25110 if (doneFn === void 0) {
25111 doneFn = noop;
25112 }
25113
25114 // In order to set the duration on the media source, it's necessary to wait for all
25115 // source buffers to no longer be updating. "If the updating attribute equals true on
25116 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25117 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25118 pushQueue({
25119 type: 'mediaSource',
25120 sourceUpdater: this,
25121 action: actions.duration(duration),
25122 name: 'duration',
25123 doneFn: doneFn
25124 });
25125 }
25126 /**
25127 * Add a mediaSource endOfStream call to the queue
25128 *
25129 * @param {Error} [error]
25130 * Call endOfStream with an error
25131 *
25132 * @param {Function} [doneFn]
25133 * A function that should be called when the
25134 * endOfStream call has finished.
25135 */
25136 ;
25137
25138 _proto.endOfStream = function endOfStream(error, doneFn) {
25139 if (error === void 0) {
25140 error = null;
25141 }
25142
25143 if (doneFn === void 0) {
25144 doneFn = noop;
25145 }
25146
25147 if (typeof error !== 'string') {
25148 error = undefined;
25149 } // In order to set the duration on the media source, it's necessary to wait for all
25150 // source buffers to no longer be updating. "If the updating attribute equals true on
25151 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25152 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25153
25154
25155 pushQueue({
25156 type: 'mediaSource',
25157 sourceUpdater: this,
25158 action: actions.endOfStream(error),
25159 name: 'endOfStream',
25160 doneFn: doneFn
25161 });
25162 }
25163 /**
25164 * Queue an update to remove a time range from the buffer.
25165 *
25166 * @param {number} start where to start the removal
25167 * @param {number} end where to end the removal
25168 * @param {Function} [done=noop] optional callback to be executed when the remove
25169 * operation is complete
25170 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25171 */
25172 ;
25173
25174 _proto.removeAudio = function removeAudio(start, end, done) {
25175 if (done === void 0) {
25176 done = noop;
25177 }
25178
25179 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
25180 done();
25181 return;
25182 }
25183
25184 pushQueue({
25185 type: 'audio',
25186 sourceUpdater: this,
25187 action: actions.remove(start, end),
25188 doneFn: done,
25189 name: 'remove'
25190 });
25191 }
25192 /**
25193 * Queue an update to remove a time range from the buffer.
25194 *
25195 * @param {number} start where to start the removal
25196 * @param {number} end where to end the removal
25197 * @param {Function} [done=noop] optional callback to be executed when the remove
25198 * operation is complete
25199 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25200 */
25201 ;
25202
25203 _proto.removeVideo = function removeVideo(start, end, done) {
25204 if (done === void 0) {
25205 done = noop;
25206 }
25207
25208 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
25209 done();
25210 return;
25211 }
25212
25213 pushQueue({
25214 type: 'video',
25215 sourceUpdater: this,
25216 action: actions.remove(start, end),
25217 doneFn: done,
25218 name: 'remove'
25219 });
25220 }
25221 /**
25222 * Whether the underlying sourceBuffer is updating or not
25223 *
25224 * @return {boolean} the updating status of the SourceBuffer
25225 */
25226 ;
25227
25228 _proto.updating = function updating() {
25229 // the audio/video source buffer is updating
25230 if (_updating('audio', this) || _updating('video', this)) {
25231 return true;
25232 }
25233
25234 return false;
25235 }
25236 /**
25237 * Set/get the timestampoffset on the audio SourceBuffer
25238 *
25239 * @return {number} the timestamp offset
25240 */
25241 ;
25242
25243 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
25244 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
25245 this.audioTimestampOffset_ !== offset) {
25246 pushQueue({
25247 type: 'audio',
25248 sourceUpdater: this,
25249 action: actions.timestampOffset(offset),
25250 name: 'timestampOffset'
25251 });
25252 this.audioTimestampOffset_ = offset;
25253 }
25254
25255 return this.audioTimestampOffset_;
25256 }
25257 /**
25258 * Set/get the timestampoffset on the video SourceBuffer
25259 *
25260 * @return {number} the timestamp offset
25261 */
25262 ;
25263
25264 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
25265 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
25266 this.videoTimestampOffset !== offset) {
25267 pushQueue({
25268 type: 'video',
25269 sourceUpdater: this,
25270 action: actions.timestampOffset(offset),
25271 name: 'timestampOffset'
25272 });
25273 this.videoTimestampOffset_ = offset;
25274 }
25275
25276 return this.videoTimestampOffset_;
25277 }
25278 /**
25279 * Add a function to the queue that will be called
25280 * when it is its turn to run in the audio queue.
25281 *
25282 * @param {Function} callback
25283 * The callback to queue.
25284 */
25285 ;
25286
25287 _proto.audioQueueCallback = function audioQueueCallback(callback) {
25288 if (!this.audioBuffer) {
25289 return;
25290 }
25291
25292 pushQueue({
25293 type: 'audio',
25294 sourceUpdater: this,
25295 action: actions.callback(callback),
25296 name: 'callback'
25297 });
25298 }
25299 /**
25300 * Add a function to the queue that will be called
25301 * when it is its turn to run in the video queue.
25302 *
25303 * @param {Function} callback
25304 * The callback to queue.
25305 */
25306 ;
25307
25308 _proto.videoQueueCallback = function videoQueueCallback(callback) {
25309 if (!this.videoBuffer) {
25310 return;
25311 }
25312
25313 pushQueue({
25314 type: 'video',
25315 sourceUpdater: this,
25316 action: actions.callback(callback),
25317 name: 'callback'
25318 });
25319 }
25320 /**
25321 * dispose of the source updater and the underlying sourceBuffer
25322 */
25323 ;
25324
25325 _proto.dispose = function dispose() {
25326 var _this4 = this;
25327
25328 this.trigger('dispose');
25329 bufferTypes.forEach(function (type) {
25330 _this4.abort(type);
25331
25332 if (_this4.canRemoveSourceBuffer()) {
25333 _this4.removeSourceBuffer(type);
25334 } else {
25335 _this4[type + "QueueCallback"](function () {
25336 return cleanupBuffer(type, _this4);
25337 });
25338 }
25339 });
25340 this.videoAppendQueued_ = false;
25341 this.delayedAudioAppendQueue_.length = 0;
25342
25343 if (this.sourceopenListener_) {
25344 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
25345 }
25346
25347 this.off();
25348 };
25349
25350 return SourceUpdater;
25351 }(videojs__default["default"].EventTarget);
25352
25353 var getPrototypeOf = createCommonjsModule(function (module) {
25354 function _getPrototypeOf(o) {
25355 module.exports = _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
25356 return o.__proto__ || Object.getPrototypeOf(o);
25357 };
25358 module.exports["default"] = module.exports, module.exports.__esModule = true;
25359 return _getPrototypeOf(o);
25360 }
25361
25362 module.exports = _getPrototypeOf;
25363 module.exports["default"] = module.exports, module.exports.__esModule = true;
25364 });
25365
25366 var isNativeFunction = createCommonjsModule(function (module) {
25367 function _isNativeFunction(fn) {
25368 return Function.toString.call(fn).indexOf("[native code]") !== -1;
25369 }
25370
25371 module.exports = _isNativeFunction;
25372 module.exports["default"] = module.exports, module.exports.__esModule = true;
25373 });
25374
25375 var isNativeReflectConstruct = createCommonjsModule(function (module) {
25376 function _isNativeReflectConstruct() {
25377 if (typeof Reflect === "undefined" || !Reflect.construct) return false;
25378 if (Reflect.construct.sham) return false;
25379 if (typeof Proxy === "function") return true;
25380
25381 try {
25382 Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {}));
25383 return true;
25384 } catch (e) {
25385 return false;
25386 }
25387 }
25388
25389 module.exports = _isNativeReflectConstruct;
25390 module.exports["default"] = module.exports, module.exports.__esModule = true;
25391 });
25392
25393 var construct = createCommonjsModule(function (module) {
25394 function _construct(Parent, args, Class) {
25395 if (isNativeReflectConstruct()) {
25396 module.exports = _construct = Reflect.construct;
25397 module.exports["default"] = module.exports, module.exports.__esModule = true;
25398 } else {
25399 module.exports = _construct = function _construct(Parent, args, Class) {
25400 var a = [null];
25401 a.push.apply(a, args);
25402 var Constructor = Function.bind.apply(Parent, a);
25403 var instance = new Constructor();
25404 if (Class) setPrototypeOf(instance, Class.prototype);
25405 return instance;
25406 };
25407
25408 module.exports["default"] = module.exports, module.exports.__esModule = true;
25409 }
25410
25411 return _construct.apply(null, arguments);
25412 }
25413
25414 module.exports = _construct;
25415 module.exports["default"] = module.exports, module.exports.__esModule = true;
25416 });
25417
25418 var wrapNativeSuper = createCommonjsModule(function (module) {
25419 function _wrapNativeSuper(Class) {
25420 var _cache = typeof Map === "function" ? new Map() : undefined;
25421
25422 module.exports = _wrapNativeSuper = function _wrapNativeSuper(Class) {
25423 if (Class === null || !isNativeFunction(Class)) return Class;
25424
25425 if (typeof Class !== "function") {
25426 throw new TypeError("Super expression must either be null or a function");
25427 }
25428
25429 if (typeof _cache !== "undefined") {
25430 if (_cache.has(Class)) return _cache.get(Class);
25431
25432 _cache.set(Class, Wrapper);
25433 }
25434
25435 function Wrapper() {
25436 return construct(Class, arguments, getPrototypeOf(this).constructor);
25437 }
25438
25439 Wrapper.prototype = Object.create(Class.prototype, {
25440 constructor: {
25441 value: Wrapper,
25442 enumerable: false,
25443 writable: true,
25444 configurable: true
25445 }
25446 });
25447 return setPrototypeOf(Wrapper, Class);
25448 };
25449
25450 module.exports["default"] = module.exports, module.exports.__esModule = true;
25451 return _wrapNativeSuper(Class);
25452 }
25453
25454 module.exports = _wrapNativeSuper;
25455 module.exports["default"] = module.exports, module.exports.__esModule = true;
25456 });
25457
25458 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
25459 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
25460 };
25461
25462 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
25463 return char.charCodeAt(0);
25464 }));
25465
25466 var NoVttJsError = /*#__PURE__*/function (_Error) {
25467 inheritsLoose(NoVttJsError, _Error);
25468
25469 function NoVttJsError() {
25470 return _Error.call(this, 'Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.') || this;
25471 }
25472
25473 return NoVttJsError;
25474 }( /*#__PURE__*/wrapNativeSuper(Error));
25475 /**
25476 * An object that manages segment loading and appending.
25477 *
25478 * @class VTTSegmentLoader
25479 * @param {Object} options required and optional options
25480 * @extends videojs.EventTarget
25481 */
25482
25483
25484 var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
25485 inheritsLoose(VTTSegmentLoader, _SegmentLoader);
25486
25487 function VTTSegmentLoader(settings, options) {
25488 var _this;
25489
25490 if (options === void 0) {
25491 options = {};
25492 }
25493
25494 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
25495 // however, VTTSegmentLoader has no need of a media source, so delete the reference
25496
25497 _this.mediaSource_ = null;
25498 _this.subtitlesTrack_ = null;
25499 _this.loaderType_ = 'subtitle';
25500 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
25501 _this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
25502 // the sync controller leads to improper behavior.
25503
25504 _this.shouldSaveSegmentTimingInfo_ = false;
25505 return _this;
25506 }
25507
25508 var _proto = VTTSegmentLoader.prototype;
25509
25510 _proto.createTransmuxer_ = function createTransmuxer_() {
25511 // don't need to transmux any subtitles
25512 return null;
25513 }
25514 /**
25515 * Indicates which time ranges are buffered
25516 *
25517 * @return {TimeRange}
25518 * TimeRange object representing the current buffered ranges
25519 */
25520 ;
25521
25522 _proto.buffered_ = function buffered_() {
25523 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
25524 return videojs__default["default"].createTimeRanges();
25525 }
25526
25527 var cues = this.subtitlesTrack_.cues;
25528 var start = cues[0].startTime;
25529 var end = cues[cues.length - 1].startTime;
25530 return videojs__default["default"].createTimeRanges([[start, end]]);
25531 }
25532 /**
25533 * Gets and sets init segment for the provided map
25534 *
25535 * @param {Object} map
25536 * The map object representing the init segment to get or set
25537 * @param {boolean=} set
25538 * If true, the init segment for the provided map should be saved
25539 * @return {Object}
25540 * map object for desired init segment
25541 */
25542 ;
25543
25544 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
25545 if (set === void 0) {
25546 set = false;
25547 }
25548
25549 if (!map) {
25550 return null;
25551 }
25552
25553 var id = initSegmentId(map);
25554 var storedMap = this.initSegments_[id];
25555
25556 if (set && !storedMap && map.bytes) {
25557 // append WebVTT line terminators to the media initialization segment if it exists
25558 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
25559 // requires two or more WebVTT line terminators between the WebVTT header and the
25560 // rest of the file
25561 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
25562 var combinedSegment = new Uint8Array(combinedByteLength);
25563 combinedSegment.set(map.bytes);
25564 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
25565 this.initSegments_[id] = storedMap = {
25566 resolvedUri: map.resolvedUri,
25567 byterange: map.byterange,
25568 bytes: combinedSegment
25569 };
25570 }
25571
25572 return storedMap || map;
25573 }
25574 /**
25575 * Returns true if all configuration required for loading is present, otherwise false.
25576 *
25577 * @return {boolean} True if the all configuration is ready for loading
25578 * @private
25579 */
25580 ;
25581
25582 _proto.couldBeginLoading_ = function couldBeginLoading_() {
25583 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
25584 }
25585 /**
25586 * Once all the starting parameters have been specified, begin
25587 * operation. This method should only be invoked from the INIT
25588 * state.
25589 *
25590 * @private
25591 */
25592 ;
25593
25594 _proto.init_ = function init_() {
25595 this.state = 'READY';
25596 this.resetEverything();
25597 return this.monitorBuffer_();
25598 }
25599 /**
25600 * Set a subtitle track on the segment loader to add subtitles to
25601 *
25602 * @param {TextTrack=} track
25603 * The text track to add loaded subtitles to
25604 * @return {TextTrack}
25605 * Returns the subtitles track
25606 */
25607 ;
25608
25609 _proto.track = function track(_track) {
25610 if (typeof _track === 'undefined') {
25611 return this.subtitlesTrack_;
25612 }
25613
25614 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
25615 // buffering now
25616
25617 if (this.state === 'INIT' && this.couldBeginLoading_()) {
25618 this.init_();
25619 }
25620
25621 return this.subtitlesTrack_;
25622 }
25623 /**
25624 * Remove any data in the source buffer between start and end times
25625 *
25626 * @param {number} start - the start time of the region to remove from the buffer
25627 * @param {number} end - the end time of the region to remove from the buffer
25628 */
25629 ;
25630
25631 _proto.remove = function remove(start, end) {
25632 removeCuesFromTrack(start, end, this.subtitlesTrack_);
25633 }
25634 /**
25635 * fill the buffer with segements unless the sourceBuffers are
25636 * currently updating
25637 *
25638 * Note: this function should only ever be called by monitorBuffer_
25639 * and never directly
25640 *
25641 * @private
25642 */
25643 ;
25644
25645 _proto.fillBuffer_ = function fillBuffer_() {
25646 var _this2 = this;
25647
25648 // see if we need to begin loading immediately
25649 var segmentInfo = this.chooseNextRequest_();
25650
25651 if (!segmentInfo) {
25652 return;
25653 }
25654
25655 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
25656 // We don't have the timestamp offset that we need to sync subtitles.
25657 // Rerun on a timestamp offset or user interaction.
25658 var checkTimestampOffset = function checkTimestampOffset() {
25659 _this2.state = 'READY';
25660
25661 if (!_this2.paused()) {
25662 // if not paused, queue a buffer check as soon as possible
25663 _this2.monitorBuffer_();
25664 }
25665 };
25666
25667 this.syncController_.one('timestampoffset', checkTimestampOffset);
25668 this.state = 'WAITING_ON_TIMELINE';
25669 return;
25670 }
25671
25672 this.loadSegment_(segmentInfo);
25673 } // never set a timestamp offset for vtt segments.
25674 ;
25675
25676 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
25677 return null;
25678 };
25679
25680 _proto.chooseNextRequest_ = function chooseNextRequest_() {
25681 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
25682 }
25683 /**
25684 * Prevents the segment loader from requesting segments we know contain no subtitles
25685 * by walking forward until we find the next segment that we don't know whether it is
25686 * empty or not.
25687 *
25688 * @param {Object} segmentInfo
25689 * a segment info object that describes the current segment
25690 * @return {Object}
25691 * a segment info object that describes the current segment
25692 */
25693 ;
25694
25695 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
25696 while (segmentInfo && segmentInfo.segment.empty) {
25697 // stop at the last possible segmentInfo
25698 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
25699 segmentInfo = null;
25700 break;
25701 }
25702
25703 segmentInfo = this.generateSegmentInfo_({
25704 playlist: segmentInfo.playlist,
25705 mediaIndex: segmentInfo.mediaIndex + 1,
25706 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
25707 isSyncRequest: segmentInfo.isSyncRequest
25708 });
25709 }
25710
25711 return segmentInfo;
25712 };
25713
25714 _proto.stopForError = function stopForError(error) {
25715 this.error(error);
25716 this.state = 'READY';
25717 this.pause();
25718 this.trigger('error');
25719 }
25720 /**
25721 * append a decrypted segement to the SourceBuffer through a SourceUpdater
25722 *
25723 * @private
25724 */
25725 ;
25726
25727 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
25728 var _this3 = this;
25729
25730 if (!this.subtitlesTrack_) {
25731 this.state = 'READY';
25732 return;
25733 }
25734
25735 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
25736
25737 if (!this.pendingSegment_) {
25738 this.state = 'READY';
25739 this.mediaRequestsAborted += 1;
25740 return;
25741 }
25742
25743 if (error) {
25744 if (error.code === REQUEST_ERRORS.TIMEOUT) {
25745 this.handleTimeout_();
25746 }
25747
25748 if (error.code === REQUEST_ERRORS.ABORTED) {
25749 this.mediaRequestsAborted += 1;
25750 } else {
25751 this.mediaRequestsErrored += 1;
25752 }
25753
25754 this.stopForError(error);
25755 return;
25756 }
25757
25758 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
25759 // maintain functionality between segment loaders
25760
25761 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cache
25762
25763 if (simpleSegment.key) {
25764 this.segmentKey(simpleSegment.key, true);
25765 }
25766
25767 this.state = 'APPENDING'; // used for tests
25768
25769 this.trigger('appending');
25770 var segment = segmentInfo.segment;
25771
25772 if (segment.map) {
25773 segment.map.bytes = simpleSegment.map.bytes;
25774 }
25775
25776 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
25777
25778 if (typeof window.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
25779 this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times
25780 // script will be loaded once but multiple listeners will be added to the queue, which is expected.
25781
25782 this.loadVttJs().then(function () {
25783 return _this3.segmentRequestFinished_(error, simpleSegment, result);
25784 }, function () {
25785 return _this3.stopForError({
25786 message: 'Error loading vtt.js'
25787 });
25788 });
25789 return;
25790 }
25791
25792 segment.requested = true;
25793
25794 try {
25795 this.parseVTTCues_(segmentInfo);
25796 } catch (e) {
25797 this.stopForError({
25798 message: e.message
25799 });
25800 return;
25801 }
25802
25803 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
25804
25805 if (segmentInfo.cues.length) {
25806 segmentInfo.timingInfo = {
25807 start: segmentInfo.cues[0].startTime,
25808 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
25809 };
25810 } else {
25811 segmentInfo.timingInfo = {
25812 start: segmentInfo.startOfSegment,
25813 end: segmentInfo.startOfSegment + segmentInfo.duration
25814 };
25815 }
25816
25817 if (segmentInfo.isSyncRequest) {
25818 this.trigger('syncinfoupdate');
25819 this.pendingSegment_ = null;
25820 this.state = 'READY';
25821 return;
25822 }
25823
25824 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
25825 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
25826 // the subtitle track
25827
25828 segmentInfo.cues.forEach(function (cue) {
25829 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
25830 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
25831 // cues to have identical time-intervals, but if the text is also identical
25832 // we can safely assume it is a duplicate that can be removed (ex. when a cue
25833 // "overlaps" VTT segments)
25834
25835 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
25836 this.handleAppendsDone_();
25837 };
25838
25839 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
25840 // that we do not support here.
25841 };
25842
25843 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
25844 }
25845 /**
25846 * Uses the WebVTT parser to parse the segment response
25847 *
25848 * @throws NoVttJsError
25849 *
25850 * @param {Object} segmentInfo
25851 * a segment info object that describes the current segment
25852 * @private
25853 */
25854 ;
25855
25856 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
25857 var decoder;
25858 var decodeBytesToString = false;
25859
25860 if (typeof window.WebVTT !== 'function') {
25861 // caller is responsible for exception handling.
25862 throw new NoVttJsError();
25863 }
25864
25865 if (typeof window.TextDecoder === 'function') {
25866 decoder = new window.TextDecoder('utf8');
25867 } else {
25868 decoder = window.WebVTT.StringDecoder();
25869 decodeBytesToString = true;
25870 }
25871
25872 var parser = new window.WebVTT.Parser(window, window.vttjs, decoder);
25873 segmentInfo.cues = [];
25874 segmentInfo.timestampmap = {
25875 MPEGTS: 0,
25876 LOCAL: 0
25877 };
25878 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
25879
25880 parser.ontimestampmap = function (map) {
25881 segmentInfo.timestampmap = map;
25882 };
25883
25884 parser.onparsingerror = function (error) {
25885 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
25886 };
25887
25888 if (segmentInfo.segment.map) {
25889 var mapData = segmentInfo.segment.map.bytes;
25890
25891 if (decodeBytesToString) {
25892 mapData = uint8ToUtf8(mapData);
25893 }
25894
25895 parser.parse(mapData);
25896 }
25897
25898 var segmentData = segmentInfo.bytes;
25899
25900 if (decodeBytesToString) {
25901 segmentData = uint8ToUtf8(segmentData);
25902 }
25903
25904 parser.parse(segmentData);
25905 parser.flush();
25906 }
25907 /**
25908 * Updates the start and end times of any cues parsed by the WebVTT parser using
25909 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
25910 * from the SyncController
25911 *
25912 * @param {Object} segmentInfo
25913 * a segment info object that describes the current segment
25914 * @param {Object} mappingObj
25915 * object containing a mapping from TS to media time
25916 * @param {Object} playlist
25917 * the playlist object containing the segment
25918 * @private
25919 */
25920 ;
25921
25922 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
25923 var segment = segmentInfo.segment;
25924
25925 if (!mappingObj) {
25926 // If the sync controller does not have a mapping of TS to Media Time for the
25927 // timeline, then we don't have enough information to update the cue
25928 // start/end times
25929 return;
25930 }
25931
25932 if (!segmentInfo.cues.length) {
25933 // If there are no cues, we also do not have enough information to figure out
25934 // segment timing. Mark that the segment contains no cues so we don't re-request
25935 // an empty segment.
25936 segment.empty = true;
25937 return;
25938 }
25939
25940 var timestampmap = segmentInfo.timestampmap;
25941 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
25942 segmentInfo.cues.forEach(function (cue) {
25943 // First convert cue time to TS time using the timestamp-map provided within the vtt
25944 cue.startTime += diff;
25945 cue.endTime += diff;
25946 });
25947
25948 if (!playlist.syncInfo) {
25949 var firstStart = segmentInfo.cues[0].startTime;
25950 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
25951 playlist.syncInfo = {
25952 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
25953 time: Math.min(firstStart, lastStart - segment.duration)
25954 };
25955 }
25956 };
25957
25958 return VTTSegmentLoader;
25959 }(SegmentLoader);
25960
25961 /**
25962 * @file ad-cue-tags.js
25963 */
25964
25965 /**
25966 * Searches for an ad cue that overlaps with the given mediaTime
25967 *
25968 * @param {Object} track
25969 * the track to find the cue for
25970 *
25971 * @param {number} mediaTime
25972 * the time to find the cue at
25973 *
25974 * @return {Object|null}
25975 * the found cue or null
25976 */
25977 var findAdCue = function findAdCue(track, mediaTime) {
25978 var cues = track.cues;
25979
25980 for (var i = 0; i < cues.length; i++) {
25981 var cue = cues[i];
25982
25983 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
25984 return cue;
25985 }
25986 }
25987
25988 return null;
25989 };
25990 var updateAdCues = function updateAdCues(media, track, offset) {
25991 if (offset === void 0) {
25992 offset = 0;
25993 }
25994
25995 if (!media.segments) {
25996 return;
25997 }
25998
25999 var mediaTime = offset;
26000 var cue;
26001
26002 for (var i = 0; i < media.segments.length; i++) {
26003 var segment = media.segments[i];
26004
26005 if (!cue) {
26006 // Since the cues will span for at least the segment duration, adding a fudge
26007 // factor of half segment duration will prevent duplicate cues from being
26008 // created when timing info is not exact (e.g. cue start time initialized
26009 // at 10.006677, but next call mediaTime is 10.003332 )
26010 cue = findAdCue(track, mediaTime + segment.duration / 2);
26011 }
26012
26013 if (cue) {
26014 if ('cueIn' in segment) {
26015 // Found a CUE-IN so end the cue
26016 cue.endTime = mediaTime;
26017 cue.adEndTime = mediaTime;
26018 mediaTime += segment.duration;
26019 cue = null;
26020 continue;
26021 }
26022
26023 if (mediaTime < cue.endTime) {
26024 // Already processed this mediaTime for this cue
26025 mediaTime += segment.duration;
26026 continue;
26027 } // otherwise extend cue until a CUE-IN is found
26028
26029
26030 cue.endTime += segment.duration;
26031 } else {
26032 if ('cueOut' in segment) {
26033 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
26034 cue.adStartTime = mediaTime; // Assumes tag format to be
26035 // #EXT-X-CUE-OUT:30
26036
26037 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
26038 track.addCue(cue);
26039 }
26040
26041 if ('cueOutCont' in segment) {
26042 // Entered into the middle of an ad cue
26043 // Assumes tag formate to be
26044 // #EXT-X-CUE-OUT-CONT:10/30
26045 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
26046 adOffset = _segment$cueOutCont$s[0],
26047 adTotal = _segment$cueOutCont$s[1];
26048
26049 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');
26050 cue.adStartTime = mediaTime - adOffset;
26051 cue.adEndTime = cue.adStartTime + adTotal;
26052 track.addCue(cue);
26053 }
26054 }
26055
26056 mediaTime += segment.duration;
26057 }
26058 };
26059
26060 // synchronize expired playlist segments.
26061 // the max media sequence diff is 48 hours of live stream
26062 // content with two second segments. Anything larger than that
26063 // will likely be invalid.
26064
26065 var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
26066 var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
26067 // the equivalence display-time 0 === segment-index 0
26068 {
26069 name: 'VOD',
26070 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26071 if (duration !== Infinity) {
26072 var syncPoint = {
26073 time: 0,
26074 segmentIndex: 0,
26075 partIndex: null
26076 };
26077 return syncPoint;
26078 }
26079
26080 return null;
26081 }
26082 }, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
26083 {
26084 name: 'ProgramDateTime',
26085 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26086 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
26087 return null;
26088 }
26089
26090 var syncPoint = null;
26091 var lastDistance = null;
26092 var partsAndSegments = getPartsAndSegments(playlist);
26093 currentTime = currentTime || 0;
26094
26095 for (var i = 0; i < partsAndSegments.length; i++) {
26096 // start from the end and loop backwards for live
26097 // or start from the front and loop forwards for non-live
26098 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
26099 var partAndSegment = partsAndSegments[index];
26100 var segment = partAndSegment.segment;
26101 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
26102
26103 if (!datetimeMapping || !segment.dateTimeObject) {
26104 continue;
26105 }
26106
26107 var segmentTime = segment.dateTimeObject.getTime() / 1000;
26108 var start = segmentTime + datetimeMapping; // take part duration into account.
26109
26110 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
26111 for (var z = 0; z < partAndSegment.partIndex; z++) {
26112 start += segment.parts[z].duration;
26113 }
26114 }
26115
26116 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
26117 // currentTime and can stop looking for better candidates
26118
26119 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
26120 break;
26121 }
26122
26123 lastDistance = distance;
26124 syncPoint = {
26125 time: start,
26126 segmentIndex: partAndSegment.segmentIndex,
26127 partIndex: partAndSegment.partIndex
26128 };
26129 }
26130
26131 return syncPoint;
26132 }
26133 }, // Stategy "Segment": We have a known time mapping for a timeline and a
26134 // segment in the current timeline with timing data
26135 {
26136 name: 'Segment',
26137 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26138 var syncPoint = null;
26139 var lastDistance = null;
26140 currentTime = currentTime || 0;
26141 var partsAndSegments = getPartsAndSegments(playlist);
26142
26143 for (var i = 0; i < partsAndSegments.length; i++) {
26144 // start from the end and loop backwards for live
26145 // or start from the front and loop forwards for non-live
26146 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
26147 var partAndSegment = partsAndSegments[index];
26148 var segment = partAndSegment.segment;
26149 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
26150
26151 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
26152 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
26153 // currentTime and can stop looking for better candidates
26154
26155 if (lastDistance !== null && lastDistance < distance) {
26156 break;
26157 }
26158
26159 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
26160 lastDistance = distance;
26161 syncPoint = {
26162 time: start,
26163 segmentIndex: partAndSegment.segmentIndex,
26164 partIndex: partAndSegment.partIndex
26165 };
26166 }
26167 }
26168 }
26169
26170 return syncPoint;
26171 }
26172 }, // Stategy "Discontinuity": We have a discontinuity with a known
26173 // display-time
26174 {
26175 name: 'Discontinuity',
26176 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26177 var syncPoint = null;
26178 currentTime = currentTime || 0;
26179
26180 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
26181 var lastDistance = null;
26182
26183 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
26184 var segmentIndex = playlist.discontinuityStarts[i];
26185 var discontinuity = playlist.discontinuitySequence + i + 1;
26186 var discontinuitySync = syncController.discontinuities[discontinuity];
26187
26188 if (discontinuitySync) {
26189 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
26190 // currentTime and can stop looking for better candidates
26191
26192 if (lastDistance !== null && lastDistance < distance) {
26193 break;
26194 }
26195
26196 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
26197 lastDistance = distance;
26198 syncPoint = {
26199 time: discontinuitySync.time,
26200 segmentIndex: segmentIndex,
26201 partIndex: null
26202 };
26203 }
26204 }
26205 }
26206 }
26207
26208 return syncPoint;
26209 }
26210 }, // Stategy "Playlist": We have a playlist with a known mapping of
26211 // segment index to display time
26212 {
26213 name: 'Playlist',
26214 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26215 if (playlist.syncInfo) {
26216 var syncPoint = {
26217 time: playlist.syncInfo.time,
26218 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
26219 partIndex: null
26220 };
26221 return syncPoint;
26222 }
26223
26224 return null;
26225 }
26226 }];
26227
26228 var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
26229 inheritsLoose(SyncController, _videojs$EventTarget);
26230
26231 function SyncController(options) {
26232 var _this;
26233
26234 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
26235
26236 _this.timelines = [];
26237 _this.discontinuities = [];
26238 _this.timelineToDatetimeMappings = {};
26239 _this.logger_ = logger('SyncController');
26240 return _this;
26241 }
26242 /**
26243 * Find a sync-point for the playlist specified
26244 *
26245 * A sync-point is defined as a known mapping from display-time to
26246 * a segment-index in the current playlist.
26247 *
26248 * @param {Playlist} playlist
26249 * The playlist that needs a sync-point
26250 * @param {number} duration
26251 * Duration of the MediaSource (Infinite if playing a live source)
26252 * @param {number} currentTimeline
26253 * The last timeline from which a segment was loaded
26254 * @return {Object}
26255 * A sync-point object
26256 */
26257
26258
26259 var _proto = SyncController.prototype;
26260
26261 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
26262 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
26263
26264 if (!syncPoints.length) {
26265 // Signal that we need to attempt to get a sync-point manually
26266 // by fetching a segment in the playlist and constructing
26267 // a sync-point from that information
26268 return null;
26269 } // Now find the sync-point that is closest to the currentTime because
26270 // that should result in the most accurate guess about which segment
26271 // to fetch
26272
26273
26274 return this.selectSyncPoint_(syncPoints, {
26275 key: 'time',
26276 value: currentTime
26277 });
26278 }
26279 /**
26280 * Calculate the amount of time that has expired off the playlist during playback
26281 *
26282 * @param {Playlist} playlist
26283 * Playlist object to calculate expired from
26284 * @param {number} duration
26285 * Duration of the MediaSource (Infinity if playling a live source)
26286 * @return {number|null}
26287 * The amount of time that has expired off the playlist during playback. Null
26288 * if no sync-points for the playlist can be found.
26289 */
26290 ;
26291
26292 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
26293 if (!playlist || !playlist.segments) {
26294 return null;
26295 }
26296
26297 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
26298
26299 if (!syncPoints.length) {
26300 return null;
26301 }
26302
26303 var syncPoint = this.selectSyncPoint_(syncPoints, {
26304 key: 'segmentIndex',
26305 value: 0
26306 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
26307 // duration from index 0 to syncPoint.segmentIndex instead of adding.
26308
26309 if (syncPoint.segmentIndex > 0) {
26310 syncPoint.time *= -1;
26311 }
26312
26313 return Math.abs(syncPoint.time + sumDurations({
26314 defaultDuration: playlist.targetDuration,
26315 durationList: playlist.segments,
26316 startIndex: syncPoint.segmentIndex,
26317 endIndex: 0
26318 }));
26319 }
26320 /**
26321 * Runs each sync-point strategy and returns a list of sync-points returned by the
26322 * strategies
26323 *
26324 * @private
26325 * @param {Playlist} playlist
26326 * The playlist that needs a sync-point
26327 * @param {number} duration
26328 * Duration of the MediaSource (Infinity if playing a live source)
26329 * @param {number} currentTimeline
26330 * The last timeline from which a segment was loaded
26331 * @return {Array}
26332 * A list of sync-point objects
26333 */
26334 ;
26335
26336 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
26337 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
26338
26339 for (var i = 0; i < syncPointStrategies.length; i++) {
26340 var strategy = syncPointStrategies[i];
26341 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
26342
26343 if (syncPoint) {
26344 syncPoint.strategy = strategy.name;
26345 syncPoints.push({
26346 strategy: strategy.name,
26347 syncPoint: syncPoint
26348 });
26349 }
26350 }
26351
26352 return syncPoints;
26353 }
26354 /**
26355 * Selects the sync-point nearest the specified target
26356 *
26357 * @private
26358 * @param {Array} syncPoints
26359 * List of sync-points to select from
26360 * @param {Object} target
26361 * Object specifying the property and value we are targeting
26362 * @param {string} target.key
26363 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
26364 * @param {number} target.value
26365 * The value to target for the specified key.
26366 * @return {Object}
26367 * The sync-point nearest the target
26368 */
26369 ;
26370
26371 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
26372 var bestSyncPoint = syncPoints[0].syncPoint;
26373 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
26374 var bestStrategy = syncPoints[0].strategy;
26375
26376 for (var i = 1; i < syncPoints.length; i++) {
26377 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
26378
26379 if (newDistance < bestDistance) {
26380 bestDistance = newDistance;
26381 bestSyncPoint = syncPoints[i].syncPoint;
26382 bestStrategy = syncPoints[i].strategy;
26383 }
26384 }
26385
26386 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
26387 return bestSyncPoint;
26388 }
26389 /**
26390 * Save any meta-data present on the segments when segments leave
26391 * the live window to the playlist to allow for synchronization at the
26392 * playlist level later.
26393 *
26394 * @param {Playlist} oldPlaylist - The previous active playlist
26395 * @param {Playlist} newPlaylist - The updated and most current playlist
26396 */
26397 ;
26398
26399 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
26400 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
26401
26402 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
26403 videojs__default["default"].log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
26404 return;
26405 } // When a segment expires from the playlist and it has a start time
26406 // save that information as a possible sync-point reference in future
26407
26408
26409 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
26410 var lastRemovedSegment = oldPlaylist.segments[i];
26411
26412 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
26413 newPlaylist.syncInfo = {
26414 mediaSequence: oldPlaylist.mediaSequence + i,
26415 time: lastRemovedSegment.start
26416 };
26417 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
26418 this.trigger('syncinfoupdate');
26419 break;
26420 }
26421 }
26422 }
26423 /**
26424 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
26425 * before segments start to load.
26426 *
26427 * @param {Playlist} playlist - The currently active playlist
26428 */
26429 ;
26430
26431 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
26432 // It's possible for the playlist to be updated before playback starts, meaning time
26433 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
26434 // crossed, then the old time zero mapping (for the prior timeline) would be retained
26435 // unless the mappings are cleared.
26436 this.timelineToDatetimeMappings = {};
26437
26438 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
26439 var firstSegment = playlist.segments[0];
26440 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
26441 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
26442 }
26443 }
26444 /**
26445 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
26446 * based on the latest timing information.
26447 *
26448 * @param {Object} options
26449 * Options object
26450 * @param {SegmentInfo} options.segmentInfo
26451 * The current active request information
26452 * @param {boolean} options.shouldSaveTimelineMapping
26453 * If there's a timeline change, determines if the timeline mapping should be
26454 * saved for timeline mapping and program date time mappings.
26455 */
26456 ;
26457
26458 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
26459 var segmentInfo = _ref.segmentInfo,
26460 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
26461 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
26462 var segment = segmentInfo.segment;
26463
26464 if (didCalculateSegmentTimeMapping) {
26465 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
26466 // now with segment timing information
26467
26468 if (!segmentInfo.playlist.syncInfo) {
26469 segmentInfo.playlist.syncInfo = {
26470 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
26471 time: segment.start
26472 };
26473 }
26474 }
26475
26476 var dateTime = segment.dateTimeObject;
26477
26478 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
26479 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
26480 }
26481 };
26482
26483 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
26484 if (typeof this.timelines[timeline] === 'undefined') {
26485 return null;
26486 }
26487
26488 return this.timelines[timeline].time;
26489 };
26490
26491 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
26492 if (typeof this.timelines[timeline] === 'undefined') {
26493 return null;
26494 }
26495
26496 return this.timelines[timeline].mapping;
26497 }
26498 /**
26499 * Use the "media time" for a segment to generate a mapping to "display time" and
26500 * save that display time to the segment.
26501 *
26502 * @private
26503 * @param {SegmentInfo} segmentInfo
26504 * The current active request information
26505 * @param {Object} timingInfo
26506 * The start and end time of the current segment in "media time"
26507 * @param {boolean} shouldSaveTimelineMapping
26508 * If there's a timeline change, determines if the timeline mapping should be
26509 * saved in timelines.
26510 * @return {boolean}
26511 * Returns false if segment time mapping could not be calculated
26512 */
26513 ;
26514
26515 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
26516 // TODO: remove side effects
26517 var segment = segmentInfo.segment;
26518 var part = segmentInfo.part;
26519 var mappingObj = this.timelines[segmentInfo.timeline];
26520 var start;
26521 var end;
26522
26523 if (typeof segmentInfo.timestampOffset === 'number') {
26524 mappingObj = {
26525 time: segmentInfo.startOfSegment,
26526 mapping: segmentInfo.startOfSegment - timingInfo.start
26527 };
26528
26529 if (shouldSaveTimelineMapping) {
26530 this.timelines[segmentInfo.timeline] = mappingObj;
26531 this.trigger('timestampoffset');
26532 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
26533 }
26534
26535 start = segmentInfo.startOfSegment;
26536 end = timingInfo.end + mappingObj.mapping;
26537 } else if (mappingObj) {
26538 start = timingInfo.start + mappingObj.mapping;
26539 end = timingInfo.end + mappingObj.mapping;
26540 } else {
26541 return false;
26542 }
26543
26544 if (part) {
26545 part.start = start;
26546 part.end = end;
26547 } // If we don't have a segment start yet or the start value we got
26548 // is less than our current segment.start value, save a new start value.
26549 // We have to do this because parts will have segment timing info saved
26550 // multiple times and we want segment start to be the earliest part start
26551 // value for that segment.
26552
26553
26554 if (!segment.start || start < segment.start) {
26555 segment.start = start;
26556 }
26557
26558 segment.end = end;
26559 return true;
26560 }
26561 /**
26562 * Each time we have discontinuity in the playlist, attempt to calculate the location
26563 * in display of the start of the discontinuity and save that. We also save an accuracy
26564 * value so that we save values with the most accuracy (closest to 0.)
26565 *
26566 * @private
26567 * @param {SegmentInfo} segmentInfo - The current active request information
26568 */
26569 ;
26570
26571 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
26572 var playlist = segmentInfo.playlist;
26573 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
26574 // the start of the range and it's accuracy is 0 (greater accuracy values
26575 // mean more approximation)
26576
26577 if (segment.discontinuity) {
26578 this.discontinuities[segment.timeline] = {
26579 time: segment.start,
26580 accuracy: 0
26581 };
26582 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
26583 // Search for future discontinuities that we can provide better timing
26584 // information for and save that information for sync purposes
26585 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
26586 var segmentIndex = playlist.discontinuityStarts[i];
26587 var discontinuity = playlist.discontinuitySequence + i + 1;
26588 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
26589 var accuracy = Math.abs(mediaIndexDiff);
26590
26591 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
26592 var time = void 0;
26593
26594 if (mediaIndexDiff < 0) {
26595 time = segment.start - sumDurations({
26596 defaultDuration: playlist.targetDuration,
26597 durationList: playlist.segments,
26598 startIndex: segmentInfo.mediaIndex,
26599 endIndex: segmentIndex
26600 });
26601 } else {
26602 time = segment.end + sumDurations({
26603 defaultDuration: playlist.targetDuration,
26604 durationList: playlist.segments,
26605 startIndex: segmentInfo.mediaIndex + 1,
26606 endIndex: segmentIndex
26607 });
26608 }
26609
26610 this.discontinuities[discontinuity] = {
26611 time: time,
26612 accuracy: accuracy
26613 };
26614 }
26615 }
26616 }
26617 };
26618
26619 _proto.dispose = function dispose() {
26620 this.trigger('dispose');
26621 this.off();
26622 };
26623
26624 return SyncController;
26625 }(videojs__default["default"].EventTarget);
26626
26627 /**
26628 * The TimelineChangeController acts as a source for segment loaders to listen for and
26629 * keep track of latest and pending timeline changes. This is useful to ensure proper
26630 * sync, as each loader may need to make a consideration for what timeline the other
26631 * loader is on before making changes which could impact the other loader's media.
26632 *
26633 * @class TimelineChangeController
26634 * @extends videojs.EventTarget
26635 */
26636
26637 var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
26638 inheritsLoose(TimelineChangeController, _videojs$EventTarget);
26639
26640 function TimelineChangeController() {
26641 var _this;
26642
26643 _this = _videojs$EventTarget.call(this) || this;
26644 _this.pendingTimelineChanges_ = {};
26645 _this.lastTimelineChanges_ = {};
26646 return _this;
26647 }
26648
26649 var _proto = TimelineChangeController.prototype;
26650
26651 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
26652 this.pendingTimelineChanges_[type] = null;
26653 this.trigger('pendingtimelinechange');
26654 };
26655
26656 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
26657 var type = _ref.type,
26658 from = _ref.from,
26659 to = _ref.to;
26660
26661 if (typeof from === 'number' && typeof to === 'number') {
26662 this.pendingTimelineChanges_[type] = {
26663 type: type,
26664 from: from,
26665 to: to
26666 };
26667 this.trigger('pendingtimelinechange');
26668 }
26669
26670 return this.pendingTimelineChanges_[type];
26671 };
26672
26673 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
26674 var type = _ref2.type,
26675 from = _ref2.from,
26676 to = _ref2.to;
26677
26678 if (typeof from === 'number' && typeof to === 'number') {
26679 this.lastTimelineChanges_[type] = {
26680 type: type,
26681 from: from,
26682 to: to
26683 };
26684 delete this.pendingTimelineChanges_[type];
26685 this.trigger('timelinechange');
26686 }
26687
26688 return this.lastTimelineChanges_[type];
26689 };
26690
26691 _proto.dispose = function dispose() {
26692 this.trigger('dispose');
26693 this.pendingTimelineChanges_ = {};
26694 this.lastTimelineChanges_ = {};
26695 this.off();
26696 };
26697
26698 return TimelineChangeController;
26699 }(videojs__default["default"].EventTarget);
26700
26701 /* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
26702 var workerCode = transform(function (self) {
26703
26704 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
26705
26706 function createCommonjsModule(fn, basedir, module) {
26707 return module = {
26708 path: basedir,
26709 exports: {},
26710 require: function require(path, base) {
26711 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
26712 }
26713 }, fn(module, module.exports), module.exports;
26714 }
26715
26716 function commonjsRequire() {
26717 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
26718 }
26719
26720 var createClass = createCommonjsModule(function (module) {
26721 function _defineProperties(target, props) {
26722 for (var i = 0; i < props.length; i++) {
26723 var descriptor = props[i];
26724 descriptor.enumerable = descriptor.enumerable || false;
26725 descriptor.configurable = true;
26726 if ("value" in descriptor) descriptor.writable = true;
26727 Object.defineProperty(target, descriptor.key, descriptor);
26728 }
26729 }
26730
26731 function _createClass(Constructor, protoProps, staticProps) {
26732 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
26733 if (staticProps) _defineProperties(Constructor, staticProps);
26734 return Constructor;
26735 }
26736
26737 module.exports = _createClass;
26738 module.exports["default"] = module.exports, module.exports.__esModule = true;
26739 });
26740 var setPrototypeOf = createCommonjsModule(function (module) {
26741 function _setPrototypeOf(o, p) {
26742 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
26743 o.__proto__ = p;
26744 return o;
26745 };
26746
26747 module.exports["default"] = module.exports, module.exports.__esModule = true;
26748 return _setPrototypeOf(o, p);
26749 }
26750
26751 module.exports = _setPrototypeOf;
26752 module.exports["default"] = module.exports, module.exports.__esModule = true;
26753 });
26754 var inheritsLoose = createCommonjsModule(function (module) {
26755 function _inheritsLoose(subClass, superClass) {
26756 subClass.prototype = Object.create(superClass.prototype);
26757 subClass.prototype.constructor = subClass;
26758 setPrototypeOf(subClass, superClass);
26759 }
26760
26761 module.exports = _inheritsLoose;
26762 module.exports["default"] = module.exports, module.exports.__esModule = true;
26763 });
26764 /**
26765 * @file stream.js
26766 */
26767
26768 /**
26769 * A lightweight readable stream implemention that handles event dispatching.
26770 *
26771 * @class Stream
26772 */
26773
26774 var Stream = /*#__PURE__*/function () {
26775 function Stream() {
26776 this.listeners = {};
26777 }
26778 /**
26779 * Add a listener for a specified event type.
26780 *
26781 * @param {string} type the event name
26782 * @param {Function} listener the callback to be invoked when an event of
26783 * the specified type occurs
26784 */
26785
26786
26787 var _proto = Stream.prototype;
26788
26789 _proto.on = function on(type, listener) {
26790 if (!this.listeners[type]) {
26791 this.listeners[type] = [];
26792 }
26793
26794 this.listeners[type].push(listener);
26795 }
26796 /**
26797 * Remove a listener for a specified event type.
26798 *
26799 * @param {string} type the event name
26800 * @param {Function} listener a function previously registered for this
26801 * type of event through `on`
26802 * @return {boolean} if we could turn it off or not
26803 */
26804 ;
26805
26806 _proto.off = function off(type, listener) {
26807 if (!this.listeners[type]) {
26808 return false;
26809 }
26810
26811 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
26812 // In Video.js we slice listener functions
26813 // on trigger so that it does not mess up the order
26814 // while we loop through.
26815 //
26816 // Here we slice on off so that the loop in trigger
26817 // can continue using it's old reference to loop without
26818 // messing up the order.
26819
26820 this.listeners[type] = this.listeners[type].slice(0);
26821 this.listeners[type].splice(index, 1);
26822 return index > -1;
26823 }
26824 /**
26825 * Trigger an event of the specified type on this stream. Any additional
26826 * arguments to this function are passed as parameters to event listeners.
26827 *
26828 * @param {string} type the event name
26829 */
26830 ;
26831
26832 _proto.trigger = function trigger(type) {
26833 var callbacks = this.listeners[type];
26834
26835 if (!callbacks) {
26836 return;
26837 } // Slicing the arguments on every invocation of this method
26838 // can add a significant amount of overhead. Avoid the
26839 // intermediate object creation for the common case of a
26840 // single callback argument
26841
26842
26843 if (arguments.length === 2) {
26844 var length = callbacks.length;
26845
26846 for (var i = 0; i < length; ++i) {
26847 callbacks[i].call(this, arguments[1]);
26848 }
26849 } else {
26850 var args = Array.prototype.slice.call(arguments, 1);
26851 var _length = callbacks.length;
26852
26853 for (var _i = 0; _i < _length; ++_i) {
26854 callbacks[_i].apply(this, args);
26855 }
26856 }
26857 }
26858 /**
26859 * Destroys the stream and cleans up.
26860 */
26861 ;
26862
26863 _proto.dispose = function dispose() {
26864 this.listeners = {};
26865 }
26866 /**
26867 * Forwards all `data` events on this stream to the destination stream. The
26868 * destination stream should provide a method `push` to receive the data
26869 * events as they arrive.
26870 *
26871 * @param {Stream} destination the stream that will receive all `data` events
26872 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
26873 */
26874 ;
26875
26876 _proto.pipe = function pipe(destination) {
26877 this.on('data', function (data) {
26878 destination.push(data);
26879 });
26880 };
26881
26882 return Stream;
26883 }();
26884 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
26885
26886 /**
26887 * Returns the subarray of a Uint8Array without PKCS#7 padding.
26888 *
26889 * @param padded {Uint8Array} unencrypted bytes that have been padded
26890 * @return {Uint8Array} the unpadded bytes
26891 * @see http://tools.ietf.org/html/rfc5652
26892 */
26893
26894
26895 function unpad(padded) {
26896 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
26897 }
26898 /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
26899
26900 /**
26901 * @file aes.js
26902 *
26903 * This file contains an adaptation of the AES decryption algorithm
26904 * from the Standford Javascript Cryptography Library. That work is
26905 * covered by the following copyright and permissions notice:
26906 *
26907 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
26908 * All rights reserved.
26909 *
26910 * Redistribution and use in source and binary forms, with or without
26911 * modification, are permitted provided that the following conditions are
26912 * met:
26913 *
26914 * 1. Redistributions of source code must retain the above copyright
26915 * notice, this list of conditions and the following disclaimer.
26916 *
26917 * 2. Redistributions in binary form must reproduce the above
26918 * copyright notice, this list of conditions and the following
26919 * disclaimer in the documentation and/or other materials provided
26920 * with the distribution.
26921 *
26922 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
26923 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
26924 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26925 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
26926 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26927 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26928 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
26929 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26930 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26931 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26932 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26933 *
26934 * The views and conclusions contained in the software and documentation
26935 * are those of the authors and should not be interpreted as representing
26936 * official policies, either expressed or implied, of the authors.
26937 */
26938
26939 /**
26940 * Expand the S-box tables.
26941 *
26942 * @private
26943 */
26944
26945
26946 var precompute = function precompute() {
26947 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
26948 var encTable = tables[0];
26949 var decTable = tables[1];
26950 var sbox = encTable[4];
26951 var sboxInv = decTable[4];
26952 var i;
26953 var x;
26954 var xInv;
26955 var d = [];
26956 var th = [];
26957 var x2;
26958 var x4;
26959 var x8;
26960 var s;
26961 var tEnc;
26962 var tDec; // Compute double and third tables
26963
26964 for (i = 0; i < 256; i++) {
26965 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
26966 }
26967
26968 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
26969 // Compute sbox
26970 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
26971 s = s >> 8 ^ s & 255 ^ 99;
26972 sbox[x] = s;
26973 sboxInv[s] = x; // Compute MixColumns
26974
26975 x8 = d[x4 = d[x2 = d[x]]];
26976 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
26977 tEnc = d[s] * 0x101 ^ s * 0x1010100;
26978
26979 for (i = 0; i < 4; i++) {
26980 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
26981 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
26982 }
26983 } // Compactify. Considerable speedup on Firefox.
26984
26985
26986 for (i = 0; i < 5; i++) {
26987 encTable[i] = encTable[i].slice(0);
26988 decTable[i] = decTable[i].slice(0);
26989 }
26990
26991 return tables;
26992 };
26993
26994 var aesTables = null;
26995 /**
26996 * Schedule out an AES key for both encryption and decryption. This
26997 * is a low-level class. Use a cipher mode to do bulk encryption.
26998 *
26999 * @class AES
27000 * @param key {Array} The key as an array of 4, 6 or 8 words.
27001 */
27002
27003 var AES = /*#__PURE__*/function () {
27004 function AES(key) {
27005 /**
27006 * The expanded S-box and inverse S-box tables. These will be computed
27007 * on the client so that we don't have to send them down the wire.
27008 *
27009 * There are two tables, _tables[0] is for encryption and
27010 * _tables[1] is for decryption.
27011 *
27012 * The first 4 sub-tables are the expanded S-box with MixColumns. The
27013 * last (_tables[01][4]) is the S-box itself.
27014 *
27015 * @private
27016 */
27017 // if we have yet to precompute the S-box tables
27018 // do so now
27019 if (!aesTables) {
27020 aesTables = precompute();
27021 } // then make a copy of that object for use
27022
27023
27024 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
27025 var i;
27026 var j;
27027 var tmp;
27028 var sbox = this._tables[0][4];
27029 var decTable = this._tables[1];
27030 var keyLen = key.length;
27031 var rcon = 1;
27032
27033 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
27034 throw new Error('Invalid aes key size');
27035 }
27036
27037 var encKey = key.slice(0);
27038 var decKey = [];
27039 this._key = [encKey, decKey]; // schedule encryption keys
27040
27041 for (i = keyLen; i < 4 * keyLen + 28; i++) {
27042 tmp = encKey[i - 1]; // apply sbox
27043
27044 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
27045 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
27046
27047 if (i % keyLen === 0) {
27048 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
27049 rcon = rcon << 1 ^ (rcon >> 7) * 283;
27050 }
27051 }
27052
27053 encKey[i] = encKey[i - keyLen] ^ tmp;
27054 } // schedule decryption keys
27055
27056
27057 for (j = 0; i; j++, i--) {
27058 tmp = encKey[j & 3 ? i : i - 4];
27059
27060 if (i <= 4 || j < 4) {
27061 decKey[j] = tmp;
27062 } else {
27063 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
27064 }
27065 }
27066 }
27067 /**
27068 * Decrypt 16 bytes, specified as four 32-bit words.
27069 *
27070 * @param {number} encrypted0 the first word to decrypt
27071 * @param {number} encrypted1 the second word to decrypt
27072 * @param {number} encrypted2 the third word to decrypt
27073 * @param {number} encrypted3 the fourth word to decrypt
27074 * @param {Int32Array} out the array to write the decrypted words
27075 * into
27076 * @param {number} offset the offset into the output array to start
27077 * writing results
27078 * @return {Array} The plaintext.
27079 */
27080
27081
27082 var _proto = AES.prototype;
27083
27084 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
27085 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
27086
27087 var a = encrypted0 ^ key[0];
27088 var b = encrypted3 ^ key[1];
27089 var c = encrypted2 ^ key[2];
27090 var d = encrypted1 ^ key[3];
27091 var a2;
27092 var b2;
27093 var c2; // key.length === 2 ?
27094
27095 var nInnerRounds = key.length / 4 - 2;
27096 var i;
27097 var kIndex = 4;
27098 var table = this._tables[1]; // load up the tables
27099
27100 var table0 = table[0];
27101 var table1 = table[1];
27102 var table2 = table[2];
27103 var table3 = table[3];
27104 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
27105
27106 for (i = 0; i < nInnerRounds; i++) {
27107 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
27108 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
27109 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
27110 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
27111 kIndex += 4;
27112 a = a2;
27113 b = b2;
27114 c = c2;
27115 } // Last round.
27116
27117
27118 for (i = 0; i < 4; i++) {
27119 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
27120 a2 = a;
27121 a = b;
27122 b = c;
27123 c = d;
27124 d = a2;
27125 }
27126 };
27127
27128 return AES;
27129 }();
27130 /**
27131 * A wrapper around the Stream class to use setTimeout
27132 * and run stream "jobs" Asynchronously
27133 *
27134 * @class AsyncStream
27135 * @extends Stream
27136 */
27137
27138
27139 var AsyncStream = /*#__PURE__*/function (_Stream) {
27140 inheritsLoose(AsyncStream, _Stream);
27141
27142 function AsyncStream() {
27143 var _this;
27144
27145 _this = _Stream.call(this, Stream) || this;
27146 _this.jobs = [];
27147 _this.delay = 1;
27148 _this.timeout_ = null;
27149 return _this;
27150 }
27151 /**
27152 * process an async job
27153 *
27154 * @private
27155 */
27156
27157
27158 var _proto = AsyncStream.prototype;
27159
27160 _proto.processJob_ = function processJob_() {
27161 this.jobs.shift()();
27162
27163 if (this.jobs.length) {
27164 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
27165 } else {
27166 this.timeout_ = null;
27167 }
27168 }
27169 /**
27170 * push a job into the stream
27171 *
27172 * @param {Function} job the job to push into the stream
27173 */
27174 ;
27175
27176 _proto.push = function push(job) {
27177 this.jobs.push(job);
27178
27179 if (!this.timeout_) {
27180 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
27181 }
27182 };
27183
27184 return AsyncStream;
27185 }(Stream);
27186 /**
27187 * Convert network-order (big-endian) bytes into their little-endian
27188 * representation.
27189 */
27190
27191
27192 var ntoh = function ntoh(word) {
27193 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
27194 };
27195 /**
27196 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
27197 *
27198 * @param {Uint8Array} encrypted the encrypted bytes
27199 * @param {Uint32Array} key the bytes of the decryption key
27200 * @param {Uint32Array} initVector the initialization vector (IV) to
27201 * use for the first round of CBC.
27202 * @return {Uint8Array} the decrypted bytes
27203 *
27204 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
27205 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
27206 * @see https://tools.ietf.org/html/rfc2315
27207 */
27208
27209
27210 var decrypt = function decrypt(encrypted, key, initVector) {
27211 // word-level access to the encrypted bytes
27212 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
27213 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
27214
27215 var decrypted = new Uint8Array(encrypted.byteLength);
27216 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
27217 // decrypted data
27218
27219 var init0;
27220 var init1;
27221 var init2;
27222 var init3;
27223 var encrypted0;
27224 var encrypted1;
27225 var encrypted2;
27226 var encrypted3; // iteration variable
27227
27228 var wordIx; // pull out the words of the IV to ensure we don't modify the
27229 // passed-in reference and easier access
27230
27231 init0 = initVector[0];
27232 init1 = initVector[1];
27233 init2 = initVector[2];
27234 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
27235 // to each decrypted block
27236
27237 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
27238 // convert big-endian (network order) words into little-endian
27239 // (javascript order)
27240 encrypted0 = ntoh(encrypted32[wordIx]);
27241 encrypted1 = ntoh(encrypted32[wordIx + 1]);
27242 encrypted2 = ntoh(encrypted32[wordIx + 2]);
27243 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
27244
27245 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
27246 // plaintext
27247
27248 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
27249 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
27250 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
27251 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
27252
27253 init0 = encrypted0;
27254 init1 = encrypted1;
27255 init2 = encrypted2;
27256 init3 = encrypted3;
27257 }
27258
27259 return decrypted;
27260 };
27261 /**
27262 * The `Decrypter` class that manages decryption of AES
27263 * data through `AsyncStream` objects and the `decrypt`
27264 * function
27265 *
27266 * @param {Uint8Array} encrypted the encrypted bytes
27267 * @param {Uint32Array} key the bytes of the decryption key
27268 * @param {Uint32Array} initVector the initialization vector (IV) to
27269 * @param {Function} done the function to run when done
27270 * @class Decrypter
27271 */
27272
27273
27274 var Decrypter = /*#__PURE__*/function () {
27275 function Decrypter(encrypted, key, initVector, done) {
27276 var step = Decrypter.STEP;
27277 var encrypted32 = new Int32Array(encrypted.buffer);
27278 var decrypted = new Uint8Array(encrypted.byteLength);
27279 var i = 0;
27280 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
27281
27282 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27283
27284 for (i = step; i < encrypted32.length; i += step) {
27285 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
27286 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27287 } // invoke the done() callback when everything is finished
27288
27289
27290 this.asyncStream_.push(function () {
27291 // remove pkcs#7 padding from the decrypted bytes
27292 done(null, unpad(decrypted));
27293 });
27294 }
27295 /**
27296 * a getter for step the maximum number of bytes to process at one time
27297 *
27298 * @return {number} the value of step 32000
27299 */
27300
27301
27302 var _proto = Decrypter.prototype;
27303 /**
27304 * @private
27305 */
27306
27307 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
27308 return function () {
27309 var bytes = decrypt(encrypted, key, initVector);
27310 decrypted.set(bytes, encrypted.byteOffset);
27311 };
27312 };
27313
27314 createClass(Decrypter, null, [{
27315 key: "STEP",
27316 get: function get() {
27317 // 4 * 8000;
27318 return 32000;
27319 }
27320 }]);
27321 return Decrypter;
27322 }();
27323
27324 var win;
27325
27326 if (typeof window !== "undefined") {
27327 win = window;
27328 } else if (typeof commonjsGlobal !== "undefined") {
27329 win = commonjsGlobal;
27330 } else if (typeof self !== "undefined") {
27331 win = self;
27332 } else {
27333 win = {};
27334 }
27335
27336 var window_1 = win;
27337
27338 var isArrayBufferView = function isArrayBufferView(obj) {
27339 if (ArrayBuffer.isView === 'function') {
27340 return ArrayBuffer.isView(obj);
27341 }
27342
27343 return obj && obj.buffer instanceof ArrayBuffer;
27344 };
27345
27346 var BigInt = window_1.BigInt || Number;
27347 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
27348
27349 (function () {
27350 var a = new Uint16Array([0xFFCC]);
27351 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
27352
27353 if (b[0] === 0xFF) {
27354 return 'big';
27355 }
27356
27357 if (b[0] === 0xCC) {
27358 return 'little';
27359 }
27360
27361 return 'unknown';
27362 })();
27363 /**
27364 * Creates an object for sending to a web worker modifying properties that are TypedArrays
27365 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
27366 *
27367 * @param {Object} message
27368 * Object of properties and values to send to the web worker
27369 * @return {Object}
27370 * Modified message with TypedArray values expanded
27371 * @function createTransferableMessage
27372 */
27373
27374
27375 var createTransferableMessage = function createTransferableMessage(message) {
27376 var transferable = {};
27377 Object.keys(message).forEach(function (key) {
27378 var value = message[key];
27379
27380 if (isArrayBufferView(value)) {
27381 transferable[key] = {
27382 bytes: value.buffer,
27383 byteOffset: value.byteOffset,
27384 byteLength: value.byteLength
27385 };
27386 } else {
27387 transferable[key] = value;
27388 }
27389 });
27390 return transferable;
27391 };
27392 /* global self */
27393
27394 /**
27395 * Our web worker interface so that things can talk to aes-decrypter
27396 * that will be running in a web worker. the scope is passed to this by
27397 * webworkify.
27398 */
27399
27400
27401 self.onmessage = function (event) {
27402 var data = event.data;
27403 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
27404 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
27405 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
27406 /* eslint-disable no-new, handle-callback-err */
27407
27408 new Decrypter(encrypted, key, iv, function (err, bytes) {
27409 self.postMessage(createTransferableMessage({
27410 source: data.source,
27411 decrypted: bytes
27412 }), [bytes.buffer]);
27413 });
27414 /* eslint-enable */
27415 };
27416 });
27417 var Decrypter = factory(workerCode);
27418 /* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
27419
27420 /**
27421 * Convert the properties of an HLS track into an audioTrackKind.
27422 *
27423 * @private
27424 */
27425
27426 var audioTrackKind_ = function audioTrackKind_(properties) {
27427 var kind = properties.default ? 'main' : 'alternative';
27428
27429 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
27430 kind = 'main-desc';
27431 }
27432
27433 return kind;
27434 };
27435 /**
27436 * Pause provided segment loader and playlist loader if active
27437 *
27438 * @param {SegmentLoader} segmentLoader
27439 * SegmentLoader to pause
27440 * @param {Object} mediaType
27441 * Active media type
27442 * @function stopLoaders
27443 */
27444
27445
27446 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
27447 segmentLoader.abort();
27448 segmentLoader.pause();
27449
27450 if (mediaType && mediaType.activePlaylistLoader) {
27451 mediaType.activePlaylistLoader.pause();
27452 mediaType.activePlaylistLoader = null;
27453 }
27454 };
27455 /**
27456 * Start loading provided segment loader and playlist loader
27457 *
27458 * @param {PlaylistLoader} playlistLoader
27459 * PlaylistLoader to start loading
27460 * @param {Object} mediaType
27461 * Active media type
27462 * @function startLoaders
27463 */
27464
27465 var startLoaders = function startLoaders(playlistLoader, mediaType) {
27466 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
27467 // playlist loader
27468 mediaType.activePlaylistLoader = playlistLoader;
27469 playlistLoader.load();
27470 };
27471 /**
27472 * Returns a function to be called when the media group changes. It performs a
27473 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
27474 * change of group is merely a rendition switch of the same content at another encoding,
27475 * rather than a change of content, such as switching audio from English to Spanish.
27476 *
27477 * @param {string} type
27478 * MediaGroup type
27479 * @param {Object} settings
27480 * Object containing required information for media groups
27481 * @return {Function}
27482 * Handler for a non-destructive resync of SegmentLoader when the active media
27483 * group changes.
27484 * @function onGroupChanged
27485 */
27486
27487 var onGroupChanged = function onGroupChanged(type, settings) {
27488 return function () {
27489 var _settings$segmentLoad = settings.segmentLoaders,
27490 segmentLoader = _settings$segmentLoad[type],
27491 mainSegmentLoader = _settings$segmentLoad.main,
27492 mediaType = settings.mediaTypes[type];
27493 var activeTrack = mediaType.activeTrack();
27494 var activeGroup = mediaType.getActiveGroup();
27495 var previousActiveLoader = mediaType.activePlaylistLoader;
27496 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
27497
27498 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
27499 return;
27500 }
27501
27502 mediaType.lastGroup_ = activeGroup;
27503 mediaType.lastTrack_ = activeTrack;
27504 stopLoaders(segmentLoader, mediaType);
27505
27506 if (!activeGroup || activeGroup.isMasterPlaylist) {
27507 // there is no group active or active group is a main playlist and won't change
27508 return;
27509 }
27510
27511 if (!activeGroup.playlistLoader) {
27512 if (previousActiveLoader) {
27513 // The previous group had a playlist loader but the new active group does not
27514 // this means we are switching from demuxed to muxed audio. In this case we want to
27515 // do a destructive reset of the main segment loader and not restart the audio
27516 // loaders.
27517 mainSegmentLoader.resetEverything();
27518 }
27519
27520 return;
27521 } // Non-destructive resync
27522
27523
27524 segmentLoader.resyncLoader();
27525 startLoaders(activeGroup.playlistLoader, mediaType);
27526 };
27527 };
27528 var onGroupChanging = function onGroupChanging(type, settings) {
27529 return function () {
27530 var segmentLoader = settings.segmentLoaders[type],
27531 mediaType = settings.mediaTypes[type];
27532 mediaType.lastGroup_ = null;
27533 segmentLoader.abort();
27534 segmentLoader.pause();
27535 };
27536 };
27537 /**
27538 * Returns a function to be called when the media track changes. It performs a
27539 * destructive reset of the SegmentLoader to ensure we start loading as close to
27540 * currentTime as possible.
27541 *
27542 * @param {string} type
27543 * MediaGroup type
27544 * @param {Object} settings
27545 * Object containing required information for media groups
27546 * @return {Function}
27547 * Handler for a destructive reset of SegmentLoader when the active media
27548 * track changes.
27549 * @function onTrackChanged
27550 */
27551
27552 var onTrackChanged = function onTrackChanged(type, settings) {
27553 return function () {
27554 var masterPlaylistLoader = settings.masterPlaylistLoader,
27555 _settings$segmentLoad2 = settings.segmentLoaders,
27556 segmentLoader = _settings$segmentLoad2[type],
27557 mainSegmentLoader = _settings$segmentLoad2.main,
27558 mediaType = settings.mediaTypes[type];
27559 var activeTrack = mediaType.activeTrack();
27560 var activeGroup = mediaType.getActiveGroup();
27561 var previousActiveLoader = mediaType.activePlaylistLoader;
27562 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
27563
27564 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
27565 return;
27566 }
27567
27568 mediaType.lastGroup_ = activeGroup;
27569 mediaType.lastTrack_ = activeTrack;
27570 stopLoaders(segmentLoader, mediaType);
27571
27572 if (!activeGroup) {
27573 // there is no group active so we do not want to restart loaders
27574 return;
27575 }
27576
27577 if (activeGroup.isMasterPlaylist) {
27578 // track did not change, do nothing
27579 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
27580 return;
27581 }
27582
27583 var mpc = settings.vhs.masterPlaylistController_;
27584 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
27585
27586 if (mpc.media() === newPlaylist) {
27587 return;
27588 }
27589
27590 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
27591 masterPlaylistLoader.pause();
27592 mainSegmentLoader.resetEverything();
27593 mpc.fastQualityChange_(newPlaylist);
27594 return;
27595 }
27596
27597 if (type === 'AUDIO') {
27598 if (!activeGroup.playlistLoader) {
27599 // when switching from demuxed audio/video to muxed audio/video (noted by no
27600 // playlist loader for the audio group), we want to do a destructive reset of the
27601 // main segment loader and not restart the audio loaders
27602 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
27603 // it should be stopped
27604
27605 mainSegmentLoader.resetEverything();
27606 return;
27607 } // although the segment loader is an audio segment loader, call the setAudio
27608 // function to ensure it is prepared to re-append the init segment (or handle other
27609 // config changes)
27610
27611
27612 segmentLoader.setAudio(true);
27613 mainSegmentLoader.setAudio(false);
27614 }
27615
27616 if (previousActiveLoader === activeGroup.playlistLoader) {
27617 // Nothing has actually changed. This can happen because track change events can fire
27618 // multiple times for a "single" change. One for enabling the new active track, and
27619 // one for disabling the track that was active
27620 startLoaders(activeGroup.playlistLoader, mediaType);
27621 return;
27622 }
27623
27624 if (segmentLoader.track) {
27625 // For WebVTT, set the new text track in the segmentloader
27626 segmentLoader.track(activeTrack);
27627 } // destructive reset
27628
27629
27630 segmentLoader.resetEverything();
27631 startLoaders(activeGroup.playlistLoader, mediaType);
27632 };
27633 };
27634 var onError = {
27635 /**
27636 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27637 * an error.
27638 *
27639 * @param {string} type
27640 * MediaGroup type
27641 * @param {Object} settings
27642 * Object containing required information for media groups
27643 * @return {Function}
27644 * Error handler. Logs warning (or error if the playlist is blacklisted) to
27645 * console and switches back to default audio track.
27646 * @function onError.AUDIO
27647 */
27648 AUDIO: function AUDIO(type, settings) {
27649 return function () {
27650 var segmentLoader = settings.segmentLoaders[type],
27651 mediaType = settings.mediaTypes[type],
27652 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
27653 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
27654
27655 var activeTrack = mediaType.activeTrack();
27656 var activeGroup = mediaType.activeGroup();
27657 var id = (activeGroup.filter(function (group) {
27658 return group.default;
27659 })[0] || activeGroup[0]).id;
27660 var defaultTrack = mediaType.tracks[id];
27661
27662 if (activeTrack === defaultTrack) {
27663 // Default track encountered an error. All we can do now is blacklist the current
27664 // rendition and hope another will switch audio groups
27665 blacklistCurrentPlaylist({
27666 message: 'Problem encountered loading the default audio track.'
27667 });
27668 return;
27669 }
27670
27671 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
27672
27673 for (var trackId in mediaType.tracks) {
27674 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
27675 }
27676
27677 mediaType.onTrackChanged();
27678 };
27679 },
27680
27681 /**
27682 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27683 * an error.
27684 *
27685 * @param {string} type
27686 * MediaGroup type
27687 * @param {Object} settings
27688 * Object containing required information for media groups
27689 * @return {Function}
27690 * Error handler. Logs warning to console and disables the active subtitle track
27691 * @function onError.SUBTITLES
27692 */
27693 SUBTITLES: function SUBTITLES(type, settings) {
27694 return function () {
27695 var segmentLoader = settings.segmentLoaders[type],
27696 mediaType = settings.mediaTypes[type];
27697 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
27698 stopLoaders(segmentLoader, mediaType);
27699 var track = mediaType.activeTrack();
27700
27701 if (track) {
27702 track.mode = 'disabled';
27703 }
27704
27705 mediaType.onTrackChanged();
27706 };
27707 }
27708 };
27709 var setupListeners = {
27710 /**
27711 * Setup event listeners for audio playlist loader
27712 *
27713 * @param {string} type
27714 * MediaGroup type
27715 * @param {PlaylistLoader|null} playlistLoader
27716 * PlaylistLoader to register listeners on
27717 * @param {Object} settings
27718 * Object containing required information for media groups
27719 * @function setupListeners.AUDIO
27720 */
27721 AUDIO: function AUDIO(type, playlistLoader, settings) {
27722 if (!playlistLoader) {
27723 // no playlist loader means audio will be muxed with the video
27724 return;
27725 }
27726
27727 var tech = settings.tech,
27728 requestOptions = settings.requestOptions,
27729 segmentLoader = settings.segmentLoaders[type];
27730 playlistLoader.on('loadedmetadata', function () {
27731 var media = playlistLoader.media();
27732 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
27733 // permits, start downloading segments
27734
27735 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27736 segmentLoader.load();
27737 }
27738 });
27739 playlistLoader.on('loadedplaylist', function () {
27740 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27741
27742 if (!tech.paused()) {
27743 segmentLoader.load();
27744 }
27745 });
27746 playlistLoader.on('error', onError[type](type, settings));
27747 },
27748
27749 /**
27750 * Setup event listeners for subtitle playlist loader
27751 *
27752 * @param {string} type
27753 * MediaGroup type
27754 * @param {PlaylistLoader|null} playlistLoader
27755 * PlaylistLoader to register listeners on
27756 * @param {Object} settings
27757 * Object containing required information for media groups
27758 * @function setupListeners.SUBTITLES
27759 */
27760 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
27761 var tech = settings.tech,
27762 requestOptions = settings.requestOptions,
27763 segmentLoader = settings.segmentLoaders[type],
27764 mediaType = settings.mediaTypes[type];
27765 playlistLoader.on('loadedmetadata', function () {
27766 var media = playlistLoader.media();
27767 segmentLoader.playlist(media, requestOptions);
27768 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
27769 // permits, start downloading segments
27770
27771 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27772 segmentLoader.load();
27773 }
27774 });
27775 playlistLoader.on('loadedplaylist', function () {
27776 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27777
27778 if (!tech.paused()) {
27779 segmentLoader.load();
27780 }
27781 });
27782 playlistLoader.on('error', onError[type](type, settings));
27783 }
27784 };
27785 var initialize = {
27786 /**
27787 * Setup PlaylistLoaders and AudioTracks for the audio groups
27788 *
27789 * @param {string} type
27790 * MediaGroup type
27791 * @param {Object} settings
27792 * Object containing required information for media groups
27793 * @function initialize.AUDIO
27794 */
27795 'AUDIO': function AUDIO(type, settings) {
27796 var vhs = settings.vhs,
27797 sourceType = settings.sourceType,
27798 segmentLoader = settings.segmentLoaders[type],
27799 requestOptions = settings.requestOptions,
27800 mediaGroups = settings.master.mediaGroups,
27801 _settings$mediaTypes$ = settings.mediaTypes[type],
27802 groups = _settings$mediaTypes$.groups,
27803 tracks = _settings$mediaTypes$.tracks,
27804 logger_ = _settings$mediaTypes$.logger_,
27805 masterPlaylistLoader = settings.masterPlaylistLoader;
27806 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
27807
27808 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
27809 mediaGroups[type] = {
27810 main: {
27811 default: {
27812 default: true
27813 }
27814 }
27815 };
27816
27817 if (audioOnlyMaster) {
27818 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
27819 }
27820 }
27821
27822 for (var groupId in mediaGroups[type]) {
27823 if (!groups[groupId]) {
27824 groups[groupId] = [];
27825 }
27826
27827 for (var variantLabel in mediaGroups[type][groupId]) {
27828 var properties = mediaGroups[type][groupId][variantLabel];
27829 var playlistLoader = void 0;
27830
27831 if (audioOnlyMaster) {
27832 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
27833 properties.isMasterPlaylist = true;
27834 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
27835 // use the resolved media playlist object
27836 } else if (sourceType === 'vhs-json' && properties.playlists) {
27837 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
27838 } else if (properties.resolvedUri) {
27839 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
27840 // should we even have properties.playlists in this check.
27841 } else if (properties.playlists && sourceType === 'dash') {
27842 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27843 } else {
27844 // no resolvedUri means the audio is muxed with the video when using this
27845 // audio track
27846 playlistLoader = null;
27847 }
27848
27849 properties = videojs__default["default"].mergeOptions({
27850 id: variantLabel,
27851 playlistLoader: playlistLoader
27852 }, properties);
27853 setupListeners[type](type, properties.playlistLoader, settings);
27854 groups[groupId].push(properties);
27855
27856 if (typeof tracks[variantLabel] === 'undefined') {
27857 var track = new videojs__default["default"].AudioTrack({
27858 id: variantLabel,
27859 kind: audioTrackKind_(properties),
27860 enabled: false,
27861 language: properties.language,
27862 default: properties.default,
27863 label: variantLabel
27864 });
27865 tracks[variantLabel] = track;
27866 }
27867 }
27868 } // setup single error event handler for the segment loader
27869
27870
27871 segmentLoader.on('error', onError[type](type, settings));
27872 },
27873
27874 /**
27875 * Setup PlaylistLoaders and TextTracks for the subtitle groups
27876 *
27877 * @param {string} type
27878 * MediaGroup type
27879 * @param {Object} settings
27880 * Object containing required information for media groups
27881 * @function initialize.SUBTITLES
27882 */
27883 'SUBTITLES': function SUBTITLES(type, settings) {
27884 var tech = settings.tech,
27885 vhs = settings.vhs,
27886 sourceType = settings.sourceType,
27887 segmentLoader = settings.segmentLoaders[type],
27888 requestOptions = settings.requestOptions,
27889 mediaGroups = settings.master.mediaGroups,
27890 _settings$mediaTypes$2 = settings.mediaTypes[type],
27891 groups = _settings$mediaTypes$2.groups,
27892 tracks = _settings$mediaTypes$2.tracks,
27893 masterPlaylistLoader = settings.masterPlaylistLoader;
27894
27895 for (var groupId in mediaGroups[type]) {
27896 if (!groups[groupId]) {
27897 groups[groupId] = [];
27898 }
27899
27900 for (var variantLabel in mediaGroups[type][groupId]) {
27901 if (mediaGroups[type][groupId][variantLabel].forced) {
27902 // Subtitle playlists with the forced attribute are not selectable in Safari.
27903 // According to Apple's HLS Authoring Specification:
27904 // If content has forced subtitles and regular subtitles in a given language,
27905 // the regular subtitles track in that language MUST contain both the forced
27906 // subtitles and the regular subtitles for that language.
27907 // Because of this requirement and that Safari does not add forced subtitles,
27908 // forced subtitles are skipped here to maintain consistent experience across
27909 // all platforms
27910 continue;
27911 }
27912
27913 var properties = mediaGroups[type][groupId][variantLabel];
27914 var playlistLoader = void 0;
27915
27916 if (sourceType === 'hls') {
27917 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
27918 } else if (sourceType === 'dash') {
27919 var playlists = properties.playlists.filter(function (p) {
27920 return p.excludeUntil !== Infinity;
27921 });
27922
27923 if (!playlists.length) {
27924 return;
27925 }
27926
27927 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27928 } else if (sourceType === 'vhs-json') {
27929 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
27930 // as provided, otherwise use the resolved URI to load the playlist
27931 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
27932 }
27933
27934 properties = videojs__default["default"].mergeOptions({
27935 id: variantLabel,
27936 playlistLoader: playlistLoader
27937 }, properties);
27938 setupListeners[type](type, properties.playlistLoader, settings);
27939 groups[groupId].push(properties);
27940
27941 if (typeof tracks[variantLabel] === 'undefined') {
27942 var track = tech.addRemoteTextTrack({
27943 id: variantLabel,
27944 kind: 'subtitles',
27945 default: properties.default && properties.autoselect,
27946 language: properties.language,
27947 label: variantLabel
27948 }, false).track;
27949 tracks[variantLabel] = track;
27950 }
27951 }
27952 } // setup single error event handler for the segment loader
27953
27954
27955 segmentLoader.on('error', onError[type](type, settings));
27956 },
27957
27958 /**
27959 * Setup TextTracks for the closed-caption groups
27960 *
27961 * @param {String} type
27962 * MediaGroup type
27963 * @param {Object} settings
27964 * Object containing required information for media groups
27965 * @function initialize['CLOSED-CAPTIONS']
27966 */
27967 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
27968 var tech = settings.tech,
27969 mediaGroups = settings.master.mediaGroups,
27970 _settings$mediaTypes$3 = settings.mediaTypes[type],
27971 groups = _settings$mediaTypes$3.groups,
27972 tracks = _settings$mediaTypes$3.tracks;
27973
27974 for (var groupId in mediaGroups[type]) {
27975 if (!groups[groupId]) {
27976 groups[groupId] = [];
27977 }
27978
27979 for (var variantLabel in mediaGroups[type][groupId]) {
27980 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
27981
27982 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
27983 continue;
27984 }
27985
27986 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
27987 var newProps = {
27988 label: variantLabel,
27989 language: properties.language,
27990 instreamId: properties.instreamId,
27991 default: properties.default && properties.autoselect
27992 };
27993
27994 if (captionServices[newProps.instreamId]) {
27995 newProps = videojs__default["default"].mergeOptions(newProps, captionServices[newProps.instreamId]);
27996 }
27997
27998 if (newProps.default === undefined) {
27999 delete newProps.default;
28000 } // No PlaylistLoader is required for Closed-Captions because the captions are
28001 // embedded within the video stream
28002
28003
28004 groups[groupId].push(videojs__default["default"].mergeOptions({
28005 id: variantLabel
28006 }, properties));
28007
28008 if (typeof tracks[variantLabel] === 'undefined') {
28009 var track = tech.addRemoteTextTrack({
28010 id: newProps.instreamId,
28011 kind: 'captions',
28012 default: newProps.default,
28013 language: newProps.language,
28014 label: newProps.label
28015 }, false).track;
28016 tracks[variantLabel] = track;
28017 }
28018 }
28019 }
28020 }
28021 };
28022
28023 var groupMatch = function groupMatch(list, media) {
28024 for (var i = 0; i < list.length; i++) {
28025 if (playlistMatch(media, list[i])) {
28026 return true;
28027 }
28028
28029 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
28030 return true;
28031 }
28032 }
28033
28034 return false;
28035 };
28036 /**
28037 * Returns a function used to get the active group of the provided type
28038 *
28039 * @param {string} type
28040 * MediaGroup type
28041 * @param {Object} settings
28042 * Object containing required information for media groups
28043 * @return {Function}
28044 * Function that returns the active media group for the provided type. Takes an
28045 * optional parameter {TextTrack} track. If no track is provided, a list of all
28046 * variants in the group, otherwise the variant corresponding to the provided
28047 * track is returned.
28048 * @function activeGroup
28049 */
28050
28051
28052 var activeGroup = function activeGroup(type, settings) {
28053 return function (track) {
28054 var masterPlaylistLoader = settings.masterPlaylistLoader,
28055 groups = settings.mediaTypes[type].groups;
28056 var media = masterPlaylistLoader.media();
28057
28058 if (!media) {
28059 return null;
28060 }
28061
28062 var variants = null; // set to variants to main media active group
28063
28064 if (media.attributes[type]) {
28065 variants = groups[media.attributes[type]];
28066 }
28067
28068 var groupKeys = Object.keys(groups);
28069
28070 if (!variants) {
28071 // find the masterPlaylistLoader media
28072 // that is in a media group if we are dealing
28073 // with audio only
28074 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
28075 for (var i = 0; i < groupKeys.length; i++) {
28076 var groupPropertyList = groups[groupKeys[i]];
28077
28078 if (groupMatch(groupPropertyList, media)) {
28079 variants = groupPropertyList;
28080 break;
28081 }
28082 } // use the main group if it exists
28083
28084 } else if (groups.main) {
28085 variants = groups.main; // only one group, use that one
28086 } else if (groupKeys.length === 1) {
28087 variants = groups[groupKeys[0]];
28088 }
28089 }
28090
28091 if (typeof track === 'undefined') {
28092 return variants;
28093 }
28094
28095 if (track === null || !variants) {
28096 // An active track was specified so a corresponding group is expected. track === null
28097 // means no track is currently active so there is no corresponding group
28098 return null;
28099 }
28100
28101 return variants.filter(function (props) {
28102 return props.id === track.id;
28103 })[0] || null;
28104 };
28105 };
28106 var activeTrack = {
28107 /**
28108 * Returns a function used to get the active track of type provided
28109 *
28110 * @param {string} type
28111 * MediaGroup type
28112 * @param {Object} settings
28113 * Object containing required information for media groups
28114 * @return {Function}
28115 * Function that returns the active media track for the provided type. Returns
28116 * null if no track is active
28117 * @function activeTrack.AUDIO
28118 */
28119 AUDIO: function AUDIO(type, settings) {
28120 return function () {
28121 var tracks = settings.mediaTypes[type].tracks;
28122
28123 for (var id in tracks) {
28124 if (tracks[id].enabled) {
28125 return tracks[id];
28126 }
28127 }
28128
28129 return null;
28130 };
28131 },
28132
28133 /**
28134 * Returns a function used to get the active track of type provided
28135 *
28136 * @param {string} type
28137 * MediaGroup type
28138 * @param {Object} settings
28139 * Object containing required information for media groups
28140 * @return {Function}
28141 * Function that returns the active media track for the provided type. Returns
28142 * null if no track is active
28143 * @function activeTrack.SUBTITLES
28144 */
28145 SUBTITLES: function SUBTITLES(type, settings) {
28146 return function () {
28147 var tracks = settings.mediaTypes[type].tracks;
28148
28149 for (var id in tracks) {
28150 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
28151 return tracks[id];
28152 }
28153 }
28154
28155 return null;
28156 };
28157 }
28158 };
28159 var getActiveGroup = function getActiveGroup(type, _ref) {
28160 var mediaTypes = _ref.mediaTypes;
28161 return function () {
28162 var activeTrack_ = mediaTypes[type].activeTrack();
28163
28164 if (!activeTrack_) {
28165 return null;
28166 }
28167
28168 return mediaTypes[type].activeGroup(activeTrack_);
28169 };
28170 };
28171 /**
28172 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
28173 * Closed-Captions) specified in the master manifest.
28174 *
28175 * @param {Object} settings
28176 * Object containing required information for setting up the media groups
28177 * @param {Tech} settings.tech
28178 * The tech of the player
28179 * @param {Object} settings.requestOptions
28180 * XHR request options used by the segment loaders
28181 * @param {PlaylistLoader} settings.masterPlaylistLoader
28182 * PlaylistLoader for the master source
28183 * @param {VhsHandler} settings.vhs
28184 * VHS SourceHandler
28185 * @param {Object} settings.master
28186 * The parsed master manifest
28187 * @param {Object} settings.mediaTypes
28188 * Object to store the loaders, tracks, and utility methods for each media type
28189 * @param {Function} settings.blacklistCurrentPlaylist
28190 * Blacklists the current rendition and forces a rendition switch.
28191 * @function setupMediaGroups
28192 */
28193
28194 var setupMediaGroups = function setupMediaGroups(settings) {
28195 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
28196 initialize[type](type, settings);
28197 });
28198 var mediaTypes = settings.mediaTypes,
28199 masterPlaylistLoader = settings.masterPlaylistLoader,
28200 tech = settings.tech,
28201 vhs = settings.vhs,
28202 _settings$segmentLoad3 = settings.segmentLoaders,
28203 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
28204 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
28205
28206 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28207 mediaTypes[type].activeGroup = activeGroup(type, settings);
28208 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
28209 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
28210 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
28211 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
28212 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
28213 }); // DO NOT enable the default subtitle or caption track.
28214 // DO enable the default audio track
28215
28216 var audioGroup = mediaTypes.AUDIO.activeGroup();
28217
28218 if (audioGroup) {
28219 var groupId = (audioGroup.filter(function (group) {
28220 return group.default;
28221 })[0] || audioGroup[0]).id;
28222 mediaTypes.AUDIO.tracks[groupId].enabled = true;
28223 mediaTypes.AUDIO.onGroupChanged();
28224 mediaTypes.AUDIO.onTrackChanged();
28225 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
28226 // track is changed, but needs to be handled here since the track may not be considered
28227 // changed on the first call to onTrackChanged
28228
28229 if (!activeAudioGroup.playlistLoader) {
28230 // either audio is muxed with video or the stream is audio only
28231 mainSegmentLoader.setAudio(true);
28232 } else {
28233 // audio is demuxed
28234 mainSegmentLoader.setAudio(false);
28235 audioSegmentLoader.setAudio(true);
28236 }
28237 }
28238
28239 masterPlaylistLoader.on('mediachange', function () {
28240 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28241 return mediaTypes[type].onGroupChanged();
28242 });
28243 });
28244 masterPlaylistLoader.on('mediachanging', function () {
28245 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28246 return mediaTypes[type].onGroupChanging();
28247 });
28248 }); // custom audio track change event handler for usage event
28249
28250 var onAudioTrackChanged = function onAudioTrackChanged() {
28251 mediaTypes.AUDIO.onTrackChanged();
28252 tech.trigger({
28253 type: 'usage',
28254 name: 'vhs-audio-change'
28255 });
28256 tech.trigger({
28257 type: 'usage',
28258 name: 'hls-audio-change'
28259 });
28260 };
28261
28262 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
28263 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28264 vhs.on('dispose', function () {
28265 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
28266 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28267 }); // clear existing audio tracks and add the ones we just created
28268
28269 tech.clearTracks('audio');
28270
28271 for (var id in mediaTypes.AUDIO.tracks) {
28272 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
28273 }
28274 };
28275 /**
28276 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
28277 * media type
28278 *
28279 * @return {Object}
28280 * Object to store the loaders, tracks, and utility methods for each media type
28281 * @function createMediaTypes
28282 */
28283
28284 var createMediaTypes = function createMediaTypes() {
28285 var mediaTypes = {};
28286 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
28287 mediaTypes[type] = {
28288 groups: {},
28289 tracks: {},
28290 activePlaylistLoader: null,
28291 activeGroup: noop,
28292 activeTrack: noop,
28293 getActiveGroup: noop,
28294 onGroupChanged: noop,
28295 onTrackChanged: noop,
28296 lastTrack_: null,
28297 logger_: logger("MediaGroups[" + type + "]")
28298 };
28299 });
28300 return mediaTypes;
28301 };
28302
28303 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
28304 var Vhs$1; // SegmentLoader stats that need to have each loader's
28305 // values summed to calculate the final value
28306
28307 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
28308
28309 var sumLoaderStat = function sumLoaderStat(stat) {
28310 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
28311 };
28312
28313 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
28314 var currentPlaylist = _ref.currentPlaylist,
28315 buffered = _ref.buffered,
28316 currentTime = _ref.currentTime,
28317 nextPlaylist = _ref.nextPlaylist,
28318 bufferLowWaterLine = _ref.bufferLowWaterLine,
28319 bufferHighWaterLine = _ref.bufferHighWaterLine,
28320 duration = _ref.duration,
28321 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
28322 log = _ref.log;
28323
28324 // we have no other playlist to switch to
28325 if (!nextPlaylist) {
28326 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
28327 return false;
28328 }
28329
28330 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
28331
28332 if (!currentPlaylist) {
28333 log(sharedLogLine + " as current playlist is not set");
28334 return true;
28335 } // no need to switch if playlist is the same
28336
28337
28338 if (nextPlaylist.id === currentPlaylist.id) {
28339 return false;
28340 } // determine if current time is in a buffered range.
28341
28342
28343 var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
28344 // This is because in LIVE, the player plays 3 segments from the end of the
28345 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
28346 // in those segments, a viewer will never experience a rendition upswitch.
28347
28348 if (!currentPlaylist.endList) {
28349 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
28350 // doubles the time to first playback.
28351 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
28352 log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
28353 return false;
28354 }
28355
28356 log(sharedLogLine + " as current playlist is live");
28357 return true;
28358 }
28359
28360 var forwardBuffer = timeAheadOf(buffered, currentTime);
28361 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
28362 // duration is below the max potential low water line
28363
28364 if (duration < maxBufferLowWaterLine) {
28365 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
28366 return true;
28367 }
28368
28369 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
28370 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
28371 // we can switch down
28372
28373 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
28374 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
28375
28376 if (experimentalBufferBasedABR) {
28377 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
28378 }
28379
28380 log(logLine);
28381 return true;
28382 } // and if our buffer is higher than the low water line,
28383 // we can switch up
28384
28385
28386 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
28387 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
28388
28389 if (experimentalBufferBasedABR) {
28390 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
28391 }
28392
28393 log(_logLine);
28394 return true;
28395 }
28396
28397 log("not " + sharedLogLine + " as no switching criteria met");
28398 return false;
28399 };
28400 /**
28401 * the master playlist controller controller all interactons
28402 * between playlists and segmentloaders. At this time this mainly
28403 * involves a master playlist and a series of audio playlists
28404 * if they are available
28405 *
28406 * @class MasterPlaylistController
28407 * @extends videojs.EventTarget
28408 */
28409
28410
28411 var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
28412 inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
28413
28414 function MasterPlaylistController(options) {
28415 var _this;
28416
28417 _this = _videojs$EventTarget.call(this) || this;
28418 var src = options.src,
28419 handleManifestRedirects = options.handleManifestRedirects,
28420 withCredentials = options.withCredentials,
28421 tech = options.tech,
28422 bandwidth = options.bandwidth,
28423 externVhs = options.externVhs,
28424 useCueTags = options.useCueTags,
28425 blacklistDuration = options.blacklistDuration,
28426 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
28427 sourceType = options.sourceType,
28428 cacheEncryptionKeys = options.cacheEncryptionKeys,
28429 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
28430 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
28431 captionServices = options.captionServices;
28432
28433 if (!src) {
28434 throw new Error('A non-empty playlist URL or JSON manifest string is required');
28435 }
28436
28437 var maxPlaylistRetries = options.maxPlaylistRetries;
28438
28439 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
28440 maxPlaylistRetries = Infinity;
28441 }
28442
28443 Vhs$1 = externVhs;
28444 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
28445 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
28446 _this.withCredentials = withCredentials;
28447 _this.tech_ = tech;
28448 _this.vhs_ = tech.vhs;
28449 _this.sourceType_ = sourceType;
28450 _this.useCueTags_ = useCueTags;
28451 _this.blacklistDuration = blacklistDuration;
28452 _this.maxPlaylistRetries = maxPlaylistRetries;
28453 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
28454
28455 if (_this.useCueTags_) {
28456 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
28457 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
28458 }
28459
28460 _this.requestOptions_ = {
28461 withCredentials: withCredentials,
28462 handleManifestRedirects: handleManifestRedirects,
28463 maxPlaylistRetries: maxPlaylistRetries,
28464 timeout: null
28465 };
28466
28467 _this.on('error', _this.pauseLoading);
28468
28469 _this.mediaTypes_ = createMediaTypes();
28470 _this.mediaSource = new window.MediaSource();
28471 _this.handleDurationChange_ = _this.handleDurationChange_.bind(assertThisInitialized(_this));
28472 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(assertThisInitialized(_this));
28473 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(assertThisInitialized(_this));
28474
28475 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
28476
28477
28478 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
28479
28480 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
28481 // everything, and the MediaSource should not be detached without a proper disposal
28482
28483
28484 _this.seekable_ = videojs__default["default"].createTimeRanges();
28485 _this.hasPlayed_ = false;
28486 _this.syncController_ = new SyncController(options);
28487 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
28488 kind: 'metadata',
28489 label: 'segment-metadata'
28490 }, false).track;
28491 _this.decrypter_ = new Decrypter();
28492 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
28493 _this.inbandTextTracks_ = {};
28494 _this.timelineChangeController_ = new TimelineChangeController();
28495 var segmentLoaderSettings = {
28496 vhs: _this.vhs_,
28497 parse708captions: options.parse708captions,
28498 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
28499 captionServices: captionServices,
28500 mediaSource: _this.mediaSource,
28501 currentTime: _this.tech_.currentTime.bind(_this.tech_),
28502 seekable: function seekable() {
28503 return _this.seekable();
28504 },
28505 seeking: function seeking() {
28506 return _this.tech_.seeking();
28507 },
28508 duration: function duration() {
28509 return _this.duration();
28510 },
28511 hasPlayed: function hasPlayed() {
28512 return _this.hasPlayed_;
28513 },
28514 goalBufferLength: function goalBufferLength() {
28515 return _this.goalBufferLength();
28516 },
28517 bandwidth: bandwidth,
28518 syncController: _this.syncController_,
28519 decrypter: _this.decrypter_,
28520 sourceType: _this.sourceType_,
28521 inbandTextTracks: _this.inbandTextTracks_,
28522 cacheEncryptionKeys: cacheEncryptionKeys,
28523 sourceUpdater: _this.sourceUpdater_,
28524 timelineChangeController: _this.timelineChangeController_,
28525 experimentalExactManifestTimings: options.experimentalExactManifestTimings
28526 }; // The source type check not only determines whether a special DASH playlist loader
28527 // should be used, but also covers the case where the provided src is a vhs-json
28528 // manifest object (instead of a URL). In the case of vhs-json, the default
28529 // PlaylistLoader should be used.
28530
28531 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
28532
28533 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
28534 // combined audio/video or just video when alternate audio track is selected
28535
28536
28537 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28538 segmentMetadataTrack: _this.segmentMetadataTrack_,
28539 loaderType: 'main'
28540 }), options); // alternate audio track
28541
28542 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28543 loaderType: 'audio'
28544 }), options);
28545 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28546 loaderType: 'vtt',
28547 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks,
28548 loadVttJs: function loadVttJs() {
28549 return new Promise(function (resolve, reject) {
28550 function onLoad() {
28551 tech.off('vttjserror', onError);
28552 resolve();
28553 }
28554
28555 function onError() {
28556 tech.off('vttjsloaded', onLoad);
28557 reject();
28558 }
28559
28560 tech.one('vttjsloaded', onLoad);
28561 tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:
28562
28563 tech.addWebVttScript_();
28564 });
28565 }
28566 }), options);
28567
28568 _this.setupSegmentLoaderListeners_();
28569
28570 if (_this.experimentalBufferBasedABR) {
28571 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
28572 return _this.startABRTimer_();
28573 });
28574
28575 _this.tech_.on('pause', function () {
28576 return _this.stopABRTimer_();
28577 });
28578
28579 _this.tech_.on('play', function () {
28580 return _this.startABRTimer_();
28581 });
28582 } // Create SegmentLoader stat-getters
28583 // mediaRequests_
28584 // mediaRequestsAborted_
28585 // mediaRequestsTimedout_
28586 // mediaRequestsErrored_
28587 // mediaTransferDuration_
28588 // mediaBytesTransferred_
28589 // mediaAppends_
28590
28591
28592 loaderStats.forEach(function (stat) {
28593 _this[stat + '_'] = sumLoaderStat.bind(assertThisInitialized(_this), stat);
28594 });
28595 _this.logger_ = logger('MPC');
28596 _this.triggeredFmp4Usage = false;
28597
28598 if (_this.tech_.preload() === 'none') {
28599 _this.loadOnPlay_ = function () {
28600 _this.loadOnPlay_ = null;
28601
28602 _this.masterPlaylistLoader_.load();
28603 };
28604
28605 _this.tech_.one('play', _this.loadOnPlay_);
28606 } else {
28607 _this.masterPlaylistLoader_.load();
28608 }
28609
28610 _this.timeToLoadedData__ = -1;
28611 _this.mainAppendsToLoadedData__ = -1;
28612 _this.audioAppendsToLoadedData__ = -1;
28613 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
28614
28615 _this.tech_.one(event, function () {
28616 var timeToLoadedDataStart = Date.now();
28617
28618 _this.tech_.one('loadeddata', function () {
28619 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
28620 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
28621 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
28622 });
28623 });
28624
28625 return _this;
28626 }
28627
28628 var _proto = MasterPlaylistController.prototype;
28629
28630 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
28631 return this.mainAppendsToLoadedData__;
28632 };
28633
28634 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
28635 return this.audioAppendsToLoadedData__;
28636 };
28637
28638 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
28639 var main = this.mainAppendsToLoadedData_();
28640 var audio = this.audioAppendsToLoadedData_();
28641
28642 if (main === -1 || audio === -1) {
28643 return -1;
28644 }
28645
28646 return main + audio;
28647 };
28648
28649 _proto.timeToLoadedData_ = function timeToLoadedData_() {
28650 return this.timeToLoadedData__;
28651 }
28652 /**
28653 * Run selectPlaylist and switch to the new playlist if we should
28654 *
28655 * @param {string} [reason=abr] a reason for why the ABR check is made
28656 * @private
28657 */
28658 ;
28659
28660 _proto.checkABR_ = function checkABR_(reason) {
28661 if (reason === void 0) {
28662 reason = 'abr';
28663 }
28664
28665 var nextPlaylist = this.selectPlaylist();
28666
28667 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
28668 this.switchMedia_(nextPlaylist, reason);
28669 }
28670 };
28671
28672 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
28673 var oldMedia = this.media();
28674 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
28675 var newId = playlist.id || playlist.uri;
28676
28677 if (oldId && oldId !== newId) {
28678 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
28679 this.tech_.trigger({
28680 type: 'usage',
28681 name: "vhs-rendition-change-" + cause
28682 });
28683 }
28684
28685 this.masterPlaylistLoader_.media(playlist, delay);
28686 }
28687 /**
28688 * Start a timer that periodically calls checkABR_
28689 *
28690 * @private
28691 */
28692 ;
28693
28694 _proto.startABRTimer_ = function startABRTimer_() {
28695 var _this2 = this;
28696
28697 this.stopABRTimer_();
28698 this.abrTimer_ = window.setInterval(function () {
28699 return _this2.checkABR_();
28700 }, 250);
28701 }
28702 /**
28703 * Stop the timer that periodically calls checkABR_
28704 *
28705 * @private
28706 */
28707 ;
28708
28709 _proto.stopABRTimer_ = function stopABRTimer_() {
28710 // if we're scrubbing, we don't need to pause.
28711 // This getter will be added to Video.js in version 7.11.
28712 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
28713 return;
28714 }
28715
28716 window.clearInterval(this.abrTimer_);
28717 this.abrTimer_ = null;
28718 }
28719 /**
28720 * Get a list of playlists for the currently selected audio playlist
28721 *
28722 * @return {Array} the array of audio playlists
28723 */
28724 ;
28725
28726 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
28727 var master = this.master();
28728 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
28729 // assume that the audio tracks are contained in masters
28730 // playlist array, use that or an empty array.
28731
28732 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
28733 return defaultPlaylists;
28734 }
28735
28736 var AUDIO = master.mediaGroups.AUDIO;
28737 var groupKeys = Object.keys(AUDIO);
28738 var track; // get the current active track
28739
28740 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
28741 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
28742 } else {
28743 // default group is `main` or just the first group.
28744 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
28745
28746 for (var label in defaultGroup) {
28747 if (defaultGroup[label].default) {
28748 track = {
28749 label: label
28750 };
28751 break;
28752 }
28753 }
28754 } // no active track no playlists.
28755
28756
28757 if (!track) {
28758 return defaultPlaylists;
28759 }
28760
28761 var playlists = []; // get all of the playlists that are possible for the
28762 // active track.
28763
28764 for (var group in AUDIO) {
28765 if (AUDIO[group][track.label]) {
28766 var properties = AUDIO[group][track.label];
28767
28768 if (properties.playlists && properties.playlists.length) {
28769 playlists.push.apply(playlists, properties.playlists);
28770 } else if (properties.uri) {
28771 playlists.push(properties);
28772 } else if (master.playlists.length) {
28773 // if an audio group does not have a uri
28774 // see if we have main playlists that use it as a group.
28775 // if we do then add those to the playlists list.
28776 for (var i = 0; i < master.playlists.length; i++) {
28777 var playlist = master.playlists[i];
28778
28779 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
28780 playlists.push(playlist);
28781 }
28782 }
28783 }
28784 }
28785 }
28786
28787 if (!playlists.length) {
28788 return defaultPlaylists;
28789 }
28790
28791 return playlists;
28792 }
28793 /**
28794 * Register event handlers on the master playlist loader. A helper
28795 * function for construction time.
28796 *
28797 * @private
28798 */
28799 ;
28800
28801 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
28802 var _this3 = this;
28803
28804 this.masterPlaylistLoader_.on('loadedmetadata', function () {
28805 var media = _this3.masterPlaylistLoader_.media();
28806
28807 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28808 // timeout the request.
28809
28810 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28811 _this3.requestOptions_.timeout = 0;
28812 } else {
28813 _this3.requestOptions_.timeout = requestTimeout;
28814 } // if this isn't a live video and preload permits, start
28815 // downloading segments
28816
28817
28818 if (media.endList && _this3.tech_.preload() !== 'none') {
28819 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28820
28821 _this3.mainSegmentLoader_.load();
28822 }
28823
28824 setupMediaGroups({
28825 sourceType: _this3.sourceType_,
28826 segmentLoaders: {
28827 AUDIO: _this3.audioSegmentLoader_,
28828 SUBTITLES: _this3.subtitleSegmentLoader_,
28829 main: _this3.mainSegmentLoader_
28830 },
28831 tech: _this3.tech_,
28832 requestOptions: _this3.requestOptions_,
28833 masterPlaylistLoader: _this3.masterPlaylistLoader_,
28834 vhs: _this3.vhs_,
28835 master: _this3.master(),
28836 mediaTypes: _this3.mediaTypes_,
28837 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
28838 });
28839
28840 _this3.triggerPresenceUsage_(_this3.master(), media);
28841
28842 _this3.setupFirstPlay();
28843
28844 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
28845 _this3.trigger('selectedinitialmedia');
28846 } else {
28847 // We must wait for the active audio playlist loader to
28848 // finish setting up before triggering this event so the
28849 // representations API and EME setup is correct
28850 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
28851 _this3.trigger('selectedinitialmedia');
28852 });
28853 }
28854 });
28855 this.masterPlaylistLoader_.on('loadedplaylist', function () {
28856 if (_this3.loadOnPlay_) {
28857 _this3.tech_.off('play', _this3.loadOnPlay_);
28858 }
28859
28860 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
28861
28862 if (!updatedPlaylist) {
28863 // exclude any variants that are not supported by the browser before selecting
28864 // an initial media as the playlist selectors do not consider browser support
28865 _this3.excludeUnsupportedVariants_();
28866
28867 var selectedMedia;
28868
28869 if (_this3.enableLowInitialPlaylist) {
28870 selectedMedia = _this3.selectInitialPlaylist();
28871 }
28872
28873 if (!selectedMedia) {
28874 selectedMedia = _this3.selectPlaylist();
28875 }
28876
28877 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
28878 return;
28879 }
28880
28881 _this3.initialMedia_ = selectedMedia;
28882
28883 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
28884 // fire again since the playlist will be requested. In the case of vhs-json
28885 // (where the manifest object is provided as the source), when the media
28886 // playlist's `segments` list is already available, a media playlist won't be
28887 // requested, and loadedplaylist won't fire again, so the playlist handler must be
28888 // called on its own here.
28889
28890
28891 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
28892
28893 if (!haveJsonSource) {
28894 return;
28895 }
28896
28897 updatedPlaylist = _this3.initialMedia_;
28898 }
28899
28900 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
28901 });
28902 this.masterPlaylistLoader_.on('error', function () {
28903 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
28904 });
28905 this.masterPlaylistLoader_.on('mediachanging', function () {
28906 _this3.mainSegmentLoader_.abort();
28907
28908 _this3.mainSegmentLoader_.pause();
28909 });
28910 this.masterPlaylistLoader_.on('mediachange', function () {
28911 var media = _this3.masterPlaylistLoader_.media();
28912
28913 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28914 // timeout the request.
28915
28916 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28917 _this3.requestOptions_.timeout = 0;
28918 } else {
28919 _this3.requestOptions_.timeout = requestTimeout;
28920 }
28921
28922 _this3.masterPlaylistLoader_.load(); // TODO: Create a new event on the PlaylistLoader that signals
28923 // that the segments have changed in some way and use that to
28924 // update the SegmentLoader instead of doing it twice here and
28925 // on `loadedplaylist`
28926
28927
28928 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28929
28930 _this3.mainSegmentLoader_.load();
28931
28932 _this3.tech_.trigger({
28933 type: 'mediachange',
28934 bubbles: true
28935 });
28936 });
28937 this.masterPlaylistLoader_.on('playlistunchanged', function () {
28938 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
28939 // excluded for not-changing. We likely just have a really slowly updating
28940 // playlist.
28941
28942
28943 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
28944 return;
28945 }
28946
28947 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
28948
28949 if (playlistOutdated) {
28950 // Playlist has stopped updating and we're stuck at its end. Try to
28951 // blacklist it and switch to another playlist in the hope that that
28952 // one is updating (and give the player a chance to re-adjust to the
28953 // safe live point).
28954 _this3.blacklistCurrentPlaylist({
28955 message: 'Playlist no longer updating.',
28956 reason: 'playlist-unchanged'
28957 }); // useful for monitoring QoS
28958
28959
28960 _this3.tech_.trigger('playliststuck');
28961 }
28962 });
28963 this.masterPlaylistLoader_.on('renditiondisabled', function () {
28964 _this3.tech_.trigger({
28965 type: 'usage',
28966 name: 'vhs-rendition-disabled'
28967 });
28968
28969 _this3.tech_.trigger({
28970 type: 'usage',
28971 name: 'hls-rendition-disabled'
28972 });
28973 });
28974 this.masterPlaylistLoader_.on('renditionenabled', function () {
28975 _this3.tech_.trigger({
28976 type: 'usage',
28977 name: 'vhs-rendition-enabled'
28978 });
28979
28980 _this3.tech_.trigger({
28981 type: 'usage',
28982 name: 'hls-rendition-enabled'
28983 });
28984 });
28985 }
28986 /**
28987 * Given an updated media playlist (whether it was loaded for the first time, or
28988 * refreshed for live playlists), update any relevant properties and state to reflect
28989 * changes in the media that should be accounted for (e.g., cues and duration).
28990 *
28991 * @param {Object} updatedPlaylist the updated media playlist object
28992 *
28993 * @private
28994 */
28995 ;
28996
28997 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
28998 if (this.useCueTags_) {
28999 this.updateAdCues_(updatedPlaylist);
29000 } // TODO: Create a new event on the PlaylistLoader that signals
29001 // that the segments have changed in some way and use that to
29002 // update the SegmentLoader instead of doing it twice here and
29003 // on `mediachange`
29004
29005
29006 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
29007 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
29008 // as it is possible that it was temporarily stopped while waiting for
29009 // a playlist (e.g., in case the playlist errored and we re-requested it).
29010
29011 if (!this.tech_.paused()) {
29012 this.mainSegmentLoader_.load();
29013
29014 if (this.audioSegmentLoader_) {
29015 this.audioSegmentLoader_.load();
29016 }
29017 }
29018 }
29019 /**
29020 * A helper function for triggerring presence usage events once per source
29021 *
29022 * @private
29023 */
29024 ;
29025
29026 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
29027 var mediaGroups = master.mediaGroups || {};
29028 var defaultDemuxed = true;
29029 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
29030
29031 for (var mediaGroup in mediaGroups.AUDIO) {
29032 for (var label in mediaGroups.AUDIO[mediaGroup]) {
29033 var properties = mediaGroups.AUDIO[mediaGroup][label];
29034
29035 if (!properties.uri) {
29036 defaultDemuxed = false;
29037 }
29038 }
29039 }
29040
29041 if (defaultDemuxed) {
29042 this.tech_.trigger({
29043 type: 'usage',
29044 name: 'vhs-demuxed'
29045 });
29046 this.tech_.trigger({
29047 type: 'usage',
29048 name: 'hls-demuxed'
29049 });
29050 }
29051
29052 if (Object.keys(mediaGroups.SUBTITLES).length) {
29053 this.tech_.trigger({
29054 type: 'usage',
29055 name: 'vhs-webvtt'
29056 });
29057 this.tech_.trigger({
29058 type: 'usage',
29059 name: 'hls-webvtt'
29060 });
29061 }
29062
29063 if (Vhs$1.Playlist.isAes(media)) {
29064 this.tech_.trigger({
29065 type: 'usage',
29066 name: 'vhs-aes'
29067 });
29068 this.tech_.trigger({
29069 type: 'usage',
29070 name: 'hls-aes'
29071 });
29072 }
29073
29074 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
29075 this.tech_.trigger({
29076 type: 'usage',
29077 name: 'vhs-alternate-audio'
29078 });
29079 this.tech_.trigger({
29080 type: 'usage',
29081 name: 'hls-alternate-audio'
29082 });
29083 }
29084
29085 if (this.useCueTags_) {
29086 this.tech_.trigger({
29087 type: 'usage',
29088 name: 'vhs-playlist-cue-tags'
29089 });
29090 this.tech_.trigger({
29091 type: 'usage',
29092 name: 'hls-playlist-cue-tags'
29093 });
29094 }
29095 };
29096
29097 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
29098 var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
29099 var currentTime = this.tech_.currentTime();
29100 var bufferLowWaterLine = this.bufferLowWaterLine();
29101 var bufferHighWaterLine = this.bufferHighWaterLine();
29102 var buffered = this.tech_.buffered();
29103 return shouldSwitchToMedia({
29104 buffered: buffered,
29105 currentTime: currentTime,
29106 currentPlaylist: currentPlaylist,
29107 nextPlaylist: nextPlaylist,
29108 bufferLowWaterLine: bufferLowWaterLine,
29109 bufferHighWaterLine: bufferHighWaterLine,
29110 duration: this.duration(),
29111 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
29112 log: this.logger_
29113 });
29114 }
29115 /**
29116 * Register event handlers on the segment loaders. A helper function
29117 * for construction time.
29118 *
29119 * @private
29120 */
29121 ;
29122
29123 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
29124 var _this4 = this;
29125
29126 this.mainSegmentLoader_.on('bandwidthupdate', function () {
29127 // Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's
29128 // useful to check to see if a rendition switch should be made.
29129 _this4.checkABR_('bandwidthupdate');
29130
29131 _this4.tech_.trigger('bandwidthupdate');
29132 });
29133 this.mainSegmentLoader_.on('timeout', function () {
29134 if (_this4.experimentalBufferBasedABR) {
29135 // If a rendition change is needed, then it would've be done on `bandwidthupdate`.
29136 // Here the only consideration is that for buffer based ABR there's no guarantee
29137 // of an immediate switch (since the bandwidth is averaged with a timeout
29138 // bandwidth value of 1), so force a load on the segment loader to keep it going.
29139 _this4.mainSegmentLoader_.load();
29140 }
29141 }); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer
29142 // based ABR.
29143
29144 if (!this.experimentalBufferBasedABR) {
29145 this.mainSegmentLoader_.on('progress', function () {
29146 _this4.trigger('progress');
29147 });
29148 }
29149
29150 this.mainSegmentLoader_.on('error', function () {
29151 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
29152 });
29153 this.mainSegmentLoader_.on('appenderror', function () {
29154 _this4.error = _this4.mainSegmentLoader_.error_;
29155
29156 _this4.trigger('error');
29157 });
29158 this.mainSegmentLoader_.on('syncinfoupdate', function () {
29159 _this4.onSyncInfoUpdate_();
29160 });
29161 this.mainSegmentLoader_.on('timestampoffset', function () {
29162 _this4.tech_.trigger({
29163 type: 'usage',
29164 name: 'vhs-timestamp-offset'
29165 });
29166
29167 _this4.tech_.trigger({
29168 type: 'usage',
29169 name: 'hls-timestamp-offset'
29170 });
29171 });
29172 this.audioSegmentLoader_.on('syncinfoupdate', function () {
29173 _this4.onSyncInfoUpdate_();
29174 });
29175 this.audioSegmentLoader_.on('appenderror', function () {
29176 _this4.error = _this4.audioSegmentLoader_.error_;
29177
29178 _this4.trigger('error');
29179 });
29180 this.mainSegmentLoader_.on('ended', function () {
29181 _this4.logger_('main segment loader ended');
29182
29183 _this4.onEndOfStream();
29184 });
29185 this.mainSegmentLoader_.on('earlyabort', function (event) {
29186 // never try to early abort with the new ABR algorithm
29187 if (_this4.experimentalBufferBasedABR) {
29188 return;
29189 }
29190
29191 _this4.delegateLoaders_('all', ['abort']);
29192
29193 _this4.blacklistCurrentPlaylist({
29194 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
29195 }, ABORT_EARLY_BLACKLIST_SECONDS);
29196 });
29197
29198 var updateCodecs = function updateCodecs() {
29199 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
29200 return _this4.tryToCreateSourceBuffers_();
29201 }
29202
29203 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
29204
29205
29206 if (!codecs) {
29207 return;
29208 }
29209
29210 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
29211 };
29212
29213 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
29214 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
29215 this.mainSegmentLoader_.on('fmp4', function () {
29216 if (!_this4.triggeredFmp4Usage) {
29217 _this4.tech_.trigger({
29218 type: 'usage',
29219 name: 'vhs-fmp4'
29220 });
29221
29222 _this4.tech_.trigger({
29223 type: 'usage',
29224 name: 'hls-fmp4'
29225 });
29226
29227 _this4.triggeredFmp4Usage = true;
29228 }
29229 });
29230 this.audioSegmentLoader_.on('fmp4', function () {
29231 if (!_this4.triggeredFmp4Usage) {
29232 _this4.tech_.trigger({
29233 type: 'usage',
29234 name: 'vhs-fmp4'
29235 });
29236
29237 _this4.tech_.trigger({
29238 type: 'usage',
29239 name: 'hls-fmp4'
29240 });
29241
29242 _this4.triggeredFmp4Usage = true;
29243 }
29244 });
29245 this.audioSegmentLoader_.on('ended', function () {
29246 _this4.logger_('audioSegmentLoader ended');
29247
29248 _this4.onEndOfStream();
29249 });
29250 };
29251
29252 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
29253 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
29254 }
29255 /**
29256 * Call load on our SegmentLoaders
29257 */
29258 ;
29259
29260 _proto.load = function load() {
29261 this.mainSegmentLoader_.load();
29262
29263 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29264 this.audioSegmentLoader_.load();
29265 }
29266
29267 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29268 this.subtitleSegmentLoader_.load();
29269 }
29270 }
29271 /**
29272 * Re-tune playback quality level for the current player
29273 * conditions without performing destructive actions, like
29274 * removing already buffered content
29275 *
29276 * @private
29277 * @deprecated
29278 */
29279 ;
29280
29281 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
29282 if (media === void 0) {
29283 media = this.selectPlaylist();
29284 }
29285
29286 this.fastQualityChange_(media);
29287 }
29288 /**
29289 * Re-tune playback quality level for the current player
29290 * conditions. This method will perform destructive actions like removing
29291 * already buffered content in order to readjust the currently active
29292 * playlist quickly. This is good for manual quality changes
29293 *
29294 * @private
29295 */
29296 ;
29297
29298 _proto.fastQualityChange_ = function fastQualityChange_(media) {
29299 var _this5 = this;
29300
29301 if (media === void 0) {
29302 media = this.selectPlaylist();
29303 }
29304
29305 if (media === this.masterPlaylistLoader_.media()) {
29306 this.logger_('skipping fastQualityChange because new media is same as old');
29307 return;
29308 }
29309
29310 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
29311 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
29312 // ahead is roughly the minimum that will accomplish this across a variety of content
29313 // in IE and Edge, but seeking in place is sufficient on all other browsers)
29314 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
29315 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
29316
29317 this.mainSegmentLoader_.resetEverything(function () {
29318 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
29319 // from the previously enabled rendition to load before the new playlist has finished loading
29320 if (videojs__default["default"].browser.IE_VERSION || videojs__default["default"].browser.IS_EDGE) {
29321 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
29322 } else {
29323 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
29324 }
29325 }); // don't need to reset audio as it is reset when media changes
29326 }
29327 /**
29328 * Begin playback.
29329 */
29330 ;
29331
29332 _proto.play = function play() {
29333 if (this.setupFirstPlay()) {
29334 return;
29335 }
29336
29337 if (this.tech_.ended()) {
29338 this.tech_.setCurrentTime(0);
29339 }
29340
29341 if (this.hasPlayed_) {
29342 this.load();
29343 }
29344
29345 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
29346 // seek forward to the live point
29347
29348 if (this.tech_.duration() === Infinity) {
29349 if (this.tech_.currentTime() < seekable.start(0)) {
29350 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
29351 }
29352 }
29353 }
29354 /**
29355 * Seek to the latest media position if this is a live video and the
29356 * player and video are loaded and initialized.
29357 */
29358 ;
29359
29360 _proto.setupFirstPlay = function setupFirstPlay() {
29361 var _this6 = this;
29362
29363 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
29364 // If 1) there is no active media
29365 // 2) the player is paused
29366 // 3) the first play has already been setup
29367 // then exit early
29368
29369 if (!media || this.tech_.paused() || this.hasPlayed_) {
29370 return false;
29371 } // when the video is a live stream
29372
29373
29374 if (!media.endList) {
29375 var seekable = this.seekable();
29376
29377 if (!seekable.length) {
29378 // without a seekable range, the player cannot seek to begin buffering at the live
29379 // point
29380 return false;
29381 }
29382
29383 if (videojs__default["default"].browser.IE_VERSION && this.tech_.readyState() === 0) {
29384 // IE11 throws an InvalidStateError if you try to set currentTime while the
29385 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
29386 this.tech_.one('loadedmetadata', function () {
29387 _this6.trigger('firstplay');
29388
29389 _this6.tech_.setCurrentTime(seekable.end(0));
29390
29391 _this6.hasPlayed_ = true;
29392 });
29393 return false;
29394 } // trigger firstplay to inform the source handler to ignore the next seek event
29395
29396
29397 this.trigger('firstplay'); // seek to the live point
29398
29399 this.tech_.setCurrentTime(seekable.end(0));
29400 }
29401
29402 this.hasPlayed_ = true; // we can begin loading now that everything is ready
29403
29404 this.load();
29405 return true;
29406 }
29407 /**
29408 * handle the sourceopen event on the MediaSource
29409 *
29410 * @private
29411 */
29412 ;
29413
29414 _proto.handleSourceOpen_ = function handleSourceOpen_() {
29415 // Only attempt to create the source buffer if none already exist.
29416 // handleSourceOpen is also called when we are "re-opening" a source buffer
29417 // after `endOfStream` has been called (in response to a seek for instance)
29418 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
29419 // code in video.js but is required because play() must be invoked
29420 // *after* the media source has opened.
29421
29422 if (this.tech_.autoplay()) {
29423 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
29424 // on browsers which return a promise
29425
29426 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
29427 playPromise.then(null, function (e) {});
29428 }
29429 }
29430
29431 this.trigger('sourceopen');
29432 }
29433 /**
29434 * handle the sourceended event on the MediaSource
29435 *
29436 * @private
29437 */
29438 ;
29439
29440 _proto.handleSourceEnded_ = function handleSourceEnded_() {
29441 if (!this.inbandTextTracks_.metadataTrack_) {
29442 return;
29443 }
29444
29445 var cues = this.inbandTextTracks_.metadataTrack_.cues;
29446
29447 if (!cues || !cues.length) {
29448 return;
29449 }
29450
29451 var duration = this.duration();
29452 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
29453 }
29454 /**
29455 * handle the durationchange event on the MediaSource
29456 *
29457 * @private
29458 */
29459 ;
29460
29461 _proto.handleDurationChange_ = function handleDurationChange_() {
29462 this.tech_.trigger('durationchange');
29463 }
29464 /**
29465 * Calls endOfStream on the media source when all active stream types have called
29466 * endOfStream
29467 *
29468 * @param {string} streamType
29469 * Stream type of the segment loader that called endOfStream
29470 * @private
29471 */
29472 ;
29473
29474 _proto.onEndOfStream = function onEndOfStream() {
29475 var isEndOfStream = this.mainSegmentLoader_.ended_;
29476
29477 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29478 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
29479
29480 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
29481 // if we do not know if the main segment loader contains video yet or if we
29482 // definitively know the main segment loader contains video, then we need to wait
29483 // for both main and audio segment loaders to call endOfStream
29484 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
29485 } else {
29486 // otherwise just rely on the audio loader
29487 isEndOfStream = this.audioSegmentLoader_.ended_;
29488 }
29489 }
29490
29491 if (!isEndOfStream) {
29492 return;
29493 }
29494
29495 this.stopABRTimer_();
29496 this.sourceUpdater_.endOfStream();
29497 }
29498 /**
29499 * Check if a playlist has stopped being updated
29500 *
29501 * @param {Object} playlist the media playlist object
29502 * @return {boolean} whether the playlist has stopped being updated or not
29503 */
29504 ;
29505
29506 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
29507 var seekable = this.seekable();
29508
29509 if (!seekable.length) {
29510 // playlist doesn't have enough information to determine whether we are stuck
29511 return false;
29512 }
29513
29514 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
29515
29516 if (expired === null) {
29517 return false;
29518 } // does not use the safe live end to calculate playlist end, since we
29519 // don't want to say we are stuck while there is still content
29520
29521
29522 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
29523 var currentTime = this.tech_.currentTime();
29524 var buffered = this.tech_.buffered();
29525
29526 if (!buffered.length) {
29527 // return true if the playhead reached the absolute end of the playlist
29528 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
29529 }
29530
29531 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
29532 // end of playlist
29533
29534 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
29535 }
29536 /**
29537 * Blacklists a playlist when an error occurs for a set amount of time
29538 * making it unavailable for selection by the rendition selection algorithm
29539 * and then forces a new playlist (rendition) selection.
29540 *
29541 * @param {Object=} error an optional error that may include the playlist
29542 * to blacklist
29543 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
29544 * playlist
29545 */
29546 ;
29547
29548 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
29549 if (error === void 0) {
29550 error = {};
29551 }
29552
29553 // If the `error` was generated by the playlist loader, it will contain
29554 // the playlist we were trying to load (but failed) and that should be
29555 // blacklisted instead of the currently selected playlist which is likely
29556 // out-of-date in this scenario
29557 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
29558 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
29559 // trying to load the master OR while we were disposing of the tech
29560
29561 if (!currentPlaylist) {
29562 this.error = error;
29563
29564 if (this.mediaSource.readyState !== 'open') {
29565 this.trigger('error');
29566 } else {
29567 this.sourceUpdater_.endOfStream('network');
29568 }
29569
29570 return;
29571 }
29572
29573 currentPlaylist.playlistErrors_++;
29574 var playlists = this.masterPlaylistLoader_.master.playlists;
29575 var enabledPlaylists = playlists.filter(isEnabled);
29576 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
29577 // forever
29578
29579 if (playlists.length === 1 && blacklistDuration !== Infinity) {
29580 videojs__default["default"].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
29581 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
29582
29583 return this.masterPlaylistLoader_.load(isFinalRendition);
29584 }
29585
29586 if (isFinalRendition) {
29587 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
29588 // it, instead of erring the player or retrying this playlist, clear out the current
29589 // blacklist. This allows other playlists to be attempted in case any have been
29590 // fixed.
29591 var reincluded = false;
29592 playlists.forEach(function (playlist) {
29593 // skip current playlist which is about to be blacklisted
29594 if (playlist === currentPlaylist) {
29595 return;
29596 }
29597
29598 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
29599
29600 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
29601 reincluded = true;
29602 delete playlist.excludeUntil;
29603 }
29604 });
29605
29606 if (reincluded) {
29607 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
29608 // playlist. This is needed for users relying on the retryplaylist event to catch a
29609 // case where the player might be stuck and looping through "dead" playlists.
29610
29611 this.tech_.trigger('retryplaylist');
29612 }
29613 } // Blacklist this playlist
29614
29615
29616 var excludeUntil;
29617
29618 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
29619 excludeUntil = Infinity;
29620 } else {
29621 excludeUntil = Date.now() + blacklistDuration * 1000;
29622 }
29623
29624 currentPlaylist.excludeUntil = excludeUntil;
29625
29626 if (error.reason) {
29627 currentPlaylist.lastExcludeReason_ = error.reason;
29628 }
29629
29630 this.tech_.trigger('blacklistplaylist');
29631 this.tech_.trigger({
29632 type: 'usage',
29633 name: 'vhs-rendition-blacklisted'
29634 });
29635 this.tech_.trigger({
29636 type: 'usage',
29637 name: 'hls-rendition-blacklisted'
29638 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
29639 // Would be something like media().id !=== currentPlaylist.id and we would need something
29640 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
29641 // from loading a new playlist on any blacklist.
29642 // Select a new playlist
29643
29644 var nextPlaylist = this.selectPlaylist();
29645
29646 if (!nextPlaylist) {
29647 this.error = 'Playback cannot continue. No available working or supported playlists.';
29648 this.trigger('error');
29649 return;
29650 }
29651
29652 var logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
29653 var errorMessage = error.message ? ' ' + error.message : '';
29654 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
29655
29656 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
29657 this.delegateLoaders_('audio', ['abort', 'pause']);
29658 } // if subtitle group changed reset subtitle loaders
29659
29660
29661 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
29662 this.delegateLoaders_('subtitle', ['abort', 'pause']);
29663 }
29664
29665 this.delegateLoaders_('main', ['abort', 'pause']);
29666 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
29667 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
29668
29669 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
29670 }
29671 /**
29672 * Pause all segment/playlist loaders
29673 */
29674 ;
29675
29676 _proto.pauseLoading = function pauseLoading() {
29677 this.delegateLoaders_('all', ['abort', 'pause']);
29678 this.stopABRTimer_();
29679 }
29680 /**
29681 * Call a set of functions in order on playlist loaders, segment loaders,
29682 * or both types of loaders.
29683 *
29684 * @param {string} filter
29685 * Filter loaders that should call fnNames using a string. Can be:
29686 * * all - run on all loaders
29687 * * audio - run on all audio loaders
29688 * * subtitle - run on all subtitle loaders
29689 * * main - run on the main/master loaders
29690 *
29691 * @param {Array|string} fnNames
29692 * A string or array of function names to call.
29693 */
29694 ;
29695
29696 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
29697 var _this7 = this;
29698
29699 var loaders = [];
29700 var dontFilterPlaylist = filter === 'all';
29701
29702 if (dontFilterPlaylist || filter === 'main') {
29703 loaders.push(this.masterPlaylistLoader_);
29704 }
29705
29706 var mediaTypes = [];
29707
29708 if (dontFilterPlaylist || filter === 'audio') {
29709 mediaTypes.push('AUDIO');
29710 }
29711
29712 if (dontFilterPlaylist || filter === 'subtitle') {
29713 mediaTypes.push('CLOSED-CAPTIONS');
29714 mediaTypes.push('SUBTITLES');
29715 }
29716
29717 mediaTypes.forEach(function (mediaType) {
29718 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
29719
29720 if (loader) {
29721 loaders.push(loader);
29722 }
29723 });
29724 ['main', 'audio', 'subtitle'].forEach(function (name) {
29725 var loader = _this7[name + "SegmentLoader_"];
29726
29727 if (loader && (filter === name || filter === 'all')) {
29728 loaders.push(loader);
29729 }
29730 });
29731 loaders.forEach(function (loader) {
29732 return fnNames.forEach(function (fnName) {
29733 if (typeof loader[fnName] === 'function') {
29734 loader[fnName]();
29735 }
29736 });
29737 });
29738 }
29739 /**
29740 * set the current time on all segment loaders
29741 *
29742 * @param {TimeRange} currentTime the current time to set
29743 * @return {TimeRange} the current time
29744 */
29745 ;
29746
29747 _proto.setCurrentTime = function setCurrentTime(currentTime) {
29748 var buffered = findRange(this.tech_.buffered(), currentTime);
29749
29750 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
29751 // return immediately if the metadata is not ready yet
29752 return 0;
29753 } // it's clearly an edge-case but don't thrown an error if asked to
29754 // seek within an empty playlist
29755
29756
29757 if (!this.masterPlaylistLoader_.media().segments) {
29758 return 0;
29759 } // if the seek location is already buffered, continue buffering as usual
29760
29761
29762 if (buffered && buffered.length) {
29763 return currentTime;
29764 } // cancel outstanding requests so we begin buffering at the new
29765 // location
29766
29767
29768 this.mainSegmentLoader_.resetEverything();
29769 this.mainSegmentLoader_.abort();
29770
29771 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29772 this.audioSegmentLoader_.resetEverything();
29773 this.audioSegmentLoader_.abort();
29774 }
29775
29776 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29777 this.subtitleSegmentLoader_.resetEverything();
29778 this.subtitleSegmentLoader_.abort();
29779 } // start segment loader loading in case they are paused
29780
29781
29782 this.load();
29783 }
29784 /**
29785 * get the current duration
29786 *
29787 * @return {TimeRange} the duration
29788 */
29789 ;
29790
29791 _proto.duration = function duration() {
29792 if (!this.masterPlaylistLoader_) {
29793 return 0;
29794 }
29795
29796 var media = this.masterPlaylistLoader_.media();
29797
29798 if (!media) {
29799 // no playlists loaded yet, so can't determine a duration
29800 return 0;
29801 } // Don't rely on the media source for duration in the case of a live playlist since
29802 // setting the native MediaSource's duration to infinity ends up with consequences to
29803 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29804 //
29805 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29806 // however, few browsers have support for setLiveSeekableRange()
29807 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29808 //
29809 // Until a time when the duration of the media source can be set to infinity, and a
29810 // seekable range specified across browsers, just return Infinity.
29811
29812
29813 if (!media.endList) {
29814 return Infinity;
29815 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
29816 // available). If it's not available, fall back to a playlist-calculated estimate.
29817
29818
29819 if (this.mediaSource) {
29820 return this.mediaSource.duration;
29821 }
29822
29823 return Vhs$1.Playlist.duration(media);
29824 }
29825 /**
29826 * check the seekable range
29827 *
29828 * @return {TimeRange} the seekable range
29829 */
29830 ;
29831
29832 _proto.seekable = function seekable() {
29833 return this.seekable_;
29834 };
29835
29836 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
29837 var audioSeekable; // TODO check for creation of both source buffers before updating seekable
29838 //
29839 // A fix was made to this function where a check for
29840 // this.sourceUpdater_.hasCreatedSourceBuffers
29841 // was added to ensure that both source buffers were created before seekable was
29842 // updated. However, it originally had a bug where it was checking for a true and
29843 // returning early instead of checking for false. Setting it to check for false to
29844 // return early though created other issues. A call to play() would check for seekable
29845 // end without verifying that a seekable range was present. In addition, even checking
29846 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
29847 // due to a media update calling load on the segment loaders, skipping a seek to live,
29848 // thereby starting live streams at the beginning of the stream rather than at the end.
29849 //
29850 // This conditional should be fixed to wait for the creation of two source buffers at
29851 // the same time as the other sections of code are fixed to properly seek to live and
29852 // not throw an error due to checking for a seekable end when no seekable range exists.
29853 //
29854 // For now, fall back to the older behavior, with the understanding that the seekable
29855 // range may not be completely correct, leading to a suboptimal initial live point.
29856
29857 if (!this.masterPlaylistLoader_) {
29858 return;
29859 }
29860
29861 var media = this.masterPlaylistLoader_.media();
29862
29863 if (!media) {
29864 return;
29865 }
29866
29867 var expired = this.syncController_.getExpiredTime(media, this.duration());
29868
29869 if (expired === null) {
29870 // not enough information to update seekable
29871 return;
29872 }
29873
29874 var master = this.masterPlaylistLoader_.master;
29875 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29876
29877 if (mainSeekable.length === 0) {
29878 return;
29879 }
29880
29881 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29882 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
29883 expired = this.syncController_.getExpiredTime(media, this.duration());
29884
29885 if (expired === null) {
29886 return;
29887 }
29888
29889 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29890
29891 if (audioSeekable.length === 0) {
29892 return;
29893 }
29894 }
29895
29896 var oldEnd;
29897 var oldStart;
29898
29899 if (this.seekable_ && this.seekable_.length) {
29900 oldEnd = this.seekable_.end(0);
29901 oldStart = this.seekable_.start(0);
29902 }
29903
29904 if (!audioSeekable) {
29905 // seekable has been calculated based on buffering video data so it
29906 // can be returned directly
29907 this.seekable_ = mainSeekable;
29908 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
29909 // seekables are pretty far off, rely on main
29910 this.seekable_ = mainSeekable;
29911 } else {
29912 this.seekable_ = videojs__default["default"].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
29913 } // seekable is the same as last time
29914
29915
29916 if (this.seekable_ && this.seekable_.length) {
29917 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
29918 return;
29919 }
29920 }
29921
29922 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
29923 this.tech_.trigger('seekablechanged');
29924 }
29925 /**
29926 * Update the player duration
29927 */
29928 ;
29929
29930 _proto.updateDuration = function updateDuration(isLive) {
29931 if (this.updateDuration_) {
29932 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29933 this.updateDuration_ = null;
29934 }
29935
29936 if (this.mediaSource.readyState !== 'open') {
29937 this.updateDuration_ = this.updateDuration.bind(this, isLive);
29938 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
29939 return;
29940 }
29941
29942 if (isLive) {
29943 var seekable = this.seekable();
29944
29945 if (!seekable.length) {
29946 return;
29947 } // Even in the case of a live playlist, the native MediaSource's duration should not
29948 // be set to Infinity (even though this would be expected for a live playlist), since
29949 // setting the native MediaSource's duration to infinity ends up with consequences to
29950 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29951 //
29952 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29953 // however, few browsers have support for setLiveSeekableRange()
29954 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29955 //
29956 // Until a time when the duration of the media source can be set to infinity, and a
29957 // seekable range specified across browsers, the duration should be greater than or
29958 // equal to the last possible seekable value.
29959 // MediaSource duration starts as NaN
29960 // It is possible (and probable) that this case will never be reached for many
29961 // sources, since the MediaSource reports duration as the highest value without
29962 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
29963 // we buffered times 0 to 100 with real times of 100 to 200, even though current
29964 // time will be between 0 and 100, the native media source may report the duration
29965 // as 200. However, since we report duration separate from the media source (as
29966 // Infinity), and as long as the native media source duration value is greater than
29967 // our reported seekable range, seeks will work as expected. The large number as
29968 // duration for live is actually a strategy used by some players to work around the
29969 // issue of live seekable ranges cited above.
29970
29971
29972 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
29973 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
29974 }
29975
29976 return;
29977 }
29978
29979 var buffered = this.tech_.buffered();
29980 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
29981
29982 if (buffered.length > 0) {
29983 duration = Math.max(duration, buffered.end(buffered.length - 1));
29984 }
29985
29986 if (this.mediaSource.duration !== duration) {
29987 this.sourceUpdater_.setDuration(duration);
29988 }
29989 }
29990 /**
29991 * dispose of the MasterPlaylistController and everything
29992 * that it controls
29993 */
29994 ;
29995
29996 _proto.dispose = function dispose() {
29997 var _this8 = this;
29998
29999 this.trigger('dispose');
30000 this.decrypter_.terminate();
30001 this.masterPlaylistLoader_.dispose();
30002 this.mainSegmentLoader_.dispose();
30003
30004 if (this.loadOnPlay_) {
30005 this.tech_.off('play', this.loadOnPlay_);
30006 }
30007
30008 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
30009 var groups = _this8.mediaTypes_[type].groups;
30010
30011 for (var id in groups) {
30012 groups[id].forEach(function (group) {
30013 if (group.playlistLoader) {
30014 group.playlistLoader.dispose();
30015 }
30016 });
30017 }
30018 });
30019 this.audioSegmentLoader_.dispose();
30020 this.subtitleSegmentLoader_.dispose();
30021 this.sourceUpdater_.dispose();
30022 this.timelineChangeController_.dispose();
30023 this.stopABRTimer_();
30024
30025 if (this.updateDuration_) {
30026 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
30027 }
30028
30029 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
30030
30031 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
30032 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
30033 this.off();
30034 }
30035 /**
30036 * return the master playlist object if we have one
30037 *
30038 * @return {Object} the master playlist object that we parsed
30039 */
30040 ;
30041
30042 _proto.master = function master() {
30043 return this.masterPlaylistLoader_.master;
30044 }
30045 /**
30046 * return the currently selected playlist
30047 *
30048 * @return {Object} the currently selected playlist object that we parsed
30049 */
30050 ;
30051
30052 _proto.media = function media() {
30053 // playlist loader will not return media if it has not been fully loaded
30054 return this.masterPlaylistLoader_.media() || this.initialMedia_;
30055 };
30056
30057 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
30058 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
30059 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
30060 // otherwise check on the segment loader.
30061
30062 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
30063
30064 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
30065 return false;
30066 }
30067
30068 return true;
30069 };
30070
30071 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
30072 var _this9 = this;
30073
30074 var media = {
30075 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
30076 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
30077 }; // set "main" media equal to video
30078
30079 media.video = media.main;
30080 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
30081 var codecs = {};
30082 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
30083
30084 if (media.main.hasVideo) {
30085 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
30086 }
30087
30088 if (media.main.isMuxed) {
30089 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
30090 }
30091
30092 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
30093 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
30094
30095 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
30096 } // no codecs, no playback.
30097
30098
30099 if (!codecs.audio && !codecs.video) {
30100 this.blacklistCurrentPlaylist({
30101 playlist: this.media(),
30102 message: 'Could not determine codecs for playlist.',
30103 blacklistDuration: Infinity
30104 });
30105 return;
30106 } // fmp4 relies on browser support, while ts relies on muxer support
30107
30108
30109 var supportFunction = function supportFunction(isFmp4, codec) {
30110 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
30111 };
30112
30113 var unsupportedCodecs = {};
30114 var unsupportedAudio;
30115 ['video', 'audio'].forEach(function (type) {
30116 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
30117 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
30118 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
30119 unsupportedCodecs[supporter].push(codecs[type]);
30120
30121 if (type === 'audio') {
30122 unsupportedAudio = supporter;
30123 }
30124 }
30125 });
30126
30127 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
30128 var audioGroup = this.media().attributes.AUDIO;
30129 this.master().playlists.forEach(function (variant) {
30130 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
30131
30132 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
30133 variant.excludeUntil = Infinity;
30134 }
30135 });
30136 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
30137 } // if we have any unsupported codecs blacklist this playlist.
30138
30139
30140 if (Object.keys(unsupportedCodecs).length) {
30141 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
30142 if (acc) {
30143 acc += ', ';
30144 }
30145
30146 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
30147 return acc;
30148 }, '') + '.';
30149 this.blacklistCurrentPlaylist({
30150 playlist: this.media(),
30151 internal: true,
30152 message: message,
30153 blacklistDuration: Infinity
30154 });
30155 return;
30156 } // check if codec switching is happening
30157
30158
30159 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
30160 var switchMessages = [];
30161 ['video', 'audio'].forEach(function (type) {
30162 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
30163 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
30164
30165 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
30166 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
30167 }
30168 });
30169
30170 if (switchMessages.length) {
30171 this.blacklistCurrentPlaylist({
30172 playlist: this.media(),
30173 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
30174 blacklistDuration: Infinity,
30175 internal: true
30176 });
30177 return;
30178 }
30179 } // TODO: when using the muxer shouldn't we just return
30180 // the codecs that the muxer outputs?
30181
30182
30183 return codecs;
30184 }
30185 /**
30186 * Create source buffers and exlude any incompatible renditions.
30187 *
30188 * @private
30189 */
30190 ;
30191
30192 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
30193 // media source is not ready yet or sourceBuffers are already
30194 // created.
30195 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
30196 return;
30197 }
30198
30199 if (!this.areMediaTypesKnown_()) {
30200 return;
30201 }
30202
30203 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
30204
30205 if (!codecs) {
30206 return;
30207 }
30208
30209 this.sourceUpdater_.createSourceBuffers(codecs);
30210 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
30211 this.excludeIncompatibleVariants_(codecString);
30212 }
30213 /**
30214 * Excludes playlists with codecs that are unsupported by the muxer and browser.
30215 */
30216 ;
30217
30218 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
30219 var _this10 = this;
30220
30221 var playlists = this.master().playlists;
30222 var ids = []; // TODO: why don't we have a property to loop through all
30223 // playlist? Why did we ever mix indexes and keys?
30224
30225 Object.keys(playlists).forEach(function (key) {
30226 var variant = playlists[key]; // check if we already processed this playlist.
30227
30228 if (ids.indexOf(variant.id) !== -1) {
30229 return;
30230 }
30231
30232 ids.push(variant.id);
30233 var codecs = codecsForPlaylist(_this10.master, variant);
30234 var unsupported = [];
30235
30236 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
30237 unsupported.push("audio codec " + codecs.audio);
30238 }
30239
30240 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
30241 unsupported.push("video codec " + codecs.video);
30242 }
30243
30244 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
30245 unsupported.push("text codec " + codecs.text);
30246 }
30247
30248 if (unsupported.length) {
30249 variant.excludeUntil = Infinity;
30250
30251 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
30252 }
30253 });
30254 }
30255 /**
30256 * Blacklist playlists that are known to be codec or
30257 * stream-incompatible with the SourceBuffer configuration. For
30258 * instance, Media Source Extensions would cause the video element to
30259 * stall waiting for video data if you switched from a variant with
30260 * video and audio to an audio-only one.
30261 *
30262 * @param {Object} media a media playlist compatible with the current
30263 * set of SourceBuffers. Variants in the current master playlist that
30264 * do not appear to have compatible codec or stream configurations
30265 * will be excluded from the default playlist selection algorithm
30266 * indefinitely.
30267 * @private
30268 */
30269 ;
30270
30271 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
30272 var _this11 = this;
30273
30274 var ids = [];
30275 var playlists = this.master().playlists;
30276 var codecs = unwrapCodecList(parseCodecs(codecString));
30277 var codecCount_ = codecCount(codecs);
30278 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
30279 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
30280 Object.keys(playlists).forEach(function (key) {
30281 var variant = playlists[key]; // check if we already processed this playlist.
30282 // or it if it is already excluded forever.
30283
30284 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
30285 return;
30286 }
30287
30288 ids.push(variant.id);
30289 var blacklistReasons = []; // get codecs from the playlist for this variant
30290
30291 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
30292 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
30293 // variant is incompatible. Wait for mux.js to probe
30294
30295 if (!variantCodecs.audio && !variantCodecs.video) {
30296 return;
30297 } // TODO: we can support this by removing the
30298 // old media source and creating a new one, but it will take some work.
30299 // The number of streams cannot change
30300
30301
30302 if (variantCodecCount !== codecCount_) {
30303 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
30304 } // only exclude playlists by codec change, if codecs cannot switch
30305 // during playback.
30306
30307
30308 if (!_this11.sourceUpdater_.canChangeType()) {
30309 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
30310 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
30311
30312 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
30313 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
30314 } // the audio codec cannot change
30315
30316
30317 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
30318 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
30319 }
30320 }
30321
30322 if (blacklistReasons.length) {
30323 variant.excludeUntil = Infinity;
30324
30325 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
30326 }
30327 });
30328 };
30329
30330 _proto.updateAdCues_ = function updateAdCues_(media) {
30331 var offset = 0;
30332 var seekable = this.seekable();
30333
30334 if (seekable.length) {
30335 offset = seekable.start(0);
30336 }
30337
30338 updateAdCues(media, this.cueTagsTrack_, offset);
30339 }
30340 /**
30341 * Calculates the desired forward buffer length based on current time
30342 *
30343 * @return {number} Desired forward buffer length in seconds
30344 */
30345 ;
30346
30347 _proto.goalBufferLength = function goalBufferLength() {
30348 var currentTime = this.tech_.currentTime();
30349 var initial = Config.GOAL_BUFFER_LENGTH;
30350 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
30351 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
30352 return Math.min(initial + currentTime * rate, max);
30353 }
30354 /**
30355 * Calculates the desired buffer low water line based on current time
30356 *
30357 * @return {number} Desired buffer low water line in seconds
30358 */
30359 ;
30360
30361 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
30362 var currentTime = this.tech_.currentTime();
30363 var initial = Config.BUFFER_LOW_WATER_LINE;
30364 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
30365 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
30366 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
30367 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
30368 };
30369
30370 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
30371 return Config.BUFFER_HIGH_WATER_LINE;
30372 };
30373
30374 return MasterPlaylistController;
30375 }(videojs__default["default"].EventTarget);
30376
30377 /**
30378 * Returns a function that acts as the Enable/disable playlist function.
30379 *
30380 * @param {PlaylistLoader} loader - The master playlist loader
30381 * @param {string} playlistID - id of the playlist
30382 * @param {Function} changePlaylistFn - A function to be called after a
30383 * playlist's enabled-state has been changed. Will NOT be called if a
30384 * playlist's enabled-state is unchanged
30385 * @param {boolean=} enable - Value to set the playlist enabled-state to
30386 * or if undefined returns the current enabled-state for the playlist
30387 * @return {Function} Function for setting/getting enabled
30388 */
30389
30390 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
30391 return function (enable) {
30392 var playlist = loader.master.playlists[playlistID];
30393 var incompatible = isIncompatible(playlist);
30394 var currentlyEnabled = isEnabled(playlist);
30395
30396 if (typeof enable === 'undefined') {
30397 return currentlyEnabled;
30398 }
30399
30400 if (enable) {
30401 delete playlist.disabled;
30402 } else {
30403 playlist.disabled = true;
30404 }
30405
30406 if (enable !== currentlyEnabled && !incompatible) {
30407 // Ensure the outside world knows about our changes
30408 changePlaylistFn();
30409
30410 if (enable) {
30411 loader.trigger('renditionenabled');
30412 } else {
30413 loader.trigger('renditiondisabled');
30414 }
30415 }
30416
30417 return enable;
30418 };
30419 };
30420 /**
30421 * The representation object encapsulates the publicly visible information
30422 * in a media playlist along with a setter/getter-type function (enabled)
30423 * for changing the enabled-state of a particular playlist entry
30424 *
30425 * @class Representation
30426 */
30427
30428
30429 var Representation = function Representation(vhsHandler, playlist, id) {
30430 var mpc = vhsHandler.masterPlaylistController_,
30431 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
30432
30433 var changeType = smoothQualityChange ? 'smooth' : 'fast';
30434 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
30435
30436 if (playlist.attributes) {
30437 var resolution = playlist.attributes.RESOLUTION;
30438 this.width = resolution && resolution.width;
30439 this.height = resolution && resolution.height;
30440 this.bandwidth = playlist.attributes.BANDWIDTH;
30441 this.frameRate = playlist.attributes['FRAME-RATE'];
30442 }
30443
30444 this.codecs = codecsForPlaylist(mpc.master(), playlist);
30445 this.playlist = playlist; // The id is simply the ordinality of the media playlist
30446 // within the master playlist
30447
30448 this.id = id; // Partially-apply the enableFunction to create a playlist-
30449 // specific variant
30450
30451 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
30452 };
30453 /**
30454 * A mixin function that adds the `representations` api to an instance
30455 * of the VhsHandler class
30456 *
30457 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
30458 * representation API into
30459 */
30460
30461
30462 var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
30463 // Add a single API-specific function to the VhsHandler instance
30464 vhsHandler.representations = function () {
30465 var master = vhsHandler.masterPlaylistController_.master();
30466 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
30467
30468 if (!playlists) {
30469 return [];
30470 }
30471
30472 return playlists.filter(function (media) {
30473 return !isIncompatible(media);
30474 }).map(function (e, i) {
30475 return new Representation(vhsHandler, e, e.id);
30476 });
30477 };
30478 };
30479
30480 /**
30481 * @file playback-watcher.js
30482 *
30483 * Playback starts, and now my watch begins. It shall not end until my death. I shall
30484 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
30485 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
30486 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
30487 * my life and honor to the Playback Watch, for this Player and all the Players to come.
30488 */
30489
30490 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
30491 /**
30492 * @class PlaybackWatcher
30493 */
30494
30495 var PlaybackWatcher = /*#__PURE__*/function () {
30496 /**
30497 * Represents an PlaybackWatcher object.
30498 *
30499 * @class
30500 * @param {Object} options an object that includes the tech and settings
30501 */
30502 function PlaybackWatcher(options) {
30503 var _this = this;
30504
30505 this.masterPlaylistController_ = options.masterPlaylistController;
30506 this.tech_ = options.tech;
30507 this.seekable = options.seekable;
30508 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
30509 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
30510 this.media = options.media;
30511 this.consecutiveUpdates = 0;
30512 this.lastRecordedTime = null;
30513 this.timer_ = null;
30514 this.checkCurrentTimeTimeout_ = null;
30515 this.logger_ = logger('PlaybackWatcher');
30516 this.logger_('initialize');
30517
30518 var playHandler = function playHandler() {
30519 return _this.monitorCurrentTime_();
30520 };
30521
30522 var canPlayHandler = function canPlayHandler() {
30523 return _this.monitorCurrentTime_();
30524 };
30525
30526 var waitingHandler = function waitingHandler() {
30527 return _this.techWaiting_();
30528 };
30529
30530 var cancelTimerHandler = function cancelTimerHandler() {
30531 return _this.cancelTimer_();
30532 };
30533
30534 var mpc = this.masterPlaylistController_;
30535 var loaderTypes = ['main', 'subtitle', 'audio'];
30536 var loaderChecks = {};
30537 loaderTypes.forEach(function (type) {
30538 loaderChecks[type] = {
30539 reset: function reset() {
30540 return _this.resetSegmentDownloads_(type);
30541 },
30542 updateend: function updateend() {
30543 return _this.checkSegmentDownloads_(type);
30544 }
30545 };
30546 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
30547 // isn't changing we want to reset. We cannot assume that the new rendition
30548 // will also be stalled, until after new appends.
30549
30550 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
30551 // This prevents one segment playlists (single vtt or single segment content)
30552 // from being detected as stalling. As the buffer will not change in those cases, since
30553 // the buffer is the entire video duration.
30554
30555 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
30556 });
30557 /**
30558 * We check if a seek was into a gap through the following steps:
30559 * 1. We get a seeking event and we do not get a seeked event. This means that
30560 * a seek was attempted but not completed.
30561 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
30562 * removed everything from our buffer and appended a segment, and should be ready
30563 * to check for gaps.
30564 */
30565
30566 var setSeekingHandlers = function setSeekingHandlers(fn) {
30567 ['main', 'audio'].forEach(function (type) {
30568 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
30569 });
30570 };
30571
30572 this.seekingAppendCheck_ = function () {
30573 if (_this.fixesBadSeeks_()) {
30574 _this.consecutiveUpdates = 0;
30575 _this.lastRecordedTime = _this.tech_.currentTime();
30576 setSeekingHandlers('off');
30577 }
30578 };
30579
30580 this.clearSeekingAppendCheck_ = function () {
30581 return setSeekingHandlers('off');
30582 };
30583
30584 this.watchForBadSeeking_ = function () {
30585 _this.clearSeekingAppendCheck_();
30586
30587 setSeekingHandlers('on');
30588 };
30589
30590 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
30591 this.tech_.on('seeking', this.watchForBadSeeking_);
30592 this.tech_.on('waiting', waitingHandler);
30593 this.tech_.on(timerCancelEvents, cancelTimerHandler);
30594 this.tech_.on('canplay', canPlayHandler);
30595 /*
30596 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
30597 is surfaced in one of two ways:
30598 1) The `waiting` event is fired before the player has buffered content, making it impossible
30599 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
30600 we can check if playback is stalled due to a gap, and skip the gap if necessary.
30601 2) A source with a gap at the beginning of the stream is loaded programatically while the player
30602 is in a playing state. To catch this case, it's important that our one-time play listener is setup
30603 even if the player is in a playing state
30604 */
30605
30606 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
30607
30608 this.dispose = function () {
30609 _this.clearSeekingAppendCheck_();
30610
30611 _this.logger_('dispose');
30612
30613 _this.tech_.off('waiting', waitingHandler);
30614
30615 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
30616
30617 _this.tech_.off('canplay', canPlayHandler);
30618
30619 _this.tech_.off('play', playHandler);
30620
30621 _this.tech_.off('seeking', _this.watchForBadSeeking_);
30622
30623 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
30624
30625 loaderTypes.forEach(function (type) {
30626 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
30627 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
30628
30629 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
30630 });
30631
30632 if (_this.checkCurrentTimeTimeout_) {
30633 window.clearTimeout(_this.checkCurrentTimeTimeout_);
30634 }
30635
30636 _this.cancelTimer_();
30637 };
30638 }
30639 /**
30640 * Periodically check current time to see if playback stopped
30641 *
30642 * @private
30643 */
30644
30645
30646 var _proto = PlaybackWatcher.prototype;
30647
30648 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
30649 this.checkCurrentTime_();
30650
30651 if (this.checkCurrentTimeTimeout_) {
30652 window.clearTimeout(this.checkCurrentTimeTimeout_);
30653 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
30654
30655
30656 this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
30657 }
30658 /**
30659 * Reset stalled download stats for a specific type of loader
30660 *
30661 * @param {string} type
30662 * The segment loader type to check.
30663 *
30664 * @listens SegmentLoader#playlistupdate
30665 * @listens Tech#seeking
30666 * @listens Tech#seeked
30667 */
30668 ;
30669
30670 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
30671 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
30672
30673 if (this[type + "StalledDownloads_"] > 0) {
30674 this.logger_("resetting possible stalled download count for " + type + " loader");
30675 }
30676
30677 this[type + "StalledDownloads_"] = 0;
30678 this[type + "Buffered_"] = loader.buffered_();
30679 }
30680 /**
30681 * Checks on every segment `appendsdone` to see
30682 * if segment appends are making progress. If they are not
30683 * and we are still downloading bytes. We blacklist the playlist.
30684 *
30685 * @param {string} type
30686 * The segment loader type to check.
30687 *
30688 * @listens SegmentLoader#appendsdone
30689 */
30690 ;
30691
30692 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
30693 var mpc = this.masterPlaylistController_;
30694 var loader = mpc[type + "SegmentLoader_"];
30695 var buffered = loader.buffered_();
30696 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
30697 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
30698 // the buffered value for this loader changed
30699 // appends are working
30700
30701 if (isBufferedDifferent) {
30702 this.resetSegmentDownloads_(type);
30703 return;
30704 }
30705
30706 this[type + "StalledDownloads_"]++;
30707 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
30708 playlistId: loader.playlist_ && loader.playlist_.id,
30709 buffered: timeRangesToArray(buffered)
30710 }); // after 10 possibly stalled appends with no reset, exclude
30711
30712 if (this[type + "StalledDownloads_"] < 10) {
30713 return;
30714 }
30715
30716 this.logger_(type + " loader stalled download exclusion");
30717 this.resetSegmentDownloads_(type);
30718 this.tech_.trigger({
30719 type: 'usage',
30720 name: "vhs-" + type + "-download-exclusion"
30721 });
30722
30723 if (type === 'subtitle') {
30724 return;
30725 } // TODO: should we exclude audio tracks rather than main tracks
30726 // when type is audio?
30727
30728
30729 mpc.blacklistCurrentPlaylist({
30730 message: "Excessive " + type + " segment downloading detected."
30731 }, Infinity);
30732 }
30733 /**
30734 * The purpose of this function is to emulate the "waiting" event on
30735 * browsers that do not emit it when they are waiting for more
30736 * data to continue playback
30737 *
30738 * @private
30739 */
30740 ;
30741
30742 _proto.checkCurrentTime_ = function checkCurrentTime_() {
30743 if (this.tech_.paused() || this.tech_.seeking()) {
30744 return;
30745 }
30746
30747 var currentTime = this.tech_.currentTime();
30748 var buffered = this.tech_.buffered();
30749
30750 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
30751 // If current time is at the end of the final buffered region, then any playback
30752 // stall is most likely caused by buffering in a low bandwidth environment. The tech
30753 // should fire a `waiting` event in this scenario, but due to browser and tech
30754 // inconsistencies. Calling `techWaiting_` here allows us to simulate
30755 // responding to a native `waiting` event when the tech fails to emit one.
30756 return this.techWaiting_();
30757 }
30758
30759 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
30760 this.consecutiveUpdates++;
30761 this.waiting_();
30762 } else if (currentTime === this.lastRecordedTime) {
30763 this.consecutiveUpdates++;
30764 } else {
30765 this.consecutiveUpdates = 0;
30766 this.lastRecordedTime = currentTime;
30767 }
30768 }
30769 /**
30770 * Cancels any pending timers and resets the 'timeupdate' mechanism
30771 * designed to detect that we are stalled
30772 *
30773 * @private
30774 */
30775 ;
30776
30777 _proto.cancelTimer_ = function cancelTimer_() {
30778 this.consecutiveUpdates = 0;
30779
30780 if (this.timer_) {
30781 this.logger_('cancelTimer_');
30782 clearTimeout(this.timer_);
30783 }
30784
30785 this.timer_ = null;
30786 }
30787 /**
30788 * Fixes situations where there's a bad seek
30789 *
30790 * @return {boolean} whether an action was taken to fix the seek
30791 * @private
30792 */
30793 ;
30794
30795 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
30796 var seeking = this.tech_.seeking();
30797
30798 if (!seeking) {
30799 return false;
30800 } // TODO: It's possible that these seekable checks should be moved out of this function
30801 // and into a function that runs on seekablechange. It's also possible that we only need
30802 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
30803 // seekable range.
30804
30805
30806 var seekable = this.seekable();
30807 var currentTime = this.tech_.currentTime();
30808 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
30809 var seekTo;
30810
30811 if (isAfterSeekableRange) {
30812 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
30813
30814 seekTo = seekableEnd;
30815 }
30816
30817 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30818 var seekableStart = seekable.start(0); // sync to the beginning of the live window
30819 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
30820
30821 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
30822 // happen in live with a 3 segment playlist), then don't use a time delta
30823 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
30824 }
30825
30826 if (typeof seekTo !== 'undefined') {
30827 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
30828 this.tech_.setCurrentTime(seekTo);
30829 return true;
30830 }
30831
30832 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
30833 var buffered = this.tech_.buffered();
30834 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
30835 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
30836 var media = this.media(); // verify that at least two segment durations or one part duration have been
30837 // appended before checking for a gap.
30838
30839 var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
30840 // appended before checking for a gap.
30841
30842 var bufferedToCheck = [audioBuffered, videoBuffered];
30843
30844 for (var i = 0; i < bufferedToCheck.length; i++) {
30845 // skip null buffered
30846 if (!bufferedToCheck[i]) {
30847 continue;
30848 }
30849
30850 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
30851 // duration behind we haven't appended enough to call this a bad seek.
30852
30853 if (timeAhead < minAppendedDuration) {
30854 return false;
30855 }
30856 }
30857
30858 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
30859 // to seek over the gap
30860
30861 if (nextRange.length === 0) {
30862 return false;
30863 }
30864
30865 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
30866 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
30867 this.tech_.setCurrentTime(seekTo);
30868 return true;
30869 }
30870 /**
30871 * Handler for situations when we determine the player is waiting.
30872 *
30873 * @private
30874 */
30875 ;
30876
30877 _proto.waiting_ = function waiting_() {
30878 if (this.techWaiting_()) {
30879 return;
30880 } // All tech waiting checks failed. Use last resort correction
30881
30882
30883 var currentTime = this.tech_.currentTime();
30884 var buffered = this.tech_.buffered();
30885 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
30886 // region with no indication that anything is amiss (seen in Firefox). Seeking to
30887 // currentTime is usually enough to kickstart the player. This checks that the player
30888 // is currently within a buffered region before attempting a corrective seek.
30889 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
30890 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
30891 // make sure there is ~3 seconds of forward buffer before taking any corrective action
30892 // to avoid triggering an `unknownwaiting` event when the network is slow.
30893
30894 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
30895 this.cancelTimer_();
30896 this.tech_.setCurrentTime(currentTime);
30897 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
30898
30899 this.tech_.trigger({
30900 type: 'usage',
30901 name: 'vhs-unknown-waiting'
30902 });
30903 this.tech_.trigger({
30904 type: 'usage',
30905 name: 'hls-unknown-waiting'
30906 });
30907 return;
30908 }
30909 }
30910 /**
30911 * Handler for situations when the tech fires a `waiting` event
30912 *
30913 * @return {boolean}
30914 * True if an action (or none) was needed to correct the waiting. False if no
30915 * checks passed
30916 * @private
30917 */
30918 ;
30919
30920 _proto.techWaiting_ = function techWaiting_() {
30921 var seekable = this.seekable();
30922 var currentTime = this.tech_.currentTime();
30923
30924 if (this.tech_.seeking() || this.timer_ !== null) {
30925 // Tech is seeking or already waiting on another action, no action needed
30926 return true;
30927 }
30928
30929 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30930 var livePoint = seekable.end(seekable.length - 1);
30931 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
30932 this.cancelTimer_();
30933 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
30934
30935 this.tech_.trigger({
30936 type: 'usage',
30937 name: 'vhs-live-resync'
30938 });
30939 this.tech_.trigger({
30940 type: 'usage',
30941 name: 'hls-live-resync'
30942 });
30943 return true;
30944 }
30945
30946 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
30947 var buffered = this.tech_.buffered();
30948 var videoUnderflow = this.videoUnderflow_({
30949 audioBuffered: sourceUpdater.audioBuffered(),
30950 videoBuffered: sourceUpdater.videoBuffered(),
30951 currentTime: currentTime
30952 });
30953
30954 if (videoUnderflow) {
30955 // Even though the video underflowed and was stuck in a gap, the audio overplayed
30956 // the gap, leading currentTime into a buffered range. Seeking to currentTime
30957 // allows the video to catch up to the audio position without losing any audio
30958 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
30959 this.cancelTimer_();
30960 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
30961
30962 this.tech_.trigger({
30963 type: 'usage',
30964 name: 'vhs-video-underflow'
30965 });
30966 this.tech_.trigger({
30967 type: 'usage',
30968 name: 'hls-video-underflow'
30969 });
30970 return true;
30971 }
30972
30973 var nextRange = findNextRange(buffered, currentTime); // check for gap
30974
30975 if (nextRange.length > 0) {
30976 var difference = nextRange.start(0) - currentTime;
30977 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
30978 this.cancelTimer_();
30979 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
30980 return true;
30981 } // All checks failed. Returning false to indicate failure to correct waiting
30982
30983
30984 return false;
30985 };
30986
30987 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
30988 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
30989 allowSeeksWithinUnsafeLiveWindow = false;
30990 }
30991
30992 if (!seekable.length) {
30993 // we can't make a solid case if there's no seekable, default to false
30994 return false;
30995 }
30996
30997 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
30998 var isLive = !playlist.endList;
30999
31000 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
31001 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
31002 }
31003
31004 if (currentTime > allowedEnd) {
31005 return true;
31006 }
31007
31008 return false;
31009 };
31010
31011 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
31012 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
31013 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
31014 return true;
31015 }
31016
31017 return false;
31018 };
31019
31020 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
31021 var videoBuffered = _ref.videoBuffered,
31022 audioBuffered = _ref.audioBuffered,
31023 currentTime = _ref.currentTime;
31024
31025 // audio only content will not have video underflow :)
31026 if (!videoBuffered) {
31027 return;
31028 }
31029
31030 var gap; // find a gap in demuxed content.
31031
31032 if (videoBuffered.length && audioBuffered.length) {
31033 // in Chrome audio will continue to play for ~3s when we run out of video
31034 // so we have to check that the video buffer did have some buffer in the
31035 // past.
31036 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
31037 var videoRange = findRange(videoBuffered, currentTime);
31038 var audioRange = findRange(audioBuffered, currentTime);
31039
31040 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
31041 gap = {
31042 start: lastVideoRange.end(0),
31043 end: audioRange.end(0)
31044 };
31045 } // find a gap in muxed content.
31046
31047 } else {
31048 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
31049 // stuck in a gap due to video underflow.
31050
31051 if (!nextRange.length) {
31052 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
31053 }
31054 }
31055
31056 if (gap) {
31057 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
31058 return true;
31059 }
31060
31061 return false;
31062 }
31063 /**
31064 * Timer callback. If playback still has not proceeded, then we seek
31065 * to the start of the next buffered region.
31066 *
31067 * @private
31068 */
31069 ;
31070
31071 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
31072 var buffered = this.tech_.buffered();
31073 var currentTime = this.tech_.currentTime();
31074 var nextRange = findNextRange(buffered, currentTime);
31075 this.cancelTimer_();
31076
31077 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
31078 return;
31079 }
31080
31081 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
31082
31083 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
31084 this.tech_.trigger({
31085 type: 'usage',
31086 name: 'vhs-gap-skip'
31087 });
31088 this.tech_.trigger({
31089 type: 'usage',
31090 name: 'hls-gap-skip'
31091 });
31092 };
31093
31094 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
31095 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
31096 // playing for ~3 seconds after the video gap starts. This is done to account for
31097 // video buffer underflow/underrun (note that this is not done when there is audio
31098 // buffer underflow/underrun -- in that case the video will stop as soon as it
31099 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
31100 // video stalls). The player's time will reflect the playthrough of audio, so the
31101 // time will appear as if we are in a buffered region, even if we are stuck in a
31102 // "gap."
31103 //
31104 // Example:
31105 // video buffer: 0 => 10.1, 10.2 => 20
31106 // audio buffer: 0 => 20
31107 // overall buffer: 0 => 10.1, 10.2 => 20
31108 // current time: 13
31109 //
31110 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
31111 // however, the audio continued playing until it reached ~3 seconds past the gap
31112 // (13 seconds), at which point it stops as well. Since current time is past the
31113 // gap, findNextRange will return no ranges.
31114 //
31115 // To check for this issue, we see if there is a gap that starts somewhere within
31116 // a 3 second range (3 seconds +/- 1 second) back from our current time.
31117 var gaps = findGaps(buffered);
31118
31119 for (var i = 0; i < gaps.length; i++) {
31120 var start = gaps.start(i);
31121 var end = gaps.end(i); // gap is starts no more than 4 seconds back
31122
31123 if (currentTime - start < 4 && currentTime - start > 2) {
31124 return {
31125 start: start,
31126 end: end
31127 };
31128 }
31129 }
31130
31131 return null;
31132 };
31133
31134 return PlaybackWatcher;
31135 }();
31136
31137 var defaultOptions = {
31138 errorInterval: 30,
31139 getSource: function getSource(next) {
31140 var tech = this.tech({
31141 IWillNotUseThisInPlugins: true
31142 });
31143 var sourceObj = tech.currentSource_ || this.currentSource();
31144 return next(sourceObj);
31145 }
31146 };
31147 /**
31148 * Main entry point for the plugin
31149 *
31150 * @param {Player} player a reference to a videojs Player instance
31151 * @param {Object} [options] an object with plugin options
31152 * @private
31153 */
31154
31155 var initPlugin = function initPlugin(player, options) {
31156 var lastCalled = 0;
31157 var seekTo = 0;
31158 var localOptions = videojs__default["default"].mergeOptions(defaultOptions, options);
31159 player.ready(function () {
31160 player.trigger({
31161 type: 'usage',
31162 name: 'vhs-error-reload-initialized'
31163 });
31164 player.trigger({
31165 type: 'usage',
31166 name: 'hls-error-reload-initialized'
31167 });
31168 });
31169 /**
31170 * Player modifications to perform that must wait until `loadedmetadata`
31171 * has been triggered
31172 *
31173 * @private
31174 */
31175
31176 var loadedMetadataHandler = function loadedMetadataHandler() {
31177 if (seekTo) {
31178 player.currentTime(seekTo);
31179 }
31180 };
31181 /**
31182 * Set the source on the player element, play, and seek if necessary
31183 *
31184 * @param {Object} sourceObj An object specifying the source url and mime-type to play
31185 * @private
31186 */
31187
31188
31189 var setSource = function setSource(sourceObj) {
31190 if (sourceObj === null || sourceObj === undefined) {
31191 return;
31192 }
31193
31194 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
31195 player.one('loadedmetadata', loadedMetadataHandler);
31196 player.src(sourceObj);
31197 player.trigger({
31198 type: 'usage',
31199 name: 'vhs-error-reload'
31200 });
31201 player.trigger({
31202 type: 'usage',
31203 name: 'hls-error-reload'
31204 });
31205 player.play();
31206 };
31207 /**
31208 * Attempt to get a source from either the built-in getSource function
31209 * or a custom function provided via the options
31210 *
31211 * @private
31212 */
31213
31214
31215 var errorHandler = function errorHandler() {
31216 // Do not attempt to reload the source if a source-reload occurred before
31217 // 'errorInterval' time has elapsed since the last source-reload
31218 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
31219 player.trigger({
31220 type: 'usage',
31221 name: 'vhs-error-reload-canceled'
31222 });
31223 player.trigger({
31224 type: 'usage',
31225 name: 'hls-error-reload-canceled'
31226 });
31227 return;
31228 }
31229
31230 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
31231 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
31232 return;
31233 }
31234
31235 lastCalled = Date.now();
31236 return localOptions.getSource.call(player, setSource);
31237 };
31238 /**
31239 * Unbind any event handlers that were bound by the plugin
31240 *
31241 * @private
31242 */
31243
31244
31245 var cleanupEvents = function cleanupEvents() {
31246 player.off('loadedmetadata', loadedMetadataHandler);
31247 player.off('error', errorHandler);
31248 player.off('dispose', cleanupEvents);
31249 };
31250 /**
31251 * Cleanup before re-initializing the plugin
31252 *
31253 * @param {Object} [newOptions] an object with plugin options
31254 * @private
31255 */
31256
31257
31258 var reinitPlugin = function reinitPlugin(newOptions) {
31259 cleanupEvents();
31260 initPlugin(player, newOptions);
31261 };
31262
31263 player.on('error', errorHandler);
31264 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
31265 // initializing the plugin
31266
31267 player.reloadSourceOnError = reinitPlugin;
31268 };
31269 /**
31270 * Reload the source when an error is detected as long as there
31271 * wasn't an error previously within the last 30 seconds
31272 *
31273 * @param {Object} [options] an object with plugin options
31274 */
31275
31276
31277 var reloadSourceOnError = function reloadSourceOnError(options) {
31278 initPlugin(this, options);
31279 };
31280
31281 var version$4 = "2.16.0";
31282
31283 var version$3 = "6.0.1";
31284
31285 var version$2 = "0.22.1";
31286
31287 var version$1 = "4.8.0";
31288
31289 var version = "3.1.3";
31290
31291 var Vhs = {
31292 PlaylistLoader: PlaylistLoader,
31293 Playlist: Playlist,
31294 utils: utils,
31295 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
31296 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
31297 lastBandwidthSelector: lastBandwidthSelector,
31298 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
31299 comparePlaylistBandwidth: comparePlaylistBandwidth,
31300 comparePlaylistResolution: comparePlaylistResolution,
31301 xhr: xhrFactory()
31302 }; // Define getter/setters for config properties
31303
31304 Object.keys(Config).forEach(function (prop) {
31305 Object.defineProperty(Vhs, prop, {
31306 get: function get() {
31307 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31308 return Config[prop];
31309 },
31310 set: function set(value) {
31311 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31312
31313 if (typeof value !== 'number' || value < 0) {
31314 videojs__default["default"].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
31315 return;
31316 }
31317
31318 Config[prop] = value;
31319 }
31320 });
31321 });
31322 var LOCAL_STORAGE_KEY = 'videojs-vhs';
31323 /**
31324 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
31325 *
31326 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
31327 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
31328 * @function handleVhsMediaChange
31329 */
31330
31331 var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
31332 var newPlaylist = playlistLoader.media();
31333 var selectedIndex = -1;
31334
31335 for (var i = 0; i < qualityLevels.length; i++) {
31336 if (qualityLevels[i].id === newPlaylist.id) {
31337 selectedIndex = i;
31338 break;
31339 }
31340 }
31341
31342 qualityLevels.selectedIndex_ = selectedIndex;
31343 qualityLevels.trigger({
31344 selectedIndex: selectedIndex,
31345 type: 'change'
31346 });
31347 };
31348 /**
31349 * Adds quality levels to list once playlist metadata is available
31350 *
31351 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
31352 * @param {Object} vhs Vhs object to listen to for media events.
31353 * @function handleVhsLoadedMetadata
31354 */
31355
31356
31357 var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
31358 vhs.representations().forEach(function (rep) {
31359 qualityLevels.addQualityLevel(rep);
31360 });
31361 handleVhsMediaChange(qualityLevels, vhs.playlists);
31362 }; // HLS is a source handler, not a tech. Make sure attempts to use it
31363 // as one do not cause exceptions.
31364
31365
31366 Vhs.canPlaySource = function () {
31367 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31368 };
31369
31370 var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
31371 if (!keySystemOptions) {
31372 return keySystemOptions;
31373 }
31374
31375 var codecs = {};
31376
31377 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
31378 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
31379 }
31380
31381 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
31382 codecs.audio = audioPlaylist.attributes.CODECS;
31383 }
31384
31385 var videoContentType = getMimeForCodec(codecs.video);
31386 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
31387
31388 var keySystemContentTypes = {};
31389
31390 for (var keySystem in keySystemOptions) {
31391 keySystemContentTypes[keySystem] = {};
31392
31393 if (audioContentType) {
31394 keySystemContentTypes[keySystem].audioContentType = audioContentType;
31395 }
31396
31397 if (videoContentType) {
31398 keySystemContentTypes[keySystem].videoContentType = videoContentType;
31399 } // Default to using the video playlist's PSSH even though they may be different, as
31400 // videojs-contrib-eme will only accept one in the options.
31401 //
31402 // This shouldn't be an issue for most cases as early intialization will handle all
31403 // unique PSSH values, and if they aren't, then encrypted events should have the
31404 // specific information needed for the unique license.
31405
31406
31407 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
31408 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
31409 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
31410 // so we need to prevent overwriting the URL entirely
31411
31412
31413 if (typeof keySystemOptions[keySystem] === 'string') {
31414 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
31415 }
31416 }
31417
31418 return videojs__default["default"].mergeOptions(keySystemOptions, keySystemContentTypes);
31419 };
31420 /**
31421 * @typedef {Object} KeySystems
31422 *
31423 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
31424 * Note: not all options are listed here.
31425 *
31426 * @property {Uint8Array} [pssh]
31427 * Protection System Specific Header
31428 */
31429
31430 /**
31431 * Goes through all the playlists and collects an array of KeySystems options objects
31432 * containing each playlist's keySystems and their pssh values, if available.
31433 *
31434 * @param {Object[]} playlists
31435 * The playlists to look through
31436 * @param {string[]} keySystems
31437 * The keySystems to collect pssh values for
31438 *
31439 * @return {KeySystems[]}
31440 * An array of KeySystems objects containing available key systems and their
31441 * pssh values
31442 */
31443
31444
31445 var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
31446 return playlists.reduce(function (keySystemsArr, playlist) {
31447 if (!playlist.contentProtection) {
31448 return keySystemsArr;
31449 }
31450
31451 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
31452 var keySystemOptions = playlist.contentProtection[keySystem];
31453
31454 if (keySystemOptions && keySystemOptions.pssh) {
31455 keySystemsObj[keySystem] = {
31456 pssh: keySystemOptions.pssh
31457 };
31458 }
31459
31460 return keySystemsObj;
31461 }, {});
31462
31463 if (Object.keys(keySystemsOptions).length) {
31464 keySystemsArr.push(keySystemsOptions);
31465 }
31466
31467 return keySystemsArr;
31468 }, []);
31469 };
31470 /**
31471 * Returns a promise that waits for the
31472 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
31473 *
31474 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
31475 * browsers.
31476 *
31477 * As per the above ticket, this is particularly important for Chrome, where, if
31478 * unencrypted content is appended before encrypted content and the key session has not
31479 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
31480 * during playback.
31481 *
31482 * @param {Object} player
31483 * The player instance
31484 * @param {Object[]} sourceKeySystems
31485 * The key systems options from the player source
31486 * @param {Object} [audioMedia]
31487 * The active audio media playlist (optional)
31488 * @param {Object[]} mainPlaylists
31489 * The playlists found on the master playlist object
31490 *
31491 * @return {Object}
31492 * Promise that resolves when the key session has been created
31493 */
31494
31495
31496 var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
31497 var player = _ref.player,
31498 sourceKeySystems = _ref.sourceKeySystems,
31499 audioMedia = _ref.audioMedia,
31500 mainPlaylists = _ref.mainPlaylists;
31501
31502 if (!player.eme.initializeMediaKeys) {
31503 return Promise.resolve();
31504 } // TODO should all audio PSSH values be initialized for DRM?
31505 //
31506 // All unique video rendition pssh values are initialized for DRM, but here only
31507 // the initial audio playlist license is initialized. In theory, an encrypted
31508 // event should be fired if the user switches to an alternative audio playlist
31509 // where a license is required, but this case hasn't yet been tested. In addition, there
31510 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
31511 // languages).
31512
31513
31514 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
31515 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
31516 var initializationFinishedPromises = [];
31517 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
31518 // only place where it should not be deduped is for ms-prefixed APIs, but the early
31519 // return for IE11 above, and the existence of modern EME APIs in addition to
31520 // ms-prefixed APIs on Edge should prevent this from being a concern.
31521 // initializeMediaKeys also won't use the webkit-prefixed APIs.
31522
31523 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
31524 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
31525 player.tech_.one('keysessioncreated', resolve);
31526 }));
31527 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
31528 player.eme.initializeMediaKeys({
31529 keySystems: keySystemsOptions
31530 }, function (err) {
31531 if (err) {
31532 reject(err);
31533 return;
31534 }
31535
31536 resolve();
31537 });
31538 }));
31539 }); // The reasons Promise.race is chosen over Promise.any:
31540 //
31541 // * Promise.any is only available in Safari 14+.
31542 // * None of these promises are expected to reject. If they do reject, it might be
31543 // better here for the race to surface the rejection, rather than mask it by using
31544 // Promise.any.
31545
31546 return Promise.race([// If a session was previously created, these will all finish resolving without
31547 // creating a new session, otherwise it will take until the end of all license
31548 // requests, which is why the key session check is used (to make setup much faster).
31549 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
31550 Promise.race(keySessionCreatedPromises)]);
31551 };
31552 /**
31553 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
31554 * there are keySystems on the source, sets up source options to prepare the source for
31555 * eme.
31556 *
31557 * @param {Object} player
31558 * The player instance
31559 * @param {Object[]} sourceKeySystems
31560 * The key systems options from the player source
31561 * @param {Object} media
31562 * The active media playlist
31563 * @param {Object} [audioMedia]
31564 * The active audio media playlist (optional)
31565 *
31566 * @return {boolean}
31567 * Whether or not options were configured and EME is available
31568 */
31569
31570 var setupEmeOptions = function setupEmeOptions(_ref2) {
31571 var player = _ref2.player,
31572 sourceKeySystems = _ref2.sourceKeySystems,
31573 media = _ref2.media,
31574 audioMedia = _ref2.audioMedia;
31575 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
31576
31577 if (!sourceOptions) {
31578 return false;
31579 }
31580
31581 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
31582 // do nothing.
31583
31584 if (sourceOptions && !player.eme) {
31585 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
31586 return false;
31587 }
31588
31589 return true;
31590 };
31591
31592 var getVhsLocalStorage = function getVhsLocalStorage() {
31593 if (!window.localStorage) {
31594 return null;
31595 }
31596
31597 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
31598
31599 if (!storedObject) {
31600 return null;
31601 }
31602
31603 try {
31604 return JSON.parse(storedObject);
31605 } catch (e) {
31606 // someone may have tampered with the value
31607 return null;
31608 }
31609 };
31610
31611 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
31612 if (!window.localStorage) {
31613 return false;
31614 }
31615
31616 var objectToStore = getVhsLocalStorage();
31617 objectToStore = objectToStore ? videojs__default["default"].mergeOptions(objectToStore, options) : options;
31618
31619 try {
31620 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
31621 } catch (e) {
31622 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
31623 // storage is set to 0).
31624 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
31625 // No need to perform any operation.
31626 return false;
31627 }
31628
31629 return objectToStore;
31630 };
31631 /**
31632 * Parses VHS-supported media types from data URIs. See
31633 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
31634 * for information on data URIs.
31635 *
31636 * @param {string} dataUri
31637 * The data URI
31638 *
31639 * @return {string|Object}
31640 * The parsed object/string, or the original string if no supported media type
31641 * was found
31642 */
31643
31644
31645 var expandDataUri = function expandDataUri(dataUri) {
31646 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
31647 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
31648 } // no known case for this data URI, return the string as-is
31649
31650
31651 return dataUri;
31652 };
31653 /**
31654 * Whether the browser has built-in HLS support.
31655 */
31656
31657
31658 Vhs.supportsNativeHls = function () {
31659 if (!document || !document.createElement) {
31660 return false;
31661 }
31662
31663 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
31664
31665 if (!videojs__default["default"].getTech('Html5').isSupported()) {
31666 return false;
31667 } // HLS manifests can go by many mime-types
31668
31669
31670 var canPlay = [// Apple santioned
31671 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
31672 'audio/mpegurl', // Very common
31673 'audio/x-mpegurl', // Very common
31674 'application/x-mpegurl', // Included for completeness
31675 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
31676 return canPlay.some(function (canItPlay) {
31677 return /maybe|probably/i.test(video.canPlayType(canItPlay));
31678 });
31679 }();
31680
31681 Vhs.supportsNativeDash = function () {
31682 if (!document || !document.createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
31683 return false;
31684 }
31685
31686 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
31687 }();
31688
31689 Vhs.supportsTypeNatively = function (type) {
31690 if (type === 'hls') {
31691 return Vhs.supportsNativeHls;
31692 }
31693
31694 if (type === 'dash') {
31695 return Vhs.supportsNativeDash;
31696 }
31697
31698 return false;
31699 };
31700 /**
31701 * HLS is a source handler, not a tech. Make sure attempts to use it
31702 * as one do not cause exceptions.
31703 */
31704
31705
31706 Vhs.isSupported = function () {
31707 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31708 };
31709
31710 var Component = videojs__default["default"].getComponent('Component');
31711 /**
31712 * The Vhs Handler object, where we orchestrate all of the parts
31713 * of HLS to interact with video.js
31714 *
31715 * @class VhsHandler
31716 * @extends videojs.Component
31717 * @param {Object} source the soruce object
31718 * @param {Tech} tech the parent tech object
31719 * @param {Object} options optional and required options
31720 */
31721
31722 var VhsHandler = /*#__PURE__*/function (_Component) {
31723 inheritsLoose(VhsHandler, _Component);
31724
31725 function VhsHandler(source, tech, options) {
31726 var _this;
31727
31728 _this = _Component.call(this, tech, videojs__default["default"].mergeOptions(options.hls, options.vhs)) || this;
31729
31730 if (options.hls && Object.keys(options.hls).length) {
31731 videojs__default["default"].log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
31732 } // if a tech level `initialBandwidth` option was passed
31733 // use that over the VHS level `bandwidth` option
31734
31735
31736 if (typeof options.initialBandwidth === 'number') {
31737 _this.options_.bandwidth = options.initialBandwidth;
31738 }
31739
31740 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
31741 // backwards-compatibility
31742
31743 if (tech.options_ && tech.options_.playerId) {
31744 var _player = videojs__default["default"](tech.options_.playerId);
31745
31746 if (!_player.hasOwnProperty('hls')) {
31747 Object.defineProperty(_player, 'hls', {
31748 get: function get() {
31749 videojs__default["default"].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
31750 tech.trigger({
31751 type: 'usage',
31752 name: 'hls-player-access'
31753 });
31754 return assertThisInitialized(_this);
31755 },
31756 configurable: true
31757 });
31758 }
31759
31760 if (!_player.hasOwnProperty('vhs')) {
31761 Object.defineProperty(_player, 'vhs', {
31762 get: function get() {
31763 videojs__default["default"].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
31764 tech.trigger({
31765 type: 'usage',
31766 name: 'vhs-player-access'
31767 });
31768 return assertThisInitialized(_this);
31769 },
31770 configurable: true
31771 });
31772 }
31773
31774 if (!_player.hasOwnProperty('dash')) {
31775 Object.defineProperty(_player, 'dash', {
31776 get: function get() {
31777 videojs__default["default"].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
31778 return assertThisInitialized(_this);
31779 },
31780 configurable: true
31781 });
31782 }
31783
31784 _this.player_ = _player;
31785 }
31786
31787 _this.tech_ = tech;
31788 _this.source_ = source;
31789 _this.stats = {};
31790 _this.ignoreNextSeekingEvent_ = false;
31791
31792 _this.setOptions_();
31793
31794 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
31795 tech.overrideNativeAudioTracks(true);
31796 tech.overrideNativeVideoTracks(true);
31797 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
31798 // overriding native HLS only works if audio tracks have been emulated
31799 // error early if we're misconfigured
31800 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
31801 } // listen for fullscreenchange events for this player so that we
31802 // can adjust our quality selection quickly
31803
31804
31805 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
31806 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
31807
31808 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
31809 _this.masterPlaylistController_.fastQualityChange_();
31810 } else {
31811 // When leaving fullscreen, since the in page pixel dimensions should be smaller
31812 // than full screen, see if there should be a rendition switch down to preserve
31813 // bandwidth.
31814 _this.masterPlaylistController_.checkABR_();
31815 }
31816 });
31817
31818 _this.on(_this.tech_, 'seeking', function () {
31819 if (this.ignoreNextSeekingEvent_) {
31820 this.ignoreNextSeekingEvent_ = false;
31821 return;
31822 }
31823
31824 this.setCurrentTime(this.tech_.currentTime());
31825 });
31826
31827 _this.on(_this.tech_, 'error', function () {
31828 // verify that the error was real and we are loaded
31829 // enough to have mpc loaded.
31830 if (this.tech_.error() && this.masterPlaylistController_) {
31831 this.masterPlaylistController_.pauseLoading();
31832 }
31833 });
31834
31835 _this.on(_this.tech_, 'play', _this.play);
31836
31837 return _this;
31838 }
31839
31840 var _proto = VhsHandler.prototype;
31841
31842 _proto.setOptions_ = function setOptions_() {
31843 var _this2 = this;
31844
31845 // defaults
31846 this.options_.withCredentials = this.options_.withCredentials || false;
31847 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
31848 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
31849 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
31850 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
31851 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
31852 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
31853 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
31854 this.options_.customTagParsers = this.options_.customTagParsers || [];
31855 this.options_.customTagMappers = this.options_.customTagMappers || [];
31856 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
31857
31858 if (typeof this.options_.blacklistDuration !== 'number') {
31859 this.options_.blacklistDuration = 5 * 60;
31860 }
31861
31862 if (typeof this.options_.bandwidth !== 'number') {
31863 if (this.options_.useBandwidthFromLocalStorage) {
31864 var storedObject = getVhsLocalStorage();
31865
31866 if (storedObject && storedObject.bandwidth) {
31867 this.options_.bandwidth = storedObject.bandwidth;
31868 this.tech_.trigger({
31869 type: 'usage',
31870 name: 'vhs-bandwidth-from-local-storage'
31871 });
31872 this.tech_.trigger({
31873 type: 'usage',
31874 name: 'hls-bandwidth-from-local-storage'
31875 });
31876 }
31877
31878 if (storedObject && storedObject.throughput) {
31879 this.options_.throughput = storedObject.throughput;
31880 this.tech_.trigger({
31881 type: 'usage',
31882 name: 'vhs-throughput-from-local-storage'
31883 });
31884 this.tech_.trigger({
31885 type: 'usage',
31886 name: 'hls-throughput-from-local-storage'
31887 });
31888 }
31889 }
31890 } // if bandwidth was not set by options or pulled from local storage, start playlist
31891 // selection at a reasonable bandwidth
31892
31893
31894 if (typeof this.options_.bandwidth !== 'number') {
31895 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
31896 } // If the bandwidth number is unchanged from the initial setting
31897 // then this takes precedence over the enableLowInitialPlaylist option
31898
31899
31900 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
31901
31902 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
31903 if (typeof _this2.source_[option] !== 'undefined') {
31904 _this2.options_[option] = _this2.source_[option];
31905 }
31906 });
31907 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
31908 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
31909 }
31910 /**
31911 * called when player.src gets called, handle a new source
31912 *
31913 * @param {Object} src the source object to handle
31914 */
31915 ;
31916
31917 _proto.src = function src(_src, type) {
31918 var _this3 = this;
31919
31920 // do nothing if the src is falsey
31921 if (!_src) {
31922 return;
31923 }
31924
31925 this.setOptions_(); // add master playlist controller options
31926
31927 this.options_.src = expandDataUri(this.source_.src);
31928 this.options_.tech = this.tech_;
31929 this.options_.externVhs = Vhs;
31930 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
31931
31932 this.options_.seekTo = function (time) {
31933 _this3.tech_.setCurrentTime(time);
31934 };
31935
31936 if (this.options_.smoothQualityChange) {
31937 videojs__default["default"].log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
31938 }
31939
31940 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
31941 var playbackWatcherOptions = videojs__default["default"].mergeOptions({
31942 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
31943 }, this.options_, {
31944 seekable: function seekable() {
31945 return _this3.seekable();
31946 },
31947 media: function media() {
31948 return _this3.masterPlaylistController_.media();
31949 },
31950 masterPlaylistController: this.masterPlaylistController_
31951 });
31952 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
31953 this.masterPlaylistController_.on('error', function () {
31954 var player = videojs__default["default"].players[_this3.tech_.options_.playerId];
31955 var error = _this3.masterPlaylistController_.error;
31956
31957 if (typeof error === 'object' && !error.code) {
31958 error.code = 3;
31959 } else if (typeof error === 'string') {
31960 error = {
31961 message: error,
31962 code: 3
31963 };
31964 }
31965
31966 player.error(error);
31967 });
31968 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
31969 // compatibility with < v2
31970
31971 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
31972 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
31973
31974 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
31975 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
31976 // controller. Using a custom property for backwards compatibility
31977 // with < v2
31978
31979 Object.defineProperties(this, {
31980 selectPlaylist: {
31981 get: function get() {
31982 return this.masterPlaylistController_.selectPlaylist;
31983 },
31984 set: function set(selectPlaylist) {
31985 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
31986 }
31987 },
31988 throughput: {
31989 get: function get() {
31990 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
31991 },
31992 set: function set(throughput) {
31993 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
31994 // for the cumulative average
31995
31996 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
31997 }
31998 },
31999 bandwidth: {
32000 get: function get() {
32001 var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
32002 var networkInformation = window.navigator.connection || window.navigator.mozConnection || window.navigator.webkitConnection;
32003 var tenMbpsAsBitsPerSecond = 10e6;
32004
32005 if (this.options_.useNetworkInformationApi && networkInformation) {
32006 // downlink returns Mbps
32007 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
32008 var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
32009 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
32010 // high quality streams are not filtered out.
32011
32012 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
32013 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
32014 } else {
32015 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
32016 }
32017 }
32018
32019 return playerBandwidthEst;
32020 },
32021 set: function set(bandwidth) {
32022 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
32023 // `count` is set to zero that current value of `rate` isn't included
32024 // in the cumulative average
32025
32026 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
32027 rate: 0,
32028 count: 0
32029 };
32030 }
32031 },
32032
32033 /**
32034 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
32035 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
32036 * the entire process after that - decryption, transmuxing, and appending - provided
32037 * by `throughput`.
32038 *
32039 * Since the two process are serial, the overall system bandwidth is given by:
32040 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
32041 */
32042 systemBandwidth: {
32043 get: function get() {
32044 var invBandwidth = 1 / (this.bandwidth || 1);
32045 var invThroughput;
32046
32047 if (this.throughput > 0) {
32048 invThroughput = 1 / this.throughput;
32049 } else {
32050 invThroughput = 0;
32051 }
32052
32053 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
32054 return systemBitrate;
32055 },
32056 set: function set() {
32057 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
32058 }
32059 }
32060 });
32061
32062 if (this.options_.bandwidth) {
32063 this.bandwidth = this.options_.bandwidth;
32064 }
32065
32066 if (this.options_.throughput) {
32067 this.throughput = this.options_.throughput;
32068 }
32069
32070 Object.defineProperties(this.stats, {
32071 bandwidth: {
32072 get: function get() {
32073 return _this3.bandwidth || 0;
32074 },
32075 enumerable: true
32076 },
32077 mediaRequests: {
32078 get: function get() {
32079 return _this3.masterPlaylistController_.mediaRequests_() || 0;
32080 },
32081 enumerable: true
32082 },
32083 mediaRequestsAborted: {
32084 get: function get() {
32085 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
32086 },
32087 enumerable: true
32088 },
32089 mediaRequestsTimedout: {
32090 get: function get() {
32091 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
32092 },
32093 enumerable: true
32094 },
32095 mediaRequestsErrored: {
32096 get: function get() {
32097 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
32098 },
32099 enumerable: true
32100 },
32101 mediaTransferDuration: {
32102 get: function get() {
32103 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
32104 },
32105 enumerable: true
32106 },
32107 mediaBytesTransferred: {
32108 get: function get() {
32109 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
32110 },
32111 enumerable: true
32112 },
32113 mediaSecondsLoaded: {
32114 get: function get() {
32115 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
32116 },
32117 enumerable: true
32118 },
32119 mediaAppends: {
32120 get: function get() {
32121 return _this3.masterPlaylistController_.mediaAppends_() || 0;
32122 },
32123 enumerable: true
32124 },
32125 mainAppendsToLoadedData: {
32126 get: function get() {
32127 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
32128 },
32129 enumerable: true
32130 },
32131 audioAppendsToLoadedData: {
32132 get: function get() {
32133 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
32134 },
32135 enumerable: true
32136 },
32137 appendsToLoadedData: {
32138 get: function get() {
32139 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
32140 },
32141 enumerable: true
32142 },
32143 timeToLoadedData: {
32144 get: function get() {
32145 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
32146 },
32147 enumerable: true
32148 },
32149 buffered: {
32150 get: function get() {
32151 return timeRangesToArray(_this3.tech_.buffered());
32152 },
32153 enumerable: true
32154 },
32155 currentTime: {
32156 get: function get() {
32157 return _this3.tech_.currentTime();
32158 },
32159 enumerable: true
32160 },
32161 currentSource: {
32162 get: function get() {
32163 return _this3.tech_.currentSource_;
32164 },
32165 enumerable: true
32166 },
32167 currentTech: {
32168 get: function get() {
32169 return _this3.tech_.name_;
32170 },
32171 enumerable: true
32172 },
32173 duration: {
32174 get: function get() {
32175 return _this3.tech_.duration();
32176 },
32177 enumerable: true
32178 },
32179 master: {
32180 get: function get() {
32181 return _this3.playlists.master;
32182 },
32183 enumerable: true
32184 },
32185 playerDimensions: {
32186 get: function get() {
32187 return _this3.tech_.currentDimensions();
32188 },
32189 enumerable: true
32190 },
32191 seekable: {
32192 get: function get() {
32193 return timeRangesToArray(_this3.tech_.seekable());
32194 },
32195 enumerable: true
32196 },
32197 timestamp: {
32198 get: function get() {
32199 return Date.now();
32200 },
32201 enumerable: true
32202 },
32203 videoPlaybackQuality: {
32204 get: function get() {
32205 return _this3.tech_.getVideoPlaybackQuality();
32206 },
32207 enumerable: true
32208 }
32209 });
32210 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
32211 this.tech_.on('bandwidthupdate', function () {
32212 if (_this3.options_.useBandwidthFromLocalStorage) {
32213 updateVhsLocalStorage({
32214 bandwidth: _this3.bandwidth,
32215 throughput: Math.round(_this3.throughput)
32216 });
32217 }
32218 });
32219 this.masterPlaylistController_.on('selectedinitialmedia', function () {
32220 // Add the manual rendition mix-in to VhsHandler
32221 renditionSelectionMixin(_this3);
32222 });
32223 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
32224 _this3.setupEme_();
32225 }); // the bandwidth of the primary segment loader is our best
32226 // estimate of overall bandwidth
32227
32228 this.on(this.masterPlaylistController_, 'progress', function () {
32229 this.tech_.trigger('progress');
32230 }); // In the live case, we need to ignore the very first `seeking` event since
32231 // that will be the result of the seek-to-live behavior
32232
32233 this.on(this.masterPlaylistController_, 'firstplay', function () {
32234 this.ignoreNextSeekingEvent_ = true;
32235 });
32236 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
32237 // this can occur if someone sets the src in player.ready(), for instance
32238
32239 if (!this.tech_.el()) {
32240 return;
32241 }
32242
32243 this.mediaSourceUrl_ = window.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
32244 this.tech_.src(this.mediaSourceUrl_);
32245 };
32246
32247 _proto.createKeySessions_ = function createKeySessions_() {
32248 var _this4 = this;
32249
32250 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32251 this.logger_('waiting for EME key session creation');
32252 waitForKeySessionCreation({
32253 player: this.player_,
32254 sourceKeySystems: this.source_.keySystems,
32255 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
32256 mainPlaylists: this.playlists.master.playlists
32257 }).then(function () {
32258 _this4.logger_('created EME key session');
32259
32260 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
32261 }).catch(function (err) {
32262 _this4.logger_('error while creating EME key session', err);
32263
32264 _this4.player_.error({
32265 message: 'Failed to initialize media keys for EME',
32266 code: 3
32267 });
32268 });
32269 };
32270
32271 _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
32272 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
32273 // the key is in the manifest. While this should've happened on initial source load, it
32274 // may happen again in live streams where the keys change, and the manifest info
32275 // reflects the update.
32276 //
32277 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
32278 // already requested keys for, we don't have to worry about this generating extraneous
32279 // requests.
32280 this.logger_('waitingforkey fired, attempting to create any new key sessions');
32281 this.createKeySessions_();
32282 }
32283 /**
32284 * If necessary and EME is available, sets up EME options and waits for key session
32285 * creation.
32286 *
32287 * This function also updates the source updater so taht it can be used, as for some
32288 * browsers, EME must be configured before content is appended (if appending unencrypted
32289 * content before encrypted content).
32290 */
32291 ;
32292
32293 _proto.setupEme_ = function setupEme_() {
32294 var _this5 = this;
32295
32296 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32297 var didSetupEmeOptions = setupEmeOptions({
32298 player: this.player_,
32299 sourceKeySystems: this.source_.keySystems,
32300 media: this.playlists.media(),
32301 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
32302 });
32303 this.player_.tech_.on('keystatuschange', function (e) {
32304 if (e.status !== 'output-restricted') {
32305 return;
32306 }
32307
32308 var masterPlaylist = _this5.masterPlaylistController_.master();
32309
32310 if (!masterPlaylist || !masterPlaylist.playlists) {
32311 return;
32312 }
32313
32314 var excludedHDPlaylists = []; // Assume all HD streams are unplayable and exclude them from ABR selection
32315
32316 masterPlaylist.playlists.forEach(function (playlist) {
32317 if (playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height >= 720) {
32318 if (!playlist.excludeUntil || playlist.excludeUntil < Infinity) {
32319 playlist.excludeUntil = Infinity;
32320 excludedHDPlaylists.push(playlist);
32321 }
32322 }
32323 });
32324
32325 if (excludedHDPlaylists.length) {
32326 var _videojs$log;
32327
32328 (_videojs$log = videojs__default["default"].log).warn.apply(_videojs$log, ['DRM keystatus changed to "output-restricted." Removing the following HD playlists ' + 'that will most likely fail to play and clearing the buffer. ' + 'This may be due to HDCP restrictions on the stream and the capabilities of the current device.'].concat(excludedHDPlaylists)); // Clear the buffer before switching playlists, since it may already contain unplayable segments
32329
32330
32331 _this5.masterPlaylistController_.fastQualityChange_();
32332 }
32333 });
32334 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
32335 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
32336 // promises.
32337
32338 if (videojs__default["default"].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
32339 // If EME options were not set up, we've done all we could to initialize EME.
32340 this.masterPlaylistController_.sourceUpdater_.initializedEme();
32341 return;
32342 }
32343
32344 this.createKeySessions_();
32345 }
32346 /**
32347 * Initializes the quality levels and sets listeners to update them.
32348 *
32349 * @method setupQualityLevels_
32350 * @private
32351 */
32352 ;
32353
32354 _proto.setupQualityLevels_ = function setupQualityLevels_() {
32355 var _this6 = this;
32356
32357 var player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
32358 // or qualityLevels_ listeners have already been setup, do nothing.
32359
32360 if (!player || !player.qualityLevels || this.qualityLevels_) {
32361 return;
32362 }
32363
32364 this.qualityLevels_ = player.qualityLevels();
32365 this.masterPlaylistController_.on('selectedinitialmedia', function () {
32366 handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
32367 });
32368 this.playlists.on('mediachange', function () {
32369 handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
32370 });
32371 }
32372 /**
32373 * return the version
32374 */
32375 ;
32376
32377 VhsHandler.version = function version$5() {
32378 return {
32379 '@videojs/http-streaming': version$4,
32380 'mux.js': version$3,
32381 'mpd-parser': version$2,
32382 'm3u8-parser': version$1,
32383 'aes-decrypter': version
32384 };
32385 }
32386 /**
32387 * return the version
32388 */
32389 ;
32390
32391 _proto.version = function version() {
32392 return this.constructor.version();
32393 };
32394
32395 _proto.canChangeType = function canChangeType() {
32396 return SourceUpdater.canChangeType();
32397 }
32398 /**
32399 * Begin playing the video.
32400 */
32401 ;
32402
32403 _proto.play = function play() {
32404 this.masterPlaylistController_.play();
32405 }
32406 /**
32407 * a wrapper around the function in MasterPlaylistController
32408 */
32409 ;
32410
32411 _proto.setCurrentTime = function setCurrentTime(currentTime) {
32412 this.masterPlaylistController_.setCurrentTime(currentTime);
32413 }
32414 /**
32415 * a wrapper around the function in MasterPlaylistController
32416 */
32417 ;
32418
32419 _proto.duration = function duration() {
32420 return this.masterPlaylistController_.duration();
32421 }
32422 /**
32423 * a wrapper around the function in MasterPlaylistController
32424 */
32425 ;
32426
32427 _proto.seekable = function seekable() {
32428 return this.masterPlaylistController_.seekable();
32429 }
32430 /**
32431 * Abort all outstanding work and cleanup.
32432 */
32433 ;
32434
32435 _proto.dispose = function dispose() {
32436 if (this.playbackWatcher_) {
32437 this.playbackWatcher_.dispose();
32438 }
32439
32440 if (this.masterPlaylistController_) {
32441 this.masterPlaylistController_.dispose();
32442 }
32443
32444 if (this.qualityLevels_) {
32445 this.qualityLevels_.dispose();
32446 }
32447
32448 if (this.player_) {
32449 delete this.player_.vhs;
32450 delete this.player_.dash;
32451 delete this.player_.hls;
32452 }
32453
32454 if (this.tech_ && this.tech_.vhs) {
32455 delete this.tech_.vhs;
32456 } // don't check this.tech_.hls as it will log a deprecated warning
32457
32458
32459 if (this.tech_) {
32460 delete this.tech_.hls;
32461 }
32462
32463 if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
32464 window.URL.revokeObjectURL(this.mediaSourceUrl_);
32465 this.mediaSourceUrl_ = null;
32466 }
32467
32468 if (this.tech_) {
32469 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
32470 }
32471
32472 _Component.prototype.dispose.call(this);
32473 };
32474
32475 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
32476 return getProgramTime({
32477 playlist: this.masterPlaylistController_.media(),
32478 time: time,
32479 callback: callback
32480 });
32481 } // the player must be playing before calling this
32482 ;
32483
32484 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
32485 if (pauseAfterSeek === void 0) {
32486 pauseAfterSeek = true;
32487 }
32488
32489 if (retryCount === void 0) {
32490 retryCount = 2;
32491 }
32492
32493 return seekToProgramTime({
32494 programTime: programTime,
32495 playlist: this.masterPlaylistController_.media(),
32496 retryCount: retryCount,
32497 pauseAfterSeek: pauseAfterSeek,
32498 seekTo: this.options_.seekTo,
32499 tech: this.options_.tech,
32500 callback: callback
32501 });
32502 };
32503
32504 return VhsHandler;
32505 }(Component);
32506 /**
32507 * The Source Handler object, which informs video.js what additional
32508 * MIME types are supported and sets up playback. It is registered
32509 * automatically to the appropriate tech based on the capabilities of
32510 * the browser it is running in. It is not necessary to use or modify
32511 * this object in normal usage.
32512 */
32513
32514
32515 var VhsSourceHandler = {
32516 name: 'videojs-http-streaming',
32517 VERSION: version$4,
32518 canHandleSource: function canHandleSource(srcObj, options) {
32519 if (options === void 0) {
32520 options = {};
32521 }
32522
32523 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32524 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
32525 },
32526 handleSource: function handleSource(source, tech, options) {
32527 if (options === void 0) {
32528 options = {};
32529 }
32530
32531 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32532 tech.vhs = new VhsHandler(source, tech, localOptions);
32533
32534 if (!videojs__default["default"].hasOwnProperty('hls')) {
32535 Object.defineProperty(tech, 'hls', {
32536 get: function get() {
32537 videojs__default["default"].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
32538 return tech.vhs;
32539 },
32540 configurable: true
32541 });
32542 }
32543
32544 tech.vhs.xhr = xhrFactory();
32545 tech.vhs.src(source.src, source.type);
32546 return tech.vhs;
32547 },
32548 canPlayType: function canPlayType(type, options) {
32549 var simpleType = simpleTypeFromSourceType(type);
32550
32551 if (!simpleType) {
32552 return '';
32553 }
32554
32555 var overrideNative = VhsSourceHandler.getOverrideNative(options);
32556 var supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);
32557 var canUseMsePlayback = !supportsTypeNatively || overrideNative;
32558 return canUseMsePlayback ? 'maybe' : '';
32559 },
32560 getOverrideNative: function getOverrideNative(options) {
32561 if (options === void 0) {
32562 options = {};
32563 }
32564
32565 var _options = options,
32566 _options$vhs = _options.vhs,
32567 vhs = _options$vhs === void 0 ? {} : _options$vhs,
32568 _options$hls = _options.hls,
32569 hls = _options$hls === void 0 ? {} : _options$hls;
32570 var defaultOverrideNative = !(videojs__default["default"].browser.IS_ANY_SAFARI || videojs__default["default"].browser.IS_IOS);
32571 var _vhs$overrideNative = vhs.overrideNative,
32572 overrideNative = _vhs$overrideNative === void 0 ? defaultOverrideNative : _vhs$overrideNative;
32573 var _hls$overrideNative = hls.overrideNative,
32574 legacyOverrideNative = _hls$overrideNative === void 0 ? false : _hls$overrideNative;
32575 return legacyOverrideNative || overrideNative;
32576 }
32577 };
32578 /**
32579 * Check to see if the native MediaSource object exists and supports
32580 * an MP4 container with both H.264 video and AAC-LC audio.
32581 *
32582 * @return {boolean} if native media sources are supported
32583 */
32584
32585 var supportsNativeMediaSources = function supportsNativeMediaSources() {
32586 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
32587 }; // register source handlers with the appropriate techs
32588
32589
32590 if (supportsNativeMediaSources()) {
32591 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
32592 }
32593
32594 videojs__default["default"].VhsHandler = VhsHandler;
32595 Object.defineProperty(videojs__default["default"], 'HlsHandler', {
32596 get: function get() {
32597 videojs__default["default"].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
32598 return VhsHandler;
32599 },
32600 configurable: true
32601 });
32602 videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
32603 Object.defineProperty(videojs__default["default"], 'HlsSourceHandler', {
32604 get: function get() {
32605 videojs__default["default"].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
32606 return VhsSourceHandler;
32607 },
32608 configurable: true
32609 });
32610 videojs__default["default"].Vhs = Vhs;
32611 Object.defineProperty(videojs__default["default"], 'Hls', {
32612 get: function get() {
32613 videojs__default["default"].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
32614 return Vhs;
32615 },
32616 configurable: true
32617 });
32618
32619 if (!videojs__default["default"].use) {
32620 videojs__default["default"].registerComponent('Hls', Vhs);
32621 videojs__default["default"].registerComponent('Vhs', Vhs);
32622 }
32623
32624 videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
32625 videojs__default["default"].options.hls = videojs__default["default"].options.hls || {};
32626
32627 if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
32628 var registerPlugin = videojs__default["default"].registerPlugin || videojs__default["default"].plugin;
32629 registerPlugin('reloadSourceOnError', reloadSourceOnError);
32630 }
32631
32632 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
32633 exports.Vhs = Vhs;
32634 exports.VhsHandler = VhsHandler;
32635 exports.VhsSourceHandler = VhsSourceHandler;
32636 exports.emeKeySystems = emeKeySystems;
32637 exports.expandDataUri = expandDataUri;
32638 exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
32639 exports.setupEmeOptions = setupEmeOptions;
32640 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
32641 exports.waitForKeySessionCreation = waitForKeySessionCreation;
32642
32643 Object.defineProperty(exports, '__esModule', { value: true });
32644
32645}));