UNPKG

1.11 MBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.16.0 @license Apache-2.0 */
2(function (global, factory) {
3 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
4 typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
5 (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.httpStreaming = {}, global.videojs, global.window));
6})(this, (function (exports, videojs, xmldom) { 'use strict';
7
8 function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
10 var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
11
12 function createCommonjsModule(fn, basedir, module) {
13 return module = {
14 path: basedir,
15 exports: {},
16 require: function (path, base) {
17 return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
18 }
19 }, fn(module, module.exports), module.exports;
20 }
21
22 function commonjsRequire () {
23 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
24 }
25
26 var assertThisInitialized = createCommonjsModule(function (module) {
27 function _assertThisInitialized(self) {
28 if (self === void 0) {
29 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
30 }
31
32 return self;
33 }
34
35 module.exports = _assertThisInitialized;
36 module.exports["default"] = module.exports, module.exports.__esModule = true;
37 });
38
39 var setPrototypeOf = createCommonjsModule(function (module) {
40 function _setPrototypeOf(o, p) {
41 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
42 o.__proto__ = p;
43 return o;
44 };
45
46 module.exports["default"] = module.exports, module.exports.__esModule = true;
47 return _setPrototypeOf(o, p);
48 }
49
50 module.exports = _setPrototypeOf;
51 module.exports["default"] = module.exports, module.exports.__esModule = true;
52 });
53
54 var inheritsLoose = createCommonjsModule(function (module) {
55 function _inheritsLoose(subClass, superClass) {
56 subClass.prototype = Object.create(superClass.prototype);
57 subClass.prototype.constructor = subClass;
58 setPrototypeOf(subClass, superClass);
59 }
60
61 module.exports = _inheritsLoose;
62 module.exports["default"] = module.exports, module.exports.__esModule = true;
63 });
64
65 var urlToolkit = createCommonjsModule(function (module, exports) {
66 // see https://tools.ietf.org/html/rfc1808
67 (function (root) {
68 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
69 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
70 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
71 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
72 var URLToolkit = {
73 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
74 // E.g
75 // With opts.alwaysNormalize = false (default, spec compliant)
76 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
77 // With opts.alwaysNormalize = true (not spec compliant)
78 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
79 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
80 opts = opts || {}; // remove any remaining space and CRLF
81
82 baseURL = baseURL.trim();
83 relativeURL = relativeURL.trim();
84
85 if (!relativeURL) {
86 // 2a) If the embedded URL is entirely empty, it inherits the
87 // entire base URL (i.e., is set equal to the base URL)
88 // and we are done.
89 if (!opts.alwaysNormalize) {
90 return baseURL;
91 }
92
93 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
94
95 if (!basePartsForNormalise) {
96 throw new Error('Error trying to parse base URL.');
97 }
98
99 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
100 return URLToolkit.buildURLFromParts(basePartsForNormalise);
101 }
102
103 var relativeParts = URLToolkit.parseURL(relativeURL);
104
105 if (!relativeParts) {
106 throw new Error('Error trying to parse relative URL.');
107 }
108
109 if (relativeParts.scheme) {
110 // 2b) If the embedded URL starts with a scheme name, it is
111 // interpreted as an absolute URL and we are done.
112 if (!opts.alwaysNormalize) {
113 return relativeURL;
114 }
115
116 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
117 return URLToolkit.buildURLFromParts(relativeParts);
118 }
119
120 var baseParts = URLToolkit.parseURL(baseURL);
121
122 if (!baseParts) {
123 throw new Error('Error trying to parse base URL.');
124 }
125
126 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
127 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
128 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
129 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
130 baseParts.netLoc = pathParts[1];
131 baseParts.path = pathParts[2];
132 }
133
134 if (baseParts.netLoc && !baseParts.path) {
135 baseParts.path = '/';
136 }
137
138 var builtParts = {
139 // 2c) Otherwise, the embedded URL inherits the scheme of
140 // the base URL.
141 scheme: baseParts.scheme,
142 netLoc: relativeParts.netLoc,
143 path: null,
144 params: relativeParts.params,
145 query: relativeParts.query,
146 fragment: relativeParts.fragment
147 };
148
149 if (!relativeParts.netLoc) {
150 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
151 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
152 // (if any) of the base URL.
153 builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
154 // path is not relative and we skip to Step 7.
155
156 if (relativeParts.path[0] !== '/') {
157 if (!relativeParts.path) {
158 // 5) If the embedded URL path is empty (and not preceded by a
159 // slash), then the embedded URL inherits the base URL path
160 builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
161 // step 7; otherwise, it inherits the <params> of the base
162 // URL (if any) and
163
164 if (!relativeParts.params) {
165 builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
166 // step 7; otherwise, it inherits the <query> of the base
167 // URL (if any) and we skip to step 7.
168
169 if (!relativeParts.query) {
170 builtParts.query = baseParts.query;
171 }
172 }
173 } else {
174 // 6) The last segment of the base URL's path (anything
175 // following the rightmost slash "/", or the entire path if no
176 // slash is present) is removed and the embedded URL's path is
177 // appended in its place.
178 var baseURLPath = baseParts.path;
179 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
180 builtParts.path = URLToolkit.normalizePath(newPath);
181 }
182 }
183 }
184
185 if (builtParts.path === null) {
186 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
187 }
188
189 return URLToolkit.buildURLFromParts(builtParts);
190 },
191 parseURL: function parseURL(url) {
192 var parts = URL_REGEX.exec(url);
193
194 if (!parts) {
195 return null;
196 }
197
198 return {
199 scheme: parts[1] || '',
200 netLoc: parts[2] || '',
201 path: parts[3] || '',
202 params: parts[4] || '',
203 query: parts[5] || '',
204 fragment: parts[6] || ''
205 };
206 },
207 normalizePath: function normalizePath(path) {
208 // The following operations are
209 // then applied, in order, to the new path:
210 // 6a) All occurrences of "./", where "." is a complete path
211 // segment, are removed.
212 // 6b) If the path ends with "." as a complete path segment,
213 // that "." is removed.
214 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
215 // complete path segment not equal to "..", are removed.
216 // Removal of these path segments is performed iteratively,
217 // removing the leftmost matching pattern on each iteration,
218 // until no matching pattern remains.
219 // 6d) If the path ends with "<segment>/..", where <segment> is a
220 // complete path segment not equal to "..", that
221 // "<segment>/.." is removed.
222
223 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
224
225 return path.split('').reverse().join('');
226 },
227 buildURLFromParts: function buildURLFromParts(parts) {
228 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
229 }
230 };
231 module.exports = URLToolkit;
232 })();
233 });
234
235 var DEFAULT_LOCATION = 'http://example.com';
236
237 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
238 // return early if we don't need to resolve
239 if (/^[a-z]+:/i.test(relativeUrl)) {
240 return relativeUrl;
241 } // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
242
243
244 if (/^data:/.test(baseUrl)) {
245 baseUrl = window.location && window.location.href || '';
246 } // IE11 supports URL but not the URL constructor
247 // feature detect the behavior we want
248
249
250 var nativeURL = typeof window.URL === 'function';
251 var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
252 // and if baseUrl isn't an absolute url
253
254 var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
255
256 if (nativeURL) {
257 baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
258 } else if (!/\/\//i.test(baseUrl)) {
259 baseUrl = urlToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
260 }
261
262 if (nativeURL) {
263 var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
264 // and if we're location-less, remove the location
265 // otherwise, return the url unmodified
266
267 if (removeLocation) {
268 return newUrl.href.slice(DEFAULT_LOCATION.length);
269 } else if (protocolLess) {
270 return newUrl.href.slice(newUrl.protocol.length);
271 }
272
273 return newUrl.href;
274 }
275
276 return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
277 };
278
279 /**
280 * @file resolve-url.js - Handling how URLs are resolved and manipulated
281 */
282 var resolveUrl = resolveUrl$1;
283 /**
284 * Checks whether xhr request was redirected and returns correct url depending
285 * on `handleManifestRedirects` option
286 *
287 * @api private
288 *
289 * @param {string} url - an url being requested
290 * @param {XMLHttpRequest} req - xhr request result
291 *
292 * @return {string}
293 */
294
295 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
296 // To understand how the responseURL below is set and generated:
297 // - https://fetch.spec.whatwg.org/#concept-response-url
298 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
299 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
300 return req.responseURL;
301 }
302
303 return url;
304 };
305
306 var logger = function logger(source) {
307 if (videojs__default["default"].log.debug) {
308 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', source + " >");
309 }
310
311 return function () {};
312 };
313
314 var _extends_1 = createCommonjsModule(function (module) {
315 function _extends() {
316 module.exports = _extends = Object.assign || function (target) {
317 for (var i = 1; i < arguments.length; i++) {
318 var source = arguments[i];
319
320 for (var key in source) {
321 if (Object.prototype.hasOwnProperty.call(source, key)) {
322 target[key] = source[key];
323 }
324 }
325 }
326
327 return target;
328 };
329
330 module.exports["default"] = module.exports, module.exports.__esModule = true;
331 return _extends.apply(this, arguments);
332 }
333
334 module.exports = _extends;
335 module.exports["default"] = module.exports, module.exports.__esModule = true;
336 });
337
338 /**
339 * @file stream.js
340 */
341
342 /**
343 * A lightweight readable stream implemention that handles event dispatching.
344 *
345 * @class Stream
346 */
347 var Stream = /*#__PURE__*/function () {
348 function Stream() {
349 this.listeners = {};
350 }
351 /**
352 * Add a listener for a specified event type.
353 *
354 * @param {string} type the event name
355 * @param {Function} listener the callback to be invoked when an event of
356 * the specified type occurs
357 */
358
359
360 var _proto = Stream.prototype;
361
362 _proto.on = function on(type, listener) {
363 if (!this.listeners[type]) {
364 this.listeners[type] = [];
365 }
366
367 this.listeners[type].push(listener);
368 }
369 /**
370 * Remove a listener for a specified event type.
371 *
372 * @param {string} type the event name
373 * @param {Function} listener a function previously registered for this
374 * type of event through `on`
375 * @return {boolean} if we could turn it off or not
376 */
377 ;
378
379 _proto.off = function off(type, listener) {
380 if (!this.listeners[type]) {
381 return false;
382 }
383
384 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
385 // In Video.js we slice listener functions
386 // on trigger so that it does not mess up the order
387 // while we loop through.
388 //
389 // Here we slice on off so that the loop in trigger
390 // can continue using it's old reference to loop without
391 // messing up the order.
392
393 this.listeners[type] = this.listeners[type].slice(0);
394 this.listeners[type].splice(index, 1);
395 return index > -1;
396 }
397 /**
398 * Trigger an event of the specified type on this stream. Any additional
399 * arguments to this function are passed as parameters to event listeners.
400 *
401 * @param {string} type the event name
402 */
403 ;
404
405 _proto.trigger = function trigger(type) {
406 var callbacks = this.listeners[type];
407
408 if (!callbacks) {
409 return;
410 } // Slicing the arguments on every invocation of this method
411 // can add a significant amount of overhead. Avoid the
412 // intermediate object creation for the common case of a
413 // single callback argument
414
415
416 if (arguments.length === 2) {
417 var length = callbacks.length;
418
419 for (var i = 0; i < length; ++i) {
420 callbacks[i].call(this, arguments[1]);
421 }
422 } else {
423 var args = Array.prototype.slice.call(arguments, 1);
424 var _length = callbacks.length;
425
426 for (var _i = 0; _i < _length; ++_i) {
427 callbacks[_i].apply(this, args);
428 }
429 }
430 }
431 /**
432 * Destroys the stream and cleans up.
433 */
434 ;
435
436 _proto.dispose = function dispose() {
437 this.listeners = {};
438 }
439 /**
440 * Forwards all `data` events on this stream to the destination stream. The
441 * destination stream should provide a method `push` to receive the data
442 * events as they arrive.
443 *
444 * @param {Stream} destination the stream that will receive all `data` events
445 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
446 */
447 ;
448
449 _proto.pipe = function pipe(destination) {
450 this.on('data', function (data) {
451 destination.push(data);
452 });
453 };
454
455 return Stream;
456 }();
457
458 var atob = function atob(s) {
459 return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
460 };
461
462 function decodeB64ToUint8Array(b64Text) {
463 var decodedString = atob(b64Text);
464 var array = new Uint8Array(decodedString.length);
465
466 for (var i = 0; i < decodedString.length; i++) {
467 array[i] = decodedString.charCodeAt(i);
468 }
469
470 return array;
471 }
472
473 /*! @name m3u8-parser @version 4.8.0 @license Apache-2.0 */
474 /**
475 * A stream that buffers string input and generates a `data` event for each
476 * line.
477 *
478 * @class LineStream
479 * @extends Stream
480 */
481
482 var LineStream = /*#__PURE__*/function (_Stream) {
483 inheritsLoose(LineStream, _Stream);
484
485 function LineStream() {
486 var _this;
487
488 _this = _Stream.call(this) || this;
489 _this.buffer = '';
490 return _this;
491 }
492 /**
493 * Add new data to be parsed.
494 *
495 * @param {string} data the text to process
496 */
497
498
499 var _proto = LineStream.prototype;
500
501 _proto.push = function push(data) {
502 var nextNewline;
503 this.buffer += data;
504 nextNewline = this.buffer.indexOf('\n');
505
506 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
507 this.trigger('data', this.buffer.substring(0, nextNewline));
508 this.buffer = this.buffer.substring(nextNewline + 1);
509 }
510 };
511
512 return LineStream;
513 }(Stream);
514
515 var TAB = String.fromCharCode(0x09);
516
517 var parseByterange = function parseByterange(byterangeString) {
518 // optionally match and capture 0+ digits before `@`
519 // optionally match and capture 0+ digits after `@`
520 var match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
521 var result = {};
522
523 if (match[1]) {
524 result.length = parseInt(match[1], 10);
525 }
526
527 if (match[2]) {
528 result.offset = parseInt(match[2], 10);
529 }
530
531 return result;
532 };
533 /**
534 * "forgiving" attribute list psuedo-grammar:
535 * attributes -> keyvalue (',' keyvalue)*
536 * keyvalue -> key '=' value
537 * key -> [^=]*
538 * value -> '"' [^"]* '"' | [^,]*
539 */
540
541
542 var attributeSeparator = function attributeSeparator() {
543 var key = '[^=]*';
544 var value = '"[^"]*"|[^,]*';
545 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
546 return new RegExp('(?:^|,)(' + keyvalue + ')');
547 };
548 /**
549 * Parse attributes from a line given the separator
550 *
551 * @param {string} attributes the attribute line to parse
552 */
553
554
555 var parseAttributes$1 = function parseAttributes(attributes) {
556 // split the string using attributes as the separator
557 var attrs = attributes.split(attributeSeparator());
558 var result = {};
559 var i = attrs.length;
560 var attr;
561
562 while (i--) {
563 // filter out unmatched portions of the string
564 if (attrs[i] === '') {
565 continue;
566 } // split the key and value
567
568
569 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
570
571 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
572 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
573 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
574 result[attr[0]] = attr[1];
575 }
576
577 return result;
578 };
579 /**
580 * A line-level M3U8 parser event stream. It expects to receive input one
581 * line at a time and performs a context-free parse of its contents. A stream
582 * interpretation of a manifest can be useful if the manifest is expected to
583 * be too large to fit comfortably into memory or the entirety of the input
584 * is not immediately available. Otherwise, it's probably much easier to work
585 * with a regular `Parser` object.
586 *
587 * Produces `data` events with an object that captures the parser's
588 * interpretation of the input. That object has a property `tag` that is one
589 * of `uri`, `comment`, or `tag`. URIs only have a single additional
590 * property, `line`, which captures the entirety of the input without
591 * interpretation. Comments similarly have a single additional property
592 * `text` which is the input without the leading `#`.
593 *
594 * Tags always have a property `tagType` which is the lower-cased version of
595 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
596 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
597 * tags are given the tag type `unknown` and a single additional property
598 * `data` with the remainder of the input.
599 *
600 * @class ParseStream
601 * @extends Stream
602 */
603
604
605 var ParseStream = /*#__PURE__*/function (_Stream) {
606 inheritsLoose(ParseStream, _Stream);
607
608 function ParseStream() {
609 var _this;
610
611 _this = _Stream.call(this) || this;
612 _this.customParsers = [];
613 _this.tagMappers = [];
614 return _this;
615 }
616 /**
617 * Parses an additional line of input.
618 *
619 * @param {string} line a single line of an M3U8 file to parse
620 */
621
622
623 var _proto = ParseStream.prototype;
624
625 _proto.push = function push(line) {
626 var _this2 = this;
627
628 var match;
629 var event; // strip whitespace
630
631 line = line.trim();
632
633 if (line.length === 0) {
634 // ignore empty lines
635 return;
636 } // URIs
637
638
639 if (line[0] !== '#') {
640 this.trigger('data', {
641 type: 'uri',
642 uri: line
643 });
644 return;
645 } // map tags
646
647
648 var newLines = this.tagMappers.reduce(function (acc, mapper) {
649 var mappedLine = mapper(line); // skip if unchanged
650
651 if (mappedLine === line) {
652 return acc;
653 }
654
655 return acc.concat([mappedLine]);
656 }, [line]);
657 newLines.forEach(function (newLine) {
658 for (var i = 0; i < _this2.customParsers.length; i++) {
659 if (_this2.customParsers[i].call(_this2, newLine)) {
660 return;
661 }
662 } // Comments
663
664
665 if (newLine.indexOf('#EXT') !== 0) {
666 _this2.trigger('data', {
667 type: 'comment',
668 text: newLine.slice(1)
669 });
670
671 return;
672 } // strip off any carriage returns here so the regex matching
673 // doesn't have to account for them.
674
675
676 newLine = newLine.replace('\r', ''); // Tags
677
678 match = /^#EXTM3U/.exec(newLine);
679
680 if (match) {
681 _this2.trigger('data', {
682 type: 'tag',
683 tagType: 'm3u'
684 });
685
686 return;
687 }
688
689 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
690
691 if (match) {
692 event = {
693 type: 'tag',
694 tagType: 'inf'
695 };
696
697 if (match[1]) {
698 event.duration = parseFloat(match[1]);
699 }
700
701 if (match[2]) {
702 event.title = match[2];
703 }
704
705 _this2.trigger('data', event);
706
707 return;
708 }
709
710 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
711
712 if (match) {
713 event = {
714 type: 'tag',
715 tagType: 'targetduration'
716 };
717
718 if (match[1]) {
719 event.duration = parseInt(match[1], 10);
720 }
721
722 _this2.trigger('data', event);
723
724 return;
725 }
726
727 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
728
729 if (match) {
730 event = {
731 type: 'tag',
732 tagType: 'version'
733 };
734
735 if (match[1]) {
736 event.version = parseInt(match[1], 10);
737 }
738
739 _this2.trigger('data', event);
740
741 return;
742 }
743
744 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
745
746 if (match) {
747 event = {
748 type: 'tag',
749 tagType: 'media-sequence'
750 };
751
752 if (match[1]) {
753 event.number = parseInt(match[1], 10);
754 }
755
756 _this2.trigger('data', event);
757
758 return;
759 }
760
761 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
762
763 if (match) {
764 event = {
765 type: 'tag',
766 tagType: 'discontinuity-sequence'
767 };
768
769 if (match[1]) {
770 event.number = parseInt(match[1], 10);
771 }
772
773 _this2.trigger('data', event);
774
775 return;
776 }
777
778 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
779
780 if (match) {
781 event = {
782 type: 'tag',
783 tagType: 'playlist-type'
784 };
785
786 if (match[1]) {
787 event.playlistType = match[1];
788 }
789
790 _this2.trigger('data', event);
791
792 return;
793 }
794
795 match = /^#EXT-X-BYTERANGE:?(.*)?$/.exec(newLine);
796
797 if (match) {
798 event = _extends_1(parseByterange(match[1]), {
799 type: 'tag',
800 tagType: 'byterange'
801 });
802
803 _this2.trigger('data', event);
804
805 return;
806 }
807
808 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
809
810 if (match) {
811 event = {
812 type: 'tag',
813 tagType: 'allow-cache'
814 };
815
816 if (match[1]) {
817 event.allowed = !/NO/.test(match[1]);
818 }
819
820 _this2.trigger('data', event);
821
822 return;
823 }
824
825 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
826
827 if (match) {
828 event = {
829 type: 'tag',
830 tagType: 'map'
831 };
832
833 if (match[1]) {
834 var attributes = parseAttributes$1(match[1]);
835
836 if (attributes.URI) {
837 event.uri = attributes.URI;
838 }
839
840 if (attributes.BYTERANGE) {
841 event.byterange = parseByterange(attributes.BYTERANGE);
842 }
843 }
844
845 _this2.trigger('data', event);
846
847 return;
848 }
849
850 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
851
852 if (match) {
853 event = {
854 type: 'tag',
855 tagType: 'stream-inf'
856 };
857
858 if (match[1]) {
859 event.attributes = parseAttributes$1(match[1]);
860
861 if (event.attributes.RESOLUTION) {
862 var split = event.attributes.RESOLUTION.split('x');
863 var resolution = {};
864
865 if (split[0]) {
866 resolution.width = parseInt(split[0], 10);
867 }
868
869 if (split[1]) {
870 resolution.height = parseInt(split[1], 10);
871 }
872
873 event.attributes.RESOLUTION = resolution;
874 }
875
876 if (event.attributes.BANDWIDTH) {
877 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
878 }
879
880 if (event.attributes['FRAME-RATE']) {
881 event.attributes['FRAME-RATE'] = parseFloat(event.attributes['FRAME-RATE']);
882 }
883
884 if (event.attributes['PROGRAM-ID']) {
885 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
886 }
887 }
888
889 _this2.trigger('data', event);
890
891 return;
892 }
893
894 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
895
896 if (match) {
897 event = {
898 type: 'tag',
899 tagType: 'media'
900 };
901
902 if (match[1]) {
903 event.attributes = parseAttributes$1(match[1]);
904 }
905
906 _this2.trigger('data', event);
907
908 return;
909 }
910
911 match = /^#EXT-X-ENDLIST/.exec(newLine);
912
913 if (match) {
914 _this2.trigger('data', {
915 type: 'tag',
916 tagType: 'endlist'
917 });
918
919 return;
920 }
921
922 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
923
924 if (match) {
925 _this2.trigger('data', {
926 type: 'tag',
927 tagType: 'discontinuity'
928 });
929
930 return;
931 }
932
933 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
934
935 if (match) {
936 event = {
937 type: 'tag',
938 tagType: 'program-date-time'
939 };
940
941 if (match[1]) {
942 event.dateTimeString = match[1];
943 event.dateTimeObject = new Date(match[1]);
944 }
945
946 _this2.trigger('data', event);
947
948 return;
949 }
950
951 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
952
953 if (match) {
954 event = {
955 type: 'tag',
956 tagType: 'key'
957 };
958
959 if (match[1]) {
960 event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
961
962 if (event.attributes.IV) {
963 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
964 event.attributes.IV = event.attributes.IV.substring(2);
965 }
966
967 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
968 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
969 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
970 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
971 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
972 event.attributes.IV = new Uint32Array(event.attributes.IV);
973 }
974 }
975
976 _this2.trigger('data', event);
977
978 return;
979 }
980
981 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
982
983 if (match) {
984 event = {
985 type: 'tag',
986 tagType: 'start'
987 };
988
989 if (match[1]) {
990 event.attributes = parseAttributes$1(match[1]);
991 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
992 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
993 }
994
995 _this2.trigger('data', event);
996
997 return;
998 }
999
1000 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
1001
1002 if (match) {
1003 event = {
1004 type: 'tag',
1005 tagType: 'cue-out-cont'
1006 };
1007
1008 if (match[1]) {
1009 event.data = match[1];
1010 } else {
1011 event.data = '';
1012 }
1013
1014 _this2.trigger('data', event);
1015
1016 return;
1017 }
1018
1019 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
1020
1021 if (match) {
1022 event = {
1023 type: 'tag',
1024 tagType: 'cue-out'
1025 };
1026
1027 if (match[1]) {
1028 event.data = match[1];
1029 } else {
1030 event.data = '';
1031 }
1032
1033 _this2.trigger('data', event);
1034
1035 return;
1036 }
1037
1038 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
1039
1040 if (match) {
1041 event = {
1042 type: 'tag',
1043 tagType: 'cue-in'
1044 };
1045
1046 if (match[1]) {
1047 event.data = match[1];
1048 } else {
1049 event.data = '';
1050 }
1051
1052 _this2.trigger('data', event);
1053
1054 return;
1055 }
1056
1057 match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
1058
1059 if (match && match[1]) {
1060 event = {
1061 type: 'tag',
1062 tagType: 'skip'
1063 };
1064 event.attributes = parseAttributes$1(match[1]);
1065
1066 if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
1067 event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
1068 }
1069
1070 if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
1071 event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
1072 }
1073
1074 _this2.trigger('data', event);
1075
1076 return;
1077 }
1078
1079 match = /^#EXT-X-PART:(.*)$/.exec(newLine);
1080
1081 if (match && match[1]) {
1082 event = {
1083 type: 'tag',
1084 tagType: 'part'
1085 };
1086 event.attributes = parseAttributes$1(match[1]);
1087 ['DURATION'].forEach(function (key) {
1088 if (event.attributes.hasOwnProperty(key)) {
1089 event.attributes[key] = parseFloat(event.attributes[key]);
1090 }
1091 });
1092 ['INDEPENDENT', 'GAP'].forEach(function (key) {
1093 if (event.attributes.hasOwnProperty(key)) {
1094 event.attributes[key] = /YES/.test(event.attributes[key]);
1095 }
1096 });
1097
1098 if (event.attributes.hasOwnProperty('BYTERANGE')) {
1099 event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
1100 }
1101
1102 _this2.trigger('data', event);
1103
1104 return;
1105 }
1106
1107 match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
1108
1109 if (match && match[1]) {
1110 event = {
1111 type: 'tag',
1112 tagType: 'server-control'
1113 };
1114 event.attributes = parseAttributes$1(match[1]);
1115 ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
1116 if (event.attributes.hasOwnProperty(key)) {
1117 event.attributes[key] = parseFloat(event.attributes[key]);
1118 }
1119 });
1120 ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
1121 if (event.attributes.hasOwnProperty(key)) {
1122 event.attributes[key] = /YES/.test(event.attributes[key]);
1123 }
1124 });
1125
1126 _this2.trigger('data', event);
1127
1128 return;
1129 }
1130
1131 match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
1132
1133 if (match && match[1]) {
1134 event = {
1135 type: 'tag',
1136 tagType: 'part-inf'
1137 };
1138 event.attributes = parseAttributes$1(match[1]);
1139 ['PART-TARGET'].forEach(function (key) {
1140 if (event.attributes.hasOwnProperty(key)) {
1141 event.attributes[key] = parseFloat(event.attributes[key]);
1142 }
1143 });
1144
1145 _this2.trigger('data', event);
1146
1147 return;
1148 }
1149
1150 match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
1151
1152 if (match && match[1]) {
1153 event = {
1154 type: 'tag',
1155 tagType: 'preload-hint'
1156 };
1157 event.attributes = parseAttributes$1(match[1]);
1158 ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
1159 if (event.attributes.hasOwnProperty(key)) {
1160 event.attributes[key] = parseInt(event.attributes[key], 10);
1161 var subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
1162 event.attributes.byterange = event.attributes.byterange || {};
1163 event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
1164
1165 delete event.attributes[key];
1166 }
1167 });
1168
1169 _this2.trigger('data', event);
1170
1171 return;
1172 }
1173
1174 match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
1175
1176 if (match && match[1]) {
1177 event = {
1178 type: 'tag',
1179 tagType: 'rendition-report'
1180 };
1181 event.attributes = parseAttributes$1(match[1]);
1182 ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
1183 if (event.attributes.hasOwnProperty(key)) {
1184 event.attributes[key] = parseInt(event.attributes[key], 10);
1185 }
1186 });
1187
1188 _this2.trigger('data', event);
1189
1190 return;
1191 } // unknown tag type
1192
1193
1194 _this2.trigger('data', {
1195 type: 'tag',
1196 data: newLine.slice(4)
1197 });
1198 });
1199 }
1200 /**
1201 * Add a parser for custom headers
1202 *
1203 * @param {Object} options a map of options for the added parser
1204 * @param {RegExp} options.expression a regular expression to match the custom header
1205 * @param {string} options.customType the custom type to register to the output
1206 * @param {Function} [options.dataParser] function to parse the line into an object
1207 * @param {boolean} [options.segment] should tag data be attached to the segment object
1208 */
1209 ;
1210
1211 _proto.addParser = function addParser(_ref) {
1212 var _this3 = this;
1213
1214 var expression = _ref.expression,
1215 customType = _ref.customType,
1216 dataParser = _ref.dataParser,
1217 segment = _ref.segment;
1218
1219 if (typeof dataParser !== 'function') {
1220 dataParser = function dataParser(line) {
1221 return line;
1222 };
1223 }
1224
1225 this.customParsers.push(function (line) {
1226 var match = expression.exec(line);
1227
1228 if (match) {
1229 _this3.trigger('data', {
1230 type: 'custom',
1231 data: dataParser(line),
1232 customType: customType,
1233 segment: segment
1234 });
1235
1236 return true;
1237 }
1238 });
1239 }
1240 /**
1241 * Add a custom header mapper
1242 *
1243 * @param {Object} options
1244 * @param {RegExp} options.expression a regular expression to match the custom header
1245 * @param {Function} options.map function to translate tag into a different tag
1246 */
1247 ;
1248
1249 _proto.addTagMapper = function addTagMapper(_ref2) {
1250 var expression = _ref2.expression,
1251 map = _ref2.map;
1252
1253 var mapFn = function mapFn(line) {
1254 if (expression.test(line)) {
1255 return map(line);
1256 }
1257
1258 return line;
1259 };
1260
1261 this.tagMappers.push(mapFn);
1262 };
1263
1264 return ParseStream;
1265 }(Stream);
1266
1267 var camelCase = function camelCase(str) {
1268 return str.toLowerCase().replace(/-(\w)/g, function (a) {
1269 return a[1].toUpperCase();
1270 });
1271 };
1272
1273 var camelCaseKeys = function camelCaseKeys(attributes) {
1274 var result = {};
1275 Object.keys(attributes).forEach(function (key) {
1276 result[camelCase(key)] = attributes[key];
1277 });
1278 return result;
1279 }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
1280 // we need this helper because defaults are based upon targetDuration and
1281 // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
1282 // target durations are set.
1283
1284
1285 var setHoldBack = function setHoldBack(manifest) {
1286 var serverControl = manifest.serverControl,
1287 targetDuration = manifest.targetDuration,
1288 partTargetDuration = manifest.partTargetDuration;
1289
1290 if (!serverControl) {
1291 return;
1292 }
1293
1294 var tag = '#EXT-X-SERVER-CONTROL';
1295 var hb = 'holdBack';
1296 var phb = 'partHoldBack';
1297 var minTargetDuration = targetDuration && targetDuration * 3;
1298 var minPartDuration = partTargetDuration && partTargetDuration * 2;
1299
1300 if (targetDuration && !serverControl.hasOwnProperty(hb)) {
1301 serverControl[hb] = minTargetDuration;
1302 this.trigger('info', {
1303 message: tag + " defaulting HOLD-BACK to targetDuration * 3 (" + minTargetDuration + ")."
1304 });
1305 }
1306
1307 if (minTargetDuration && serverControl[hb] < minTargetDuration) {
1308 this.trigger('warn', {
1309 message: tag + " clamping HOLD-BACK (" + serverControl[hb] + ") to targetDuration * 3 (" + minTargetDuration + ")"
1310 });
1311 serverControl[hb] = minTargetDuration;
1312 } // default no part hold back to part target duration * 3
1313
1314
1315 if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
1316 serverControl[phb] = partTargetDuration * 3;
1317 this.trigger('info', {
1318 message: tag + " defaulting PART-HOLD-BACK to partTargetDuration * 3 (" + serverControl[phb] + ")."
1319 });
1320 } // if part hold back is too small default it to part target duration * 2
1321
1322
1323 if (partTargetDuration && serverControl[phb] < minPartDuration) {
1324 this.trigger('warn', {
1325 message: tag + " clamping PART-HOLD-BACK (" + serverControl[phb] + ") to partTargetDuration * 2 (" + minPartDuration + ")."
1326 });
1327 serverControl[phb] = minPartDuration;
1328 }
1329 };
1330 /**
1331 * A parser for M3U8 files. The current interpretation of the input is
1332 * exposed as a property `manifest` on parser objects. It's just two lines to
1333 * create and parse a manifest once you have the contents available as a string:
1334 *
1335 * ```js
1336 * var parser = new m3u8.Parser();
1337 * parser.push(xhr.responseText);
1338 * ```
1339 *
1340 * New input can later be applied to update the manifest object by calling
1341 * `push` again.
1342 *
1343 * The parser attempts to create a usable manifest object even if the
1344 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1345 * events during the parse if it encounters input that seems invalid or
1346 * requires some property of the manifest object to be defaulted.
1347 *
1348 * @class Parser
1349 * @extends Stream
1350 */
1351
1352
1353 var Parser = /*#__PURE__*/function (_Stream) {
1354 inheritsLoose(Parser, _Stream);
1355
1356 function Parser() {
1357 var _this;
1358
1359 _this = _Stream.call(this) || this;
1360 _this.lineStream = new LineStream();
1361 _this.parseStream = new ParseStream();
1362
1363 _this.lineStream.pipe(_this.parseStream);
1364 /* eslint-disable consistent-this */
1365
1366
1367 var self = assertThisInitialized(_this);
1368 /* eslint-enable consistent-this */
1369
1370
1371 var uris = [];
1372 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1373
1374 var currentMap; // if specified, the active decryption key
1375
1376 var _key;
1377
1378 var hasParts = false;
1379
1380 var noop = function noop() {};
1381
1382 var defaultMediaGroups = {
1383 'AUDIO': {},
1384 'VIDEO': {},
1385 'CLOSED-CAPTIONS': {},
1386 'SUBTITLES': {}
1387 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1388 // used in MPDs with Widevine encrypted streams.
1389
1390 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1391
1392 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1393
1394 _this.manifest = {
1395 allowCache: true,
1396 discontinuityStarts: [],
1397 segments: []
1398 }; // keep track of the last seen segment's byte range end, as segments are not required
1399 // to provide the offset, in which case it defaults to the next byte after the
1400 // previous segment
1401
1402 var lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
1403
1404 var lastPartByterangeEnd = 0;
1405
1406 _this.on('end', function () {
1407 // only add preloadSegment if we don't yet have a uri for it.
1408 // and we actually have parts/preloadHints
1409 if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
1410 return;
1411 }
1412
1413 if (!currentUri.map && currentMap) {
1414 currentUri.map = currentMap;
1415 }
1416
1417 if (!currentUri.key && _key) {
1418 currentUri.key = _key;
1419 }
1420
1421 if (!currentUri.timeline && typeof currentTimeline === 'number') {
1422 currentUri.timeline = currentTimeline;
1423 }
1424
1425 _this.manifest.preloadSegment = currentUri;
1426 }); // update the manifest with the m3u8 entry from the parse stream
1427
1428
1429 _this.parseStream.on('data', function (entry) {
1430 var mediaGroup;
1431 var rendition;
1432 ({
1433 tag: function tag() {
1434 // switch based on the tag type
1435 (({
1436 version: function version() {
1437 if (entry.version) {
1438 this.manifest.version = entry.version;
1439 }
1440 },
1441 'allow-cache': function allowCache() {
1442 this.manifest.allowCache = entry.allowed;
1443
1444 if (!('allowed' in entry)) {
1445 this.trigger('info', {
1446 message: 'defaulting allowCache to YES'
1447 });
1448 this.manifest.allowCache = true;
1449 }
1450 },
1451 byterange: function byterange() {
1452 var byterange = {};
1453
1454 if ('length' in entry) {
1455 currentUri.byterange = byterange;
1456 byterange.length = entry.length;
1457
1458 if (!('offset' in entry)) {
1459 /*
1460 * From the latest spec (as of this writing):
1461 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
1462 *
1463 * Same text since EXT-X-BYTERANGE's introduction in draft 7:
1464 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
1465 *
1466 * "If o [offset] is not present, the sub-range begins at the next byte
1467 * following the sub-range of the previous media segment."
1468 */
1469 entry.offset = lastByterangeEnd;
1470 }
1471 }
1472
1473 if ('offset' in entry) {
1474 currentUri.byterange = byterange;
1475 byterange.offset = entry.offset;
1476 }
1477
1478 lastByterangeEnd = byterange.offset + byterange.length;
1479 },
1480 endlist: function endlist() {
1481 this.manifest.endList = true;
1482 },
1483 inf: function inf() {
1484 if (!('mediaSequence' in this.manifest)) {
1485 this.manifest.mediaSequence = 0;
1486 this.trigger('info', {
1487 message: 'defaulting media sequence to zero'
1488 });
1489 }
1490
1491 if (!('discontinuitySequence' in this.manifest)) {
1492 this.manifest.discontinuitySequence = 0;
1493 this.trigger('info', {
1494 message: 'defaulting discontinuity sequence to zero'
1495 });
1496 }
1497
1498 if (entry.duration > 0) {
1499 currentUri.duration = entry.duration;
1500 }
1501
1502 if (entry.duration === 0) {
1503 currentUri.duration = 0.01;
1504 this.trigger('info', {
1505 message: 'updating zero segment duration to a small value'
1506 });
1507 }
1508
1509 this.manifest.segments = uris;
1510 },
1511 key: function key() {
1512 if (!entry.attributes) {
1513 this.trigger('warn', {
1514 message: 'ignoring key declaration without attribute list'
1515 });
1516 return;
1517 } // clear the active encryption key
1518
1519
1520 if (entry.attributes.METHOD === 'NONE') {
1521 _key = null;
1522 return;
1523 }
1524
1525 if (!entry.attributes.URI) {
1526 this.trigger('warn', {
1527 message: 'ignoring key declaration without URI'
1528 });
1529 return;
1530 }
1531
1532 if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {
1533 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1534
1535 this.manifest.contentProtection['com.apple.fps.1_0'] = {
1536 attributes: entry.attributes
1537 };
1538 return;
1539 }
1540
1541 if (entry.attributes.KEYFORMAT === 'com.microsoft.playready') {
1542 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1543
1544 this.manifest.contentProtection['com.microsoft.playready'] = {
1545 uri: entry.attributes.URI
1546 };
1547 return;
1548 } // check if the content is encrypted for Widevine
1549 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1550
1551
1552 if (entry.attributes.KEYFORMAT === widevineUuid) {
1553 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1554
1555 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1556 this.trigger('warn', {
1557 message: 'invalid key method provided for Widevine'
1558 });
1559 return;
1560 }
1561
1562 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1563 this.trigger('warn', {
1564 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1565 });
1566 }
1567
1568 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1569 this.trigger('warn', {
1570 message: 'invalid key URI provided for Widevine'
1571 });
1572 return;
1573 }
1574
1575 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1576 this.trigger('warn', {
1577 message: 'invalid key ID provided for Widevine'
1578 });
1579 return;
1580 } // if Widevine key attributes are valid, store them as `contentProtection`
1581 // on the manifest to emulate Widevine tag structure in a DASH mpd
1582
1583
1584 this.manifest.contentProtection = this.manifest.contentProtection || {};
1585 this.manifest.contentProtection['com.widevine.alpha'] = {
1586 attributes: {
1587 schemeIdUri: entry.attributes.KEYFORMAT,
1588 // remove '0x' from the key id string
1589 keyId: entry.attributes.KEYID.substring(2)
1590 },
1591 // decode the base64-encoded PSSH box
1592 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1593 };
1594 return;
1595 }
1596
1597 if (!entry.attributes.METHOD) {
1598 this.trigger('warn', {
1599 message: 'defaulting key method to AES-128'
1600 });
1601 } // setup an encryption key for upcoming segments
1602
1603
1604 _key = {
1605 method: entry.attributes.METHOD || 'AES-128',
1606 uri: entry.attributes.URI
1607 };
1608
1609 if (typeof entry.attributes.IV !== 'undefined') {
1610 _key.iv = entry.attributes.IV;
1611 }
1612 },
1613 'media-sequence': function mediaSequence() {
1614 if (!isFinite(entry.number)) {
1615 this.trigger('warn', {
1616 message: 'ignoring invalid media sequence: ' + entry.number
1617 });
1618 return;
1619 }
1620
1621 this.manifest.mediaSequence = entry.number;
1622 },
1623 'discontinuity-sequence': function discontinuitySequence() {
1624 if (!isFinite(entry.number)) {
1625 this.trigger('warn', {
1626 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1627 });
1628 return;
1629 }
1630
1631 this.manifest.discontinuitySequence = entry.number;
1632 currentTimeline = entry.number;
1633 },
1634 'playlist-type': function playlistType() {
1635 if (!/VOD|EVENT/.test(entry.playlistType)) {
1636 this.trigger('warn', {
1637 message: 'ignoring unknown playlist type: ' + entry.playlist
1638 });
1639 return;
1640 }
1641
1642 this.manifest.playlistType = entry.playlistType;
1643 },
1644 map: function map() {
1645 currentMap = {};
1646
1647 if (entry.uri) {
1648 currentMap.uri = entry.uri;
1649 }
1650
1651 if (entry.byterange) {
1652 currentMap.byterange = entry.byterange;
1653 }
1654
1655 if (_key) {
1656 currentMap.key = _key;
1657 }
1658 },
1659 'stream-inf': function streamInf() {
1660 this.manifest.playlists = uris;
1661 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1662
1663 if (!entry.attributes) {
1664 this.trigger('warn', {
1665 message: 'ignoring empty stream-inf attributes'
1666 });
1667 return;
1668 }
1669
1670 if (!currentUri.attributes) {
1671 currentUri.attributes = {};
1672 }
1673
1674 _extends_1(currentUri.attributes, entry.attributes);
1675 },
1676 media: function media() {
1677 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1678
1679 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1680 this.trigger('warn', {
1681 message: 'ignoring incomplete or missing media group'
1682 });
1683 return;
1684 } // find the media group, creating defaults as necessary
1685
1686
1687 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1688 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1689 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1690
1691 rendition = {
1692 default: /yes/i.test(entry.attributes.DEFAULT)
1693 };
1694
1695 if (rendition.default) {
1696 rendition.autoselect = true;
1697 } else {
1698 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1699 }
1700
1701 if (entry.attributes.LANGUAGE) {
1702 rendition.language = entry.attributes.LANGUAGE;
1703 }
1704
1705 if (entry.attributes.URI) {
1706 rendition.uri = entry.attributes.URI;
1707 }
1708
1709 if (entry.attributes['INSTREAM-ID']) {
1710 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1711 }
1712
1713 if (entry.attributes.CHARACTERISTICS) {
1714 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1715 }
1716
1717 if (entry.attributes.FORCED) {
1718 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1719 } // insert the new rendition
1720
1721
1722 mediaGroup[entry.attributes.NAME] = rendition;
1723 },
1724 discontinuity: function discontinuity() {
1725 currentTimeline += 1;
1726 currentUri.discontinuity = true;
1727 this.manifest.discontinuityStarts.push(uris.length);
1728 },
1729 'program-date-time': function programDateTime() {
1730 if (typeof this.manifest.dateTimeString === 'undefined') {
1731 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1732 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1733 // to the manifest object
1734 // TODO: Consider removing this in future major version
1735 this.manifest.dateTimeString = entry.dateTimeString;
1736 this.manifest.dateTimeObject = entry.dateTimeObject;
1737 }
1738
1739 currentUri.dateTimeString = entry.dateTimeString;
1740 currentUri.dateTimeObject = entry.dateTimeObject;
1741 },
1742 targetduration: function targetduration() {
1743 if (!isFinite(entry.duration) || entry.duration < 0) {
1744 this.trigger('warn', {
1745 message: 'ignoring invalid target duration: ' + entry.duration
1746 });
1747 return;
1748 }
1749
1750 this.manifest.targetDuration = entry.duration;
1751 setHoldBack.call(this, this.manifest);
1752 },
1753 start: function start() {
1754 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1755 this.trigger('warn', {
1756 message: 'ignoring start declaration without appropriate attribute list'
1757 });
1758 return;
1759 }
1760
1761 this.manifest.start = {
1762 timeOffset: entry.attributes['TIME-OFFSET'],
1763 precise: entry.attributes.PRECISE
1764 };
1765 },
1766 'cue-out': function cueOut() {
1767 currentUri.cueOut = entry.data;
1768 },
1769 'cue-out-cont': function cueOutCont() {
1770 currentUri.cueOutCont = entry.data;
1771 },
1772 'cue-in': function cueIn() {
1773 currentUri.cueIn = entry.data;
1774 },
1775 'skip': function skip() {
1776 this.manifest.skip = camelCaseKeys(entry.attributes);
1777 this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
1778 },
1779 'part': function part() {
1780 var _this2 = this;
1781
1782 hasParts = true; // parts are always specifed before a segment
1783
1784 var segmentIndex = this.manifest.segments.length;
1785 var part = camelCaseKeys(entry.attributes);
1786 currentUri.parts = currentUri.parts || [];
1787 currentUri.parts.push(part);
1788
1789 if (part.byterange) {
1790 if (!part.byterange.hasOwnProperty('offset')) {
1791 part.byterange.offset = lastPartByterangeEnd;
1792 }
1793
1794 lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
1795 }
1796
1797 var partIndex = currentUri.parts.length - 1;
1798 this.warnOnMissingAttributes_("#EXT-X-PART #" + partIndex + " for segment #" + segmentIndex, entry.attributes, ['URI', 'DURATION']);
1799
1800 if (this.manifest.renditionReports) {
1801 this.manifest.renditionReports.forEach(function (r, i) {
1802 if (!r.hasOwnProperty('lastPart')) {
1803 _this2.trigger('warn', {
1804 message: "#EXT-X-RENDITION-REPORT #" + i + " lacks required attribute(s): LAST-PART"
1805 });
1806 }
1807 });
1808 }
1809 },
1810 'server-control': function serverControl() {
1811 var attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
1812
1813 if (!attrs.hasOwnProperty('canBlockReload')) {
1814 attrs.canBlockReload = false;
1815 this.trigger('info', {
1816 message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
1817 });
1818 }
1819
1820 setHoldBack.call(this, this.manifest);
1821
1822 if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
1823 this.trigger('warn', {
1824 message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
1825 });
1826 }
1827 },
1828 'preload-hint': function preloadHint() {
1829 // parts are always specifed before a segment
1830 var segmentIndex = this.manifest.segments.length;
1831 var hint = camelCaseKeys(entry.attributes);
1832 var isPart = hint.type && hint.type === 'PART';
1833 currentUri.preloadHints = currentUri.preloadHints || [];
1834 currentUri.preloadHints.push(hint);
1835
1836 if (hint.byterange) {
1837 if (!hint.byterange.hasOwnProperty('offset')) {
1838 // use last part byterange end or zero if not a part.
1839 hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
1840
1841 if (isPart) {
1842 lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
1843 }
1844 }
1845 }
1846
1847 var index = currentUri.preloadHints.length - 1;
1848 this.warnOnMissingAttributes_("#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex, entry.attributes, ['TYPE', 'URI']);
1849
1850 if (!hint.type) {
1851 return;
1852 } // search through all preload hints except for the current one for
1853 // a duplicate type.
1854
1855
1856 for (var i = 0; i < currentUri.preloadHints.length - 1; i++) {
1857 var otherHint = currentUri.preloadHints[i];
1858
1859 if (!otherHint.type) {
1860 continue;
1861 }
1862
1863 if (otherHint.type === hint.type) {
1864 this.trigger('warn', {
1865 message: "#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex + " has the same TYPE " + hint.type + " as preload hint #" + i
1866 });
1867 }
1868 }
1869 },
1870 'rendition-report': function renditionReport() {
1871 var report = camelCaseKeys(entry.attributes);
1872 this.manifest.renditionReports = this.manifest.renditionReports || [];
1873 this.manifest.renditionReports.push(report);
1874 var index = this.manifest.renditionReports.length - 1;
1875 var required = ['LAST-MSN', 'URI'];
1876
1877 if (hasParts) {
1878 required.push('LAST-PART');
1879 }
1880
1881 this.warnOnMissingAttributes_("#EXT-X-RENDITION-REPORT #" + index, entry.attributes, required);
1882 },
1883 'part-inf': function partInf() {
1884 this.manifest.partInf = camelCaseKeys(entry.attributes);
1885 this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
1886
1887 if (this.manifest.partInf.partTarget) {
1888 this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
1889 }
1890
1891 setHoldBack.call(this, this.manifest);
1892 }
1893 })[entry.tagType] || noop).call(self);
1894 },
1895 uri: function uri() {
1896 currentUri.uri = entry.uri;
1897 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1898
1899 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1900 this.trigger('warn', {
1901 message: 'defaulting segment duration to the target duration'
1902 });
1903 currentUri.duration = this.manifest.targetDuration;
1904 } // annotate with encryption information, if necessary
1905
1906
1907 if (_key) {
1908 currentUri.key = _key;
1909 }
1910
1911 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1912
1913 if (currentMap) {
1914 currentUri.map = currentMap;
1915 } // reset the last byterange end as it needs to be 0 between parts
1916
1917
1918 lastPartByterangeEnd = 0; // prepare for the next URI
1919
1920 currentUri = {};
1921 },
1922 comment: function comment() {// comments are not important for playback
1923 },
1924 custom: function custom() {
1925 // if this is segment-level data attach the output to the segment
1926 if (entry.segment) {
1927 currentUri.custom = currentUri.custom || {};
1928 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1929 } else {
1930 this.manifest.custom = this.manifest.custom || {};
1931 this.manifest.custom[entry.customType] = entry.data;
1932 }
1933 }
1934 })[entry.type].call(self);
1935 });
1936
1937 return _this;
1938 }
1939
1940 var _proto = Parser.prototype;
1941
1942 _proto.warnOnMissingAttributes_ = function warnOnMissingAttributes_(identifier, attributes, required) {
1943 var missing = [];
1944 required.forEach(function (key) {
1945 if (!attributes.hasOwnProperty(key)) {
1946 missing.push(key);
1947 }
1948 });
1949
1950 if (missing.length) {
1951 this.trigger('warn', {
1952 message: identifier + " lacks required attribute(s): " + missing.join(', ')
1953 });
1954 }
1955 }
1956 /**
1957 * Parse the input string and update the manifest object.
1958 *
1959 * @param {string} chunk a potentially incomplete portion of the manifest
1960 */
1961 ;
1962
1963 _proto.push = function push(chunk) {
1964 this.lineStream.push(chunk);
1965 }
1966 /**
1967 * Flush any remaining input. This can be handy if the last line of an M3U8
1968 * manifest did not contain a trailing newline but the file has been
1969 * completely received.
1970 */
1971 ;
1972
1973 _proto.end = function end() {
1974 // flush any buffered input
1975 this.lineStream.push('\n');
1976 this.trigger('end');
1977 }
1978 /**
1979 * Add an additional parser for non-standard tags
1980 *
1981 * @param {Object} options a map of options for the added parser
1982 * @param {RegExp} options.expression a regular expression to match the custom header
1983 * @param {string} options.type the type to register to the output
1984 * @param {Function} [options.dataParser] function to parse the line into an object
1985 * @param {boolean} [options.segment] should tag data be attached to the segment object
1986 */
1987 ;
1988
1989 _proto.addParser = function addParser(options) {
1990 this.parseStream.addParser(options);
1991 }
1992 /**
1993 * Add a custom header mapper
1994 *
1995 * @param {Object} options
1996 * @param {RegExp} options.expression a regular expression to match the custom header
1997 * @param {Function} options.map function to translate tag into a different tag
1998 */
1999 ;
2000
2001 _proto.addTagMapper = function addTagMapper(options) {
2002 this.parseStream.addTagMapper(options);
2003 };
2004
2005 return Parser;
2006 }(Stream);
2007
2008 var regexs = {
2009 // to determine mime types
2010 mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
2011 webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
2012 ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
2013 // to determine if a codec is audio or video
2014 video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
2015 audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
2016 text: /^(stpp.ttml.im1t)/,
2017 // mux.js support regex
2018 muxerVideo: /^(avc0?1)/,
2019 muxerAudio: /^(mp4a)/,
2020 // match nothing as muxer does not support text right now.
2021 // there cannot never be a character before the start of a string
2022 // so this matches nothing.
2023 muxerText: /a^/
2024 };
2025 var mediaTypes = ['video', 'audio', 'text'];
2026 var upperMediaTypes = ['Video', 'Audio', 'Text'];
2027 /**
2028 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
2029 * `avc1.<hhhhhh>`
2030 *
2031 * @param {string} codec
2032 * Codec string to translate
2033 * @return {string}
2034 * The translated codec string
2035 */
2036
2037 var translateLegacyCodec = function translateLegacyCodec(codec) {
2038 if (!codec) {
2039 return codec;
2040 }
2041
2042 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
2043 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
2044 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
2045 return 'avc1.' + profileHex + '00' + avcLevelHex;
2046 });
2047 };
2048 /**
2049 * @typedef {Object} ParsedCodecInfo
2050 * @property {number} codecCount
2051 * Number of codecs parsed
2052 * @property {string} [videoCodec]
2053 * Parsed video codec (if found)
2054 * @property {string} [videoObjectTypeIndicator]
2055 * Video object type indicator (if found)
2056 * @property {string|null} audioProfile
2057 * Audio profile
2058 */
2059
2060 /**
2061 * Parses a codec string to retrieve the number of codecs specified, the video codec and
2062 * object type indicator, and the audio profile.
2063 *
2064 * @param {string} [codecString]
2065 * The codec string to parse
2066 * @return {ParsedCodecInfo}
2067 * Parsed codec info
2068 */
2069
2070 var parseCodecs = function parseCodecs(codecString) {
2071 if (codecString === void 0) {
2072 codecString = '';
2073 }
2074
2075 var codecs = codecString.split(',');
2076 var result = [];
2077 codecs.forEach(function (codec) {
2078 codec = codec.trim();
2079 var codecType;
2080 mediaTypes.forEach(function (name) {
2081 var match = regexs[name].exec(codec.toLowerCase());
2082
2083 if (!match || match.length <= 1) {
2084 return;
2085 }
2086
2087 codecType = name; // maintain codec case
2088
2089 var type = codec.substring(0, match[1].length);
2090 var details = codec.replace(type, '');
2091 result.push({
2092 type: type,
2093 details: details,
2094 mediaType: name
2095 });
2096 });
2097
2098 if (!codecType) {
2099 result.push({
2100 type: codec,
2101 details: '',
2102 mediaType: 'unknown'
2103 });
2104 }
2105 });
2106 return result;
2107 };
2108 /**
2109 * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
2110 * a default alternate audio playlist for the provided audio group.
2111 *
2112 * @param {Object} master
2113 * The master playlist
2114 * @param {string} audioGroupId
2115 * ID of the audio group for which to find the default codec info
2116 * @return {ParsedCodecInfo}
2117 * Parsed codec info
2118 */
2119
2120 var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
2121 if (!master.mediaGroups.AUDIO || !audioGroupId) {
2122 return null;
2123 }
2124
2125 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
2126
2127 if (!audioGroup) {
2128 return null;
2129 }
2130
2131 for (var name in audioGroup) {
2132 var audioType = audioGroup[name];
2133
2134 if (audioType.default && audioType.playlists) {
2135 // codec should be the same for all playlists within the audio type
2136 return parseCodecs(audioType.playlists[0].attributes.CODECS);
2137 }
2138 }
2139
2140 return null;
2141 };
2142 var isAudioCodec = function isAudioCodec(codec) {
2143 if (codec === void 0) {
2144 codec = '';
2145 }
2146
2147 return regexs.audio.test(codec.trim().toLowerCase());
2148 };
2149 var isTextCodec = function isTextCodec(codec) {
2150 if (codec === void 0) {
2151 codec = '';
2152 }
2153
2154 return regexs.text.test(codec.trim().toLowerCase());
2155 };
2156 var getMimeForCodec = function getMimeForCodec(codecString) {
2157 if (!codecString || typeof codecString !== 'string') {
2158 return;
2159 }
2160
2161 var codecs = codecString.toLowerCase().split(',').map(function (c) {
2162 return translateLegacyCodec(c.trim());
2163 }); // default to video type
2164
2165 var type = 'video'; // only change to audio type if the only codec we have is
2166 // audio
2167
2168 if (codecs.length === 1 && isAudioCodec(codecs[0])) {
2169 type = 'audio';
2170 } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
2171 // text uses application/<container> for now
2172 type = 'application';
2173 } // default the container to mp4
2174
2175
2176 var container = 'mp4'; // every codec must be able to go into the container
2177 // for that container to be the correct one
2178
2179 if (codecs.every(function (c) {
2180 return regexs.mp4.test(c);
2181 })) {
2182 container = 'mp4';
2183 } else if (codecs.every(function (c) {
2184 return regexs.webm.test(c);
2185 })) {
2186 container = 'webm';
2187 } else if (codecs.every(function (c) {
2188 return regexs.ogg.test(c);
2189 })) {
2190 container = 'ogg';
2191 }
2192
2193 return type + "/" + container + ";codecs=\"" + codecString + "\"";
2194 };
2195 var browserSupportsCodec = function browserSupportsCodec(codecString) {
2196 if (codecString === void 0) {
2197 codecString = '';
2198 }
2199
2200 return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
2201 };
2202 var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
2203 if (codecString === void 0) {
2204 codecString = '';
2205 }
2206
2207 return codecString.toLowerCase().split(',').every(function (codec) {
2208 codec = codec.trim(); // any match is supported.
2209
2210 for (var i = 0; i < upperMediaTypes.length; i++) {
2211 var type = upperMediaTypes[i];
2212
2213 if (regexs["muxer" + type].test(codec)) {
2214 return true;
2215 }
2216 }
2217
2218 return false;
2219 });
2220 };
2221 var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
2222 var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
2223
2224 /**
2225 * ranges
2226 *
2227 * Utilities for working with TimeRanges.
2228 *
2229 */
2230
2231 var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
2232 // can be misleading because of precision differences or when the current media has poorly
2233 // aligned audio and video, which can cause values to be slightly off from what you would
2234 // expect. This value is what we consider to be safe to use in such comparisons to account
2235 // for these scenarios.
2236
2237 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2238
2239 var filterRanges = function filterRanges(timeRanges, predicate) {
2240 var results = [];
2241 var i;
2242
2243 if (timeRanges && timeRanges.length) {
2244 // Search for ranges that match the predicate
2245 for (i = 0; i < timeRanges.length; i++) {
2246 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2247 results.push([timeRanges.start(i), timeRanges.end(i)]);
2248 }
2249 }
2250 }
2251
2252 return videojs__default["default"].createTimeRanges(results);
2253 };
2254 /**
2255 * Attempts to find the buffered TimeRange that contains the specified
2256 * time.
2257 *
2258 * @param {TimeRanges} buffered - the TimeRanges object to query
2259 * @param {number} time - the time to filter on.
2260 * @return {TimeRanges} a new TimeRanges object
2261 */
2262
2263
2264 var findRange = function findRange(buffered, time) {
2265 return filterRanges(buffered, function (start, end) {
2266 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2267 });
2268 };
2269 /**
2270 * Returns the TimeRanges that begin later than the specified time.
2271 *
2272 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2273 * @param {number} time - the time to filter on.
2274 * @return {TimeRanges} a new TimeRanges object.
2275 */
2276
2277 var findNextRange = function findNextRange(timeRanges, time) {
2278 return filterRanges(timeRanges, function (start) {
2279 return start - TIME_FUDGE_FACTOR >= time;
2280 });
2281 };
2282 /**
2283 * Returns gaps within a list of TimeRanges
2284 *
2285 * @param {TimeRanges} buffered - the TimeRanges object
2286 * @return {TimeRanges} a TimeRanges object of gaps
2287 */
2288
2289 var findGaps = function findGaps(buffered) {
2290 if (buffered.length < 2) {
2291 return videojs__default["default"].createTimeRanges();
2292 }
2293
2294 var ranges = [];
2295
2296 for (var i = 1; i < buffered.length; i++) {
2297 var start = buffered.end(i - 1);
2298 var end = buffered.start(i);
2299 ranges.push([start, end]);
2300 }
2301
2302 return videojs__default["default"].createTimeRanges(ranges);
2303 };
2304 /**
2305 * Calculate the intersection of two TimeRanges
2306 *
2307 * @param {TimeRanges} bufferA
2308 * @param {TimeRanges} bufferB
2309 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
2310 */
2311
2312 var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
2313 var start = null;
2314 var end = null;
2315 var arity = 0;
2316 var extents = [];
2317 var ranges = [];
2318
2319 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
2320 return videojs__default["default"].createTimeRange();
2321 } // Handle the case where we have both buffers and create an
2322 // intersection of the two
2323
2324
2325 var count = bufferA.length; // A) Gather up all start and end times
2326
2327 while (count--) {
2328 extents.push({
2329 time: bufferA.start(count),
2330 type: 'start'
2331 });
2332 extents.push({
2333 time: bufferA.end(count),
2334 type: 'end'
2335 });
2336 }
2337
2338 count = bufferB.length;
2339
2340 while (count--) {
2341 extents.push({
2342 time: bufferB.start(count),
2343 type: 'start'
2344 });
2345 extents.push({
2346 time: bufferB.end(count),
2347 type: 'end'
2348 });
2349 } // B) Sort them by time
2350
2351
2352 extents.sort(function (a, b) {
2353 return a.time - b.time;
2354 }); // C) Go along one by one incrementing arity for start and decrementing
2355 // arity for ends
2356
2357 for (count = 0; count < extents.length; count++) {
2358 if (extents[count].type === 'start') {
2359 arity++; // D) If arity is ever incremented to 2 we are entering an
2360 // overlapping range
2361
2362 if (arity === 2) {
2363 start = extents[count].time;
2364 }
2365 } else if (extents[count].type === 'end') {
2366 arity--; // E) If arity is ever decremented to 1 we leaving an
2367 // overlapping range
2368
2369 if (arity === 1) {
2370 end = extents[count].time;
2371 }
2372 } // F) Record overlapping ranges
2373
2374
2375 if (start !== null && end !== null) {
2376 ranges.push([start, end]);
2377 start = null;
2378 end = null;
2379 }
2380 }
2381
2382 return videojs__default["default"].createTimeRanges(ranges);
2383 };
2384 /**
2385 * Gets a human readable string for a TimeRange
2386 *
2387 * @param {TimeRange} range
2388 * @return {string} a human readable string
2389 */
2390
2391 var printableRange = function printableRange(range) {
2392 var strArr = [];
2393
2394 if (!range || !range.length) {
2395 return '';
2396 }
2397
2398 for (var i = 0; i < range.length; i++) {
2399 strArr.push(range.start(i) + ' => ' + range.end(i));
2400 }
2401
2402 return strArr.join(', ');
2403 };
2404 /**
2405 * Calculates the amount of time left in seconds until the player hits the end of the
2406 * buffer and causes a rebuffer
2407 *
2408 * @param {TimeRange} buffered
2409 * The state of the buffer
2410 * @param {Numnber} currentTime
2411 * The current time of the player
2412 * @param {number} playbackRate
2413 * The current playback rate of the player. Defaults to 1.
2414 * @return {number}
2415 * Time until the player has to start rebuffering in seconds.
2416 * @function timeUntilRebuffer
2417 */
2418
2419 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
2420 if (playbackRate === void 0) {
2421 playbackRate = 1;
2422 }
2423
2424 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
2425 return (bufferedEnd - currentTime) / playbackRate;
2426 };
2427 /**
2428 * Converts a TimeRanges object into an array representation
2429 *
2430 * @param {TimeRanges} timeRanges
2431 * @return {Array}
2432 */
2433
2434 var timeRangesToArray = function timeRangesToArray(timeRanges) {
2435 var timeRangesList = [];
2436
2437 for (var i = 0; i < timeRanges.length; i++) {
2438 timeRangesList.push({
2439 start: timeRanges.start(i),
2440 end: timeRanges.end(i)
2441 });
2442 }
2443
2444 return timeRangesList;
2445 };
2446 /**
2447 * Determines if two time range objects are different.
2448 *
2449 * @param {TimeRange} a
2450 * the first time range object to check
2451 *
2452 * @param {TimeRange} b
2453 * the second time range object to check
2454 *
2455 * @return {Boolean}
2456 * Whether the time range objects differ
2457 */
2458
2459 var isRangeDifferent = function isRangeDifferent(a, b) {
2460 // same object
2461 if (a === b) {
2462 return false;
2463 } // one or the other is undefined
2464
2465
2466 if (!a && b || !b && a) {
2467 return true;
2468 } // length is different
2469
2470
2471 if (a.length !== b.length) {
2472 return true;
2473 } // see if any start/end pair is different
2474
2475
2476 for (var i = 0; i < a.length; i++) {
2477 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
2478 return true;
2479 }
2480 } // if the length and every pair is the same
2481 // this is the same time range
2482
2483
2484 return false;
2485 };
2486 var lastBufferedEnd = function lastBufferedEnd(a) {
2487 if (!a || !a.length || !a.end) {
2488 return;
2489 }
2490
2491 return a.end(a.length - 1);
2492 };
2493 /**
2494 * A utility function to add up the amount of time in a timeRange
2495 * after a specified startTime.
2496 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
2497 * would return 40 as there are 40s seconds after 0 in the timeRange
2498 *
2499 * @param {TimeRange} range
2500 * The range to check against
2501 * @param {number} startTime
2502 * The time in the time range that you should start counting from
2503 *
2504 * @return {number}
2505 * The number of seconds in the buffer passed the specified time.
2506 */
2507
2508 var timeAheadOf = function timeAheadOf(range, startTime) {
2509 var time = 0;
2510
2511 if (!range || !range.length) {
2512 return time;
2513 }
2514
2515 for (var i = 0; i < range.length; i++) {
2516 var start = range.start(i);
2517 var end = range.end(i); // startTime is after this range entirely
2518
2519 if (startTime > end) {
2520 continue;
2521 } // startTime is within this range
2522
2523
2524 if (startTime > start && startTime <= end) {
2525 time += end - startTime;
2526 continue;
2527 } // startTime is before this range.
2528
2529
2530 time += end - start;
2531 }
2532
2533 return time;
2534 };
2535
2536 /**
2537 * @file playlist.js
2538 *
2539 * Playlist related utilities.
2540 */
2541 var createTimeRange = videojs__default["default"].createTimeRange;
2542 /**
2543 * Get the duration of a segment, with special cases for
2544 * llhls segments that do not have a duration yet.
2545 *
2546 * @param {Object} playlist
2547 * the playlist that the segment belongs to.
2548 * @param {Object} segment
2549 * the segment to get a duration for.
2550 *
2551 * @return {number}
2552 * the segment duration
2553 */
2554
2555 var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
2556 // if this isn't a preload segment
2557 // then we will have a segment duration that is accurate.
2558 if (!segment.preload) {
2559 return segment.duration;
2560 } // otherwise we have to add up parts and preload hints
2561 // to get an up to date duration.
2562
2563
2564 var result = 0;
2565 (segment.parts || []).forEach(function (p) {
2566 result += p.duration;
2567 }); // for preload hints we have to use partTargetDuration
2568 // as they won't even have a duration yet.
2569
2570 (segment.preloadHints || []).forEach(function (p) {
2571 if (p.type === 'PART') {
2572 result += playlist.partTargetDuration;
2573 }
2574 });
2575 return result;
2576 };
2577 /**
2578 * A function to get a combined list of parts and segments with durations
2579 * and indexes.
2580 *
2581 * @param {Playlist} playlist the playlist to get the list for.
2582 *
2583 * @return {Array} The part/segment list.
2584 */
2585
2586 var getPartsAndSegments = function getPartsAndSegments(playlist) {
2587 return (playlist.segments || []).reduce(function (acc, segment, si) {
2588 if (segment.parts) {
2589 segment.parts.forEach(function (part, pi) {
2590 acc.push({
2591 duration: part.duration,
2592 segmentIndex: si,
2593 partIndex: pi,
2594 part: part,
2595 segment: segment
2596 });
2597 });
2598 } else {
2599 acc.push({
2600 duration: segment.duration,
2601 segmentIndex: si,
2602 partIndex: null,
2603 segment: segment,
2604 part: null
2605 });
2606 }
2607
2608 return acc;
2609 }, []);
2610 };
2611 var getLastParts = function getLastParts(media) {
2612 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
2613 return lastSegment && lastSegment.parts || [];
2614 };
2615 var getKnownPartCount = function getKnownPartCount(_ref) {
2616 var preloadSegment = _ref.preloadSegment;
2617
2618 if (!preloadSegment) {
2619 return;
2620 }
2621
2622 var parts = preloadSegment.parts,
2623 preloadHints = preloadSegment.preloadHints;
2624 var partCount = (preloadHints || []).reduce(function (count, hint) {
2625 return count + (hint.type === 'PART' ? 1 : 0);
2626 }, 0);
2627 partCount += parts && parts.length ? parts.length : 0;
2628 return partCount;
2629 };
2630 /**
2631 * Get the number of seconds to delay from the end of a
2632 * live playlist.
2633 *
2634 * @param {Playlist} master the master playlist
2635 * @param {Playlist} media the media playlist
2636 * @return {number} the hold back in seconds.
2637 */
2638
2639 var liveEdgeDelay = function liveEdgeDelay(master, media) {
2640 if (media.endList) {
2641 return 0;
2642 } // dash suggestedPresentationDelay trumps everything
2643
2644
2645 if (master && master.suggestedPresentationDelay) {
2646 return master.suggestedPresentationDelay;
2647 }
2648
2649 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
2650
2651 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
2652 return media.serverControl.partHoldBack;
2653 } else if (hasParts && media.partTargetDuration) {
2654 return media.partTargetDuration * 3; // finally look for full segment delays
2655 } else if (media.serverControl && media.serverControl.holdBack) {
2656 return media.serverControl.holdBack;
2657 } else if (media.targetDuration) {
2658 return media.targetDuration * 3;
2659 }
2660
2661 return 0;
2662 };
2663 /**
2664 * walk backward until we find a duration we can use
2665 * or return a failure
2666 *
2667 * @param {Playlist} playlist the playlist to walk through
2668 * @param {Number} endSequence the mediaSequence to stop walking on
2669 */
2670
2671 var backwardDuration = function backwardDuration(playlist, endSequence) {
2672 var result = 0;
2673 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
2674 // the interval, use it
2675
2676 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
2677 // information that is earlier than endSequence
2678
2679 if (segment) {
2680 if (typeof segment.start !== 'undefined') {
2681 return {
2682 result: segment.start,
2683 precise: true
2684 };
2685 }
2686
2687 if (typeof segment.end !== 'undefined') {
2688 return {
2689 result: segment.end - segment.duration,
2690 precise: true
2691 };
2692 }
2693 }
2694
2695 while (i--) {
2696 segment = playlist.segments[i];
2697
2698 if (typeof segment.end !== 'undefined') {
2699 return {
2700 result: result + segment.end,
2701 precise: true
2702 };
2703 }
2704
2705 result += segmentDurationWithParts(playlist, segment);
2706
2707 if (typeof segment.start !== 'undefined') {
2708 return {
2709 result: result + segment.start,
2710 precise: true
2711 };
2712 }
2713 }
2714
2715 return {
2716 result: result,
2717 precise: false
2718 };
2719 };
2720 /**
2721 * walk forward until we find a duration we can use
2722 * or return a failure
2723 *
2724 * @param {Playlist} playlist the playlist to walk through
2725 * @param {number} endSequence the mediaSequence to stop walking on
2726 */
2727
2728
2729 var forwardDuration = function forwardDuration(playlist, endSequence) {
2730 var result = 0;
2731 var segment;
2732 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
2733 // information
2734
2735 for (; i < playlist.segments.length; i++) {
2736 segment = playlist.segments[i];
2737
2738 if (typeof segment.start !== 'undefined') {
2739 return {
2740 result: segment.start - result,
2741 precise: true
2742 };
2743 }
2744
2745 result += segmentDurationWithParts(playlist, segment);
2746
2747 if (typeof segment.end !== 'undefined') {
2748 return {
2749 result: segment.end - result,
2750 precise: true
2751 };
2752 }
2753 } // indicate we didn't find a useful duration estimate
2754
2755
2756 return {
2757 result: -1,
2758 precise: false
2759 };
2760 };
2761 /**
2762 * Calculate the media duration from the segments associated with a
2763 * playlist. The duration of a subinterval of the available segments
2764 * may be calculated by specifying an end index.
2765 *
2766 * @param {Object} playlist a media playlist object
2767 * @param {number=} endSequence an exclusive upper boundary
2768 * for the playlist. Defaults to playlist length.
2769 * @param {number} expired the amount of time that has dropped
2770 * off the front of the playlist in a live scenario
2771 * @return {number} the duration between the first available segment
2772 * and end index.
2773 */
2774
2775
2776 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
2777 if (typeof endSequence === 'undefined') {
2778 endSequence = playlist.mediaSequence + playlist.segments.length;
2779 }
2780
2781 if (endSequence < playlist.mediaSequence) {
2782 return 0;
2783 } // do a backward walk to estimate the duration
2784
2785
2786 var backward = backwardDuration(playlist, endSequence);
2787
2788 if (backward.precise) {
2789 // if we were able to base our duration estimate on timing
2790 // information provided directly from the Media Source, return
2791 // it
2792 return backward.result;
2793 } // walk forward to see if a precise duration estimate can be made
2794 // that way
2795
2796
2797 var forward = forwardDuration(playlist, endSequence);
2798
2799 if (forward.precise) {
2800 // we found a segment that has been buffered and so it's
2801 // position is known precisely
2802 return forward.result;
2803 } // return the less-precise, playlist-based duration estimate
2804
2805
2806 return backward.result + expired;
2807 };
2808 /**
2809 * Calculates the duration of a playlist. If a start and end index
2810 * are specified, the duration will be for the subset of the media
2811 * timeline between those two indices. The total duration for live
2812 * playlists is always Infinity.
2813 *
2814 * @param {Object} playlist a media playlist object
2815 * @param {number=} endSequence an exclusive upper
2816 * boundary for the playlist. Defaults to the playlist media
2817 * sequence number plus its length.
2818 * @param {number=} expired the amount of time that has
2819 * dropped off the front of the playlist in a live scenario
2820 * @return {number} the duration between the start index and end
2821 * index.
2822 */
2823
2824
2825 var duration = function duration(playlist, endSequence, expired) {
2826 if (!playlist) {
2827 return 0;
2828 }
2829
2830 if (typeof expired !== 'number') {
2831 expired = 0;
2832 } // if a slice of the total duration is not requested, use
2833 // playlist-level duration indicators when they're present
2834
2835
2836 if (typeof endSequence === 'undefined') {
2837 // if present, use the duration specified in the playlist
2838 if (playlist.totalDuration) {
2839 return playlist.totalDuration;
2840 } // duration should be Infinity for live playlists
2841
2842
2843 if (!playlist.endList) {
2844 return window.Infinity;
2845 }
2846 } // calculate the total duration based on the segment durations
2847
2848
2849 return intervalDuration(playlist, endSequence, expired);
2850 };
2851 /**
2852 * Calculate the time between two indexes in the current playlist
2853 * neight the start- nor the end-index need to be within the current
2854 * playlist in which case, the targetDuration of the playlist is used
2855 * to approximate the durations of the segments
2856 *
2857 * @param {Array} options.durationList list to iterate over for durations.
2858 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
2859 * @param {number} options.startIndex partsAndSegments index to start
2860 * @param {number} options.endIndex partsAndSegments index to end.
2861 * @return {number} the number of seconds between startIndex and endIndex
2862 */
2863
2864 var sumDurations = function sumDurations(_ref2) {
2865 var defaultDuration = _ref2.defaultDuration,
2866 durationList = _ref2.durationList,
2867 startIndex = _ref2.startIndex,
2868 endIndex = _ref2.endIndex;
2869 var durations = 0;
2870
2871 if (startIndex > endIndex) {
2872 var _ref3 = [endIndex, startIndex];
2873 startIndex = _ref3[0];
2874 endIndex = _ref3[1];
2875 }
2876
2877 if (startIndex < 0) {
2878 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
2879 durations += defaultDuration;
2880 }
2881
2882 startIndex = 0;
2883 }
2884
2885 for (var _i = startIndex; _i < endIndex; _i++) {
2886 durations += durationList[_i].duration;
2887 }
2888
2889 return durations;
2890 };
2891 /**
2892 * Calculates the playlist end time
2893 *
2894 * @param {Object} playlist a media playlist object
2895 * @param {number=} expired the amount of time that has
2896 * dropped off the front of the playlist in a live scenario
2897 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
2898 * playlist end calculation should consider the safe live end
2899 * (truncate the playlist end by three segments). This is normally
2900 * used for calculating the end of the playlist's seekable range.
2901 * This takes into account the value of liveEdgePadding.
2902 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
2903 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
2904 * If this is provided, it is used in the safe live end calculation.
2905 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
2906 * Corresponds to suggestedPresentationDelay in DASH manifests.
2907 * @return {number} the end time of playlist
2908 * @function playlistEnd
2909 */
2910
2911 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
2912 if (!playlist || !playlist.segments) {
2913 return null;
2914 }
2915
2916 if (playlist.endList) {
2917 return duration(playlist);
2918 }
2919
2920 if (expired === null) {
2921 return null;
2922 }
2923
2924 expired = expired || 0;
2925 var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
2926
2927 if (useSafeLiveEnd) {
2928 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
2929 lastSegmentEndTime -= liveEdgePadding;
2930 } // don't return a time less than zero
2931
2932
2933 return Math.max(0, lastSegmentEndTime);
2934 };
2935 /**
2936 * Calculates the interval of time that is currently seekable in a
2937 * playlist. The returned time ranges are relative to the earliest
2938 * moment in the specified playlist that is still available. A full
2939 * seekable implementation for live streams would need to offset
2940 * these values by the duration of content that has expired from the
2941 * stream.
2942 *
2943 * @param {Object} playlist a media playlist object
2944 * dropped off the front of the playlist in a live scenario
2945 * @param {number=} expired the amount of time that has
2946 * dropped off the front of the playlist in a live scenario
2947 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
2948 * Corresponds to suggestedPresentationDelay in DASH manifests.
2949 * @return {TimeRanges} the periods of time that are valid targets
2950 * for seeking
2951 */
2952
2953 var seekable = function seekable(playlist, expired, liveEdgePadding) {
2954 var useSafeLiveEnd = true;
2955 var seekableStart = expired || 0;
2956 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
2957
2958 if (seekableEnd === null) {
2959 return createTimeRange();
2960 }
2961
2962 return createTimeRange(seekableStart, seekableEnd);
2963 };
2964 /**
2965 * Determine the index and estimated starting time of the segment that
2966 * contains a specified playback position in a media playlist.
2967 *
2968 * @param {Object} options.playlist the media playlist to query
2969 * @param {number} options.currentTime The number of seconds since the earliest
2970 * possible position to determine the containing segment for
2971 * @param {number} options.startTime the time when the segment/part starts
2972 * @param {number} options.startingSegmentIndex the segment index to start looking at.
2973 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
2974 *
2975 * @return {Object} an object with partIndex, segmentIndex, and startTime.
2976 */
2977
2978 var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
2979 var playlist = _ref4.playlist,
2980 currentTime = _ref4.currentTime,
2981 startingSegmentIndex = _ref4.startingSegmentIndex,
2982 startingPartIndex = _ref4.startingPartIndex,
2983 startTime = _ref4.startTime,
2984 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
2985 var time = currentTime - startTime;
2986 var partsAndSegments = getPartsAndSegments(playlist);
2987 var startIndex = 0;
2988
2989 for (var i = 0; i < partsAndSegments.length; i++) {
2990 var partAndSegment = partsAndSegments[i];
2991
2992 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
2993 continue;
2994 } // skip this if part index does not match.
2995
2996
2997 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
2998 continue;
2999 }
3000
3001 startIndex = i;
3002 break;
3003 }
3004
3005 if (time < 0) {
3006 // Walk backward from startIndex in the playlist, adding durations
3007 // until we find a segment that contains `time` and return it
3008 if (startIndex > 0) {
3009 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
3010 var _partAndSegment = partsAndSegments[_i2];
3011 time += _partAndSegment.duration;
3012
3013 if (experimentalExactManifestTimings) {
3014 if (time < 0) {
3015 continue;
3016 }
3017 } else if (time + TIME_FUDGE_FACTOR <= 0) {
3018 continue;
3019 }
3020
3021 return {
3022 partIndex: _partAndSegment.partIndex,
3023 segmentIndex: _partAndSegment.segmentIndex,
3024 startTime: startTime - sumDurations({
3025 defaultDuration: playlist.targetDuration,
3026 durationList: partsAndSegments,
3027 startIndex: startIndex,
3028 endIndex: _i2
3029 })
3030 };
3031 }
3032 } // We were unable to find a good segment within the playlist
3033 // so select the first segment
3034
3035
3036 return {
3037 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3038 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3039 startTime: currentTime
3040 };
3041 } // When startIndex is negative, we first walk forward to first segment
3042 // adding target durations. If we "run out of time" before getting to
3043 // the first segment, return the first segment
3044
3045
3046 if (startIndex < 0) {
3047 for (var _i3 = startIndex; _i3 < 0; _i3++) {
3048 time -= playlist.targetDuration;
3049
3050 if (time < 0) {
3051 return {
3052 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3053 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3054 startTime: currentTime
3055 };
3056 }
3057 }
3058
3059 startIndex = 0;
3060 } // Walk forward from startIndex in the playlist, subtracting durations
3061 // until we find a segment that contains `time` and return it
3062
3063
3064 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
3065 var _partAndSegment2 = partsAndSegments[_i4];
3066 time -= _partAndSegment2.duration;
3067
3068 if (experimentalExactManifestTimings) {
3069 if (time > 0) {
3070 continue;
3071 }
3072 } else if (time - TIME_FUDGE_FACTOR >= 0) {
3073 continue;
3074 }
3075
3076 return {
3077 partIndex: _partAndSegment2.partIndex,
3078 segmentIndex: _partAndSegment2.segmentIndex,
3079 startTime: startTime + sumDurations({
3080 defaultDuration: playlist.targetDuration,
3081 durationList: partsAndSegments,
3082 startIndex: startIndex,
3083 endIndex: _i4
3084 })
3085 };
3086 } // We are out of possible candidates so load the last one...
3087
3088
3089 return {
3090 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
3091 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
3092 startTime: currentTime
3093 };
3094 };
3095 /**
3096 * Check whether the playlist is blacklisted or not.
3097 *
3098 * @param {Object} playlist the media playlist object
3099 * @return {boolean} whether the playlist is blacklisted or not
3100 * @function isBlacklisted
3101 */
3102
3103 var isBlacklisted = function isBlacklisted(playlist) {
3104 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
3105 };
3106 /**
3107 * Check whether the playlist is compatible with current playback configuration or has
3108 * been blacklisted permanently for being incompatible.
3109 *
3110 * @param {Object} playlist the media playlist object
3111 * @return {boolean} whether the playlist is incompatible or not
3112 * @function isIncompatible
3113 */
3114
3115 var isIncompatible = function isIncompatible(playlist) {
3116 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
3117 };
3118 /**
3119 * Check whether the playlist is enabled or not.
3120 *
3121 * @param {Object} playlist the media playlist object
3122 * @return {boolean} whether the playlist is enabled or not
3123 * @function isEnabled
3124 */
3125
3126 var isEnabled = function isEnabled(playlist) {
3127 var blacklisted = isBlacklisted(playlist);
3128 return !playlist.disabled && !blacklisted;
3129 };
3130 /**
3131 * Check whether the playlist has been manually disabled through the representations api.
3132 *
3133 * @param {Object} playlist the media playlist object
3134 * @return {boolean} whether the playlist is disabled manually or not
3135 * @function isDisabled
3136 */
3137
3138 var isDisabled = function isDisabled(playlist) {
3139 return playlist.disabled;
3140 };
3141 /**
3142 * Returns whether the current playlist is an AES encrypted HLS stream
3143 *
3144 * @return {boolean} true if it's an AES encrypted HLS stream
3145 */
3146
3147 var isAes = function isAes(media) {
3148 for (var i = 0; i < media.segments.length; i++) {
3149 if (media.segments[i].key) {
3150 return true;
3151 }
3152 }
3153
3154 return false;
3155 };
3156 /**
3157 * Checks if the playlist has a value for the specified attribute
3158 *
3159 * @param {string} attr
3160 * Attribute to check for
3161 * @param {Object} playlist
3162 * The media playlist object
3163 * @return {boolean}
3164 * Whether the playlist contains a value for the attribute or not
3165 * @function hasAttribute
3166 */
3167
3168 var hasAttribute = function hasAttribute(attr, playlist) {
3169 return playlist.attributes && playlist.attributes[attr];
3170 };
3171 /**
3172 * Estimates the time required to complete a segment download from the specified playlist
3173 *
3174 * @param {number} segmentDuration
3175 * Duration of requested segment
3176 * @param {number} bandwidth
3177 * Current measured bandwidth of the player
3178 * @param {Object} playlist
3179 * The media playlist object
3180 * @param {number=} bytesReceived
3181 * Number of bytes already received for the request. Defaults to 0
3182 * @return {number|NaN}
3183 * The estimated time to request the segment. NaN if bandwidth information for
3184 * the given playlist is unavailable
3185 * @function estimateSegmentRequestTime
3186 */
3187
3188 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
3189 if (bytesReceived === void 0) {
3190 bytesReceived = 0;
3191 }
3192
3193 if (!hasAttribute('BANDWIDTH', playlist)) {
3194 return NaN;
3195 }
3196
3197 var size = segmentDuration * playlist.attributes.BANDWIDTH;
3198 return (size - bytesReceived * 8) / bandwidth;
3199 };
3200 /*
3201 * Returns whether the current playlist is the lowest rendition
3202 *
3203 * @return {Boolean} true if on lowest rendition
3204 */
3205
3206 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
3207 if (master.playlists.length === 1) {
3208 return true;
3209 }
3210
3211 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3212 return master.playlists.filter(function (playlist) {
3213 if (!isEnabled(playlist)) {
3214 return false;
3215 }
3216
3217 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3218 }).length === 0;
3219 };
3220 var playlistMatch = function playlistMatch(a, b) {
3221 // both playlits are null
3222 // or only one playlist is non-null
3223 // no match
3224 if (!a && !b || !a && b || a && !b) {
3225 return false;
3226 } // playlist objects are the same, match
3227
3228
3229 if (a === b) {
3230 return true;
3231 } // first try to use id as it should be the most
3232 // accurate
3233
3234
3235 if (a.id && b.id && a.id === b.id) {
3236 return true;
3237 } // next try to use reslovedUri as it should be the
3238 // second most accurate.
3239
3240
3241 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
3242 return true;
3243 } // finally try to use uri as it should be accurate
3244 // but might miss a few cases for relative uris
3245
3246
3247 if (a.uri && b.uri && a.uri === b.uri) {
3248 return true;
3249 }
3250
3251 return false;
3252 };
3253
3254 var someAudioVariant = function someAudioVariant(master, callback) {
3255 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
3256 var found = false;
3257
3258 for (var groupName in AUDIO) {
3259 for (var label in AUDIO[groupName]) {
3260 found = callback(AUDIO[groupName][label]);
3261
3262 if (found) {
3263 break;
3264 }
3265 }
3266
3267 if (found) {
3268 break;
3269 }
3270 }
3271
3272 return !!found;
3273 };
3274
3275 var isAudioOnly = function isAudioOnly(master) {
3276 // we are audio only if we have no main playlists but do
3277 // have media group playlists.
3278 if (!master || !master.playlists || !master.playlists.length) {
3279 // without audio variants or playlists this
3280 // is not an audio only master.
3281 var found = someAudioVariant(master, function (variant) {
3282 return variant.playlists && variant.playlists.length || variant.uri;
3283 });
3284 return found;
3285 } // if every playlist has only an audio codec it is audio only
3286
3287
3288 var _loop = function _loop(i) {
3289 var playlist = master.playlists[i];
3290 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
3291
3292 if (CODECS && CODECS.split(',').every(function (c) {
3293 return isAudioCodec(c);
3294 })) {
3295 return "continue";
3296 } // playlist is in an audio group it is audio only
3297
3298
3299 var found = someAudioVariant(master, function (variant) {
3300 return playlistMatch(playlist, variant);
3301 });
3302
3303 if (found) {
3304 return "continue";
3305 } // if we make it here this playlist isn't audio and we
3306 // are not audio only
3307
3308
3309 return {
3310 v: false
3311 };
3312 };
3313
3314 for (var i = 0; i < master.playlists.length; i++) {
3315 var _ret = _loop(i);
3316
3317 if (_ret === "continue") continue;
3318 if (typeof _ret === "object") return _ret.v;
3319 } // if we make it past every playlist without returning, then
3320 // this is an audio only playlist.
3321
3322
3323 return true;
3324 }; // exports
3325
3326 var Playlist = {
3327 liveEdgeDelay: liveEdgeDelay,
3328 duration: duration,
3329 seekable: seekable,
3330 getMediaInfoForTime: getMediaInfoForTime,
3331 isEnabled: isEnabled,
3332 isDisabled: isDisabled,
3333 isBlacklisted: isBlacklisted,
3334 isIncompatible: isIncompatible,
3335 playlistEnd: playlistEnd,
3336 isAes: isAes,
3337 hasAttribute: hasAttribute,
3338 estimateSegmentRequestTime: estimateSegmentRequestTime,
3339 isLowestEnabledRendition: isLowestEnabledRendition,
3340 isAudioOnly: isAudioOnly,
3341 playlistMatch: playlistMatch,
3342 segmentDurationWithParts: segmentDurationWithParts
3343 };
3344
3345 var log = videojs__default["default"].log;
3346 var createPlaylistID = function createPlaylistID(index, uri) {
3347 return index + "-" + uri;
3348 };
3349 /**
3350 * Parses a given m3u8 playlist
3351 *
3352 * @param {Function} [onwarn]
3353 * a function to call when the parser triggers a warning event.
3354 * @param {Function} [oninfo]
3355 * a function to call when the parser triggers an info event.
3356 * @param {string} manifestString
3357 * The downloaded manifest string
3358 * @param {Object[]} [customTagParsers]
3359 * An array of custom tag parsers for the m3u8-parser instance
3360 * @param {Object[]} [customTagMappers]
3361 * An array of custom tag mappers for the m3u8-parser instance
3362 * @param {boolean} [experimentalLLHLS=false]
3363 * Whether to keep ll-hls features in the manifest after parsing.
3364 * @return {Object}
3365 * The manifest object
3366 */
3367
3368 var parseManifest = function parseManifest(_ref) {
3369 var onwarn = _ref.onwarn,
3370 oninfo = _ref.oninfo,
3371 manifestString = _ref.manifestString,
3372 _ref$customTagParsers = _ref.customTagParsers,
3373 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
3374 _ref$customTagMappers = _ref.customTagMappers,
3375 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
3376 experimentalLLHLS = _ref.experimentalLLHLS;
3377 var parser = new Parser();
3378
3379 if (onwarn) {
3380 parser.on('warn', onwarn);
3381 }
3382
3383 if (oninfo) {
3384 parser.on('info', oninfo);
3385 }
3386
3387 customTagParsers.forEach(function (customParser) {
3388 return parser.addParser(customParser);
3389 });
3390 customTagMappers.forEach(function (mapper) {
3391 return parser.addTagMapper(mapper);
3392 });
3393 parser.push(manifestString);
3394 parser.end();
3395 var manifest = parser.manifest; // remove llhls features from the parsed manifest
3396 // if we don't want llhls support.
3397
3398 if (!experimentalLLHLS) {
3399 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
3400 if (manifest.hasOwnProperty(k)) {
3401 delete manifest[k];
3402 }
3403 });
3404
3405 if (manifest.segments) {
3406 manifest.segments.forEach(function (segment) {
3407 ['parts', 'preloadHints'].forEach(function (k) {
3408 if (segment.hasOwnProperty(k)) {
3409 delete segment[k];
3410 }
3411 });
3412 });
3413 }
3414 }
3415
3416 if (!manifest.targetDuration) {
3417 var targetDuration = 10;
3418
3419 if (manifest.segments && manifest.segments.length) {
3420 targetDuration = manifest.segments.reduce(function (acc, s) {
3421 return Math.max(acc, s.duration);
3422 }, 0);
3423 }
3424
3425 if (onwarn) {
3426 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
3427 }
3428
3429 manifest.targetDuration = targetDuration;
3430 }
3431
3432 var parts = getLastParts(manifest);
3433
3434 if (parts.length && !manifest.partTargetDuration) {
3435 var partTargetDuration = parts.reduce(function (acc, p) {
3436 return Math.max(acc, p.duration);
3437 }, 0);
3438
3439 if (onwarn) {
3440 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
3441 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
3442 }
3443
3444 manifest.partTargetDuration = partTargetDuration;
3445 }
3446
3447 return manifest;
3448 };
3449 /**
3450 * Loops through all supported media groups in master and calls the provided
3451 * callback for each group
3452 *
3453 * @param {Object} master
3454 * The parsed master manifest object
3455 * @param {Function} callback
3456 * Callback to call for each media group
3457 */
3458
3459 var forEachMediaGroup$1 = function forEachMediaGroup(master, callback) {
3460 if (!master.mediaGroups) {
3461 return;
3462 }
3463
3464 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
3465 if (!master.mediaGroups[mediaType]) {
3466 return;
3467 }
3468
3469 for (var groupKey in master.mediaGroups[mediaType]) {
3470 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
3471 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
3472 callback(mediaProperties, mediaType, groupKey, labelKey);
3473 }
3474 }
3475 });
3476 };
3477 /**
3478 * Adds properties and attributes to the playlist to keep consistent functionality for
3479 * playlists throughout VHS.
3480 *
3481 * @param {Object} config
3482 * Arguments object
3483 * @param {Object} config.playlist
3484 * The media playlist
3485 * @param {string} [config.uri]
3486 * The uri to the media playlist (if media playlist is not from within a master
3487 * playlist)
3488 * @param {string} id
3489 * ID to use for the playlist
3490 */
3491
3492 var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
3493 var playlist = _ref2.playlist,
3494 uri = _ref2.uri,
3495 id = _ref2.id;
3496 playlist.id = id;
3497 playlist.playlistErrors_ = 0;
3498
3499 if (uri) {
3500 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
3501 // playlists do not contain their own source URI, but one is needed for consistency in
3502 // VHS.
3503 playlist.uri = uri;
3504 } // For HLS master playlists, even though certain attributes MUST be defined, the
3505 // stream may still be played without them.
3506 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
3507 // manifest.
3508 //
3509 // To avoid undefined reference errors through the project, and make the code easier
3510 // to write/read, add an empty attributes object for these cases.
3511
3512
3513 playlist.attributes = playlist.attributes || {};
3514 };
3515 /**
3516 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
3517 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
3518 * playlist references to the playlists array.
3519 *
3520 * @param {Object} master
3521 * The master playlist
3522 */
3523
3524 var setupMediaPlaylists = function setupMediaPlaylists(master) {
3525 var i = master.playlists.length;
3526
3527 while (i--) {
3528 var playlist = master.playlists[i];
3529 setupMediaPlaylist({
3530 playlist: playlist,
3531 id: createPlaylistID(i, playlist.uri)
3532 });
3533 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
3534 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
3535
3536 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
3537 // the stream can be played without it. Although an attributes property may have been
3538 // added to the playlist to prevent undefined references, issue a warning to fix the
3539 // manifest.
3540
3541 if (!playlist.attributes.BANDWIDTH) {
3542 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
3543 }
3544 }
3545 };
3546 /**
3547 * Adds resolvedUri properties to each media group.
3548 *
3549 * @param {Object} master
3550 * The master playlist
3551 */
3552
3553 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
3554 forEachMediaGroup$1(master, function (properties) {
3555 if (properties.uri) {
3556 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
3557 }
3558 });
3559 };
3560 /**
3561 * Creates a master playlist wrapper to insert a sole media playlist into.
3562 *
3563 * @param {Object} media
3564 * Media playlist
3565 * @param {string} uri
3566 * The media URI
3567 *
3568 * @return {Object}
3569 * Master playlist
3570 */
3571
3572 var masterForMedia = function masterForMedia(media, uri) {
3573 var id = createPlaylistID(0, uri);
3574 var master = {
3575 mediaGroups: {
3576 'AUDIO': {},
3577 'VIDEO': {},
3578 'CLOSED-CAPTIONS': {},
3579 'SUBTITLES': {}
3580 },
3581 uri: window.location.href,
3582 resolvedUri: window.location.href,
3583 playlists: [{
3584 uri: uri,
3585 id: id,
3586 resolvedUri: uri,
3587 // m3u8-parser does not attach an attributes property to media playlists so make
3588 // sure that the property is attached to avoid undefined reference errors
3589 attributes: {}
3590 }]
3591 }; // set up ID reference
3592
3593 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
3594
3595 master.playlists[uri] = master.playlists[0];
3596 return master;
3597 };
3598 /**
3599 * Does an in-place update of the master manifest to add updated playlist URI references
3600 * as well as other properties needed by VHS that aren't included by the parser.
3601 *
3602 * @param {Object} master
3603 * Master manifest object
3604 * @param {string} uri
3605 * The source URI
3606 */
3607
3608 var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
3609 master.uri = uri;
3610
3611 for (var i = 0; i < master.playlists.length; i++) {
3612 if (!master.playlists[i].uri) {
3613 // Set up phony URIs for the playlists since playlists are referenced by their URIs
3614 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
3615 // TODO: consider adding dummy URIs in mpd-parser
3616 var phonyUri = "placeholder-uri-" + i;
3617 master.playlists[i].uri = phonyUri;
3618 }
3619 }
3620
3621 var audioOnlyMaster = isAudioOnly(master);
3622 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3623 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
3624
3625 if (!properties.playlists || !properties.playlists.length) {
3626 // If the manifest is audio only and this media group does not have a uri, check
3627 // if the media group is located in the main list of playlists. If it is, don't add
3628 // placeholder properties as it shouldn't be considered an alternate audio track.
3629 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
3630 for (var _i = 0; _i < master.playlists.length; _i++) {
3631 var p = master.playlists[_i];
3632
3633 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
3634 return;
3635 }
3636 }
3637 }
3638
3639 properties.playlists = [_extends_1({}, properties)];
3640 }
3641
3642 properties.playlists.forEach(function (p, i) {
3643 var id = createPlaylistID(i, groupId);
3644
3645 if (p.uri) {
3646 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
3647 } else {
3648 // DEPRECATED, this has been added to prevent a breaking change.
3649 // previously we only ever had a single media group playlist, so
3650 // we mark the first playlist uri without prepending the index as we used to
3651 // ideally we would do all of the playlists the same way.
3652 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
3653 // the placeholder again
3654
3655 p.resolvedUri = p.uri;
3656 }
3657
3658 p.id = p.id || id; // add an empty attributes object, all playlists are
3659 // expected to have this.
3660
3661 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
3662
3663 master.playlists[p.id] = p;
3664 master.playlists[p.uri] = p;
3665 });
3666 });
3667 setupMediaPlaylists(master);
3668 resolveMediaGroupUris(master);
3669 };
3670
3671 var mergeOptions$2 = videojs__default["default"].mergeOptions,
3672 EventTarget$1 = videojs__default["default"].EventTarget;
3673
3674 var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
3675 if (media.endList || !media.serverControl) {
3676 return uri;
3677 }
3678
3679 var parameters = {};
3680
3681 if (media.serverControl.canBlockReload) {
3682 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
3683
3684 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
3685 // that we are going to request a part of that preload segment.
3686 // the logic below is used to determine that.
3687
3688 if (preloadSegment) {
3689 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
3690
3691 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
3692 // length of parts, then we know we had part preload hints
3693 // and we need to add the _HLS_part= query
3694
3695 if (nextPart > -1 && nextPart !== parts.length - 1) {
3696 // add existing parts to our preload hints
3697 // eslint-disable-next-line
3698 parameters._HLS_part = nextPart;
3699 } // this if statement makes sure that we request the msn
3700 // of the preload segment if:
3701 // 1. the preload segment had parts (and was not yet a full segment)
3702 // but was added to our segments array
3703 // 2. the preload segment had preload hints for parts that are not in
3704 // the manifest yet.
3705 // in all other cases we want the segment after the preload segment
3706 // which will be given by using media.segments.length because it is 1 based
3707 // rather than 0 based.
3708
3709
3710 if (nextPart > -1 || parts.length) {
3711 nextMSN--;
3712 }
3713 } // add _HLS_msn= in front of any _HLS_part query
3714 // eslint-disable-next-line
3715
3716
3717 parameters._HLS_msn = nextMSN;
3718 }
3719
3720 if (media.serverControl && media.serverControl.canSkipUntil) {
3721 // add _HLS_skip= infront of all other queries.
3722 // eslint-disable-next-line
3723 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
3724 }
3725
3726 if (Object.keys(parameters).length) {
3727 var parsedUri = new window.URL(uri);
3728 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
3729 if (!parameters.hasOwnProperty(name)) {
3730 return;
3731 }
3732
3733 parsedUri.searchParams.set(name, parameters[name]);
3734 });
3735 uri = parsedUri.toString();
3736 }
3737
3738 return uri;
3739 };
3740 /**
3741 * Returns a new segment object with properties and
3742 * the parts array merged.
3743 *
3744 * @param {Object} a the old segment
3745 * @param {Object} b the new segment
3746 *
3747 * @return {Object} the merged segment
3748 */
3749
3750
3751 var updateSegment = function updateSegment(a, b) {
3752 if (!a) {
3753 return b;
3754 }
3755
3756 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
3757 // and the new one does not, remove preload hints.
3758
3759 if (a.preloadHints && !b.preloadHints) {
3760 delete result.preloadHints;
3761 } // if only the old segment has parts
3762 // then the parts are no longer valid
3763
3764
3765 if (a.parts && !b.parts) {
3766 delete result.parts; // if both segments have parts
3767 // copy part propeties from the old segment
3768 // to the new one.
3769 } else if (a.parts && b.parts) {
3770 for (var i = 0; i < b.parts.length; i++) {
3771 if (a.parts && a.parts[i]) {
3772 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
3773 }
3774 }
3775 } // set skipped to false for segments that have
3776 // have had information merged from the old segment.
3777
3778
3779 if (!a.skipped && b.skipped) {
3780 result.skipped = false;
3781 } // set preload to false for segments that have
3782 // had information added in the new segment.
3783
3784
3785 if (a.preload && !b.preload) {
3786 result.preload = false;
3787 }
3788
3789 return result;
3790 };
3791 /**
3792 * Returns a new array of segments that is the result of merging
3793 * properties from an older list of segments onto an updated
3794 * list. No properties on the updated playlist will be ovewritten.
3795 *
3796 * @param {Array} original the outdated list of segments
3797 * @param {Array} update the updated list of segments
3798 * @param {number=} offset the index of the first update
3799 * segment in the original segment list. For non-live playlists,
3800 * this should always be zero and does not need to be
3801 * specified. For live playlists, it should be the difference
3802 * between the media sequence numbers in the original and updated
3803 * playlists.
3804 * @return {Array} a list of merged segment objects
3805 */
3806
3807 var updateSegments = function updateSegments(original, update, offset) {
3808 var oldSegments = original.slice();
3809 var newSegments = update.slice();
3810 offset = offset || 0;
3811 var result = [];
3812 var currentMap;
3813
3814 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
3815 var oldSegment = oldSegments[newIndex + offset];
3816 var newSegment = newSegments[newIndex];
3817
3818 if (oldSegment) {
3819 currentMap = oldSegment.map || currentMap;
3820 result.push(updateSegment(oldSegment, newSegment));
3821 } else {
3822 // carry over map to new segment if it is missing
3823 if (currentMap && !newSegment.map) {
3824 newSegment.map = currentMap;
3825 }
3826
3827 result.push(newSegment);
3828 }
3829 }
3830
3831 return result;
3832 };
3833 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
3834 // preloadSegment will not have a uri at all
3835 // as the segment isn't actually in the manifest yet, only parts
3836 if (!segment.resolvedUri && segment.uri) {
3837 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
3838 }
3839
3840 if (segment.key && !segment.key.resolvedUri) {
3841 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
3842 }
3843
3844 if (segment.map && !segment.map.resolvedUri) {
3845 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
3846 }
3847
3848 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
3849 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
3850 }
3851
3852 if (segment.parts && segment.parts.length) {
3853 segment.parts.forEach(function (p) {
3854 if (p.resolvedUri) {
3855 return;
3856 }
3857
3858 p.resolvedUri = resolveUrl(baseUri, p.uri);
3859 });
3860 }
3861
3862 if (segment.preloadHints && segment.preloadHints.length) {
3863 segment.preloadHints.forEach(function (p) {
3864 if (p.resolvedUri) {
3865 return;
3866 }
3867
3868 p.resolvedUri = resolveUrl(baseUri, p.uri);
3869 });
3870 }
3871 };
3872
3873 var getAllSegments = function getAllSegments(media) {
3874 var segments = media.segments || [];
3875 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
3876 // a usable segment, only include a preloadSegment that has
3877 // parts.
3878
3879 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
3880 // if preloadHints has a MAP that means that the
3881 // init segment is going to change. We cannot use any of the parts
3882 // from this preload segment.
3883 if (preloadSegment.preloadHints) {
3884 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
3885 if (preloadSegment.preloadHints[i].type === 'MAP') {
3886 return segments;
3887 }
3888 }
3889 } // set the duration for our preload segment to target duration.
3890
3891
3892 preloadSegment.duration = media.targetDuration;
3893 preloadSegment.preload = true;
3894 segments.push(preloadSegment);
3895 }
3896
3897 return segments;
3898 }; // consider the playlist unchanged if the playlist object is the same or
3899 // the number of segments is equal, the media sequence number is unchanged,
3900 // and this playlist hasn't become the end of the playlist
3901
3902
3903 var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
3904 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
3905 };
3906 /**
3907 * Returns a new master playlist that is the result of merging an
3908 * updated media playlist into the original version. If the
3909 * updated media playlist does not match any of the playlist
3910 * entries in the original master playlist, null is returned.
3911 *
3912 * @param {Object} master a parsed master M3U8 object
3913 * @param {Object} media a parsed media M3U8 object
3914 * @return {Object} a new object that represents the original
3915 * master playlist with the updated media playlist merged in, or
3916 * null if the merge produced no change.
3917 */
3918
3919 var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
3920 if (unchangedCheck === void 0) {
3921 unchangedCheck = isPlaylistUnchanged;
3922 }
3923
3924 var result = mergeOptions$2(master, {});
3925 var oldMedia = result.playlists[newMedia.id];
3926
3927 if (!oldMedia) {
3928 return null;
3929 }
3930
3931 if (unchangedCheck(oldMedia, newMedia)) {
3932 return null;
3933 }
3934
3935 newMedia.segments = getAllSegments(newMedia);
3936 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
3937
3938 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
3939 delete mergedPlaylist.preloadSegment;
3940 } // if the update could overlap existing segment information, merge the two segment lists
3941
3942
3943 if (oldMedia.segments) {
3944 if (newMedia.skip) {
3945 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
3946 // old properties into the new segments
3947
3948 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
3949 newMedia.segments.unshift({
3950 skipped: true
3951 });
3952 }
3953 }
3954
3955 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
3956 } // resolve any segment URIs to prevent us from having to do it later
3957
3958
3959 mergedPlaylist.segments.forEach(function (segment) {
3960 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
3961 }); // TODO Right now in the playlists array there are two references to each playlist, one
3962 // that is referenced by index, and one by URI. The index reference may no longer be
3963 // necessary.
3964
3965 for (var _i = 0; _i < result.playlists.length; _i++) {
3966 if (result.playlists[_i].id === newMedia.id) {
3967 result.playlists[_i] = mergedPlaylist;
3968 }
3969 }
3970
3971 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
3972
3973 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
3974
3975 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3976 if (!properties.playlists) {
3977 return;
3978 }
3979
3980 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
3981 if (newMedia.id === properties.playlists[_i2].id) {
3982 properties.playlists[_i2] = mergedPlaylist;
3983 }
3984 }
3985 });
3986 return result;
3987 };
3988 /**
3989 * Calculates the time to wait before refreshing a live playlist
3990 *
3991 * @param {Object} media
3992 * The current media
3993 * @param {boolean} update
3994 * True if there were any updates from the last refresh, false otherwise
3995 * @return {number}
3996 * The time in ms to wait before refreshing the live playlist
3997 */
3998
3999 var refreshDelay = function refreshDelay(media, update) {
4000 var segments = media.segments || [];
4001 var lastSegment = segments[segments.length - 1];
4002 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
4003 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
4004
4005 if (update && lastDuration) {
4006 return lastDuration * 1000;
4007 } // if the playlist is unchanged since the last reload or last segment duration
4008 // cannot be determined, try again after half the target duration
4009
4010
4011 return (media.partTargetDuration || media.targetDuration || 10) * 500;
4012 };
4013 /**
4014 * Load a playlist from a remote location
4015 *
4016 * @class PlaylistLoader
4017 * @extends Stream
4018 * @param {string|Object} src url or object of manifest
4019 * @param {boolean} withCredentials the withCredentials xhr option
4020 * @class
4021 */
4022
4023 var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
4024 inheritsLoose(PlaylistLoader, _EventTarget);
4025
4026 function PlaylistLoader(src, vhs, options) {
4027 var _this;
4028
4029 if (options === void 0) {
4030 options = {};
4031 }
4032
4033 _this = _EventTarget.call(this) || this;
4034
4035 if (!src) {
4036 throw new Error('A non-empty playlist URL or object is required');
4037 }
4038
4039 _this.logger_ = logger('PlaylistLoader');
4040 var _options = options,
4041 _options$withCredenti = _options.withCredentials,
4042 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
4043 _options$handleManife = _options.handleManifestRedirects,
4044 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
4045 _this.src = src;
4046 _this.vhs_ = vhs;
4047 _this.withCredentials = withCredentials;
4048 _this.handleManifestRedirects = handleManifestRedirects;
4049 var vhsOptions = vhs.options_;
4050 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
4051 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
4052 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
4053
4054 if (videojs__default["default"].browser.IE_VERSION) {
4055 _this.experimentalLLHLS = false;
4056 } // initialize the loader state
4057
4058
4059 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
4060
4061 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(assertThisInitialized(_this));
4062
4063 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
4064
4065 return _this;
4066 }
4067
4068 var _proto = PlaylistLoader.prototype;
4069
4070 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
4071 var _this2 = this;
4072
4073 if (this.state !== 'HAVE_METADATA') {
4074 // only refresh the media playlist if no other activity is going on
4075 return;
4076 }
4077
4078 var media = this.media();
4079 var uri = resolveUrl(this.master.uri, media.uri);
4080
4081 if (this.experimentalLLHLS) {
4082 uri = addLLHLSQueryDirectives(uri, media);
4083 }
4084
4085 this.state = 'HAVE_CURRENT_METADATA';
4086 this.request = this.vhs_.xhr({
4087 uri: uri,
4088 withCredentials: this.withCredentials
4089 }, function (error, req) {
4090 // disposed
4091 if (!_this2.request) {
4092 return;
4093 }
4094
4095 if (error) {
4096 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
4097 }
4098
4099 _this2.haveMetadata({
4100 playlistString: _this2.request.responseText,
4101 url: _this2.media().uri,
4102 id: _this2.media().id
4103 });
4104 });
4105 };
4106
4107 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
4108 var uri = playlist.uri,
4109 id = playlist.id; // any in-flight request is now finished
4110
4111 this.request = null;
4112
4113 if (startingState) {
4114 this.state = startingState;
4115 }
4116
4117 this.error = {
4118 playlist: this.master.playlists[id],
4119 status: xhr.status,
4120 message: "HLS playlist request error at URL: " + uri + ".",
4121 responseText: xhr.responseText,
4122 code: xhr.status >= 500 ? 4 : 2
4123 };
4124 this.trigger('error');
4125 };
4126
4127 _proto.parseManifest_ = function parseManifest_(_ref) {
4128 var _this3 = this;
4129
4130 var url = _ref.url,
4131 manifestString = _ref.manifestString;
4132 return parseManifest({
4133 onwarn: function onwarn(_ref2) {
4134 var message = _ref2.message;
4135 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
4136 },
4137 oninfo: function oninfo(_ref3) {
4138 var message = _ref3.message;
4139 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
4140 },
4141 manifestString: manifestString,
4142 customTagParsers: this.customTagParsers,
4143 customTagMappers: this.customTagMappers,
4144 experimentalLLHLS: this.experimentalLLHLS
4145 });
4146 }
4147 /**
4148 * Update the playlist loader's state in response to a new or updated playlist.
4149 *
4150 * @param {string} [playlistString]
4151 * Playlist string (if playlistObject is not provided)
4152 * @param {Object} [playlistObject]
4153 * Playlist object (if playlistString is not provided)
4154 * @param {string} url
4155 * URL of playlist
4156 * @param {string} id
4157 * ID to use for playlist
4158 */
4159 ;
4160
4161 _proto.haveMetadata = function haveMetadata(_ref4) {
4162 var playlistString = _ref4.playlistString,
4163 playlistObject = _ref4.playlistObject,
4164 url = _ref4.url,
4165 id = _ref4.id;
4166 // any in-flight request is now finished
4167 this.request = null;
4168 this.state = 'HAVE_METADATA';
4169 var playlist = playlistObject || this.parseManifest_({
4170 url: url,
4171 manifestString: playlistString
4172 });
4173 playlist.lastRequest = Date.now();
4174 setupMediaPlaylist({
4175 playlist: playlist,
4176 uri: url,
4177 id: id
4178 }); // merge this playlist into the master
4179
4180 var update = updateMaster$1(this.master, playlist);
4181 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
4182 this.pendingMedia_ = null;
4183
4184 if (update) {
4185 this.master = update;
4186 this.media_ = this.master.playlists[id];
4187 } else {
4188 this.trigger('playlistunchanged');
4189 }
4190
4191 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
4192 this.trigger('loadedplaylist');
4193 }
4194 /**
4195 * Abort any outstanding work and clean up.
4196 */
4197 ;
4198
4199 _proto.dispose = function dispose() {
4200 this.trigger('dispose');
4201 this.stopRequest();
4202 window.clearTimeout(this.mediaUpdateTimeout);
4203 window.clearTimeout(this.finalRenditionTimeout);
4204 this.off();
4205 };
4206
4207 _proto.stopRequest = function stopRequest() {
4208 if (this.request) {
4209 var oldRequest = this.request;
4210 this.request = null;
4211 oldRequest.onreadystatechange = null;
4212 oldRequest.abort();
4213 }
4214 }
4215 /**
4216 * When called without any arguments, returns the currently
4217 * active media playlist. When called with a single argument,
4218 * triggers the playlist loader to asynchronously switch to the
4219 * specified media playlist. Calling this method while the
4220 * loader is in the HAVE_NOTHING causes an error to be emitted
4221 * but otherwise has no effect.
4222 *
4223 * @param {Object=} playlist the parsed media playlist
4224 * object to switch to
4225 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
4226 *
4227 * @return {Playlist} the current loaded media
4228 */
4229 ;
4230
4231 _proto.media = function media(playlist, shouldDelay) {
4232 var _this4 = this;
4233
4234 // getter
4235 if (!playlist) {
4236 return this.media_;
4237 } // setter
4238
4239
4240 if (this.state === 'HAVE_NOTHING') {
4241 throw new Error('Cannot switch media playlist from ' + this.state);
4242 } // find the playlist object if the target playlist has been
4243 // specified by URI
4244
4245
4246 if (typeof playlist === 'string') {
4247 if (!this.master.playlists[playlist]) {
4248 throw new Error('Unknown playlist URI: ' + playlist);
4249 }
4250
4251 playlist = this.master.playlists[playlist];
4252 }
4253
4254 window.clearTimeout(this.finalRenditionTimeout);
4255
4256 if (shouldDelay) {
4257 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
4258 this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);
4259 return;
4260 }
4261
4262 var startingState = this.state;
4263 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
4264 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
4265
4266 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
4267 // media playlist or, for the case of demuxed audio, a resolved audio media group)
4268 playlist.endList && playlist.segments.length) {
4269 // abort outstanding playlist requests
4270 if (this.request) {
4271 this.request.onreadystatechange = null;
4272 this.request.abort();
4273 this.request = null;
4274 }
4275
4276 this.state = 'HAVE_METADATA';
4277 this.media_ = playlist; // trigger media change if the active media has been updated
4278
4279 if (mediaChange) {
4280 this.trigger('mediachanging');
4281
4282 if (startingState === 'HAVE_MASTER') {
4283 // The initial playlist was a master manifest, and the first media selected was
4284 // also provided (in the form of a resolved playlist object) as part of the
4285 // source object (rather than just a URL). Therefore, since the media playlist
4286 // doesn't need to be requested, loadedmetadata won't trigger as part of the
4287 // normal flow, and needs an explicit trigger here.
4288 this.trigger('loadedmetadata');
4289 } else {
4290 this.trigger('mediachange');
4291 }
4292 }
4293
4294 return;
4295 } // We update/set the timeout here so that live playlists
4296 // that are not a media change will "start" the loader as expected.
4297 // We expect that this function will start the media update timeout
4298 // cycle again. This also prevents a playlist switch failure from
4299 // causing us to stall during live.
4300
4301
4302 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
4303
4304 if (!mediaChange) {
4305 return;
4306 }
4307
4308 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
4309
4310 if (this.request) {
4311 if (playlist.resolvedUri === this.request.url) {
4312 // requesting to switch to the same playlist multiple times
4313 // has no effect after the first
4314 return;
4315 }
4316
4317 this.request.onreadystatechange = null;
4318 this.request.abort();
4319 this.request = null;
4320 } // request the new playlist
4321
4322
4323 if (this.media_) {
4324 this.trigger('mediachanging');
4325 }
4326
4327 this.pendingMedia_ = playlist;
4328 this.request = this.vhs_.xhr({
4329 uri: playlist.resolvedUri,
4330 withCredentials: this.withCredentials
4331 }, function (error, req) {
4332 // disposed
4333 if (!_this4.request) {
4334 return;
4335 }
4336
4337 playlist.lastRequest = Date.now();
4338 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
4339
4340 if (error) {
4341 return _this4.playlistRequestError(_this4.request, playlist, startingState);
4342 }
4343
4344 _this4.haveMetadata({
4345 playlistString: req.responseText,
4346 url: playlist.uri,
4347 id: playlist.id
4348 }); // fire loadedmetadata the first time a media playlist is loaded
4349
4350
4351 if (startingState === 'HAVE_MASTER') {
4352 _this4.trigger('loadedmetadata');
4353 } else {
4354 _this4.trigger('mediachange');
4355 }
4356 });
4357 }
4358 /**
4359 * pause loading of the playlist
4360 */
4361 ;
4362
4363 _proto.pause = function pause() {
4364 if (this.mediaUpdateTimeout) {
4365 window.clearTimeout(this.mediaUpdateTimeout);
4366 this.mediaUpdateTimeout = null;
4367 }
4368
4369 this.stopRequest();
4370
4371 if (this.state === 'HAVE_NOTHING') {
4372 // If we pause the loader before any data has been retrieved, its as if we never
4373 // started, so reset to an unstarted state.
4374 this.started = false;
4375 } // Need to restore state now that no activity is happening
4376
4377
4378 if (this.state === 'SWITCHING_MEDIA') {
4379 // if the loader was in the process of switching media, it should either return to
4380 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
4381 // playlist yet. This is determined by the existence of loader.media_
4382 if (this.media_) {
4383 this.state = 'HAVE_METADATA';
4384 } else {
4385 this.state = 'HAVE_MASTER';
4386 }
4387 } else if (this.state === 'HAVE_CURRENT_METADATA') {
4388 this.state = 'HAVE_METADATA';
4389 }
4390 }
4391 /**
4392 * start loading of the playlist
4393 */
4394 ;
4395
4396 _proto.load = function load(shouldDelay) {
4397 var _this5 = this;
4398
4399 if (this.mediaUpdateTimeout) {
4400 window.clearTimeout(this.mediaUpdateTimeout);
4401 this.mediaUpdateTimeout = null;
4402 }
4403
4404 var media = this.media();
4405
4406 if (shouldDelay) {
4407 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
4408 this.mediaUpdateTimeout = window.setTimeout(function () {
4409 _this5.mediaUpdateTimeout = null;
4410
4411 _this5.load();
4412 }, delay);
4413 return;
4414 }
4415
4416 if (!this.started) {
4417 this.start();
4418 return;
4419 }
4420
4421 if (media && !media.endList) {
4422 this.trigger('mediaupdatetimeout');
4423 } else {
4424 this.trigger('loadedplaylist');
4425 }
4426 };
4427
4428 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
4429 var _this6 = this;
4430
4431 if (this.mediaUpdateTimeout) {
4432 window.clearTimeout(this.mediaUpdateTimeout);
4433 this.mediaUpdateTimeout = null;
4434 } // we only have use mediaupdatetimeout for live playlists.
4435
4436
4437 if (!this.media() || this.media().endList) {
4438 return;
4439 }
4440
4441 this.mediaUpdateTimeout = window.setTimeout(function () {
4442 _this6.mediaUpdateTimeout = null;
4443
4444 _this6.trigger('mediaupdatetimeout');
4445
4446 _this6.updateMediaUpdateTimeout_(delay);
4447 }, delay);
4448 }
4449 /**
4450 * start loading of the playlist
4451 */
4452 ;
4453
4454 _proto.start = function start() {
4455 var _this7 = this;
4456
4457 this.started = true;
4458
4459 if (typeof this.src === 'object') {
4460 // in the case of an entirely constructed manifest object (meaning there's no actual
4461 // manifest on a server), default the uri to the page's href
4462 if (!this.src.uri) {
4463 this.src.uri = window.location.href;
4464 } // resolvedUri is added on internally after the initial request. Since there's no
4465 // request for pre-resolved manifests, add on resolvedUri here.
4466
4467
4468 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
4469 // request can be skipped (since the top level of the manifest, at a minimum, is
4470 // already available as a parsed manifest object). However, if the manifest object
4471 // represents a master playlist, some media playlists may need to be resolved before
4472 // the starting segment list is available. Therefore, go directly to setup of the
4473 // initial playlist, and let the normal flow continue from there.
4474 //
4475 // Note that the call to setup is asynchronous, as other sections of VHS may assume
4476 // that the first request is asynchronous.
4477
4478 setTimeout(function () {
4479 _this7.setupInitialPlaylist(_this7.src);
4480 }, 0);
4481 return;
4482 } // request the specified URL
4483
4484
4485 this.request = this.vhs_.xhr({
4486 uri: this.src,
4487 withCredentials: this.withCredentials
4488 }, function (error, req) {
4489 // disposed
4490 if (!_this7.request) {
4491 return;
4492 } // clear the loader's request reference
4493
4494
4495 _this7.request = null;
4496
4497 if (error) {
4498 _this7.error = {
4499 status: req.status,
4500 message: "HLS playlist request error at URL: " + _this7.src + ".",
4501 responseText: req.responseText,
4502 // MEDIA_ERR_NETWORK
4503 code: 2
4504 };
4505
4506 if (_this7.state === 'HAVE_NOTHING') {
4507 _this7.started = false;
4508 }
4509
4510 return _this7.trigger('error');
4511 }
4512
4513 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
4514
4515 var manifest = _this7.parseManifest_({
4516 manifestString: req.responseText,
4517 url: _this7.src
4518 });
4519
4520 _this7.setupInitialPlaylist(manifest);
4521 });
4522 };
4523
4524 _proto.srcUri = function srcUri() {
4525 return typeof this.src === 'string' ? this.src : this.src.uri;
4526 }
4527 /**
4528 * Given a manifest object that's either a master or media playlist, trigger the proper
4529 * events and set the state of the playlist loader.
4530 *
4531 * If the manifest object represents a master playlist, `loadedplaylist` will be
4532 * triggered to allow listeners to select a playlist. If none is selected, the loader
4533 * will default to the first one in the playlists array.
4534 *
4535 * If the manifest object represents a media playlist, `loadedplaylist` will be
4536 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
4537 *
4538 * In the case of a media playlist, a master playlist object wrapper with one playlist
4539 * will be created so that all logic can handle playlists in the same fashion (as an
4540 * assumed manifest object schema).
4541 *
4542 * @param {Object} manifest
4543 * The parsed manifest object
4544 */
4545 ;
4546
4547 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
4548 this.state = 'HAVE_MASTER';
4549
4550 if (manifest.playlists) {
4551 this.master = manifest;
4552 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
4553 // then resolve URIs in advance, as they are usually done after a playlist request,
4554 // which may not happen if the playlist is resolved.
4555
4556 manifest.playlists.forEach(function (playlist) {
4557 playlist.segments = getAllSegments(playlist);
4558 playlist.segments.forEach(function (segment) {
4559 resolveSegmentUris(segment, playlist.resolvedUri);
4560 });
4561 });
4562 this.trigger('loadedplaylist');
4563
4564 if (!this.request) {
4565 // no media playlist was specifically selected so start
4566 // from the first listed one
4567 this.media(this.master.playlists[0]);
4568 }
4569
4570 return;
4571 } // In order to support media playlists passed in as vhs-json, the case where the uri
4572 // is not provided as part of the manifest should be considered, and an appropriate
4573 // default used.
4574
4575
4576 var uri = this.srcUri() || window.location.href;
4577 this.master = masterForMedia(manifest, uri);
4578 this.haveMetadata({
4579 playlistObject: manifest,
4580 url: uri,
4581 id: this.master.playlists[0].id
4582 });
4583 this.trigger('loadedmetadata');
4584 };
4585
4586 return PlaylistLoader;
4587 }(EventTarget$1);
4588
4589 /**
4590 * @file xhr.js
4591 */
4592 var videojsXHR = videojs__default["default"].xhr,
4593 mergeOptions$1 = videojs__default["default"].mergeOptions;
4594
4595 var callbackWrapper = function callbackWrapper(request, error, response, callback) {
4596 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
4597
4598 if (!error && reqResponse) {
4599 request.responseTime = Date.now();
4600 request.roundTripTime = request.responseTime - request.requestTime;
4601 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
4602
4603 if (!request.bandwidth) {
4604 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
4605 }
4606 }
4607
4608 if (response.headers) {
4609 request.responseHeaders = response.headers;
4610 } // videojs.xhr now uses a specific code on the error
4611 // object to signal that a request has timed out instead
4612 // of setting a boolean on the request object
4613
4614
4615 if (error && error.code === 'ETIMEDOUT') {
4616 request.timedout = true;
4617 } // videojs.xhr no longer considers status codes outside of 200 and 0
4618 // (for file uris) to be errors, but the old XHR did, so emulate that
4619 // behavior. Status 206 may be used in response to byterange requests.
4620
4621
4622 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
4623 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
4624 }
4625
4626 callback(error, request);
4627 };
4628
4629 var xhrFactory = function xhrFactory() {
4630 var xhr = function XhrFunction(options, callback) {
4631 // Add a default timeout
4632 options = mergeOptions$1({
4633 timeout: 45e3
4634 }, options); // Allow an optional user-specified function to modify the option
4635 // object before we construct the xhr request
4636
4637 var beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest;
4638
4639 if (beforeRequest && typeof beforeRequest === 'function') {
4640 var newOptions = beforeRequest(options);
4641
4642 if (newOptions) {
4643 options = newOptions;
4644 }
4645 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
4646 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
4647
4648
4649 var xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr;
4650 var request = xhrMethod(options, function (error, response) {
4651 return callbackWrapper(request, error, response, callback);
4652 });
4653 var originalAbort = request.abort;
4654
4655 request.abort = function () {
4656 request.aborted = true;
4657 return originalAbort.apply(request, arguments);
4658 };
4659
4660 request.uri = options.uri;
4661 request.requestTime = Date.now();
4662 return request;
4663 };
4664
4665 xhr.original = true;
4666 return xhr;
4667 };
4668 /**
4669 * Turns segment byterange into a string suitable for use in
4670 * HTTP Range requests
4671 *
4672 * @param {Object} byterange - an object with two values defining the start and end
4673 * of a byte-range
4674 */
4675
4676
4677 var byterangeStr = function byterangeStr(byterange) {
4678 // `byterangeEnd` is one less than `offset + length` because the HTTP range
4679 // header uses inclusive ranges
4680 var byterangeEnd;
4681 var byterangeStart = byterange.offset;
4682
4683 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
4684 byterangeEnd = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
4685 } else {
4686 byterangeEnd = byterange.offset + byterange.length - 1;
4687 }
4688
4689 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
4690 };
4691 /**
4692 * Defines headers for use in the xhr request for a particular segment.
4693 *
4694 * @param {Object} segment - a simplified copy of the segmentInfo object
4695 * from SegmentLoader
4696 */
4697
4698 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
4699 var headers = {};
4700
4701 if (segment.byterange) {
4702 headers.Range = byterangeStr(segment.byterange);
4703 }
4704
4705 return headers;
4706 };
4707
4708 var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
4709 var DASH_REGEX = /^application\/dash\+xml/i;
4710 /**
4711 * Returns a string that describes the type of source based on a video source object's
4712 * media type.
4713 *
4714 * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
4715 *
4716 * @param {string} type
4717 * Video source object media type
4718 * @return {('hls'|'dash'|'vhs-json'|null)}
4719 * VHS source type string
4720 */
4721
4722 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
4723 if (MPEGURL_REGEX.test(type)) {
4724 return 'hls';
4725 }
4726
4727 if (DASH_REGEX.test(type)) {
4728 return 'dash';
4729 } // Denotes the special case of a manifest object passed to http-streaming instead of a
4730 // source URL.
4731 //
4732 // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
4733 //
4734 // In this case, vnd stands for vendor, video.js for the organization, VHS for this
4735 // project, and the +json suffix identifies the structure of the media type.
4736
4737
4738 if (type === 'application/vnd.videojs.vhs+json') {
4739 return 'vhs-json';
4740 }
4741
4742 return null;
4743 };
4744
4745 // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
4746 // we used to do this with log2 but BigInt does not support builtin math
4747 // Math.ceil(log2(x));
4748
4749
4750 var countBits = function countBits(x) {
4751 return x.toString(2).length;
4752 }; // count the number of whole bytes it would take to represent a number
4753
4754 var countBytes = function countBytes(x) {
4755 return Math.ceil(countBits(x) / 8);
4756 };
4757 var isArrayBufferView = function isArrayBufferView(obj) {
4758 if (ArrayBuffer.isView === 'function') {
4759 return ArrayBuffer.isView(obj);
4760 }
4761
4762 return obj && obj.buffer instanceof ArrayBuffer;
4763 };
4764 var isTypedArray = function isTypedArray(obj) {
4765 return isArrayBufferView(obj);
4766 };
4767 var toUint8 = function toUint8(bytes) {
4768 if (bytes instanceof Uint8Array) {
4769 return bytes;
4770 }
4771
4772 if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
4773 // any non-number or NaN leads to empty uint8array
4774 // eslint-disable-next-line
4775 if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
4776 bytes = 0;
4777 } else {
4778 bytes = [bytes];
4779 }
4780 }
4781
4782 return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
4783 };
4784 var BigInt = window.BigInt || Number;
4785 var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
4786 (function () {
4787 var a = new Uint16Array([0xFFCC]);
4788 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
4789
4790 if (b[0] === 0xFF) {
4791 return 'big';
4792 }
4793
4794 if (b[0] === 0xCC) {
4795 return 'little';
4796 }
4797
4798 return 'unknown';
4799 })();
4800 var bytesToNumber = function bytesToNumber(bytes, _temp) {
4801 var _ref = _temp === void 0 ? {} : _temp,
4802 _ref$signed = _ref.signed,
4803 signed = _ref$signed === void 0 ? false : _ref$signed,
4804 _ref$le = _ref.le,
4805 le = _ref$le === void 0 ? false : _ref$le;
4806
4807 bytes = toUint8(bytes);
4808 var fn = le ? 'reduce' : 'reduceRight';
4809 var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
4810 var number = obj.call(bytes, function (total, byte, i) {
4811 var exponent = le ? i : Math.abs(i + 1 - bytes.length);
4812 return total + BigInt(byte) * BYTE_TABLE[exponent];
4813 }, BigInt(0));
4814
4815 if (signed) {
4816 var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
4817 number = BigInt(number);
4818
4819 if (number > max) {
4820 number -= max;
4821 number -= max;
4822 number -= BigInt(2);
4823 }
4824 }
4825
4826 return Number(number);
4827 };
4828 var numberToBytes = function numberToBytes(number, _temp2) {
4829 var _ref2 = _temp2 === void 0 ? {} : _temp2,
4830 _ref2$le = _ref2.le,
4831 le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
4832
4833
4834 if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
4835 number = 0;
4836 }
4837
4838 number = BigInt(number);
4839 var byteCount = countBytes(number);
4840 var bytes = new Uint8Array(new ArrayBuffer(byteCount));
4841
4842 for (var i = 0; i < byteCount; i++) {
4843 var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
4844 bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
4845
4846 if (number < 0) {
4847 bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
4848 bytes[byteIndex] -= i === 0 ? 1 : 2;
4849 }
4850 }
4851
4852 return bytes;
4853 };
4854 var stringToBytes = function stringToBytes(string, stringIsBytes) {
4855 if (typeof string !== 'string' && string && typeof string.toString === 'function') {
4856 string = string.toString();
4857 }
4858
4859 if (typeof string !== 'string') {
4860 return new Uint8Array();
4861 } // If the string already is bytes, we don't have to do this
4862 // otherwise we do this so that we split multi length characters
4863 // into individual bytes
4864
4865
4866 if (!stringIsBytes) {
4867 string = unescape(encodeURIComponent(string));
4868 }
4869
4870 var view = new Uint8Array(string.length);
4871
4872 for (var i = 0; i < string.length; i++) {
4873 view[i] = string.charCodeAt(i);
4874 }
4875
4876 return view;
4877 };
4878 var concatTypedArrays = function concatTypedArrays() {
4879 for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
4880 buffers[_key] = arguments[_key];
4881 }
4882
4883 buffers = buffers.filter(function (b) {
4884 return b && (b.byteLength || b.length) && typeof b !== 'string';
4885 });
4886
4887 if (buffers.length <= 1) {
4888 // for 0 length we will return empty uint8
4889 // for 1 length we return the first uint8
4890 return toUint8(buffers[0]);
4891 }
4892
4893 var totalLen = buffers.reduce(function (total, buf, i) {
4894 return total + (buf.byteLength || buf.length);
4895 }, 0);
4896 var tempBuffer = new Uint8Array(totalLen);
4897 var offset = 0;
4898 buffers.forEach(function (buf) {
4899 buf = toUint8(buf);
4900 tempBuffer.set(buf, offset);
4901 offset += buf.byteLength;
4902 });
4903 return tempBuffer;
4904 };
4905 /**
4906 * Check if the bytes "b" are contained within bytes "a".
4907 *
4908 * @param {Uint8Array|Array} a
4909 * Bytes to check in
4910 *
4911 * @param {Uint8Array|Array} b
4912 * Bytes to check for
4913 *
4914 * @param {Object} options
4915 * options
4916 *
4917 * @param {Array|Uint8Array} [offset=0]
4918 * offset to use when looking at bytes in a
4919 *
4920 * @param {Array|Uint8Array} [mask=[]]
4921 * mask to use on bytes before comparison.
4922 *
4923 * @return {boolean}
4924 * If all bytes in b are inside of a, taking into account
4925 * bit masks.
4926 */
4927
4928 var bytesMatch = function bytesMatch(a, b, _temp3) {
4929 var _ref3 = _temp3 === void 0 ? {} : _temp3,
4930 _ref3$offset = _ref3.offset,
4931 offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
4932 _ref3$mask = _ref3.mask,
4933 mask = _ref3$mask === void 0 ? [] : _ref3$mask;
4934
4935 a = toUint8(a);
4936 b = toUint8(b); // ie 11 does not support uint8 every
4937
4938 var fn = b.every ? b.every : Array.prototype.every;
4939 return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
4940 fn.call(b, function (bByte, i) {
4941 var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
4942 return bByte === aByte;
4943 });
4944 };
4945
4946 /**
4947 * @file bin-utils.js
4948 */
4949
4950 /**
4951 * convert a TimeRange to text
4952 *
4953 * @param {TimeRange} range the timerange to use for conversion
4954 * @param {number} i the iterator on the range to convert
4955 * @return {string} the range in string format
4956 */
4957
4958 var textRange = function textRange(range, i) {
4959 return range.start(i) + '-' + range.end(i);
4960 };
4961 /**
4962 * format a number as hex string
4963 *
4964 * @param {number} e The number
4965 * @param {number} i the iterator
4966 * @return {string} the hex formatted number as a string
4967 */
4968
4969
4970 var formatHexString = function formatHexString(e, i) {
4971 var value = e.toString(16);
4972 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
4973 };
4974
4975 var formatAsciiString = function formatAsciiString(e) {
4976 if (e >= 0x20 && e < 0x7e) {
4977 return String.fromCharCode(e);
4978 }
4979
4980 return '.';
4981 };
4982 /**
4983 * Creates an object for sending to a web worker modifying properties that are TypedArrays
4984 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
4985 *
4986 * @param {Object} message
4987 * Object of properties and values to send to the web worker
4988 * @return {Object}
4989 * Modified message with TypedArray values expanded
4990 * @function createTransferableMessage
4991 */
4992
4993
4994 var createTransferableMessage = function createTransferableMessage(message) {
4995 var transferable = {};
4996 Object.keys(message).forEach(function (key) {
4997 var value = message[key];
4998
4999 if (isArrayBufferView(value)) {
5000 transferable[key] = {
5001 bytes: value.buffer,
5002 byteOffset: value.byteOffset,
5003 byteLength: value.byteLength
5004 };
5005 } else {
5006 transferable[key] = value;
5007 }
5008 });
5009 return transferable;
5010 };
5011 /**
5012 * Returns a unique string identifier for a media initialization
5013 * segment.
5014 *
5015 * @param {Object} initSegment
5016 * the init segment object.
5017 *
5018 * @return {string} the generated init segment id
5019 */
5020
5021 var initSegmentId = function initSegmentId(initSegment) {
5022 var byterange = initSegment.byterange || {
5023 length: Infinity,
5024 offset: 0
5025 };
5026 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
5027 };
5028 /**
5029 * Returns a unique string identifier for a media segment key.
5030 *
5031 * @param {Object} key the encryption key
5032 * @return {string} the unique id for the media segment key.
5033 */
5034
5035 var segmentKeyId = function segmentKeyId(key) {
5036 return key.resolvedUri;
5037 };
5038 /**
5039 * utils to help dump binary data to the console
5040 *
5041 * @param {Array|TypedArray} data
5042 * data to dump to a string
5043 *
5044 * @return {string} the data as a hex string.
5045 */
5046
5047 var hexDump = function hexDump(data) {
5048 var bytes = Array.prototype.slice.call(data);
5049 var step = 16;
5050 var result = '';
5051 var hex;
5052 var ascii;
5053
5054 for (var j = 0; j < bytes.length / step; j++) {
5055 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
5056 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
5057 result += hex + ' ' + ascii + '\n';
5058 }
5059
5060 return result;
5061 };
5062 var tagDump = function tagDump(_ref) {
5063 var bytes = _ref.bytes;
5064 return hexDump(bytes);
5065 };
5066 var textRanges = function textRanges(ranges) {
5067 var result = '';
5068 var i;
5069
5070 for (i = 0; i < ranges.length; i++) {
5071 result += textRange(ranges, i) + ' ';
5072 }
5073
5074 return result;
5075 };
5076
5077 var utils = /*#__PURE__*/Object.freeze({
5078 __proto__: null,
5079 createTransferableMessage: createTransferableMessage,
5080 initSegmentId: initSegmentId,
5081 segmentKeyId: segmentKeyId,
5082 hexDump: hexDump,
5083 tagDump: tagDump,
5084 textRanges: textRanges
5085 });
5086
5087 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
5088 // 25% was arbitrarily chosen, and may need to be refined over time.
5089
5090 var SEGMENT_END_FUDGE_PERCENT = 0.25;
5091 /**
5092 * Converts a player time (any time that can be gotten/set from player.currentTime(),
5093 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
5094 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
5095 *
5096 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
5097 * point" (a point where we have a mapping from program time to player time, with player
5098 * time being the post transmux start of the segment).
5099 *
5100 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
5101 *
5102 * @param {number} playerTime the player time
5103 * @param {Object} segment the segment which contains the player time
5104 * @return {Date} program time
5105 */
5106
5107 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
5108 if (!segment.dateTimeObject) {
5109 // Can't convert without an "anchor point" for the program time (i.e., a time that can
5110 // be used to map the start of a segment with a real world time).
5111 return null;
5112 }
5113
5114 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
5115 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
5116
5117 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
5118 var offsetFromSegmentStart = playerTime - startOfSegment;
5119 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
5120 };
5121 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
5122 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
5123 };
5124 /**
5125 * Finds a segment that contains the time requested given as an ISO-8601 string. The
5126 * returned segment might be an estimate or an accurate match.
5127 *
5128 * @param {string} programTime The ISO-8601 programTime to find a match for
5129 * @param {Object} playlist A playlist object to search within
5130 */
5131
5132 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
5133 // Assumptions:
5134 // - verifyProgramDateTimeTags has already been run
5135 // - live streams have been started
5136 var dateTimeObject;
5137
5138 try {
5139 dateTimeObject = new Date(programTime);
5140 } catch (e) {
5141 return null;
5142 }
5143
5144 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5145 return null;
5146 }
5147
5148 var segment = playlist.segments[0];
5149
5150 if (dateTimeObject < segment.dateTimeObject) {
5151 // Requested time is before stream start.
5152 return null;
5153 }
5154
5155 for (var i = 0; i < playlist.segments.length - 1; i++) {
5156 segment = playlist.segments[i];
5157 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
5158
5159 if (dateTimeObject < nextSegmentStart) {
5160 break;
5161 }
5162 }
5163
5164 var lastSegment = playlist.segments[playlist.segments.length - 1];
5165 var lastSegmentStart = lastSegment.dateTimeObject;
5166 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
5167 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
5168
5169 if (dateTimeObject > lastSegmentEnd) {
5170 // Beyond the end of the stream, or our best guess of the end of the stream.
5171 return null;
5172 }
5173
5174 if (dateTimeObject > lastSegmentStart) {
5175 segment = lastSegment;
5176 }
5177
5178 return {
5179 segment: segment,
5180 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
5181 // Although, given that all segments have accurate date time objects, the segment
5182 // selected should be accurate, unless the video has been transmuxed at some point
5183 // (determined by the presence of the videoTimingInfo object), the segment's "player
5184 // time" (the start time in the player) can't be considered accurate.
5185 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5186 };
5187 };
5188 /**
5189 * Finds a segment that contains the given player time(in seconds).
5190 *
5191 * @param {number} time The player time to find a match for
5192 * @param {Object} playlist A playlist object to search within
5193 */
5194
5195 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
5196 // Assumptions:
5197 // - there will always be a segment.duration
5198 // - we can start from zero
5199 // - segments are in time order
5200 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5201 return null;
5202 }
5203
5204 var segmentEnd = 0;
5205 var segment;
5206
5207 for (var i = 0; i < playlist.segments.length; i++) {
5208 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
5209 // should contain the most accurate values we have for the segment's player times.
5210 //
5211 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
5212 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
5213 // calculate an end value.
5214
5215 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
5216
5217 if (time <= segmentEnd) {
5218 break;
5219 }
5220 }
5221
5222 var lastSegment = playlist.segments[playlist.segments.length - 1];
5223
5224 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
5225 // The time requested is beyond the stream end.
5226 return null;
5227 }
5228
5229 if (time > segmentEnd) {
5230 // The time is within or beyond the last segment.
5231 //
5232 // Check to see if the time is beyond a reasonable guess of the end of the stream.
5233 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
5234 // Technically, because the duration value is only an estimate, the time may still
5235 // exist in the last segment, however, there isn't enough information to make even
5236 // a reasonable estimate.
5237 return null;
5238 }
5239
5240 segment = lastSegment;
5241 }
5242
5243 return {
5244 segment: segment,
5245 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
5246 // Because videoTimingInfo is only set after transmux, it is the only way to get
5247 // accurate timing values.
5248 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5249 };
5250 };
5251 /**
5252 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
5253 * If the offset returned is positive, the programTime occurs after the
5254 * comparisonTimestamp.
5255 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
5256 *
5257 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
5258 * @param {string} programTime The programTime as an ISO-8601 string
5259 * @return {number} offset
5260 */
5261
5262 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
5263 var segmentDateTime;
5264 var programDateTime;
5265
5266 try {
5267 segmentDateTime = new Date(comparisonTimeStamp);
5268 programDateTime = new Date(programTime);
5269 } catch (e) {// TODO handle error
5270 }
5271
5272 var segmentTimeEpoch = segmentDateTime.getTime();
5273 var programTimeEpoch = programDateTime.getTime();
5274 return (programTimeEpoch - segmentTimeEpoch) / 1000;
5275 };
5276 /**
5277 * Checks that all segments in this playlist have programDateTime tags.
5278 *
5279 * @param {Object} playlist A playlist object
5280 */
5281
5282 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
5283 if (!playlist.segments || playlist.segments.length === 0) {
5284 return false;
5285 }
5286
5287 for (var i = 0; i < playlist.segments.length; i++) {
5288 var segment = playlist.segments[i];
5289
5290 if (!segment.dateTimeObject) {
5291 return false;
5292 }
5293 }
5294
5295 return true;
5296 };
5297 /**
5298 * Returns the programTime of the media given a playlist and a playerTime.
5299 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
5300 * If the segments containing the time requested have not been buffered yet, an estimate
5301 * may be returned to the callback.
5302 *
5303 * @param {Object} args
5304 * @param {Object} args.playlist A playlist object to search within
5305 * @param {number} time A playerTime in seconds
5306 * @param {Function} callback(err, programTime)
5307 * @return {string} err.message A detailed error message
5308 * @return {Object} programTime
5309 * @return {number} programTime.mediaSeconds The streamTime in seconds
5310 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
5311 */
5312
5313 var getProgramTime = function getProgramTime(_ref) {
5314 var playlist = _ref.playlist,
5315 _ref$time = _ref.time,
5316 time = _ref$time === void 0 ? undefined : _ref$time,
5317 callback = _ref.callback;
5318
5319 if (!callback) {
5320 throw new Error('getProgramTime: callback must be provided');
5321 }
5322
5323 if (!playlist || time === undefined) {
5324 return callback({
5325 message: 'getProgramTime: playlist and time must be provided'
5326 });
5327 }
5328
5329 var matchedSegment = findSegmentForPlayerTime(time, playlist);
5330
5331 if (!matchedSegment) {
5332 return callback({
5333 message: 'valid programTime was not found'
5334 });
5335 }
5336
5337 if (matchedSegment.type === 'estimate') {
5338 return callback({
5339 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
5340 seekTime: matchedSegment.estimatedStart
5341 });
5342 }
5343
5344 var programTimeObject = {
5345 mediaSeconds: time
5346 };
5347 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
5348
5349 if (programTime) {
5350 programTimeObject.programDateTime = programTime.toISOString();
5351 }
5352
5353 return callback(null, programTimeObject);
5354 };
5355 /**
5356 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
5357 *
5358 * @param {Object} args
5359 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
5360 * @param {Object} args.playlist A playlist to look within
5361 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
5362 * @param {Function} args.seekTo A method to perform a seek
5363 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
5364 * @param {Object} args.tech The tech to seek on
5365 * @param {Function} args.callback(err, newTime) A callback to return the new time to
5366 * @return {string} err.message A detailed error message
5367 * @return {number} newTime The exact time that was seeked to in seconds
5368 */
5369
5370 var seekToProgramTime = function seekToProgramTime(_ref2) {
5371 var programTime = _ref2.programTime,
5372 playlist = _ref2.playlist,
5373 _ref2$retryCount = _ref2.retryCount,
5374 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
5375 seekTo = _ref2.seekTo,
5376 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
5377 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
5378 tech = _ref2.tech,
5379 callback = _ref2.callback;
5380
5381 if (!callback) {
5382 throw new Error('seekToProgramTime: callback must be provided');
5383 }
5384
5385 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
5386 return callback({
5387 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
5388 });
5389 }
5390
5391 if (!playlist.endList && !tech.hasStarted_) {
5392 return callback({
5393 message: 'player must be playing a live stream to start buffering'
5394 });
5395 }
5396
5397 if (!verifyProgramDateTimeTags(playlist)) {
5398 return callback({
5399 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
5400 });
5401 }
5402
5403 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
5404
5405 if (!matchedSegment) {
5406 return callback({
5407 message: programTime + " was not found in the stream"
5408 });
5409 }
5410
5411 var segment = matchedSegment.segment;
5412 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
5413
5414 if (matchedSegment.type === 'estimate') {
5415 // we've run out of retries
5416 if (retryCount === 0) {
5417 return callback({
5418 message: programTime + " is not buffered yet. Try again"
5419 });
5420 }
5421
5422 seekTo(matchedSegment.estimatedStart + mediaOffset);
5423 tech.one('seeked', function () {
5424 seekToProgramTime({
5425 programTime: programTime,
5426 playlist: playlist,
5427 retryCount: retryCount - 1,
5428 seekTo: seekTo,
5429 pauseAfterSeek: pauseAfterSeek,
5430 tech: tech,
5431 callback: callback
5432 });
5433 });
5434 return;
5435 } // Since the segment.start value is determined from the buffered end or ending time
5436 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
5437 // modifications.
5438
5439
5440 var seekToTime = segment.start + mediaOffset;
5441
5442 var seekedCallback = function seekedCallback() {
5443 return callback(null, tech.currentTime());
5444 }; // listen for seeked event
5445
5446
5447 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
5448
5449 if (pauseAfterSeek) {
5450 tech.pause();
5451 }
5452
5453 seekTo(seekToTime);
5454 };
5455
5456 /**
5457 * Loops through all supported media groups in master and calls the provided
5458 * callback for each group
5459 *
5460 * @param {Object} master
5461 * The parsed master manifest object
5462 * @param {string[]} groups
5463 * The media groups to call the callback for
5464 * @param {Function} callback
5465 * Callback to call for each media group
5466 */
5467 var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
5468 groups.forEach(function (mediaType) {
5469 for (var groupKey in master.mediaGroups[mediaType]) {
5470 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
5471 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
5472 callback(mediaProperties, mediaType, groupKey, labelKey);
5473 }
5474 }
5475 });
5476 };
5477
5478 /*! @name mpd-parser @version 0.22.1 @license Apache-2.0 */
5479
5480 var isObject = function isObject(obj) {
5481 return !!obj && typeof obj === 'object';
5482 };
5483
5484 var merge = function merge() {
5485 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
5486 objects[_key] = arguments[_key];
5487 }
5488
5489 return objects.reduce(function (result, source) {
5490 if (typeof source !== 'object') {
5491 return result;
5492 }
5493
5494 Object.keys(source).forEach(function (key) {
5495 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
5496 result[key] = result[key].concat(source[key]);
5497 } else if (isObject(result[key]) && isObject(source[key])) {
5498 result[key] = merge(result[key], source[key]);
5499 } else {
5500 result[key] = source[key];
5501 }
5502 });
5503 return result;
5504 }, {});
5505 };
5506
5507 var values = function values(o) {
5508 return Object.keys(o).map(function (k) {
5509 return o[k];
5510 });
5511 };
5512
5513 var range = function range(start, end) {
5514 var result = [];
5515
5516 for (var i = start; i < end; i++) {
5517 result.push(i);
5518 }
5519
5520 return result;
5521 };
5522
5523 var flatten = function flatten(lists) {
5524 return lists.reduce(function (x, y) {
5525 return x.concat(y);
5526 }, []);
5527 };
5528
5529 var from = function from(list) {
5530 if (!list.length) {
5531 return [];
5532 }
5533
5534 var result = [];
5535
5536 for (var i = 0; i < list.length; i++) {
5537 result.push(list[i]);
5538 }
5539
5540 return result;
5541 };
5542
5543 var findIndexes = function findIndexes(l, key) {
5544 return l.reduce(function (a, e, i) {
5545 if (e[key]) {
5546 a.push(i);
5547 }
5548
5549 return a;
5550 }, []);
5551 };
5552 /**
5553 * Returns the first index that satisfies the matching function, or -1 if not found.
5554 *
5555 * Only necessary because of IE11 support.
5556 *
5557 * @param {Array} list - the list to search through
5558 * @param {Function} matchingFunction - the matching function
5559 *
5560 * @return {number} the matching index or -1 if not found
5561 */
5562
5563
5564 var findIndex = function findIndex(list, matchingFunction) {
5565 for (var i = 0; i < list.length; i++) {
5566 if (matchingFunction(list[i])) {
5567 return i;
5568 }
5569 }
5570
5571 return -1;
5572 };
5573 /**
5574 * Returns a union of the included lists provided each element can be identified by a key.
5575 *
5576 * @param {Array} list - list of lists to get the union of
5577 * @param {Function} keyFunction - the function to use as a key for each element
5578 *
5579 * @return {Array} the union of the arrays
5580 */
5581
5582
5583 var union = function union(lists, keyFunction) {
5584 return values(lists.reduce(function (acc, list) {
5585 list.forEach(function (el) {
5586 acc[keyFunction(el)] = el;
5587 });
5588 return acc;
5589 }, {}));
5590 };
5591
5592 var errors = {
5593 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
5594 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
5595 DASH_INVALID_XML: 'DASH_INVALID_XML',
5596 NO_BASE_URL: 'NO_BASE_URL',
5597 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
5598 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
5599 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
5600 };
5601 /**
5602 * @typedef {Object} SingleUri
5603 * @property {string} uri - relative location of segment
5604 * @property {string} resolvedUri - resolved location of segment
5605 * @property {Object} byterange - Object containing information on how to make byte range
5606 * requests following byte-range-spec per RFC2616.
5607 * @property {String} byterange.length - length of range request
5608 * @property {String} byterange.offset - byte offset of range request
5609 *
5610 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
5611 */
5612
5613 /**
5614 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
5615 * that conforms to how m3u8-parser is structured
5616 *
5617 * @see https://github.com/videojs/m3u8-parser
5618 *
5619 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
5620 * @param {string} source - source url for segment
5621 * @param {string} range - optional range used for range calls,
5622 * follows RFC 2616, Clause 14.35.1
5623 * @return {SingleUri} full segment information transformed into a format similar
5624 * to m3u8-parser
5625 */
5626
5627 var urlTypeToSegment = function urlTypeToSegment(_ref) {
5628 var _ref$baseUrl = _ref.baseUrl,
5629 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
5630 _ref$source = _ref.source,
5631 source = _ref$source === void 0 ? '' : _ref$source,
5632 _ref$range = _ref.range,
5633 range = _ref$range === void 0 ? '' : _ref$range,
5634 _ref$indexRange = _ref.indexRange,
5635 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
5636 var segment = {
5637 uri: source,
5638 resolvedUri: resolveUrl$1(baseUrl || '', source)
5639 };
5640
5641 if (range || indexRange) {
5642 var rangeStr = range ? range : indexRange;
5643 var ranges = rangeStr.split('-'); // default to parsing this as a BigInt if possible
5644
5645 var startRange = window.BigInt ? window.BigInt(ranges[0]) : parseInt(ranges[0], 10);
5646 var endRange = window.BigInt ? window.BigInt(ranges[1]) : parseInt(ranges[1], 10); // convert back to a number if less than MAX_SAFE_INTEGER
5647
5648 if (startRange < Number.MAX_SAFE_INTEGER && typeof startRange === 'bigint') {
5649 startRange = Number(startRange);
5650 }
5651
5652 if (endRange < Number.MAX_SAFE_INTEGER && typeof endRange === 'bigint') {
5653 endRange = Number(endRange);
5654 }
5655
5656 var length;
5657
5658 if (typeof endRange === 'bigint' || typeof startRange === 'bigint') {
5659 length = window.BigInt(endRange) - window.BigInt(startRange) + window.BigInt(1);
5660 } else {
5661 length = endRange - startRange + 1;
5662 }
5663
5664 if (typeof length === 'bigint' && length < Number.MAX_SAFE_INTEGER) {
5665 length = Number(length);
5666 } // byterange should be inclusive according to
5667 // RFC 2616, Clause 14.35.1
5668
5669
5670 segment.byterange = {
5671 length: length,
5672 offset: startRange
5673 };
5674 }
5675
5676 return segment;
5677 };
5678
5679 var byteRangeToString = function byteRangeToString(byterange) {
5680 // `endRange` is one less than `offset + length` because the HTTP range
5681 // header uses inclusive ranges
5682 var endRange;
5683
5684 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
5685 endRange = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
5686 } else {
5687 endRange = byterange.offset + byterange.length - 1;
5688 }
5689
5690 return byterange.offset + "-" + endRange;
5691 };
5692 /**
5693 * parse the end number attribue that can be a string
5694 * number, or undefined.
5695 *
5696 * @param {string|number|undefined} endNumber
5697 * The end number attribute.
5698 *
5699 * @return {number|null}
5700 * The result of parsing the end number.
5701 */
5702
5703
5704 var parseEndNumber = function parseEndNumber(endNumber) {
5705 if (endNumber && typeof endNumber !== 'number') {
5706 endNumber = parseInt(endNumber, 10);
5707 }
5708
5709 if (isNaN(endNumber)) {
5710 return null;
5711 }
5712
5713 return endNumber;
5714 };
5715 /**
5716 * Functions for calculating the range of available segments in static and dynamic
5717 * manifests.
5718 */
5719
5720
5721 var segmentRange = {
5722 /**
5723 * Returns the entire range of available segments for a static MPD
5724 *
5725 * @param {Object} attributes
5726 * Inheritied MPD attributes
5727 * @return {{ start: number, end: number }}
5728 * The start and end numbers for available segments
5729 */
5730 static: function _static(attributes) {
5731 var duration = attributes.duration,
5732 _attributes$timescale = attributes.timescale,
5733 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
5734 sourceDuration = attributes.sourceDuration,
5735 periodDuration = attributes.periodDuration;
5736 var endNumber = parseEndNumber(attributes.endNumber);
5737 var segmentDuration = duration / timescale;
5738
5739 if (typeof endNumber === 'number') {
5740 return {
5741 start: 0,
5742 end: endNumber
5743 };
5744 }
5745
5746 if (typeof periodDuration === 'number') {
5747 return {
5748 start: 0,
5749 end: periodDuration / segmentDuration
5750 };
5751 }
5752
5753 return {
5754 start: 0,
5755 end: sourceDuration / segmentDuration
5756 };
5757 },
5758
5759 /**
5760 * Returns the current live window range of available segments for a dynamic MPD
5761 *
5762 * @param {Object} attributes
5763 * Inheritied MPD attributes
5764 * @return {{ start: number, end: number }}
5765 * The start and end numbers for available segments
5766 */
5767 dynamic: function dynamic(attributes) {
5768 var NOW = attributes.NOW,
5769 clientOffset = attributes.clientOffset,
5770 availabilityStartTime = attributes.availabilityStartTime,
5771 _attributes$timescale2 = attributes.timescale,
5772 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
5773 duration = attributes.duration,
5774 _attributes$periodSta = attributes.periodStart,
5775 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
5776 _attributes$minimumUp = attributes.minimumUpdatePeriod,
5777 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
5778 _attributes$timeShift = attributes.timeShiftBufferDepth,
5779 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
5780 var endNumber = parseEndNumber(attributes.endNumber); // clientOffset is passed in at the top level of mpd-parser and is an offset calculated
5781 // after retrieving UTC server time.
5782
5783 var now = (NOW + clientOffset) / 1000; // WC stands for Wall Clock.
5784 // Convert the period start time to EPOCH.
5785
5786 var periodStartWC = availabilityStartTime + periodStart; // Period end in EPOCH is manifest's retrieval time + time until next update.
5787
5788 var periodEndWC = now + minimumUpdatePeriod;
5789 var periodDuration = periodEndWC - periodStartWC;
5790 var segmentCount = Math.ceil(periodDuration * timescale / duration);
5791 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
5792 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
5793 return {
5794 start: Math.max(0, availableStart),
5795 end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)
5796 };
5797 }
5798 };
5799 /**
5800 * Maps a range of numbers to objects with information needed to build the corresponding
5801 * segment list
5802 *
5803 * @name toSegmentsCallback
5804 * @function
5805 * @param {number} number
5806 * Number of the segment
5807 * @param {number} index
5808 * Index of the number in the range list
5809 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
5810 * Object with segment timing and duration info
5811 */
5812
5813 /**
5814 * Returns a callback for Array.prototype.map for mapping a range of numbers to
5815 * information needed to build the segment list.
5816 *
5817 * @param {Object} attributes
5818 * Inherited MPD attributes
5819 * @return {toSegmentsCallback}
5820 * Callback map function
5821 */
5822
5823 var toSegments = function toSegments(attributes) {
5824 return function (number) {
5825 var duration = attributes.duration,
5826 _attributes$timescale3 = attributes.timescale,
5827 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
5828 periodStart = attributes.periodStart,
5829 _attributes$startNumb = attributes.startNumber,
5830 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
5831 return {
5832 number: startNumber + number,
5833 duration: duration / timescale,
5834 timeline: periodStart,
5835 time: number * duration
5836 };
5837 };
5838 };
5839 /**
5840 * Returns a list of objects containing segment timing and duration info used for
5841 * building the list of segments. This uses the @duration attribute specified
5842 * in the MPD manifest to derive the range of segments.
5843 *
5844 * @param {Object} attributes
5845 * Inherited MPD attributes
5846 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5847 * List of Objects with segment timing and duration info
5848 */
5849
5850
5851 var parseByDuration = function parseByDuration(attributes) {
5852 var type = attributes.type,
5853 duration = attributes.duration,
5854 _attributes$timescale4 = attributes.timescale,
5855 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
5856 periodDuration = attributes.periodDuration,
5857 sourceDuration = attributes.sourceDuration;
5858
5859 var _segmentRange$type = segmentRange[type](attributes),
5860 start = _segmentRange$type.start,
5861 end = _segmentRange$type.end;
5862
5863 var segments = range(start, end).map(toSegments(attributes));
5864
5865 if (type === 'static') {
5866 var index = segments.length - 1; // section is either a period or the full source
5867
5868 var sectionDuration = typeof periodDuration === 'number' ? periodDuration : sourceDuration; // final segment may be less than full segment duration
5869
5870 segments[index].duration = sectionDuration - duration / timescale * index;
5871 }
5872
5873 return segments;
5874 };
5875 /**
5876 * Translates SegmentBase into a set of segments.
5877 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
5878 * node should be translated into segment.
5879 *
5880 * @param {Object} attributes
5881 * Object containing all inherited attributes from parent elements with attribute
5882 * names as keys
5883 * @return {Object.<Array>} list of segments
5884 */
5885
5886
5887 var segmentsFromBase = function segmentsFromBase(attributes) {
5888 var baseUrl = attributes.baseUrl,
5889 _attributes$initializ = attributes.initialization,
5890 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
5891 sourceDuration = attributes.sourceDuration,
5892 _attributes$indexRang = attributes.indexRange,
5893 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
5894 periodStart = attributes.periodStart,
5895 presentationTime = attributes.presentationTime,
5896 _attributes$number = attributes.number,
5897 number = _attributes$number === void 0 ? 0 : _attributes$number,
5898 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
5899
5900 if (!baseUrl) {
5901 throw new Error(errors.NO_BASE_URL);
5902 }
5903
5904 var initSegment = urlTypeToSegment({
5905 baseUrl: baseUrl,
5906 source: initialization.sourceURL,
5907 range: initialization.range
5908 });
5909 var segment = urlTypeToSegment({
5910 baseUrl: baseUrl,
5911 source: baseUrl,
5912 indexRange: indexRange
5913 });
5914 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
5915 // (since SegmentBase is only for one total segment)
5916
5917 if (duration) {
5918 var segmentTimeInfo = parseByDuration(attributes);
5919
5920 if (segmentTimeInfo.length) {
5921 segment.duration = segmentTimeInfo[0].duration;
5922 segment.timeline = segmentTimeInfo[0].timeline;
5923 }
5924 } else if (sourceDuration) {
5925 segment.duration = sourceDuration;
5926 segment.timeline = periodStart;
5927 } // If presentation time is provided, these segments are being generated by SIDX
5928 // references, and should use the time provided. For the general case of SegmentBase,
5929 // there should only be one segment in the period, so its presentation time is the same
5930 // as its period start.
5931
5932
5933 segment.presentationTime = presentationTime || periodStart;
5934 segment.number = number;
5935 return [segment];
5936 };
5937 /**
5938 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
5939 * according to the sidx information given.
5940 *
5941 * playlist.sidx has metadadata about the sidx where-as the sidx param
5942 * is the parsed sidx box itself.
5943 *
5944 * @param {Object} playlist the playlist to update the sidx information for
5945 * @param {Object} sidx the parsed sidx box
5946 * @return {Object} the playlist object with the updated sidx information
5947 */
5948
5949
5950 var addSidxSegmentsToPlaylist$1 = function addSidxSegmentsToPlaylist(playlist, sidx, baseUrl) {
5951 // Retain init segment information
5952 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial main manifest parsing
5953
5954 var sourceDuration = playlist.sidx.duration; // Retain source timeline
5955
5956 var timeline = playlist.timeline || 0;
5957 var sidxByteRange = playlist.sidx.byterange;
5958 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
5959
5960 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
5961
5962 var mediaReferences = sidx.references.filter(function (r) {
5963 return r.referenceType !== 1;
5964 });
5965 var segments = [];
5966 var type = playlist.endList ? 'static' : 'dynamic';
5967 var periodStart = playlist.sidx.timeline;
5968 var presentationTime = periodStart;
5969 var number = playlist.mediaSequence || 0; // firstOffset is the offset from the end of the sidx box
5970
5971 var startIndex; // eslint-disable-next-line
5972
5973 if (typeof sidx.firstOffset === 'bigint') {
5974 startIndex = window.BigInt(sidxEnd) + sidx.firstOffset;
5975 } else {
5976 startIndex = sidxEnd + sidx.firstOffset;
5977 }
5978
5979 for (var i = 0; i < mediaReferences.length; i++) {
5980 var reference = sidx.references[i]; // size of the referenced (sub)segment
5981
5982 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
5983 // this will be converted to seconds when generating segments
5984
5985 var duration = reference.subsegmentDuration; // should be an inclusive range
5986
5987 var endIndex = void 0; // eslint-disable-next-line
5988
5989 if (typeof startIndex === 'bigint') {
5990 endIndex = startIndex + window.BigInt(size) - window.BigInt(1);
5991 } else {
5992 endIndex = startIndex + size - 1;
5993 }
5994
5995 var indexRange = startIndex + "-" + endIndex;
5996 var attributes = {
5997 baseUrl: baseUrl,
5998 timescale: timescale,
5999 timeline: timeline,
6000 periodStart: periodStart,
6001 presentationTime: presentationTime,
6002 number: number,
6003 duration: duration,
6004 sourceDuration: sourceDuration,
6005 indexRange: indexRange,
6006 type: type
6007 };
6008 var segment = segmentsFromBase(attributes)[0];
6009
6010 if (initSegment) {
6011 segment.map = initSegment;
6012 }
6013
6014 segments.push(segment);
6015
6016 if (typeof startIndex === 'bigint') {
6017 startIndex += window.BigInt(size);
6018 } else {
6019 startIndex += size;
6020 }
6021
6022 presentationTime += duration / timescale;
6023 number++;
6024 }
6025
6026 playlist.segments = segments;
6027 return playlist;
6028 };
6029
6030 var SUPPORTED_MEDIA_TYPES = ['AUDIO', 'SUBTITLES']; // allow one 60fps frame as leniency (arbitrarily chosen)
6031
6032 var TIME_FUDGE = 1 / 60;
6033 /**
6034 * Given a list of timelineStarts, combines, dedupes, and sorts them.
6035 *
6036 * @param {TimelineStart[]} timelineStarts - list of timeline starts
6037 *
6038 * @return {TimelineStart[]} the combined and deduped timeline starts
6039 */
6040
6041 var getUniqueTimelineStarts = function getUniqueTimelineStarts(timelineStarts) {
6042 return union(timelineStarts, function (_ref) {
6043 var timeline = _ref.timeline;
6044 return timeline;
6045 }).sort(function (a, b) {
6046 return a.timeline > b.timeline ? 1 : -1;
6047 });
6048 };
6049 /**
6050 * Finds the playlist with the matching NAME attribute.
6051 *
6052 * @param {Array} playlists - playlists to search through
6053 * @param {string} name - the NAME attribute to search for
6054 *
6055 * @return {Object|null} the matching playlist object, or null
6056 */
6057
6058
6059 var findPlaylistWithName = function findPlaylistWithName(playlists, name) {
6060 for (var i = 0; i < playlists.length; i++) {
6061 if (playlists[i].attributes.NAME === name) {
6062 return playlists[i];
6063 }
6064 }
6065
6066 return null;
6067 };
6068 /**
6069 * Gets a flattened array of media group playlists.
6070 *
6071 * @param {Object} manifest - the main manifest object
6072 *
6073 * @return {Array} the media group playlists
6074 */
6075
6076
6077 var getMediaGroupPlaylists = function getMediaGroupPlaylists(manifest) {
6078 var mediaGroupPlaylists = [];
6079 forEachMediaGroup(manifest, SUPPORTED_MEDIA_TYPES, function (properties, type, group, label) {
6080 mediaGroupPlaylists = mediaGroupPlaylists.concat(properties.playlists || []);
6081 });
6082 return mediaGroupPlaylists;
6083 };
6084 /**
6085 * Updates the playlist's media sequence numbers.
6086 *
6087 * @param {Object} config - options object
6088 * @param {Object} config.playlist - the playlist to update
6089 * @param {number} config.mediaSequence - the mediaSequence number to start with
6090 */
6091
6092
6093 var updateMediaSequenceForPlaylist = function updateMediaSequenceForPlaylist(_ref2) {
6094 var playlist = _ref2.playlist,
6095 mediaSequence = _ref2.mediaSequence;
6096 playlist.mediaSequence = mediaSequence;
6097 playlist.segments.forEach(function (segment, index) {
6098 segment.number = playlist.mediaSequence + index;
6099 });
6100 };
6101 /**
6102 * Updates the media and discontinuity sequence numbers of newPlaylists given oldPlaylists
6103 * and a complete list of timeline starts.
6104 *
6105 * If no matching playlist is found, only the discontinuity sequence number of the playlist
6106 * will be updated.
6107 *
6108 * Since early available timelines are not supported, at least one segment must be present.
6109 *
6110 * @param {Object} config - options object
6111 * @param {Object[]} oldPlaylists - the old playlists to use as a reference
6112 * @param {Object[]} newPlaylists - the new playlists to update
6113 * @param {Object} timelineStarts - all timelineStarts seen in the stream to this point
6114 */
6115
6116
6117 var updateSequenceNumbers = function updateSequenceNumbers(_ref3) {
6118 var oldPlaylists = _ref3.oldPlaylists,
6119 newPlaylists = _ref3.newPlaylists,
6120 timelineStarts = _ref3.timelineStarts;
6121 newPlaylists.forEach(function (playlist) {
6122 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref4) {
6123 var timeline = _ref4.timeline;
6124 return timeline === playlist.timeline;
6125 }); // Playlists NAMEs come from DASH Representation IDs, which are mandatory
6126 // (see ISO_23009-1-2012 5.3.5.2).
6127 //
6128 // If the same Representation existed in a prior Period, it will retain the same NAME.
6129
6130 var oldPlaylist = findPlaylistWithName(oldPlaylists, playlist.attributes.NAME);
6131
6132 if (!oldPlaylist) {
6133 // Since this is a new playlist, the media sequence values can start from 0 without
6134 // consequence.
6135 return;
6136 } // TODO better support for live SIDX
6137 //
6138 // As of this writing, mpd-parser does not support multiperiod SIDX (in live or VOD).
6139 // This is evident by a playlist only having a single SIDX reference. In a multiperiod
6140 // playlist there would need to be multiple SIDX references. In addition, live SIDX is
6141 // not supported when the SIDX properties change on refreshes.
6142 //
6143 // In the future, if support needs to be added, the merging logic here can be called
6144 // after SIDX references are resolved. For now, exit early to prevent exceptions being
6145 // thrown due to undefined references.
6146
6147
6148 if (playlist.sidx) {
6149 return;
6150 } // Since we don't yet support early available timelines, we don't need to support
6151 // playlists with no segments.
6152
6153
6154 var firstNewSegment = playlist.segments[0];
6155 var oldMatchingSegmentIndex = findIndex(oldPlaylist.segments, function (oldSegment) {
6156 return Math.abs(oldSegment.presentationTime - firstNewSegment.presentationTime) < TIME_FUDGE;
6157 }); // No matching segment from the old playlist means the entire playlist was refreshed.
6158 // In this case the media sequence should account for this update, and the new segments
6159 // should be marked as discontinuous from the prior content, since the last prior
6160 // timeline was removed.
6161
6162 if (oldMatchingSegmentIndex === -1) {
6163 updateMediaSequenceForPlaylist({
6164 playlist: playlist,
6165 mediaSequence: oldPlaylist.mediaSequence + oldPlaylist.segments.length
6166 });
6167 playlist.segments[0].discontinuity = true;
6168 playlist.discontinuityStarts.unshift(0); // No matching segment does not necessarily mean there's missing content.
6169 //
6170 // If the new playlist's timeline is the same as the last seen segment's timeline,
6171 // then a discontinuity can be added to identify that there's potentially missing
6172 // content. If there's no missing content, the discontinuity should still be rather
6173 // harmless. It's possible that if segment durations are accurate enough, that the
6174 // existence of a gap can be determined using the presentation times and durations,
6175 // but if the segment timing info is off, it may introduce more problems than simply
6176 // adding the discontinuity.
6177 //
6178 // If the new playlist's timeline is different from the last seen segment's timeline,
6179 // then a discontinuity can be added to identify that this is the first seen segment
6180 // of a new timeline. However, the logic at the start of this function that
6181 // determined the disconinuity sequence by timeline index is now off by one (the
6182 // discontinuity of the newest timeline hasn't yet fallen off the manifest...since
6183 // we added it), so the disconinuity sequence must be decremented.
6184 //
6185 // A period may also have a duration of zero, so the case of no segments is handled
6186 // here even though we don't yet support early available periods.
6187
6188 if (!oldPlaylist.segments.length && playlist.timeline > oldPlaylist.timeline || oldPlaylist.segments.length && playlist.timeline > oldPlaylist.segments[oldPlaylist.segments.length - 1].timeline) {
6189 playlist.discontinuitySequence--;
6190 }
6191
6192 return;
6193 } // If the first segment matched with a prior segment on a discontinuity (it's matching
6194 // on the first segment of a period), then the discontinuitySequence shouldn't be the
6195 // timeline's matching one, but instead should be the one prior, and the first segment
6196 // of the new manifest should be marked with a discontinuity.
6197 //
6198 // The reason for this special case is that discontinuity sequence shows how many
6199 // discontinuities have fallen off of the playlist, and discontinuities are marked on
6200 // the first segment of a new "timeline." Because of this, while DASH will retain that
6201 // Period while the "timeline" exists, HLS keeps track of it via the discontinuity
6202 // sequence, and that first segment is an indicator, but can be removed before that
6203 // timeline is gone.
6204
6205
6206 var oldMatchingSegment = oldPlaylist.segments[oldMatchingSegmentIndex];
6207
6208 if (oldMatchingSegment.discontinuity && !firstNewSegment.discontinuity) {
6209 firstNewSegment.discontinuity = true;
6210 playlist.discontinuityStarts.unshift(0);
6211 playlist.discontinuitySequence--;
6212 }
6213
6214 updateMediaSequenceForPlaylist({
6215 playlist: playlist,
6216 mediaSequence: oldPlaylist.segments[oldMatchingSegmentIndex].number
6217 });
6218 });
6219 };
6220 /**
6221 * Given an old parsed manifest object and a new parsed manifest object, updates the
6222 * sequence and timing values within the new manifest to ensure that it lines up with the
6223 * old.
6224 *
6225 * @param {Array} oldManifest - the old main manifest object
6226 * @param {Array} newManifest - the new main manifest object
6227 *
6228 * @return {Object} the updated new manifest object
6229 */
6230
6231
6232 var positionManifestOnTimeline = function positionManifestOnTimeline(_ref5) {
6233 var oldManifest = _ref5.oldManifest,
6234 newManifest = _ref5.newManifest; // Starting from v4.1.2 of the IOP, section 4.4.3.3 states:
6235 //
6236 // "MPD@availabilityStartTime and Period@start shall not be changed over MPD updates."
6237 //
6238 // This was added from https://github.com/Dash-Industry-Forum/DASH-IF-IOP/issues/160
6239 //
6240 // Because of this change, and the difficulty of supporting periods with changing start
6241 // times, periods with changing start times are not supported. This makes the logic much
6242 // simpler, since periods with the same start time can be considerred the same period
6243 // across refreshes.
6244 //
6245 // To give an example as to the difficulty of handling periods where the start time may
6246 // change, if a single period manifest is refreshed with another manifest with a single
6247 // period, and both the start and end times are increased, then the only way to determine
6248 // if it's a new period or an old one that has changed is to look through the segments of
6249 // each playlist and determine the presentation time bounds to find a match. In addition,
6250 // if the period start changed to exceed the old period end, then there would be no
6251 // match, and it would not be possible to determine whether the refreshed period is a new
6252 // one or the old one.
6253
6254 var oldPlaylists = oldManifest.playlists.concat(getMediaGroupPlaylists(oldManifest));
6255 var newPlaylists = newManifest.playlists.concat(getMediaGroupPlaylists(newManifest)); // Save all seen timelineStarts to the new manifest. Although this potentially means that
6256 // there's a "memory leak" in that it will never stop growing, in reality, only a couple
6257 // of properties are saved for each seen Period. Even long running live streams won't
6258 // generate too many Periods, unless the stream is watched for decades. In the future,
6259 // this can be optimized by mapping to discontinuity sequence numbers for each timeline,
6260 // but it may not become an issue, and the additional info can be useful for debugging.
6261
6262 newManifest.timelineStarts = getUniqueTimelineStarts([oldManifest.timelineStarts, newManifest.timelineStarts]);
6263 updateSequenceNumbers({
6264 oldPlaylists: oldPlaylists,
6265 newPlaylists: newPlaylists,
6266 timelineStarts: newManifest.timelineStarts
6267 });
6268 return newManifest;
6269 };
6270
6271 var generateSidxKey = function generateSidxKey(sidx) {
6272 return sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);
6273 };
6274
6275 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
6276 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
6277 // assuming playlist IDs are the same across periods
6278 // TODO: handle multiperiod where representation sets are not the same
6279 // across periods
6280 var name = playlist.attributes.id + (playlist.attributes.lang || '');
6281
6282 if (!acc[name]) {
6283 // First Period
6284 acc[name] = playlist;
6285 acc[name].attributes.timelineStarts = [];
6286 } else {
6287 // Subsequent Periods
6288 if (playlist.segments) {
6289 var _acc$name$segments; // first segment of subsequent periods signal a discontinuity
6290
6291
6292 if (playlist.segments[0]) {
6293 playlist.segments[0].discontinuity = true;
6294 }
6295
6296 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments);
6297 } // bubble up contentProtection, this assumes all DRM content
6298 // has the same contentProtection
6299
6300
6301 if (playlist.attributes.contentProtection) {
6302 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
6303 }
6304 }
6305
6306 acc[name].attributes.timelineStarts.push({
6307 // Although they represent the same number, it's important to have both to make it
6308 // compatible with HLS potentially having a similar attribute.
6309 start: playlist.attributes.periodStart,
6310 timeline: playlist.attributes.periodStart
6311 });
6312 return acc;
6313 }, {}));
6314 return mergedPlaylists.map(function (playlist) {
6315 playlist.discontinuityStarts = findIndexes(playlist.segments || [], 'discontinuity');
6316 return playlist;
6317 });
6318 };
6319
6320 var addSidxSegmentsToPlaylist = function addSidxSegmentsToPlaylist(playlist, sidxMapping) {
6321 var sidxKey = generateSidxKey(playlist.sidx);
6322 var sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
6323
6324 if (sidxMatch) {
6325 addSidxSegmentsToPlaylist$1(playlist, sidxMatch, playlist.sidx.resolvedUri);
6326 }
6327
6328 return playlist;
6329 };
6330
6331 var addSidxSegmentsToPlaylists = function addSidxSegmentsToPlaylists(playlists, sidxMapping) {
6332 if (sidxMapping === void 0) {
6333 sidxMapping = {};
6334 }
6335
6336 if (!Object.keys(sidxMapping).length) {
6337 return playlists;
6338 }
6339
6340 for (var i in playlists) {
6341 playlists[i] = addSidxSegmentsToPlaylist(playlists[i], sidxMapping);
6342 }
6343
6344 return playlists;
6345 };
6346
6347 var formatAudioPlaylist = function formatAudioPlaylist(_ref, isAudioOnly) {
6348 var _attributes;
6349
6350 var attributes = _ref.attributes,
6351 segments = _ref.segments,
6352 sidx = _ref.sidx,
6353 mediaSequence = _ref.mediaSequence,
6354 discontinuitySequence = _ref.discontinuitySequence,
6355 discontinuityStarts = _ref.discontinuityStarts;
6356 var playlist = {
6357 attributes: (_attributes = {
6358 NAME: attributes.id,
6359 BANDWIDTH: attributes.bandwidth,
6360 CODECS: attributes.codecs
6361 }, _attributes['PROGRAM-ID'] = 1, _attributes),
6362 uri: '',
6363 endList: attributes.type === 'static',
6364 timeline: attributes.periodStart,
6365 resolvedUri: '',
6366 targetDuration: attributes.duration,
6367 discontinuitySequence: discontinuitySequence,
6368 discontinuityStarts: discontinuityStarts,
6369 timelineStarts: attributes.timelineStarts,
6370 mediaSequence: mediaSequence,
6371 segments: segments
6372 };
6373
6374 if (attributes.contentProtection) {
6375 playlist.contentProtection = attributes.contentProtection;
6376 }
6377
6378 if (sidx) {
6379 playlist.sidx = sidx;
6380 }
6381
6382 if (isAudioOnly) {
6383 playlist.attributes.AUDIO = 'audio';
6384 playlist.attributes.SUBTITLES = 'subs';
6385 }
6386
6387 return playlist;
6388 };
6389
6390 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
6391 var _m3u8Attributes;
6392
6393 var attributes = _ref2.attributes,
6394 segments = _ref2.segments,
6395 mediaSequence = _ref2.mediaSequence,
6396 discontinuityStarts = _ref2.discontinuityStarts,
6397 discontinuitySequence = _ref2.discontinuitySequence;
6398
6399 if (typeof segments === 'undefined') {
6400 // vtt tracks may use single file in BaseURL
6401 segments = [{
6402 uri: attributes.baseUrl,
6403 timeline: attributes.periodStart,
6404 resolvedUri: attributes.baseUrl || '',
6405 duration: attributes.sourceDuration,
6406 number: 0
6407 }]; // targetDuration should be the same duration as the only segment
6408
6409 attributes.duration = attributes.sourceDuration;
6410 }
6411
6412 var m3u8Attributes = (_m3u8Attributes = {
6413 NAME: attributes.id,
6414 BANDWIDTH: attributes.bandwidth
6415 }, _m3u8Attributes['PROGRAM-ID'] = 1, _m3u8Attributes);
6416
6417 if (attributes.codecs) {
6418 m3u8Attributes.CODECS = attributes.codecs;
6419 }
6420
6421 return {
6422 attributes: m3u8Attributes,
6423 uri: '',
6424 endList: attributes.type === 'static',
6425 timeline: attributes.periodStart,
6426 resolvedUri: attributes.baseUrl || '',
6427 targetDuration: attributes.duration,
6428 timelineStarts: attributes.timelineStarts,
6429 discontinuityStarts: discontinuityStarts,
6430 discontinuitySequence: discontinuitySequence,
6431 mediaSequence: mediaSequence,
6432 segments: segments
6433 };
6434 };
6435
6436 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping, isAudioOnly) {
6437 if (sidxMapping === void 0) {
6438 sidxMapping = {};
6439 }
6440
6441 if (isAudioOnly === void 0) {
6442 isAudioOnly = false;
6443 }
6444
6445 var mainPlaylist;
6446 var formattedPlaylists = playlists.reduce(function (a, playlist) {
6447 var role = playlist.attributes.role && playlist.attributes.role.value || '';
6448 var language = playlist.attributes.lang || '';
6449 var label = playlist.attributes.label || 'main';
6450
6451 if (language && !playlist.attributes.label) {
6452 var roleLabel = role ? " (" + role + ")" : '';
6453 label = "" + playlist.attributes.lang + roleLabel;
6454 }
6455
6456 if (!a[label]) {
6457 a[label] = {
6458 language: language,
6459 autoselect: true,
6460 default: role === 'main',
6461 playlists: [],
6462 uri: ''
6463 };
6464 }
6465
6466 var formatted = addSidxSegmentsToPlaylist(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);
6467 a[label].playlists.push(formatted);
6468
6469 if (typeof mainPlaylist === 'undefined' && role === 'main') {
6470 mainPlaylist = playlist;
6471 mainPlaylist.default = true;
6472 }
6473
6474 return a;
6475 }, {}); // if no playlists have role "main", mark the first as main
6476
6477 if (!mainPlaylist) {
6478 var firstLabel = Object.keys(formattedPlaylists)[0];
6479 formattedPlaylists[firstLabel].default = true;
6480 }
6481
6482 return formattedPlaylists;
6483 };
6484
6485 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
6486 if (sidxMapping === void 0) {
6487 sidxMapping = {};
6488 }
6489
6490 return playlists.reduce(function (a, playlist) {
6491 var label = playlist.attributes.lang || 'text';
6492
6493 if (!a[label]) {
6494 a[label] = {
6495 language: label,
6496 default: false,
6497 autoselect: false,
6498 playlists: [],
6499 uri: ''
6500 };
6501 }
6502
6503 a[label].playlists.push(addSidxSegmentsToPlaylist(formatVttPlaylist(playlist), sidxMapping));
6504 return a;
6505 }, {});
6506 };
6507
6508 var organizeCaptionServices = function organizeCaptionServices(captionServices) {
6509 return captionServices.reduce(function (svcObj, svc) {
6510 if (!svc) {
6511 return svcObj;
6512 }
6513
6514 svc.forEach(function (service) {
6515 var channel = service.channel,
6516 language = service.language;
6517 svcObj[language] = {
6518 autoselect: false,
6519 default: false,
6520 instreamId: channel,
6521 language: language
6522 };
6523
6524 if (service.hasOwnProperty('aspectRatio')) {
6525 svcObj[language].aspectRatio = service.aspectRatio;
6526 }
6527
6528 if (service.hasOwnProperty('easyReader')) {
6529 svcObj[language].easyReader = service.easyReader;
6530 }
6531
6532 if (service.hasOwnProperty('3D')) {
6533 svcObj[language]['3D'] = service['3D'];
6534 }
6535 });
6536 return svcObj;
6537 }, {});
6538 };
6539
6540 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
6541 var _attributes2;
6542
6543 var attributes = _ref3.attributes,
6544 segments = _ref3.segments,
6545 sidx = _ref3.sidx,
6546 discontinuityStarts = _ref3.discontinuityStarts;
6547 var playlist = {
6548 attributes: (_attributes2 = {
6549 NAME: attributes.id,
6550 AUDIO: 'audio',
6551 SUBTITLES: 'subs',
6552 RESOLUTION: {
6553 width: attributes.width,
6554 height: attributes.height
6555 },
6556 CODECS: attributes.codecs,
6557 BANDWIDTH: attributes.bandwidth
6558 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
6559 uri: '',
6560 endList: attributes.type === 'static',
6561 timeline: attributes.periodStart,
6562 resolvedUri: '',
6563 targetDuration: attributes.duration,
6564 discontinuityStarts: discontinuityStarts,
6565 timelineStarts: attributes.timelineStarts,
6566 segments: segments
6567 };
6568
6569 if (attributes.frameRate) {
6570 playlist.attributes['FRAME-RATE'] = attributes.frameRate;
6571 }
6572
6573 if (attributes.contentProtection) {
6574 playlist.contentProtection = attributes.contentProtection;
6575 }
6576
6577 if (sidx) {
6578 playlist.sidx = sidx;
6579 }
6580
6581 return playlist;
6582 };
6583
6584 var videoOnly = function videoOnly(_ref4) {
6585 var attributes = _ref4.attributes;
6586 return attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';
6587 };
6588
6589 var audioOnly = function audioOnly(_ref5) {
6590 var attributes = _ref5.attributes;
6591 return attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';
6592 };
6593
6594 var vttOnly = function vttOnly(_ref6) {
6595 var attributes = _ref6.attributes;
6596 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
6597 };
6598 /**
6599 * Contains start and timeline properties denoting a timeline start. For DASH, these will
6600 * be the same number.
6601 *
6602 * @typedef {Object} TimelineStart
6603 * @property {number} start - the start time of the timeline
6604 * @property {number} timeline - the timeline number
6605 */
6606
6607 /**
6608 * Adds appropriate media and discontinuity sequence values to the segments and playlists.
6609 *
6610 * Throughout mpd-parser, the `number` attribute is used in relation to `startNumber`, a
6611 * DASH specific attribute used in constructing segment URI's from templates. However, from
6612 * an HLS perspective, the `number` attribute on a segment would be its `mediaSequence`
6613 * value, which should start at the original media sequence value (or 0) and increment by 1
6614 * for each segment thereafter. Since DASH's `startNumber` values are independent per
6615 * period, it doesn't make sense to use it for `number`. Instead, assume everything starts
6616 * from a 0 mediaSequence value and increment from there.
6617 *
6618 * Note that VHS currently doesn't use the `number` property, but it can be helpful for
6619 * debugging and making sense of the manifest.
6620 *
6621 * For live playlists, to account for values increasing in manifests when periods are
6622 * removed on refreshes, merging logic should be used to update the numbers to their
6623 * appropriate values (to ensure they're sequential and increasing).
6624 *
6625 * @param {Object[]} playlists - the playlists to update
6626 * @param {TimelineStart[]} timelineStarts - the timeline starts for the manifest
6627 */
6628
6629
6630 var addMediaSequenceValues = function addMediaSequenceValues(playlists, timelineStarts) {
6631 // increment all segments sequentially
6632 playlists.forEach(function (playlist) {
6633 playlist.mediaSequence = 0;
6634 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref7) {
6635 var timeline = _ref7.timeline;
6636 return timeline === playlist.timeline;
6637 });
6638
6639 if (!playlist.segments) {
6640 return;
6641 }
6642
6643 playlist.segments.forEach(function (segment, index) {
6644 segment.number = index;
6645 });
6646 });
6647 };
6648 /**
6649 * Given a media group object, flattens all playlists within the media group into a single
6650 * array.
6651 *
6652 * @param {Object} mediaGroupObject - the media group object
6653 *
6654 * @return {Object[]}
6655 * The media group playlists
6656 */
6657
6658
6659 var flattenMediaGroupPlaylists = function flattenMediaGroupPlaylists(mediaGroupObject) {
6660 if (!mediaGroupObject) {
6661 return [];
6662 }
6663
6664 return Object.keys(mediaGroupObject).reduce(function (acc, label) {
6665 var labelContents = mediaGroupObject[label];
6666 return acc.concat(labelContents.playlists);
6667 }, []);
6668 };
6669
6670 var toM3u8 = function toM3u8(_ref8) {
6671 var _mediaGroups;
6672
6673 var dashPlaylists = _ref8.dashPlaylists,
6674 locations = _ref8.locations,
6675 _ref8$sidxMapping = _ref8.sidxMapping,
6676 sidxMapping = _ref8$sidxMapping === void 0 ? {} : _ref8$sidxMapping,
6677 previousManifest = _ref8.previousManifest;
6678
6679 if (!dashPlaylists.length) {
6680 return {};
6681 } // grab all main manifest attributes
6682
6683
6684 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
6685 duration = _dashPlaylists$0$attr.sourceDuration,
6686 type = _dashPlaylists$0$attr.type,
6687 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
6688 minimumUpdatePeriod = _dashPlaylists$0$attr.minimumUpdatePeriod;
6689 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
6690 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
6691 var vttPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(vttOnly));
6692 var captions = dashPlaylists.map(function (playlist) {
6693 return playlist.attributes.captionServices;
6694 }).filter(Boolean);
6695 var manifest = {
6696 allowCache: true,
6697 discontinuityStarts: [],
6698 segments: [],
6699 endList: true,
6700 mediaGroups: (_mediaGroups = {
6701 AUDIO: {},
6702 VIDEO: {}
6703 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
6704 uri: '',
6705 duration: duration,
6706 playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)
6707 };
6708
6709 if (minimumUpdatePeriod >= 0) {
6710 manifest.minimumUpdatePeriod = minimumUpdatePeriod * 1000;
6711 }
6712
6713 if (locations) {
6714 manifest.locations = locations;
6715 }
6716
6717 if (type === 'dynamic') {
6718 manifest.suggestedPresentationDelay = suggestedPresentationDelay;
6719 }
6720
6721 var isAudioOnly = manifest.playlists.length === 0;
6722 var organizedAudioGroup = audioPlaylists.length ? organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly) : null;
6723 var organizedVttGroup = vttPlaylists.length ? organizeVttPlaylists(vttPlaylists, sidxMapping) : null;
6724 var formattedPlaylists = videoPlaylists.concat(flattenMediaGroupPlaylists(organizedAudioGroup), flattenMediaGroupPlaylists(organizedVttGroup));
6725 var playlistTimelineStarts = formattedPlaylists.map(function (_ref9) {
6726 var timelineStarts = _ref9.timelineStarts;
6727 return timelineStarts;
6728 });
6729 manifest.timelineStarts = getUniqueTimelineStarts(playlistTimelineStarts);
6730 addMediaSequenceValues(formattedPlaylists, manifest.timelineStarts);
6731
6732 if (organizedAudioGroup) {
6733 manifest.mediaGroups.AUDIO.audio = organizedAudioGroup;
6734 }
6735
6736 if (organizedVttGroup) {
6737 manifest.mediaGroups.SUBTITLES.subs = organizedVttGroup;
6738 }
6739
6740 if (captions.length) {
6741 manifest.mediaGroups['CLOSED-CAPTIONS'].cc = organizeCaptionServices(captions);
6742 }
6743
6744 if (previousManifest) {
6745 return positionManifestOnTimeline({
6746 oldManifest: previousManifest,
6747 newManifest: manifest
6748 });
6749 }
6750
6751 return manifest;
6752 };
6753 /**
6754 * Calculates the R (repetition) value for a live stream (for the final segment
6755 * in a manifest where the r value is negative 1)
6756 *
6757 * @param {Object} attributes
6758 * Object containing all inherited attributes from parent elements with attribute
6759 * names as keys
6760 * @param {number} time
6761 * current time (typically the total time up until the final segment)
6762 * @param {number} duration
6763 * duration property for the given <S />
6764 *
6765 * @return {number}
6766 * R value to reach the end of the given period
6767 */
6768
6769
6770 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
6771 var NOW = attributes.NOW,
6772 clientOffset = attributes.clientOffset,
6773 availabilityStartTime = attributes.availabilityStartTime,
6774 _attributes$timescale = attributes.timescale,
6775 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
6776 _attributes$periodSta = attributes.periodStart,
6777 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
6778 _attributes$minimumUp = attributes.minimumUpdatePeriod,
6779 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
6780 var now = (NOW + clientOffset) / 1000;
6781 var periodStartWC = availabilityStartTime + periodStart;
6782 var periodEndWC = now + minimumUpdatePeriod;
6783 var periodDuration = periodEndWC - periodStartWC;
6784 return Math.ceil((periodDuration * timescale - time) / duration);
6785 };
6786 /**
6787 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
6788 * timing and duration
6789 *
6790 * @param {Object} attributes
6791 * Object containing all inherited attributes from parent elements with attribute
6792 * names as keys
6793 * @param {Object[]} segmentTimeline
6794 * List of objects representing the attributes of each S element contained within
6795 *
6796 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6797 * List of Objects with segment timing and duration info
6798 */
6799
6800
6801 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
6802 var type = attributes.type,
6803 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
6804 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
6805 _attributes$media = attributes.media,
6806 media = _attributes$media === void 0 ? '' : _attributes$media,
6807 sourceDuration = attributes.sourceDuration,
6808 _attributes$timescale2 = attributes.timescale,
6809 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
6810 _attributes$startNumb = attributes.startNumber,
6811 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
6812 timeline = attributes.periodStart;
6813 var segments = [];
6814 var time = -1;
6815
6816 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
6817 var S = segmentTimeline[sIndex];
6818 var duration = S.d;
6819 var repeat = S.r || 0;
6820 var segmentTime = S.t || 0;
6821
6822 if (time < 0) {
6823 // first segment
6824 time = segmentTime;
6825 }
6826
6827 if (segmentTime && segmentTime > time) {
6828 // discontinuity
6829 // TODO: How to handle this type of discontinuity
6830 // timeline++ here would treat it like HLS discontuity and content would
6831 // get appended without gap
6832 // E.G.
6833 // <S t="0" d="1" />
6834 // <S d="1" />
6835 // <S d="1" />
6836 // <S t="5" d="1" />
6837 // would have $Time$ values of [0, 1, 2, 5]
6838 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
6839 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
6840 // does the value of sourceDuration consider this when calculating arbitrary
6841 // negative @r repeat value?
6842 // E.G. Same elements as above with this added at the end
6843 // <S d="1" r="-1" />
6844 // with a sourceDuration of 10
6845 // Would the 2 gaps be included in the time duration calculations resulting in
6846 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
6847 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
6848 time = segmentTime;
6849 }
6850
6851 var count = void 0;
6852
6853 if (repeat < 0) {
6854 var nextS = sIndex + 1;
6855
6856 if (nextS === segmentTimeline.length) {
6857 // last segment
6858 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
6859 count = getLiveRValue(attributes, time, duration);
6860 } else {
6861 // TODO: This may be incorrect depending on conclusion of TODO above
6862 count = (sourceDuration * timescale - time) / duration;
6863 }
6864 } else {
6865 count = (segmentTimeline[nextS].t - time) / duration;
6866 }
6867 } else {
6868 count = repeat + 1;
6869 }
6870
6871 var end = startNumber + segments.length + count;
6872 var number = startNumber + segments.length;
6873
6874 while (number < end) {
6875 segments.push({
6876 number: number,
6877 duration: duration / timescale,
6878 time: time,
6879 timeline: timeline
6880 });
6881 time += duration;
6882 number++;
6883 }
6884 }
6885
6886 return segments;
6887 };
6888
6889 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
6890 /**
6891 * Replaces template identifiers with corresponding values. To be used as the callback
6892 * for String.prototype.replace
6893 *
6894 * @name replaceCallback
6895 * @function
6896 * @param {string} match
6897 * Entire match of identifier
6898 * @param {string} identifier
6899 * Name of matched identifier
6900 * @param {string} format
6901 * Format tag string. Its presence indicates that padding is expected
6902 * @param {string} width
6903 * Desired length of the replaced value. Values less than this width shall be left
6904 * zero padded
6905 * @return {string}
6906 * Replacement for the matched identifier
6907 */
6908
6909 /**
6910 * Returns a function to be used as a callback for String.prototype.replace to replace
6911 * template identifiers
6912 *
6913 * @param {Obect} values
6914 * Object containing values that shall be used to replace known identifiers
6915 * @param {number} values.RepresentationID
6916 * Value of the Representation@id attribute
6917 * @param {number} values.Number
6918 * Number of the corresponding segment
6919 * @param {number} values.Bandwidth
6920 * Value of the Representation@bandwidth attribute.
6921 * @param {number} values.Time
6922 * Timestamp value of the corresponding segment
6923 * @return {replaceCallback}
6924 * Callback to be used with String.prototype.replace to replace identifiers
6925 */
6926
6927 var identifierReplacement = function identifierReplacement(values) {
6928 return function (match, identifier, format, width) {
6929 if (match === '$$') {
6930 // escape sequence
6931 return '$';
6932 }
6933
6934 if (typeof values[identifier] === 'undefined') {
6935 return match;
6936 }
6937
6938 var value = '' + values[identifier];
6939
6940 if (identifier === 'RepresentationID') {
6941 // Format tag shall not be present with RepresentationID
6942 return value;
6943 }
6944
6945 if (!format) {
6946 width = 1;
6947 } else {
6948 width = parseInt(width, 10);
6949 }
6950
6951 if (value.length >= width) {
6952 return value;
6953 }
6954
6955 return "" + new Array(width - value.length + 1).join('0') + value;
6956 };
6957 };
6958 /**
6959 * Constructs a segment url from a template string
6960 *
6961 * @param {string} url
6962 * Template string to construct url from
6963 * @param {Obect} values
6964 * Object containing values that shall be used to replace known identifiers
6965 * @param {number} values.RepresentationID
6966 * Value of the Representation@id attribute
6967 * @param {number} values.Number
6968 * Number of the corresponding segment
6969 * @param {number} values.Bandwidth
6970 * Value of the Representation@bandwidth attribute.
6971 * @param {number} values.Time
6972 * Timestamp value of the corresponding segment
6973 * @return {string}
6974 * Segment url with identifiers replaced
6975 */
6976
6977
6978 var constructTemplateUrl = function constructTemplateUrl(url, values) {
6979 return url.replace(identifierPattern, identifierReplacement(values));
6980 };
6981 /**
6982 * Generates a list of objects containing timing and duration information about each
6983 * segment needed to generate segment uris and the complete segment object
6984 *
6985 * @param {Object} attributes
6986 * Object containing all inherited attributes from parent elements with attribute
6987 * names as keys
6988 * @param {Object[]|undefined} segmentTimeline
6989 * List of objects representing the attributes of each S element contained within
6990 * the SegmentTimeline element
6991 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6992 * List of Objects with segment timing and duration info
6993 */
6994
6995
6996 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
6997 if (!attributes.duration && !segmentTimeline) {
6998 // if neither @duration or SegmentTimeline are present, then there shall be exactly
6999 // one media segment
7000 return [{
7001 number: attributes.startNumber || 1,
7002 duration: attributes.sourceDuration,
7003 time: 0,
7004 timeline: attributes.periodStart
7005 }];
7006 }
7007
7008 if (attributes.duration) {
7009 return parseByDuration(attributes);
7010 }
7011
7012 return parseByTimeline(attributes, segmentTimeline);
7013 };
7014 /**
7015 * Generates a list of segments using information provided by the SegmentTemplate element
7016 *
7017 * @param {Object} attributes
7018 * Object containing all inherited attributes from parent elements with attribute
7019 * names as keys
7020 * @param {Object[]|undefined} segmentTimeline
7021 * List of objects representing the attributes of each S element contained within
7022 * the SegmentTimeline element
7023 * @return {Object[]}
7024 * List of segment objects
7025 */
7026
7027
7028 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
7029 var templateValues = {
7030 RepresentationID: attributes.id,
7031 Bandwidth: attributes.bandwidth || 0
7032 };
7033 var _attributes$initializ = attributes.initialization,
7034 initialization = _attributes$initializ === void 0 ? {
7035 sourceURL: '',
7036 range: ''
7037 } : _attributes$initializ;
7038 var mapSegment = urlTypeToSegment({
7039 baseUrl: attributes.baseUrl,
7040 source: constructTemplateUrl(initialization.sourceURL, templateValues),
7041 range: initialization.range
7042 });
7043 var segments = parseTemplateInfo(attributes, segmentTimeline);
7044 return segments.map(function (segment) {
7045 templateValues.Number = segment.number;
7046 templateValues.Time = segment.time;
7047 var uri = constructTemplateUrl(attributes.media || '', templateValues); // See DASH spec section 5.3.9.2.2
7048 // - if timescale isn't present on any level, default to 1.
7049
7050 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7051
7052 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7053 var presentationTime = // Even if the @t attribute is not specified for the segment, segment.time is
7054 // calculated in mpd-parser prior to this, so it's assumed to be available.
7055 attributes.periodStart + (segment.time - presentationTimeOffset) / timescale;
7056 var map = {
7057 uri: uri,
7058 timeline: segment.timeline,
7059 duration: segment.duration,
7060 resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),
7061 map: mapSegment,
7062 number: segment.number,
7063 presentationTime: presentationTime
7064 };
7065 return map;
7066 });
7067 };
7068 /**
7069 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
7070 * to an object that matches the output of a segment in videojs/mpd-parser
7071 *
7072 * @param {Object} attributes
7073 * Object containing all inherited attributes from parent elements with attribute
7074 * names as keys
7075 * @param {Object} segmentUrl
7076 * <SegmentURL> node to translate into a segment object
7077 * @return {Object} translated segment object
7078 */
7079
7080
7081 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
7082 var baseUrl = attributes.baseUrl,
7083 _attributes$initializ = attributes.initialization,
7084 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
7085 var initSegment = urlTypeToSegment({
7086 baseUrl: baseUrl,
7087 source: initialization.sourceURL,
7088 range: initialization.range
7089 });
7090 var segment = urlTypeToSegment({
7091 baseUrl: baseUrl,
7092 source: segmentUrl.media,
7093 range: segmentUrl.mediaRange
7094 });
7095 segment.map = initSegment;
7096 return segment;
7097 };
7098 /**
7099 * Generates a list of segments using information provided by the SegmentList element
7100 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
7101 * node should be translated into segment.
7102 *
7103 * @param {Object} attributes
7104 * Object containing all inherited attributes from parent elements with attribute
7105 * names as keys
7106 * @param {Object[]|undefined} segmentTimeline
7107 * List of objects representing the attributes of each S element contained within
7108 * the SegmentTimeline element
7109 * @return {Object.<Array>} list of segments
7110 */
7111
7112
7113 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
7114 var duration = attributes.duration,
7115 _attributes$segmentUr = attributes.segmentUrls,
7116 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr,
7117 periodStart = attributes.periodStart; // Per spec (5.3.9.2.1) no way to determine segment duration OR
7118 // if both SegmentTimeline and @duration are defined, it is outside of spec.
7119
7120 if (!duration && !segmentTimeline || duration && segmentTimeline) {
7121 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
7122 }
7123
7124 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
7125 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
7126 });
7127 var segmentTimeInfo;
7128
7129 if (duration) {
7130 segmentTimeInfo = parseByDuration(attributes);
7131 }
7132
7133 if (segmentTimeline) {
7134 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
7135 }
7136
7137 var segments = segmentTimeInfo.map(function (segmentTime, index) {
7138 if (segmentUrlMap[index]) {
7139 var segment = segmentUrlMap[index]; // See DASH spec section 5.3.9.2.2
7140 // - if timescale isn't present on any level, default to 1.
7141
7142 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7143
7144 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7145 segment.timeline = segmentTime.timeline;
7146 segment.duration = segmentTime.duration;
7147 segment.number = segmentTime.number;
7148 segment.presentationTime = periodStart + (segmentTime.time - presentationTimeOffset) / timescale;
7149 return segment;
7150 } // Since we're mapping we should get rid of any blank segments (in case
7151 // the given SegmentTimeline is handling for more elements than we have
7152 // SegmentURLs for).
7153
7154 }).filter(function (segment) {
7155 return segment;
7156 });
7157 return segments;
7158 };
7159
7160 var generateSegments = function generateSegments(_ref) {
7161 var attributes = _ref.attributes,
7162 segmentInfo = _ref.segmentInfo;
7163 var segmentAttributes;
7164 var segmentsFn;
7165
7166 if (segmentInfo.template) {
7167 segmentsFn = segmentsFromTemplate;
7168 segmentAttributes = merge(attributes, segmentInfo.template);
7169 } else if (segmentInfo.base) {
7170 segmentsFn = segmentsFromBase;
7171 segmentAttributes = merge(attributes, segmentInfo.base);
7172 } else if (segmentInfo.list) {
7173 segmentsFn = segmentsFromList;
7174 segmentAttributes = merge(attributes, segmentInfo.list);
7175 }
7176
7177 var segmentsInfo = {
7178 attributes: attributes
7179 };
7180
7181 if (!segmentsFn) {
7182 return segmentsInfo;
7183 }
7184
7185 var segments = segmentsFn(segmentAttributes, segmentInfo.segmentTimeline); // The @duration attribute will be used to determin the playlist's targetDuration which
7186 // must be in seconds. Since we've generated the segment list, we no longer need
7187 // @duration to be in @timescale units, so we can convert it here.
7188
7189 if (segmentAttributes.duration) {
7190 var _segmentAttributes = segmentAttributes,
7191 duration = _segmentAttributes.duration,
7192 _segmentAttributes$ti = _segmentAttributes.timescale,
7193 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
7194 segmentAttributes.duration = duration / timescale;
7195 } else if (segments.length) {
7196 // if there is no @duration attribute, use the largest segment duration as
7197 // as target duration
7198 segmentAttributes.duration = segments.reduce(function (max, segment) {
7199 return Math.max(max, Math.ceil(segment.duration));
7200 }, 0);
7201 } else {
7202 segmentAttributes.duration = 0;
7203 }
7204
7205 segmentsInfo.attributes = segmentAttributes;
7206 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
7207
7208 if (segmentInfo.base && segmentAttributes.indexRange) {
7209 segmentsInfo.sidx = segments[0];
7210 segmentsInfo.segments = [];
7211 }
7212
7213 return segmentsInfo;
7214 };
7215
7216 var toPlaylists = function toPlaylists(representations) {
7217 return representations.map(generateSegments);
7218 };
7219
7220 var findChildren = function findChildren(element, name) {
7221 return from(element.childNodes).filter(function (_ref) {
7222 var tagName = _ref.tagName;
7223 return tagName === name;
7224 });
7225 };
7226
7227 var getContent = function getContent(element) {
7228 return element.textContent.trim();
7229 };
7230 /**
7231 * Converts the provided string that may contain a division operation to a number.
7232 *
7233 * @param {string} value - the provided string value
7234 *
7235 * @return {number} the parsed string value
7236 */
7237
7238
7239 var parseDivisionValue = function parseDivisionValue(value) {
7240 return parseFloat(value.split('/').reduce(function (prev, current) {
7241 return prev / current;
7242 }));
7243 };
7244
7245 var parseDuration = function parseDuration(str) {
7246 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
7247 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
7248 var SECONDS_IN_DAY = 24 * 60 * 60;
7249 var SECONDS_IN_HOUR = 60 * 60;
7250 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
7251
7252 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
7253 var match = durationRegex.exec(str);
7254
7255 if (!match) {
7256 return 0;
7257 }
7258
7259 var _match$slice = match.slice(1),
7260 year = _match$slice[0],
7261 month = _match$slice[1],
7262 day = _match$slice[2],
7263 hour = _match$slice[3],
7264 minute = _match$slice[4],
7265 second = _match$slice[5];
7266
7267 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
7268 };
7269
7270 var parseDate = function parseDate(str) {
7271 // Date format without timezone according to ISO 8601
7272 // YYY-MM-DDThh:mm:ss.ssssss
7273 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
7274 // expressed by ending with 'Z'
7275
7276 if (dateRegex.test(str)) {
7277 str += 'Z';
7278 }
7279
7280 return Date.parse(str);
7281 };
7282
7283 var parsers = {
7284 /**
7285 * Specifies the duration of the entire Media Presentation. Format is a duration string
7286 * as specified in ISO 8601
7287 *
7288 * @param {string} value
7289 * value of attribute as a string
7290 * @return {number}
7291 * The duration in seconds
7292 */
7293 mediaPresentationDuration: function mediaPresentationDuration(value) {
7294 return parseDuration(value);
7295 },
7296
7297 /**
7298 * Specifies the Segment availability start time for all Segments referred to in this
7299 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
7300 * time. Format is a date string as specified in ISO 8601
7301 *
7302 * @param {string} value
7303 * value of attribute as a string
7304 * @return {number}
7305 * The date as seconds from unix epoch
7306 */
7307 availabilityStartTime: function availabilityStartTime(value) {
7308 return parseDate(value) / 1000;
7309 },
7310
7311 /**
7312 * Specifies the smallest period between potential changes to the MPD. Format is a
7313 * duration string as specified in ISO 8601
7314 *
7315 * @param {string} value
7316 * value of attribute as a string
7317 * @return {number}
7318 * The duration in seconds
7319 */
7320 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
7321 return parseDuration(value);
7322 },
7323
7324 /**
7325 * Specifies the suggested presentation delay. Format is a
7326 * duration string as specified in ISO 8601
7327 *
7328 * @param {string} value
7329 * value of attribute as a string
7330 * @return {number}
7331 * The duration in seconds
7332 */
7333 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
7334 return parseDuration(value);
7335 },
7336
7337 /**
7338 * specifices the type of mpd. Can be either "static" or "dynamic"
7339 *
7340 * @param {string} value
7341 * value of attribute as a string
7342 *
7343 * @return {string}
7344 * The type as a string
7345 */
7346 type: function type(value) {
7347 return value;
7348 },
7349
7350 /**
7351 * Specifies the duration of the smallest time shifting buffer for any Representation
7352 * in the MPD. Format is a duration string as specified in ISO 8601
7353 *
7354 * @param {string} value
7355 * value of attribute as a string
7356 * @return {number}
7357 * The duration in seconds
7358 */
7359 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
7360 return parseDuration(value);
7361 },
7362
7363 /**
7364 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
7365 * Format is a duration string as specified in ISO 8601
7366 *
7367 * @param {string} value
7368 * value of attribute as a string
7369 * @return {number}
7370 * The duration in seconds
7371 */
7372 start: function start(value) {
7373 return parseDuration(value);
7374 },
7375
7376 /**
7377 * Specifies the width of the visual presentation
7378 *
7379 * @param {string} value
7380 * value of attribute as a string
7381 * @return {number}
7382 * The parsed width
7383 */
7384 width: function width(value) {
7385 return parseInt(value, 10);
7386 },
7387
7388 /**
7389 * Specifies the height of the visual presentation
7390 *
7391 * @param {string} value
7392 * value of attribute as a string
7393 * @return {number}
7394 * The parsed height
7395 */
7396 height: function height(value) {
7397 return parseInt(value, 10);
7398 },
7399
7400 /**
7401 * Specifies the bitrate of the representation
7402 *
7403 * @param {string} value
7404 * value of attribute as a string
7405 * @return {number}
7406 * The parsed bandwidth
7407 */
7408 bandwidth: function bandwidth(value) {
7409 return parseInt(value, 10);
7410 },
7411
7412 /**
7413 * Specifies the frame rate of the representation
7414 *
7415 * @param {string} value
7416 * value of attribute as a string
7417 * @return {number}
7418 * The parsed frame rate
7419 */
7420 frameRate: function frameRate(value) {
7421 return parseDivisionValue(value);
7422 },
7423
7424 /**
7425 * Specifies the number of the first Media Segment in this Representation in the Period
7426 *
7427 * @param {string} value
7428 * value of attribute as a string
7429 * @return {number}
7430 * The parsed number
7431 */
7432 startNumber: function startNumber(value) {
7433 return parseInt(value, 10);
7434 },
7435
7436 /**
7437 * Specifies the timescale in units per seconds
7438 *
7439 * @param {string} value
7440 * value of attribute as a string
7441 * @return {number}
7442 * The parsed timescale
7443 */
7444 timescale: function timescale(value) {
7445 return parseInt(value, 10);
7446 },
7447
7448 /**
7449 * Specifies the presentationTimeOffset.
7450 *
7451 * @param {string} value
7452 * value of the attribute as a string
7453 *
7454 * @return {number}
7455 * The parsed presentationTimeOffset
7456 */
7457 presentationTimeOffset: function presentationTimeOffset(value) {
7458 return parseInt(value, 10);
7459 },
7460
7461 /**
7462 * Specifies the constant approximate Segment duration
7463 * NOTE: The <Period> element also contains an @duration attribute. This duration
7464 * specifies the duration of the Period. This attribute is currently not
7465 * supported by the rest of the parser, however we still check for it to prevent
7466 * errors.
7467 *
7468 * @param {string} value
7469 * value of attribute as a string
7470 * @return {number}
7471 * The parsed duration
7472 */
7473 duration: function duration(value) {
7474 var parsedValue = parseInt(value, 10);
7475
7476 if (isNaN(parsedValue)) {
7477 return parseDuration(value);
7478 }
7479
7480 return parsedValue;
7481 },
7482
7483 /**
7484 * Specifies the Segment duration, in units of the value of the @timescale.
7485 *
7486 * @param {string} value
7487 * value of attribute as a string
7488 * @return {number}
7489 * The parsed duration
7490 */
7491 d: function d(value) {
7492 return parseInt(value, 10);
7493 },
7494
7495 /**
7496 * Specifies the MPD start time, in @timescale units, the first Segment in the series
7497 * starts relative to the beginning of the Period
7498 *
7499 * @param {string} value
7500 * value of attribute as a string
7501 * @return {number}
7502 * The parsed time
7503 */
7504 t: function t(value) {
7505 return parseInt(value, 10);
7506 },
7507
7508 /**
7509 * Specifies the repeat count of the number of following contiguous Segments with the
7510 * same duration expressed by the value of @d
7511 *
7512 * @param {string} value
7513 * value of attribute as a string
7514 * @return {number}
7515 * The parsed number
7516 */
7517 r: function r(value) {
7518 return parseInt(value, 10);
7519 },
7520
7521 /**
7522 * Default parser for all other attributes. Acts as a no-op and just returns the value
7523 * as a string
7524 *
7525 * @param {string} value
7526 * value of attribute as a string
7527 * @return {string}
7528 * Unparsed value
7529 */
7530 DEFAULT: function DEFAULT(value) {
7531 return value;
7532 }
7533 };
7534 /**
7535 * Gets all the attributes and values of the provided node, parses attributes with known
7536 * types, and returns an object with attribute names mapped to values.
7537 *
7538 * @param {Node} el
7539 * The node to parse attributes from
7540 * @return {Object}
7541 * Object with all attributes of el parsed
7542 */
7543
7544 var parseAttributes = function parseAttributes(el) {
7545 if (!(el && el.attributes)) {
7546 return {};
7547 }
7548
7549 return from(el.attributes).reduce(function (a, e) {
7550 var parseFn = parsers[e.name] || parsers.DEFAULT;
7551 a[e.name] = parseFn(e.value);
7552 return a;
7553 }, {});
7554 };
7555
7556 var keySystemsMap = {
7557 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
7558 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
7559 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
7560 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
7561 };
7562 /**
7563 * Builds a list of urls that is the product of the reference urls and BaseURL values
7564 *
7565 * @param {string[]} referenceUrls
7566 * List of reference urls to resolve to
7567 * @param {Node[]} baseUrlElements
7568 * List of BaseURL nodes from the mpd
7569 * @return {string[]}
7570 * List of resolved urls
7571 */
7572
7573 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
7574 if (!baseUrlElements.length) {
7575 return referenceUrls;
7576 }
7577
7578 return flatten(referenceUrls.map(function (reference) {
7579 return baseUrlElements.map(function (baseUrlElement) {
7580 return resolveUrl$1(reference, getContent(baseUrlElement));
7581 });
7582 }));
7583 };
7584 /**
7585 * Contains all Segment information for its containing AdaptationSet
7586 *
7587 * @typedef {Object} SegmentInformation
7588 * @property {Object|undefined} template
7589 * Contains the attributes for the SegmentTemplate node
7590 * @property {Object[]|undefined} segmentTimeline
7591 * Contains a list of atrributes for each S node within the SegmentTimeline node
7592 * @property {Object|undefined} list
7593 * Contains the attributes for the SegmentList node
7594 * @property {Object|undefined} base
7595 * Contains the attributes for the SegmentBase node
7596 */
7597
7598 /**
7599 * Returns all available Segment information contained within the AdaptationSet node
7600 *
7601 * @param {Node} adaptationSet
7602 * The AdaptationSet node to get Segment information from
7603 * @return {SegmentInformation}
7604 * The Segment information contained within the provided AdaptationSet
7605 */
7606
7607
7608 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
7609 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
7610 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
7611 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
7612 return merge({
7613 tag: 'SegmentURL'
7614 }, parseAttributes(s));
7615 });
7616 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
7617 var segmentTimelineParentNode = segmentList || segmentTemplate;
7618 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
7619 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
7620 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
7621 // @initialization and an <Initialization> node. @initialization can be templated,
7622 // while the node can have a url and range specified. If the <SegmentTemplate> has
7623 // both @initialization and an <Initialization> subelement we opt to override with
7624 // the node, as this interaction is not defined in the spec.
7625
7626 var template = segmentTemplate && parseAttributes(segmentTemplate);
7627
7628 if (template && segmentInitialization) {
7629 template.initialization = segmentInitialization && parseAttributes(segmentInitialization);
7630 } else if (template && template.initialization) {
7631 // If it is @initialization we convert it to an object since this is the format that
7632 // later functions will rely on for the initialization segment. This is only valid
7633 // for <SegmentTemplate>
7634 template.initialization = {
7635 sourceURL: template.initialization
7636 };
7637 }
7638
7639 var segmentInfo = {
7640 template: template,
7641 segmentTimeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
7642 return parseAttributes(s);
7643 }),
7644 list: segmentList && merge(parseAttributes(segmentList), {
7645 segmentUrls: segmentUrls,
7646 initialization: parseAttributes(segmentInitialization)
7647 }),
7648 base: segmentBase && merge(parseAttributes(segmentBase), {
7649 initialization: parseAttributes(segmentInitialization)
7650 })
7651 };
7652 Object.keys(segmentInfo).forEach(function (key) {
7653 if (!segmentInfo[key]) {
7654 delete segmentInfo[key];
7655 }
7656 });
7657 return segmentInfo;
7658 };
7659 /**
7660 * Contains Segment information and attributes needed to construct a Playlist object
7661 * from a Representation
7662 *
7663 * @typedef {Object} RepresentationInformation
7664 * @property {SegmentInformation} segmentInfo
7665 * Segment information for this Representation
7666 * @property {Object} attributes
7667 * Inherited attributes for this Representation
7668 */
7669
7670 /**
7671 * Maps a Representation node to an object containing Segment information and attributes
7672 *
7673 * @name inheritBaseUrlsCallback
7674 * @function
7675 * @param {Node} representation
7676 * Representation node from the mpd
7677 * @return {RepresentationInformation}
7678 * Representation information needed to construct a Playlist object
7679 */
7680
7681 /**
7682 * Returns a callback for Array.prototype.map for mapping Representation nodes to
7683 * Segment information and attributes using inherited BaseURL nodes.
7684 *
7685 * @param {Object} adaptationSetAttributes
7686 * Contains attributes inherited by the AdaptationSet
7687 * @param {string[]} adaptationSetBaseUrls
7688 * Contains list of resolved base urls inherited by the AdaptationSet
7689 * @param {SegmentInformation} adaptationSetSegmentInfo
7690 * Contains Segment information for the AdaptationSet
7691 * @return {inheritBaseUrlsCallback}
7692 * Callback map function
7693 */
7694
7695
7696 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
7697 return function (representation) {
7698 var repBaseUrlElements = findChildren(representation, 'BaseURL');
7699 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
7700 var attributes = merge(adaptationSetAttributes, parseAttributes(representation));
7701 var representationSegmentInfo = getSegmentInformation(representation);
7702 return repBaseUrls.map(function (baseUrl) {
7703 return {
7704 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
7705 attributes: merge(attributes, {
7706 baseUrl: baseUrl
7707 })
7708 };
7709 });
7710 };
7711 };
7712 /**
7713 * Tranforms a series of content protection nodes to
7714 * an object containing pssh data by key system
7715 *
7716 * @param {Node[]} contentProtectionNodes
7717 * Content protection nodes
7718 * @return {Object}
7719 * Object containing pssh data by key system
7720 */
7721
7722
7723 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
7724 return contentProtectionNodes.reduce(function (acc, node) {
7725 var attributes = parseAttributes(node); // Although it could be argued that according to the UUID RFC spec the UUID string (a-f chars) should be generated
7726 // as a lowercase string it also mentions it should be treated as case-insensitive on input. Since the key system
7727 // UUIDs in the keySystemsMap are hardcoded as lowercase in the codebase there isn't any reason not to do
7728 // .toLowerCase() on the input UUID string from the manifest (at least I could not think of one).
7729
7730 if (attributes.schemeIdUri) {
7731 attributes.schemeIdUri = attributes.schemeIdUri.toLowerCase();
7732 }
7733
7734 var keySystem = keySystemsMap[attributes.schemeIdUri];
7735
7736 if (keySystem) {
7737 acc[keySystem] = {
7738 attributes: attributes
7739 };
7740 var psshNode = findChildren(node, 'cenc:pssh')[0];
7741
7742 if (psshNode) {
7743 var pssh = getContent(psshNode);
7744 acc[keySystem].pssh = pssh && decodeB64ToUint8Array(pssh);
7745 }
7746 }
7747
7748 return acc;
7749 }, {});
7750 }; // defined in ANSI_SCTE 214-1 2016
7751
7752
7753 var parseCaptionServiceMetadata = function parseCaptionServiceMetadata(service) {
7754 // 608 captions
7755 if (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {
7756 var values = typeof service.value !== 'string' ? [] : service.value.split(';');
7757 return values.map(function (value) {
7758 var channel;
7759 var language; // default language to value
7760
7761 language = value;
7762
7763 if (/^CC\d=/.test(value)) {
7764 var _value$split = value.split('=');
7765
7766 channel = _value$split[0];
7767 language = _value$split[1];
7768 } else if (/^CC\d$/.test(value)) {
7769 channel = value;
7770 }
7771
7772 return {
7773 channel: channel,
7774 language: language
7775 };
7776 });
7777 } else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {
7778 var _values = typeof service.value !== 'string' ? [] : service.value.split(';');
7779
7780 return _values.map(function (value) {
7781 var flags = {
7782 // service or channel number 1-63
7783 'channel': undefined,
7784 // language is a 3ALPHA per ISO 639.2/B
7785 // field is required
7786 'language': undefined,
7787 // BIT 1/0 or ?
7788 // default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown
7789 'aspectRatio': 1,
7790 // BIT 1/0
7791 // easy reader flag indicated the text is tailed to the needs of beginning readers
7792 // default 0, or off
7793 'easyReader': 0,
7794 // BIT 1/0
7795 // If 3d metadata is present (CEA-708.1) then 1
7796 // default 0
7797 '3D': 0
7798 };
7799
7800 if (/=/.test(value)) {
7801 var _value$split2 = value.split('='),
7802 channel = _value$split2[0],
7803 _value$split2$ = _value$split2[1],
7804 opts = _value$split2$ === void 0 ? '' : _value$split2$;
7805
7806 flags.channel = channel;
7807 flags.language = value;
7808 opts.split(',').forEach(function (opt) {
7809 var _opt$split = opt.split(':'),
7810 name = _opt$split[0],
7811 val = _opt$split[1];
7812
7813 if (name === 'lang') {
7814 flags.language = val; // er for easyReadery
7815 } else if (name === 'er') {
7816 flags.easyReader = Number(val); // war for wide aspect ratio
7817 } else if (name === 'war') {
7818 flags.aspectRatio = Number(val);
7819 } else if (name === '3D') {
7820 flags['3D'] = Number(val);
7821 }
7822 });
7823 } else {
7824 flags.language = value;
7825 }
7826
7827 if (flags.channel) {
7828 flags.channel = 'SERVICE' + flags.channel;
7829 }
7830
7831 return flags;
7832 });
7833 }
7834 };
7835 /**
7836 * Maps an AdaptationSet node to a list of Representation information objects
7837 *
7838 * @name toRepresentationsCallback
7839 * @function
7840 * @param {Node} adaptationSet
7841 * AdaptationSet node from the mpd
7842 * @return {RepresentationInformation[]}
7843 * List of objects containing Representaion information
7844 */
7845
7846 /**
7847 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
7848 * Representation information objects
7849 *
7850 * @param {Object} periodAttributes
7851 * Contains attributes inherited by the Period
7852 * @param {string[]} periodBaseUrls
7853 * Contains list of resolved base urls inherited by the Period
7854 * @param {string[]} periodSegmentInfo
7855 * Contains Segment Information at the period level
7856 * @return {toRepresentationsCallback}
7857 * Callback map function
7858 */
7859
7860
7861 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
7862 return function (adaptationSet) {
7863 var adaptationSetAttributes = parseAttributes(adaptationSet);
7864 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
7865 var role = findChildren(adaptationSet, 'Role')[0];
7866 var roleAttributes = {
7867 role: parseAttributes(role)
7868 };
7869 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
7870 var accessibility = findChildren(adaptationSet, 'Accessibility')[0];
7871 var captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));
7872
7873 if (captionServices) {
7874 attrs = merge(attrs, {
7875 captionServices: captionServices
7876 });
7877 }
7878
7879 var label = findChildren(adaptationSet, 'Label')[0];
7880
7881 if (label && label.childNodes.length) {
7882 var labelVal = label.childNodes[0].nodeValue.trim();
7883 attrs = merge(attrs, {
7884 label: labelVal
7885 });
7886 }
7887
7888 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
7889
7890 if (Object.keys(contentProtection).length) {
7891 attrs = merge(attrs, {
7892 contentProtection: contentProtection
7893 });
7894 }
7895
7896 var segmentInfo = getSegmentInformation(adaptationSet);
7897 var representations = findChildren(adaptationSet, 'Representation');
7898 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
7899 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
7900 };
7901 };
7902 /**
7903 * Contains all period information for mapping nodes onto adaptation sets.
7904 *
7905 * @typedef {Object} PeriodInformation
7906 * @property {Node} period.node
7907 * Period node from the mpd
7908 * @property {Object} period.attributes
7909 * Parsed period attributes from node plus any added
7910 */
7911
7912 /**
7913 * Maps a PeriodInformation object to a list of Representation information objects for all
7914 * AdaptationSet nodes contained within the Period.
7915 *
7916 * @name toAdaptationSetsCallback
7917 * @function
7918 * @param {PeriodInformation} period
7919 * Period object containing necessary period information
7920 * @param {number} periodStart
7921 * Start time of the Period within the mpd
7922 * @return {RepresentationInformation[]}
7923 * List of objects containing Representaion information
7924 */
7925
7926 /**
7927 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
7928 * Representation information objects
7929 *
7930 * @param {Object} mpdAttributes
7931 * Contains attributes inherited by the mpd
7932 * @param {string[]} mpdBaseUrls
7933 * Contains list of resolved base urls inherited by the mpd
7934 * @return {toAdaptationSetsCallback}
7935 * Callback map function
7936 */
7937
7938
7939 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
7940 return function (period, index) {
7941 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));
7942 var periodAttributes = merge(mpdAttributes, {
7943 periodStart: period.attributes.start
7944 });
7945
7946 if (typeof period.attributes.duration === 'number') {
7947 periodAttributes.periodDuration = period.attributes.duration;
7948 }
7949
7950 var adaptationSets = findChildren(period.node, 'AdaptationSet');
7951 var periodSegmentInfo = getSegmentInformation(period.node);
7952 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
7953 };
7954 };
7955 /**
7956 * Gets Period@start property for a given period.
7957 *
7958 * @param {Object} options
7959 * Options object
7960 * @param {Object} options.attributes
7961 * Period attributes
7962 * @param {Object} [options.priorPeriodAttributes]
7963 * Prior period attributes (if prior period is available)
7964 * @param {string} options.mpdType
7965 * The MPD@type these periods came from
7966 * @return {number|null}
7967 * The period start, or null if it's an early available period or error
7968 */
7969
7970
7971 var getPeriodStart = function getPeriodStart(_ref) {
7972 var attributes = _ref.attributes,
7973 priorPeriodAttributes = _ref.priorPeriodAttributes,
7974 mpdType = _ref.mpdType; // Summary of period start time calculation from DASH spec section 5.3.2.1
7975 //
7976 // A period's start is the first period's start + time elapsed after playing all
7977 // prior periods to this one. Periods continue one after the other in time (without
7978 // gaps) until the end of the presentation.
7979 //
7980 // The value of Period@start should be:
7981 // 1. if Period@start is present: value of Period@start
7982 // 2. if previous period exists and it has @duration: previous Period@start +
7983 // previous Period@duration
7984 // 3. if this is first period and MPD@type is 'static': 0
7985 // 4. in all other cases, consider the period an "early available period" (note: not
7986 // currently supported)
7987 // (1)
7988
7989 if (typeof attributes.start === 'number') {
7990 return attributes.start;
7991 } // (2)
7992
7993
7994 if (priorPeriodAttributes && typeof priorPeriodAttributes.start === 'number' && typeof priorPeriodAttributes.duration === 'number') {
7995 return priorPeriodAttributes.start + priorPeriodAttributes.duration;
7996 } // (3)
7997
7998
7999 if (!priorPeriodAttributes && mpdType === 'static') {
8000 return 0;
8001 } // (4)
8002 // There is currently no logic for calculating the Period@start value if there is
8003 // no Period@start or prior Period@start and Period@duration available. This is not made
8004 // explicit by the DASH interop guidelines or the DASH spec, however, since there's
8005 // nothing about any other resolution strategies, it's implied. Thus, this case should
8006 // be considered an early available period, or error, and null should suffice for both
8007 // of those cases.
8008
8009
8010 return null;
8011 };
8012 /**
8013 * Traverses the mpd xml tree to generate a list of Representation information objects
8014 * that have inherited attributes from parent nodes
8015 *
8016 * @param {Node} mpd
8017 * The root node of the mpd
8018 * @param {Object} options
8019 * Available options for inheritAttributes
8020 * @param {string} options.manifestUri
8021 * The uri source of the mpd
8022 * @param {number} options.NOW
8023 * Current time per DASH IOP. Default is current time in ms since epoch
8024 * @param {number} options.clientOffset
8025 * Client time difference from NOW (in milliseconds)
8026 * @return {RepresentationInformation[]}
8027 * List of objects containing Representation information
8028 */
8029
8030
8031 var inheritAttributes = function inheritAttributes(mpd, options) {
8032 if (options === void 0) {
8033 options = {};
8034 }
8035
8036 var _options = options,
8037 _options$manifestUri = _options.manifestUri,
8038 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
8039 _options$NOW = _options.NOW,
8040 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
8041 _options$clientOffset = _options.clientOffset,
8042 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
8043 var periodNodes = findChildren(mpd, 'Period');
8044
8045 if (!periodNodes.length) {
8046 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
8047 }
8048
8049 var locations = findChildren(mpd, 'Location');
8050 var mpdAttributes = parseAttributes(mpd);
8051 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL')); // See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.
8052
8053 mpdAttributes.type = mpdAttributes.type || 'static';
8054 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
8055 mpdAttributes.NOW = NOW;
8056 mpdAttributes.clientOffset = clientOffset;
8057
8058 if (locations.length) {
8059 mpdAttributes.locations = locations.map(getContent);
8060 }
8061
8062 var periods = []; // Since toAdaptationSets acts on individual periods right now, the simplest approach to
8063 // adding properties that require looking at prior periods is to parse attributes and add
8064 // missing ones before toAdaptationSets is called. If more such properties are added, it
8065 // may be better to refactor toAdaptationSets.
8066
8067 periodNodes.forEach(function (node, index) {
8068 var attributes = parseAttributes(node); // Use the last modified prior period, as it may contain added information necessary
8069 // for this period.
8070
8071 var priorPeriod = periods[index - 1];
8072 attributes.start = getPeriodStart({
8073 attributes: attributes,
8074 priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,
8075 mpdType: mpdAttributes.type
8076 });
8077 periods.push({
8078 node: node,
8079 attributes: attributes
8080 });
8081 });
8082 return {
8083 locations: mpdAttributes.locations,
8084 representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)))
8085 };
8086 };
8087
8088 var stringToMpdXml = function stringToMpdXml(manifestString) {
8089 if (manifestString === '') {
8090 throw new Error(errors.DASH_EMPTY_MANIFEST);
8091 }
8092
8093 var parser = new xmldom.DOMParser();
8094 var xml;
8095 var mpd;
8096
8097 try {
8098 xml = parser.parseFromString(manifestString, 'application/xml');
8099 mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
8100 } catch (e) {// ie 11 throwsw on invalid xml
8101 }
8102
8103 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
8104 throw new Error(errors.DASH_INVALID_XML);
8105 }
8106
8107 return mpd;
8108 };
8109 /**
8110 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8111 *
8112 * @param {string} mpd
8113 * XML string of the MPD manifest
8114 * @return {Object|null}
8115 * Attributes of UTCTiming node specified in the manifest. Null if none found
8116 */
8117
8118
8119 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
8120 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
8121
8122 if (!UTCTimingNode) {
8123 return null;
8124 }
8125
8126 var attributes = parseAttributes(UTCTimingNode);
8127
8128 switch (attributes.schemeIdUri) {
8129 case 'urn:mpeg:dash:utc:http-head:2014':
8130 case 'urn:mpeg:dash:utc:http-head:2012':
8131 attributes.method = 'HEAD';
8132 break;
8133
8134 case 'urn:mpeg:dash:utc:http-xsdate:2014':
8135 case 'urn:mpeg:dash:utc:http-iso:2014':
8136 case 'urn:mpeg:dash:utc:http-xsdate:2012':
8137 case 'urn:mpeg:dash:utc:http-iso:2012':
8138 attributes.method = 'GET';
8139 break;
8140
8141 case 'urn:mpeg:dash:utc:direct:2014':
8142 case 'urn:mpeg:dash:utc:direct:2012':
8143 attributes.method = 'DIRECT';
8144 attributes.value = Date.parse(attributes.value);
8145 break;
8146
8147 case 'urn:mpeg:dash:utc:http-ntp:2014':
8148 case 'urn:mpeg:dash:utc:ntp:2014':
8149 case 'urn:mpeg:dash:utc:sntp:2014':
8150 default:
8151 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
8152 }
8153
8154 return attributes;
8155 };
8156 /*
8157 * Given a DASH manifest string and options, parses the DASH manifest into an object in the
8158 * form outputed by m3u8-parser and accepted by videojs/http-streaming.
8159 *
8160 * For live DASH manifests, if `previousManifest` is provided in options, then the newly
8161 * parsed DASH manifest will have its media sequence and discontinuity sequence values
8162 * updated to reflect its position relative to the prior manifest.
8163 *
8164 * @param {string} manifestString - the DASH manifest as a string
8165 * @param {options} [options] - any options
8166 *
8167 * @return {Object} the manifest object
8168 */
8169
8170 var parse = function parse(manifestString, options) {
8171 if (options === void 0) {
8172 options = {};
8173 }
8174
8175 var parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);
8176 var playlists = toPlaylists(parsedManifestInfo.representationInfo);
8177 return toM3u8({
8178 dashPlaylists: playlists,
8179 locations: parsedManifestInfo.locations,
8180 sidxMapping: options.sidxMapping,
8181 previousManifest: options.previousManifest
8182 });
8183 };
8184 /**
8185 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8186 *
8187 * @param {string} manifestString
8188 * XML string of the MPD manifest
8189 * @return {Object|null}
8190 * Attributes of UTCTiming node specified in the manifest. Null if none found
8191 */
8192
8193
8194 var parseUTCTiming = function parseUTCTiming(manifestString) {
8195 return parseUTCTimingScheme(stringToMpdXml(manifestString));
8196 };
8197
8198 var MAX_UINT32 = Math.pow(2, 32);
8199
8200 var getUint64$1 = function getUint64(uint8) {
8201 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
8202 var value;
8203
8204 if (dv.getBigUint64) {
8205 value = dv.getBigUint64(0);
8206
8207 if (value < Number.MAX_SAFE_INTEGER) {
8208 return Number(value);
8209 }
8210
8211 return value;
8212 }
8213
8214 return dv.getUint32(0) * MAX_UINT32 + dv.getUint32(4);
8215 };
8216
8217 var numbers = {
8218 getUint64: getUint64$1,
8219 MAX_UINT32: MAX_UINT32
8220 };
8221
8222 var getUint64 = numbers.getUint64;
8223
8224 var parseSidx = function parseSidx(data) {
8225 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
8226 result = {
8227 version: data[0],
8228 flags: new Uint8Array(data.subarray(1, 4)),
8229 references: [],
8230 referenceId: view.getUint32(4),
8231 timescale: view.getUint32(8)
8232 },
8233 i = 12;
8234
8235 if (result.version === 0) {
8236 result.earliestPresentationTime = view.getUint32(i);
8237 result.firstOffset = view.getUint32(i + 4);
8238 i += 8;
8239 } else {
8240 // read 64 bits
8241 result.earliestPresentationTime = getUint64(data.subarray(i));
8242 result.firstOffset = getUint64(data.subarray(i + 8));
8243 i += 16;
8244 }
8245
8246 i += 2; // reserved
8247
8248 var referenceCount = view.getUint16(i);
8249 i += 2; // start of references
8250
8251 for (; referenceCount > 0; i += 12, referenceCount--) {
8252 result.references.push({
8253 referenceType: (data[i] & 0x80) >>> 7,
8254 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
8255 subsegmentDuration: view.getUint32(i + 4),
8256 startsWithSap: !!(data[i + 8] & 0x80),
8257 sapType: (data[i + 8] & 0x70) >>> 4,
8258 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
8259 });
8260 }
8261
8262 return result;
8263 };
8264
8265 var parseSidx_1 = parseSidx;
8266
8267 var ID3 = toUint8([0x49, 0x44, 0x33]);
8268 var getId3Size = function getId3Size(bytes, offset) {
8269 if (offset === void 0) {
8270 offset = 0;
8271 }
8272
8273 bytes = toUint8(bytes);
8274 var flags = bytes[offset + 5];
8275 var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
8276 var footerPresent = (flags & 16) >> 4;
8277
8278 if (footerPresent) {
8279 return returnSize + 20;
8280 }
8281
8282 return returnSize + 10;
8283 };
8284 var getId3Offset = function getId3Offset(bytes, offset) {
8285 if (offset === void 0) {
8286 offset = 0;
8287 }
8288
8289 bytes = toUint8(bytes);
8290
8291 if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
8292 offset: offset
8293 })) {
8294 return offset;
8295 }
8296
8297 offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
8298 // have multiple ID3 tag sections even though
8299 // they should not.
8300
8301 return getId3Offset(bytes, offset);
8302 };
8303
8304 var normalizePath$1 = function normalizePath(path) {
8305 if (typeof path === 'string') {
8306 return stringToBytes(path);
8307 }
8308
8309 if (typeof path === 'number') {
8310 return path;
8311 }
8312
8313 return path;
8314 };
8315
8316 var normalizePaths$1 = function normalizePaths(paths) {
8317 if (!Array.isArray(paths)) {
8318 return [normalizePath$1(paths)];
8319 }
8320
8321 return paths.map(function (p) {
8322 return normalizePath$1(p);
8323 });
8324 };
8325 /**
8326 * find any number of boxes by name given a path to it in an iso bmff
8327 * such as mp4.
8328 *
8329 * @param {TypedArray} bytes
8330 * bytes for the iso bmff to search for boxes in
8331 *
8332 * @param {Uint8Array[]|string[]|string|Uint8Array} name
8333 * An array of paths or a single path representing the name
8334 * of boxes to search through in bytes. Paths may be
8335 * uint8 (character codes) or strings.
8336 *
8337 * @param {boolean} [complete=false]
8338 * Should we search only for complete boxes on the final path.
8339 * This is very useful when you do not want to get back partial boxes
8340 * in the case of streaming files.
8341 *
8342 * @return {Uint8Array[]}
8343 * An array of the end paths that we found.
8344 */
8345
8346 var findBox = function findBox(bytes, paths, complete) {
8347 if (complete === void 0) {
8348 complete = false;
8349 }
8350
8351 paths = normalizePaths$1(paths);
8352 bytes = toUint8(bytes);
8353 var results = [];
8354
8355 if (!paths.length) {
8356 // short-circuit the search for empty paths
8357 return results;
8358 }
8359
8360 var i = 0;
8361
8362 while (i < bytes.length) {
8363 var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
8364 var type = bytes.subarray(i + 4, i + 8); // invalid box format.
8365
8366 if (size === 0) {
8367 break;
8368 }
8369
8370 var end = i + size;
8371
8372 if (end > bytes.length) {
8373 // this box is bigger than the number of bytes we have
8374 // and complete is set, we cannot find any more boxes.
8375 if (complete) {
8376 break;
8377 }
8378
8379 end = bytes.length;
8380 }
8381
8382 var data = bytes.subarray(i + 8, end);
8383
8384 if (bytesMatch(type, paths[0])) {
8385 if (paths.length === 1) {
8386 // this is the end of the path and we've found the box we were
8387 // looking for
8388 results.push(data);
8389 } else {
8390 // recursively search for the next box along the path
8391 results.push.apply(results, findBox(data, paths.slice(1), complete));
8392 }
8393 }
8394
8395 i = end;
8396 } // we've finished searching all of bytes
8397
8398
8399 return results;
8400 };
8401
8402 // https://matroska-org.github.io/libebml/specs.html
8403 // https://www.matroska.org/technical/elements.html
8404 // https://www.webmproject.org/docs/container/
8405
8406 var EBML_TAGS = {
8407 EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
8408 DocType: toUint8([0x42, 0x82]),
8409 Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
8410 SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
8411 Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
8412 Track: toUint8([0xAE]),
8413 TrackNumber: toUint8([0xd7]),
8414 DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
8415 TrackEntry: toUint8([0xAE]),
8416 TrackType: toUint8([0x83]),
8417 FlagDefault: toUint8([0x88]),
8418 CodecID: toUint8([0x86]),
8419 CodecPrivate: toUint8([0x63, 0xA2]),
8420 VideoTrack: toUint8([0xe0]),
8421 AudioTrack: toUint8([0xe1]),
8422 // Not used yet, but will be used for live webm/mkv
8423 // see https://www.matroska.org/technical/basics.html#block-structure
8424 // see https://www.matroska.org/technical/basics.html#simpleblock-structure
8425 Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
8426 Timestamp: toUint8([0xE7]),
8427 TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
8428 BlockGroup: toUint8([0xA0]),
8429 BlockDuration: toUint8([0x9B]),
8430 Block: toUint8([0xA1]),
8431 SimpleBlock: toUint8([0xA3])
8432 };
8433 /**
8434 * This is a simple table to determine the length
8435 * of things in ebml. The length is one based (starts at 1,
8436 * rather than zero) and for every zero bit before a one bit
8437 * we add one to length. We also need this table because in some
8438 * case we have to xor all the length bits from another value.
8439 */
8440
8441 var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
8442
8443 var getLength = function getLength(byte) {
8444 var len = 1;
8445
8446 for (var i = 0; i < LENGTH_TABLE.length; i++) {
8447 if (byte & LENGTH_TABLE[i]) {
8448 break;
8449 }
8450
8451 len++;
8452 }
8453
8454 return len;
8455 }; // length in ebml is stored in the first 4 to 8 bits
8456 // of the first byte. 4 for the id length and 8 for the
8457 // data size length. Length is measured by converting the number to binary
8458 // then 1 + the number of zeros before a 1 is encountered starting
8459 // from the left.
8460
8461
8462 var getvint = function getvint(bytes, offset, removeLength, signed) {
8463 if (removeLength === void 0) {
8464 removeLength = true;
8465 }
8466
8467 if (signed === void 0) {
8468 signed = false;
8469 }
8470
8471 var length = getLength(bytes[offset]);
8472 var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
8473 // as they will be modified below to remove the dataSizeLen bits and we do not
8474 // want to modify the original data. normally we could just call slice on
8475 // uint8array but ie 11 does not support that...
8476
8477 if (removeLength) {
8478 valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
8479 valueBytes[0] ^= LENGTH_TABLE[length - 1];
8480 }
8481
8482 return {
8483 length: length,
8484 value: bytesToNumber(valueBytes, {
8485 signed: signed
8486 }),
8487 bytes: valueBytes
8488 };
8489 };
8490
8491 var normalizePath = function normalizePath(path) {
8492 if (typeof path === 'string') {
8493 return path.match(/.{1,2}/g).map(function (p) {
8494 return normalizePath(p);
8495 });
8496 }
8497
8498 if (typeof path === 'number') {
8499 return numberToBytes(path);
8500 }
8501
8502 return path;
8503 };
8504
8505 var normalizePaths = function normalizePaths(paths) {
8506 if (!Array.isArray(paths)) {
8507 return [normalizePath(paths)];
8508 }
8509
8510 return paths.map(function (p) {
8511 return normalizePath(p);
8512 });
8513 };
8514
8515 var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
8516 if (offset >= bytes.length) {
8517 return bytes.length;
8518 }
8519
8520 var innerid = getvint(bytes, offset, false);
8521
8522 if (bytesMatch(id.bytes, innerid.bytes)) {
8523 return offset;
8524 }
8525
8526 var dataHeader = getvint(bytes, offset + innerid.length);
8527 return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
8528 };
8529 /**
8530 * Notes on the EBLM format.
8531 *
8532 * EBLM uses "vints" tags. Every vint tag contains
8533 * two parts
8534 *
8535 * 1. The length from the first byte. You get this by
8536 * converting the byte to binary and counting the zeros
8537 * before a 1. Then you add 1 to that. Examples
8538 * 00011111 = length 4 because there are 3 zeros before a 1.
8539 * 00100000 = length 3 because there are 2 zeros before a 1.
8540 * 00000011 = length 7 because there are 6 zeros before a 1.
8541 *
8542 * 2. The bits used for length are removed from the first byte
8543 * Then all the bytes are merged into a value. NOTE: this
8544 * is not the case for id ebml tags as there id includes
8545 * length bits.
8546 *
8547 */
8548
8549
8550 var findEbml = function findEbml(bytes, paths) {
8551 paths = normalizePaths(paths);
8552 bytes = toUint8(bytes);
8553 var results = [];
8554
8555 if (!paths.length) {
8556 return results;
8557 }
8558
8559 var i = 0;
8560
8561 while (i < bytes.length) {
8562 var id = getvint(bytes, i, false);
8563 var dataHeader = getvint(bytes, i + id.length);
8564 var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
8565
8566 if (dataHeader.value === 0x7f) {
8567 dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
8568
8569 if (dataHeader.value !== bytes.length) {
8570 dataHeader.value -= dataStart;
8571 }
8572 }
8573
8574 var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
8575 var data = bytes.subarray(dataStart, dataEnd);
8576
8577 if (bytesMatch(paths[0], id.bytes)) {
8578 if (paths.length === 1) {
8579 // this is the end of the paths and we've found the tag we were
8580 // looking for
8581 results.push(data);
8582 } else {
8583 // recursively search for the next tag inside of the data
8584 // of this one
8585 results = results.concat(findEbml(data, paths.slice(1)));
8586 }
8587 }
8588
8589 var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
8590
8591 i += totalLength;
8592 }
8593
8594 return results;
8595 }; // see https://www.matroska.org/technical/basics.html#block-structure
8596
8597 var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
8598 var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
8599 var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
8600 /**
8601 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8602 * Sequence Payload"
8603 *
8604 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8605 * unit
8606 * @return {Uint8Array} the RBSP without any Emulation
8607 * Prevention Bytes
8608 */
8609
8610 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
8611 var positions = [];
8612 var i = 1; // Find all `Emulation Prevention Bytes`
8613
8614 while (i < bytes.length - 2) {
8615 if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
8616 positions.push(i + 2);
8617 i++;
8618 }
8619
8620 i++;
8621 } // If no Emulation Prevention Bytes were found just return the original
8622 // array
8623
8624
8625 if (positions.length === 0) {
8626 return bytes;
8627 } // Create a new array to hold the NAL unit data
8628
8629
8630 var newLength = bytes.length - positions.length;
8631 var newData = new Uint8Array(newLength);
8632 var sourceIndex = 0;
8633
8634 for (i = 0; i < newLength; sourceIndex++, i++) {
8635 if (sourceIndex === positions[0]) {
8636 // Skip this byte
8637 sourceIndex++; // Remove this position index
8638
8639 positions.shift();
8640 }
8641
8642 newData[i] = bytes[sourceIndex];
8643 }
8644
8645 return newData;
8646 };
8647 var findNal = function findNal(bytes, dataType, types, nalLimit) {
8648 if (nalLimit === void 0) {
8649 nalLimit = Infinity;
8650 }
8651
8652 bytes = toUint8(bytes);
8653 types = [].concat(types);
8654 var i = 0;
8655 var nalStart;
8656 var nalsFound = 0; // keep searching until:
8657 // we reach the end of bytes
8658 // we reach the maximum number of nals they want to seach
8659 // NOTE: that we disregard nalLimit when we have found the start
8660 // of the nal we want so that we can find the end of the nal we want.
8661
8662 while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
8663 var nalOffset = void 0;
8664
8665 if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
8666 nalOffset = 4;
8667 } else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
8668 nalOffset = 3;
8669 } // we are unsynced,
8670 // find the next nal unit
8671
8672
8673 if (!nalOffset) {
8674 i++;
8675 continue;
8676 }
8677
8678 nalsFound++;
8679
8680 if (nalStart) {
8681 return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
8682 }
8683
8684 var nalType = void 0;
8685
8686 if (dataType === 'h264') {
8687 nalType = bytes[i + nalOffset] & 0x1f;
8688 } else if (dataType === 'h265') {
8689 nalType = bytes[i + nalOffset] >> 1 & 0x3f;
8690 }
8691
8692 if (types.indexOf(nalType) !== -1) {
8693 nalStart = i + nalOffset;
8694 } // nal header is 1 length for h264, and 2 for h265
8695
8696
8697 i += nalOffset + (dataType === 'h264' ? 1 : 2);
8698 }
8699
8700 return bytes.subarray(0, 0);
8701 };
8702 var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
8703 return findNal(bytes, 'h264', type, nalLimit);
8704 };
8705 var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
8706 return findNal(bytes, 'h265', type, nalLimit);
8707 };
8708
8709 var CONSTANTS = {
8710 // "webm" string literal in hex
8711 'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
8712 // "matroska" string literal in hex
8713 'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
8714 // "fLaC" string literal in hex
8715 'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
8716 // "OggS" string literal in hex
8717 'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
8718 // ac-3 sync byte, also works for ec-3 as that is simply a codec
8719 // of ac-3
8720 'ac3': toUint8([0x0b, 0x77]),
8721 // "RIFF" string literal in hex used for wav and avi
8722 'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
8723 // "AVI" string literal in hex
8724 'avi': toUint8([0x41, 0x56, 0x49]),
8725 // "WAVE" string literal in hex
8726 'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
8727 // "ftyp3g" string literal in hex
8728 '3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
8729 // "ftyp" string literal in hex
8730 'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
8731 // "styp" string literal in hex
8732 'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
8733 // "ftypqt" string literal in hex
8734 'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
8735 // moov string literal in hex
8736 'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
8737 // moof string literal in hex
8738 'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
8739 };
8740 var _isLikely = {
8741 aac: function aac(bytes) {
8742 var offset = getId3Offset(bytes);
8743 return bytesMatch(bytes, [0xFF, 0x10], {
8744 offset: offset,
8745 mask: [0xFF, 0x16]
8746 });
8747 },
8748 mp3: function mp3(bytes) {
8749 var offset = getId3Offset(bytes);
8750 return bytesMatch(bytes, [0xFF, 0x02], {
8751 offset: offset,
8752 mask: [0xFF, 0x06]
8753 });
8754 },
8755 webm: function webm(bytes) {
8756 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
8757
8758 return bytesMatch(docType, CONSTANTS.webm);
8759 },
8760 mkv: function mkv(bytes) {
8761 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
8762
8763 return bytesMatch(docType, CONSTANTS.matroska);
8764 },
8765 mp4: function mp4(bytes) {
8766 // if this file is another base media file format, it is not mp4
8767 if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
8768 return false;
8769 } // if this file starts with a ftyp or styp box its mp4
8770
8771
8772 if (bytesMatch(bytes, CONSTANTS.mp4, {
8773 offset: 4
8774 }) || bytesMatch(bytes, CONSTANTS.fmp4, {
8775 offset: 4
8776 })) {
8777 return true;
8778 } // if this file starts with a moof/moov box its mp4
8779
8780
8781 if (bytesMatch(bytes, CONSTANTS.moof, {
8782 offset: 4
8783 }) || bytesMatch(bytes, CONSTANTS.moov, {
8784 offset: 4
8785 })) {
8786 return true;
8787 }
8788 },
8789 mov: function mov(bytes) {
8790 return bytesMatch(bytes, CONSTANTS.mov, {
8791 offset: 4
8792 });
8793 },
8794 '3gp': function gp(bytes) {
8795 return bytesMatch(bytes, CONSTANTS['3gp'], {
8796 offset: 4
8797 });
8798 },
8799 ac3: function ac3(bytes) {
8800 var offset = getId3Offset(bytes);
8801 return bytesMatch(bytes, CONSTANTS.ac3, {
8802 offset: offset
8803 });
8804 },
8805 ts: function ts(bytes) {
8806 if (bytes.length < 189 && bytes.length >= 1) {
8807 return bytes[0] === 0x47;
8808 }
8809
8810 var i = 0; // check the first 376 bytes for two matching sync bytes
8811
8812 while (i + 188 < bytes.length && i < 188) {
8813 if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
8814 return true;
8815 }
8816
8817 i += 1;
8818 }
8819
8820 return false;
8821 },
8822 flac: function flac(bytes) {
8823 var offset = getId3Offset(bytes);
8824 return bytesMatch(bytes, CONSTANTS.flac, {
8825 offset: offset
8826 });
8827 },
8828 ogg: function ogg(bytes) {
8829 return bytesMatch(bytes, CONSTANTS.ogg);
8830 },
8831 avi: function avi(bytes) {
8832 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
8833 offset: 8
8834 });
8835 },
8836 wav: function wav(bytes) {
8837 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
8838 offset: 8
8839 });
8840 },
8841 'h264': function h264(bytes) {
8842 // find seq_parameter_set_rbsp
8843 return findH264Nal(bytes, 7, 3).length;
8844 },
8845 'h265': function h265(bytes) {
8846 // find video_parameter_set_rbsp or seq_parameter_set_rbsp
8847 return findH265Nal(bytes, [32, 33], 3).length;
8848 }
8849 }; // get all the isLikely functions
8850 // but make sure 'ts' is above h264 and h265
8851 // but below everything else as it is the least specific
8852
8853 var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
8854 .filter(function (t) {
8855 return t !== 'ts' && t !== 'h264' && t !== 'h265';
8856 }) // add it back to the bottom
8857 .concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
8858
8859 isLikelyTypes.forEach(function (type) {
8860 var isLikelyFn = _isLikely[type];
8861
8862 _isLikely[type] = function (bytes) {
8863 return isLikelyFn(toUint8(bytes));
8864 };
8865 }); // export after wrapping
8866
8867 var isLikely = _isLikely; // A useful list of file signatures can be found here
8868 // https://en.wikipedia.org/wiki/List_of_file_signatures
8869
8870 var detectContainerForBytes = function detectContainerForBytes(bytes) {
8871 bytes = toUint8(bytes);
8872
8873 for (var i = 0; i < isLikelyTypes.length; i++) {
8874 var type = isLikelyTypes[i];
8875
8876 if (isLikely[type](bytes)) {
8877 return type;
8878 }
8879 }
8880
8881 return '';
8882 }; // fmp4 is not a container
8883
8884 var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
8885 return findBox(bytes, ['moof']).length > 0;
8886 };
8887
8888 // which will only happen if the request is complete.
8889
8890 var callbackOnCompleted = function callbackOnCompleted(request, cb) {
8891 if (request.readyState === 4) {
8892 return cb();
8893 }
8894
8895 return;
8896 };
8897
8898 var containerRequest = function containerRequest(uri, xhr, cb) {
8899 var bytes = [];
8900 var id3Offset;
8901 var finished = false;
8902
8903 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
8904 req.abort();
8905 finished = true;
8906 return cb(err, req, type, _bytes);
8907 };
8908
8909 var progressListener = function progressListener(error, request) {
8910 if (finished) {
8911 return;
8912 }
8913
8914 if (error) {
8915 return endRequestAndCallback(error, request, '', bytes);
8916 } // grap the new part of content that was just downloaded
8917
8918
8919 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
8920
8921 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
8922 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
8923 // or we need at least two bytes after an id3Offset
8924
8925 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
8926 return callbackOnCompleted(request, function () {
8927 return endRequestAndCallback(error, request, '', bytes);
8928 });
8929 }
8930
8931 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
8932 // to see the second sync byte, wait until we have enough data
8933 // before declaring it ts
8934
8935 if (type === 'ts' && bytes.length < 188) {
8936 return callbackOnCompleted(request, function () {
8937 return endRequestAndCallback(error, request, '', bytes);
8938 });
8939 } // this may be an unsynced ts segment
8940 // wait for 376 bytes before detecting no container
8941
8942
8943 if (!type && bytes.length < 376) {
8944 return callbackOnCompleted(request, function () {
8945 return endRequestAndCallback(error, request, '', bytes);
8946 });
8947 }
8948
8949 return endRequestAndCallback(null, request, type, bytes);
8950 };
8951
8952 var options = {
8953 uri: uri,
8954 beforeSend: function beforeSend(request) {
8955 // this forces the browser to pass the bytes to us unprocessed
8956 request.overrideMimeType('text/plain; charset=x-user-defined');
8957 request.addEventListener('progress', function (_ref) {
8958 _ref.total;
8959 _ref.loaded;
8960 return callbackWrapper(request, null, {
8961 statusCode: request.status
8962 }, progressListener);
8963 });
8964 }
8965 };
8966 var request = xhr(options, function (error, response) {
8967 return callbackWrapper(request, error, response, progressListener);
8968 });
8969 return request;
8970 };
8971
8972 var EventTarget = videojs__default["default"].EventTarget,
8973 mergeOptions = videojs__default["default"].mergeOptions;
8974
8975 var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
8976 if (!isPlaylistUnchanged(a, b)) {
8977 return false;
8978 } // for dash the above check will often return true in scenarios where
8979 // the playlist actually has changed because mediaSequence isn't a
8980 // dash thing, and we often set it to 1. So if the playlists have the same amount
8981 // of segments we return true.
8982 // So for dash we need to make sure that the underlying segments are different.
8983 // if sidx changed then the playlists are different.
8984
8985
8986 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
8987 return false;
8988 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
8989 return false;
8990 } // one or the other does not have segments
8991 // there was a change.
8992
8993
8994 if (a.segments && !b.segments || !a.segments && b.segments) {
8995 return false;
8996 } // neither has segments nothing changed
8997
8998
8999 if (!a.segments && !b.segments) {
9000 return true;
9001 } // check segments themselves
9002
9003
9004 for (var i = 0; i < a.segments.length; i++) {
9005 var aSegment = a.segments[i];
9006 var bSegment = b.segments[i]; // if uris are different between segments there was a change
9007
9008 if (aSegment.uri !== bSegment.uri) {
9009 return false;
9010 } // neither segment has a byterange, there will be no byterange change.
9011
9012
9013 if (!aSegment.byterange && !bSegment.byterange) {
9014 continue;
9015 }
9016
9017 var aByterange = aSegment.byterange;
9018 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
9019
9020 if (aByterange && !bByterange || !aByterange && bByterange) {
9021 return false;
9022 } // if both segments have byterange with different offsets, there was a change.
9023
9024
9025 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
9026 return false;
9027 }
9028 } // if everything was the same with segments, this is the same playlist.
9029
9030
9031 return true;
9032 };
9033 /**
9034 * Parses the master XML string and updates playlist URI references.
9035 *
9036 * @param {Object} config
9037 * Object of arguments
9038 * @param {string} config.masterXml
9039 * The mpd XML
9040 * @param {string} config.srcUrl
9041 * The mpd URL
9042 * @param {Date} config.clientOffset
9043 * A time difference between server and client
9044 * @param {Object} config.sidxMapping
9045 * SIDX mappings for moof/mdat URIs and byte ranges
9046 * @return {Object}
9047 * The parsed mpd manifest object
9048 */
9049
9050
9051 var parseMasterXml = function parseMasterXml(_ref) {
9052 var masterXml = _ref.masterXml,
9053 srcUrl = _ref.srcUrl,
9054 clientOffset = _ref.clientOffset,
9055 sidxMapping = _ref.sidxMapping,
9056 previousManifest = _ref.previousManifest;
9057 var manifest = parse(masterXml, {
9058 manifestUri: srcUrl,
9059 clientOffset: clientOffset,
9060 sidxMapping: sidxMapping,
9061 previousManifest: previousManifest
9062 });
9063 addPropertiesToMaster(manifest, srcUrl);
9064 return manifest;
9065 };
9066 /**
9067 * Returns a new master manifest that is the result of merging an updated master manifest
9068 * into the original version.
9069 *
9070 * @param {Object} oldMaster
9071 * The old parsed mpd object
9072 * @param {Object} newMaster
9073 * The updated parsed mpd object
9074 * @return {Object}
9075 * A new object representing the original master manifest with the updated media
9076 * playlists merged in
9077 */
9078
9079 var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
9080 var noChanges = true;
9081 var update = mergeOptions(oldMaster, {
9082 // These are top level properties that can be updated
9083 duration: newMaster.duration,
9084 minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
9085 timelineStarts: newMaster.timelineStarts
9086 }); // First update the playlists in playlist list
9087
9088 for (var i = 0; i < newMaster.playlists.length; i++) {
9089 var playlist = newMaster.playlists[i];
9090
9091 if (playlist.sidx) {
9092 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
9093
9094 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
9095 addSidxSegmentsToPlaylist$1(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
9096 }
9097 }
9098
9099 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
9100
9101 if (playlistUpdate) {
9102 update = playlistUpdate;
9103 noChanges = false;
9104 }
9105 } // Then update media group playlists
9106
9107
9108 forEachMediaGroup$1(newMaster, function (properties, type, group, label) {
9109 if (properties.playlists && properties.playlists.length) {
9110 var id = properties.playlists[0].id;
9111
9112 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
9113
9114 if (_playlistUpdate) {
9115 update = _playlistUpdate; // update the playlist reference within media groups
9116
9117 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
9118 noChanges = false;
9119 }
9120 }
9121 });
9122
9123 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9124 noChanges = false;
9125 }
9126
9127 if (noChanges) {
9128 return null;
9129 }
9130
9131 return update;
9132 }; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
9133 // If the SIDXs have maps, the two maps should match,
9134 // both `a` and `b` missing SIDXs is considered matching.
9135 // If `a` or `b` but not both have a map, they aren't matching.
9136
9137 var equivalentSidx = function equivalentSidx(a, b) {
9138 var neitherMap = Boolean(!a.map && !b.map);
9139 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
9140 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
9141 }; // exported for testing
9142
9143
9144 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
9145 var newSidxMapping = {};
9146
9147 for (var id in playlists) {
9148 var playlist = playlists[id];
9149 var currentSidxInfo = playlist.sidx;
9150
9151 if (currentSidxInfo) {
9152 var key = generateSidxKey(currentSidxInfo);
9153
9154 if (!oldSidxMapping[key]) {
9155 break;
9156 }
9157
9158 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
9159
9160 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
9161 newSidxMapping[key] = oldSidxMapping[key];
9162 }
9163 }
9164 }
9165
9166 return newSidxMapping;
9167 };
9168 /**
9169 * A function that filters out changed items as they need to be requested separately.
9170 *
9171 * The method is exported for testing
9172 *
9173 * @param {Object} master the parsed mpd XML returned via mpd-parser
9174 * @param {Object} oldSidxMapping the SIDX to compare against
9175 */
9176
9177 var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
9178 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
9179 var mediaGroupSidx = videoSidx;
9180 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
9181 if (properties.playlists && properties.playlists.length) {
9182 var playlists = properties.playlists;
9183 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
9184 }
9185 });
9186 return mediaGroupSidx;
9187 };
9188
9189 var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
9190 inheritsLoose(DashPlaylistLoader, _EventTarget);
9191
9192 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
9193 // playlist loader setups from media groups will expect to be able to pass a playlist
9194 // (since there aren't external URLs to media playlists with DASH)
9195 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
9196 var _this;
9197
9198 if (options === void 0) {
9199 options = {};
9200 }
9201
9202 _this = _EventTarget.call(this) || this;
9203 _this.masterPlaylistLoader_ = masterPlaylistLoader || assertThisInitialized(_this);
9204
9205 if (!masterPlaylistLoader) {
9206 _this.isMaster_ = true;
9207 }
9208
9209 var _options = options,
9210 _options$withCredenti = _options.withCredentials,
9211 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
9212 _options$handleManife = _options.handleManifestRedirects,
9213 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
9214 _this.vhs_ = vhs;
9215 _this.withCredentials = withCredentials;
9216 _this.handleManifestRedirects = handleManifestRedirects;
9217
9218 if (!srcUrlOrPlaylist) {
9219 throw new Error('A non-empty playlist URL or object is required');
9220 } // event naming?
9221
9222
9223 _this.on('minimumUpdatePeriod', function () {
9224 _this.refreshXml_();
9225 }); // live playlist staleness timeout
9226
9227
9228 _this.on('mediaupdatetimeout', function () {
9229 _this.refreshMedia_(_this.media().id);
9230 });
9231
9232 _this.state = 'HAVE_NOTHING';
9233 _this.loadedPlaylists_ = {};
9234 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
9235 // The masterPlaylistLoader will be created with a string
9236
9237 if (_this.isMaster_) {
9238 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
9239 // once multi-period is refactored
9240
9241 _this.masterPlaylistLoader_.sidxMapping_ = {};
9242 } else {
9243 _this.childPlaylist_ = srcUrlOrPlaylist;
9244 }
9245
9246 return _this;
9247 }
9248
9249 var _proto = DashPlaylistLoader.prototype;
9250
9251 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
9252 // disposed
9253 if (!this.request) {
9254 return true;
9255 } // pending request is cleared
9256
9257
9258 this.request = null;
9259
9260 if (err) {
9261 // use the provided error object or create one
9262 // based on the request/response
9263 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
9264 status: request.status,
9265 message: 'DASH request error at URL: ' + request.uri,
9266 response: request.response,
9267 // MEDIA_ERR_NETWORK
9268 code: 2
9269 };
9270
9271 if (startingState) {
9272 this.state = startingState;
9273 }
9274
9275 this.trigger('error');
9276 return true;
9277 }
9278 }
9279 /**
9280 * Verify that the container of the sidx segment can be parsed
9281 * and if it can, get and parse that segment.
9282 */
9283 ;
9284
9285 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
9286 var _this2 = this;
9287
9288 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
9289
9290 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
9291 // keep this function async
9292 this.mediaRequest_ = window.setTimeout(function () {
9293 return cb(false);
9294 }, 0);
9295 return;
9296 } // resolve the segment URL relative to the playlist
9297
9298
9299 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
9300
9301 var fin = function fin(err, request) {
9302 if (_this2.requestErrored_(err, request, startingState)) {
9303 return;
9304 }
9305
9306 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
9307 var sidx;
9308
9309 try {
9310 sidx = parseSidx_1(toUint8(request.response).subarray(8));
9311 } catch (e) {
9312 // sidx parsing failed.
9313 _this2.requestErrored_(e, request, startingState);
9314
9315 return;
9316 }
9317
9318 sidxMapping[sidxKey] = {
9319 sidxInfo: playlist.sidx,
9320 sidx: sidx
9321 };
9322 addSidxSegmentsToPlaylist$1(playlist, sidx, playlist.sidx.resolvedUri);
9323 return cb(true);
9324 };
9325
9326 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
9327 if (err) {
9328 return fin(err, request);
9329 }
9330
9331 if (!container || container !== 'mp4') {
9332 return fin({
9333 status: request.status,
9334 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
9335 // response is just bytes in this case
9336 // but we really don't want to return that.
9337 response: '',
9338 playlist: playlist,
9339 internal: true,
9340 blacklistDuration: Infinity,
9341 // MEDIA_ERR_NETWORK
9342 code: 2
9343 }, request);
9344 } // if we already downloaded the sidx bytes in the container request, use them
9345
9346
9347 var _playlist$sidx$bytera = playlist.sidx.byterange,
9348 offset = _playlist$sidx$bytera.offset,
9349 length = _playlist$sidx$bytera.length;
9350
9351 if (bytes.length >= length + offset) {
9352 return fin(err, {
9353 response: bytes.subarray(offset, offset + length),
9354 status: request.status,
9355 uri: request.uri
9356 });
9357 } // otherwise request sidx bytes
9358
9359
9360 _this2.request = _this2.vhs_.xhr({
9361 uri: uri,
9362 responseType: 'arraybuffer',
9363 headers: segmentXhrHeaders({
9364 byterange: playlist.sidx.byterange
9365 })
9366 }, fin);
9367 });
9368 };
9369
9370 _proto.dispose = function dispose() {
9371 this.trigger('dispose');
9372 this.stopRequest();
9373 this.loadedPlaylists_ = {};
9374 window.clearTimeout(this.minimumUpdatePeriodTimeout_);
9375 window.clearTimeout(this.mediaRequest_);
9376 window.clearTimeout(this.mediaUpdateTimeout);
9377 this.mediaUpdateTimeout = null;
9378 this.mediaRequest_ = null;
9379 this.minimumUpdatePeriodTimeout_ = null;
9380
9381 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9382 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9383 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9384 }
9385
9386 this.off();
9387 };
9388
9389 _proto.hasPendingRequest = function hasPendingRequest() {
9390 return this.request || this.mediaRequest_;
9391 };
9392
9393 _proto.stopRequest = function stopRequest() {
9394 if (this.request) {
9395 var oldRequest = this.request;
9396 this.request = null;
9397 oldRequest.onreadystatechange = null;
9398 oldRequest.abort();
9399 }
9400 };
9401
9402 _proto.media = function media(playlist) {
9403 var _this3 = this;
9404
9405 // getter
9406 if (!playlist) {
9407 return this.media_;
9408 } // setter
9409
9410
9411 if (this.state === 'HAVE_NOTHING') {
9412 throw new Error('Cannot switch media playlist from ' + this.state);
9413 }
9414
9415 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
9416
9417 if (typeof playlist === 'string') {
9418 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
9419 throw new Error('Unknown playlist URI: ' + playlist);
9420 }
9421
9422 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
9423 }
9424
9425 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
9426
9427 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
9428 this.state = 'HAVE_METADATA';
9429 this.media_ = playlist; // trigger media change if the active media has been updated
9430
9431 if (mediaChange) {
9432 this.trigger('mediachanging');
9433 this.trigger('mediachange');
9434 }
9435
9436 return;
9437 } // switching to the active playlist is a no-op
9438
9439
9440 if (!mediaChange) {
9441 return;
9442 } // switching from an already loaded playlist
9443
9444
9445 if (this.media_) {
9446 this.trigger('mediachanging');
9447 }
9448
9449 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
9450 // everything is ready just continue to haveMetadata
9451 _this3.haveMetadata({
9452 startingState: startingState,
9453 playlist: playlist
9454 });
9455 });
9456 };
9457
9458 _proto.haveMetadata = function haveMetadata(_ref2) {
9459 var startingState = _ref2.startingState,
9460 playlist = _ref2.playlist;
9461 this.state = 'HAVE_METADATA';
9462 this.loadedPlaylists_[playlist.id] = playlist;
9463 this.mediaRequest_ = null; // This will trigger loadedplaylist
9464
9465 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
9466 // to resolve setup of media groups
9467
9468 if (startingState === 'HAVE_MASTER') {
9469 this.trigger('loadedmetadata');
9470 } else {
9471 // trigger media change if the active media has been updated
9472 this.trigger('mediachange');
9473 }
9474 };
9475
9476 _proto.pause = function pause() {
9477 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9478 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9479 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9480 }
9481
9482 this.stopRequest();
9483 window.clearTimeout(this.mediaUpdateTimeout);
9484 this.mediaUpdateTimeout = null;
9485
9486 if (this.isMaster_) {
9487 window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
9488 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
9489 }
9490
9491 if (this.state === 'HAVE_NOTHING') {
9492 // If we pause the loader before any data has been retrieved, its as if we never
9493 // started, so reset to an unstarted state.
9494 this.started = false;
9495 }
9496 };
9497
9498 _proto.load = function load(isFinalRendition) {
9499 var _this4 = this;
9500
9501 window.clearTimeout(this.mediaUpdateTimeout);
9502 this.mediaUpdateTimeout = null;
9503 var media = this.media();
9504
9505 if (isFinalRendition) {
9506 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
9507 this.mediaUpdateTimeout = window.setTimeout(function () {
9508 return _this4.load();
9509 }, delay);
9510 return;
9511 } // because the playlists are internal to the manifest, load should either load the
9512 // main manifest, or do nothing but trigger an event
9513
9514
9515 if (!this.started) {
9516 this.start();
9517 return;
9518 }
9519
9520 if (media && !media.endList) {
9521 // Check to see if this is the master loader and the MUP was cleared (this happens
9522 // when the loader was paused). `media` should be set at this point since one is always
9523 // set during `start()`.
9524 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
9525 // Trigger minimumUpdatePeriod to refresh the master manifest
9526 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
9527
9528 this.updateMinimumUpdatePeriodTimeout_();
9529 }
9530
9531 this.trigger('mediaupdatetimeout');
9532 } else {
9533 this.trigger('loadedplaylist');
9534 }
9535 };
9536
9537 _proto.start = function start() {
9538 var _this5 = this;
9539
9540 this.started = true; // We don't need to request the master manifest again
9541 // Call this asynchronously to match the xhr request behavior below
9542
9543 if (!this.isMaster_) {
9544 this.mediaRequest_ = window.setTimeout(function () {
9545 return _this5.haveMaster_();
9546 }, 0);
9547 return;
9548 }
9549
9550 this.requestMaster_(function (req, masterChanged) {
9551 _this5.haveMaster_();
9552
9553 if (!_this5.hasPendingRequest() && !_this5.media_) {
9554 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
9555 }
9556 });
9557 };
9558
9559 _proto.requestMaster_ = function requestMaster_(cb) {
9560 var _this6 = this;
9561
9562 this.request = this.vhs_.xhr({
9563 uri: this.masterPlaylistLoader_.srcUrl,
9564 withCredentials: this.withCredentials
9565 }, function (error, req) {
9566 if (_this6.requestErrored_(error, req)) {
9567 if (_this6.state === 'HAVE_NOTHING') {
9568 _this6.started = false;
9569 }
9570
9571 return;
9572 }
9573
9574 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
9575 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
9576
9577 if (req.responseHeaders && req.responseHeaders.date) {
9578 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
9579 } else {
9580 _this6.masterLoaded_ = Date.now();
9581 }
9582
9583 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
9584
9585 if (masterChanged) {
9586 _this6.handleMaster_();
9587
9588 _this6.syncClientServerClock_(function () {
9589 return cb(req, masterChanged);
9590 });
9591
9592 return;
9593 }
9594
9595 return cb(req, masterChanged);
9596 });
9597 }
9598 /**
9599 * Parses the master xml for UTCTiming node to sync the client clock to the server
9600 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
9601 *
9602 * @param {Function} done
9603 * Function to call when clock sync has completed
9604 */
9605 ;
9606
9607 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
9608 var _this7 = this;
9609
9610 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
9611 // server clock
9612
9613 if (utcTiming === null) {
9614 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
9615 return done();
9616 }
9617
9618 if (utcTiming.method === 'DIRECT') {
9619 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
9620 return done();
9621 }
9622
9623 this.request = this.vhs_.xhr({
9624 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
9625 method: utcTiming.method,
9626 withCredentials: this.withCredentials
9627 }, function (error, req) {
9628 // disposed
9629 if (!_this7.request) {
9630 return;
9631 }
9632
9633 if (error) {
9634 // sync request failed, fall back to using date header from mpd
9635 // TODO: log warning
9636 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
9637 return done();
9638 }
9639
9640 var serverTime;
9641
9642 if (utcTiming.method === 'HEAD') {
9643 if (!req.responseHeaders || !req.responseHeaders.date) {
9644 // expected date header not preset, fall back to using date header from mpd
9645 // TODO: log warning
9646 serverTime = _this7.masterLoaded_;
9647 } else {
9648 serverTime = Date.parse(req.responseHeaders.date);
9649 }
9650 } else {
9651 serverTime = Date.parse(req.responseText);
9652 }
9653
9654 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
9655 done();
9656 });
9657 };
9658
9659 _proto.haveMaster_ = function haveMaster_() {
9660 this.state = 'HAVE_MASTER';
9661
9662 if (this.isMaster_) {
9663 // We have the master playlist at this point, so
9664 // trigger this to allow MasterPlaylistController
9665 // to make an initial playlist selection
9666 this.trigger('loadedplaylist');
9667 } else if (!this.media_) {
9668 // no media playlist was specifically selected so select
9669 // the one the child playlist loader was created with
9670 this.media(this.childPlaylist_);
9671 }
9672 };
9673
9674 _proto.handleMaster_ = function handleMaster_() {
9675 // clear media request
9676 this.mediaRequest_ = null;
9677 var oldMaster = this.masterPlaylistLoader_.master;
9678 var newMaster = parseMasterXml({
9679 masterXml: this.masterPlaylistLoader_.masterXml_,
9680 srcUrl: this.masterPlaylistLoader_.srcUrl,
9681 clientOffset: this.masterPlaylistLoader_.clientOffset_,
9682 sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
9683 previousManifest: oldMaster
9684 }); // if we have an old master to compare the new master against
9685
9686 if (oldMaster) {
9687 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
9688 } // only update master if we have a new master
9689
9690
9691 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
9692 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
9693
9694 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
9695 this.masterPlaylistLoader_.srcUrl = location;
9696 }
9697
9698 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9699 this.updateMinimumUpdatePeriodTimeout_();
9700 }
9701
9702 return Boolean(newMaster);
9703 };
9704
9705 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
9706 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
9707 // a new one will be added if needed.
9708
9709 if (mpl.createMupOnMedia_) {
9710 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
9711 mpl.createMupOnMedia_ = null;
9712 } // clear any pending timeouts
9713
9714
9715 if (mpl.minimumUpdatePeriodTimeout_) {
9716 window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
9717 mpl.minimumUpdatePeriodTimeout_ = null;
9718 }
9719
9720 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
9721 // MPD has no future validity, so a new one will need to be acquired when new
9722 // media segments are to be made available. Thus, we use the target duration
9723 // in this case
9724
9725 if (mup === 0) {
9726 if (mpl.media()) {
9727 mup = mpl.media().targetDuration * 1000;
9728 } else {
9729 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
9730 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
9731 }
9732 } // if minimumUpdatePeriod is invalid or <= zero, which
9733 // can happen when a live video becomes VOD. skip timeout
9734 // creation.
9735
9736
9737 if (typeof mup !== 'number' || mup <= 0) {
9738 if (mup < 0) {
9739 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
9740 }
9741
9742 return;
9743 }
9744
9745 this.createMUPTimeout_(mup);
9746 };
9747
9748 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
9749 var mpl = this.masterPlaylistLoader_;
9750 mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(function () {
9751 mpl.minimumUpdatePeriodTimeout_ = null;
9752 mpl.trigger('minimumUpdatePeriod');
9753 mpl.createMUPTimeout_(mup);
9754 }, mup);
9755 }
9756 /**
9757 * Sends request to refresh the master xml and updates the parsed master manifest
9758 */
9759 ;
9760
9761 _proto.refreshXml_ = function refreshXml_() {
9762 var _this8 = this;
9763
9764 this.requestMaster_(function (req, masterChanged) {
9765 if (!masterChanged) {
9766 return;
9767 }
9768
9769 if (_this8.media_) {
9770 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
9771 } // This will filter out updated sidx info from the mapping
9772
9773
9774 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
9775
9776 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
9777 // TODO: do we need to reload the current playlist?
9778 _this8.refreshMedia_(_this8.media().id);
9779 });
9780 });
9781 }
9782 /**
9783 * Refreshes the media playlist by re-parsing the master xml and updating playlist
9784 * references. If this is an alternate loader, the updated parsed manifest is retrieved
9785 * from the master loader.
9786 */
9787 ;
9788
9789 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
9790 var _this9 = this;
9791
9792 if (!mediaID) {
9793 throw new Error('refreshMedia_ must take a media id');
9794 } // for master we have to reparse the master xml
9795 // to re-create segments based on current timing values
9796 // which may change media. We only skip updating master
9797 // if this is the first time this.media_ is being set.
9798 // as master was just parsed in that case.
9799
9800
9801 if (this.media_ && this.isMaster_) {
9802 this.handleMaster_();
9803 }
9804
9805 var playlists = this.masterPlaylistLoader_.master.playlists;
9806 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
9807
9808 if (mediaChanged) {
9809 this.media_ = playlists[mediaID];
9810 } else {
9811 this.trigger('playlistunchanged');
9812 }
9813
9814 if (!this.mediaUpdateTimeout) {
9815 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
9816 if (_this9.media().endList) {
9817 return;
9818 }
9819
9820 _this9.mediaUpdateTimeout = window.setTimeout(function () {
9821 _this9.trigger('mediaupdatetimeout');
9822
9823 createMediaUpdateTimeout();
9824 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
9825 };
9826
9827 createMediaUpdateTimeout();
9828 }
9829
9830 this.trigger('loadedplaylist');
9831 };
9832
9833 return DashPlaylistLoader;
9834 }(EventTarget);
9835
9836 var Config = {
9837 GOAL_BUFFER_LENGTH: 30,
9838 MAX_GOAL_BUFFER_LENGTH: 60,
9839 BACK_BUFFER_LENGTH: 30,
9840 GOAL_BUFFER_LENGTH_RATE: 1,
9841 // 0.5 MB/s
9842 INITIAL_BANDWIDTH: 4194304,
9843 // A fudge factor to apply to advertised playlist bitrates to account for
9844 // temporary flucations in client bandwidth
9845 BANDWIDTH_VARIANCE: 1.2,
9846 // How much of the buffer must be filled before we consider upswitching
9847 BUFFER_LOW_WATER_LINE: 0,
9848 MAX_BUFFER_LOW_WATER_LINE: 30,
9849 // TODO: Remove this when experimentalBufferBasedABR is removed
9850 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
9851 BUFFER_LOW_WATER_LINE_RATE: 1,
9852 // If the buffer is greater than the high water line, we won't switch down
9853 BUFFER_HIGH_WATER_LINE: 30
9854 };
9855
9856 var stringToArrayBuffer = function stringToArrayBuffer(string) {
9857 var view = new Uint8Array(new ArrayBuffer(string.length));
9858
9859 for (var i = 0; i < string.length; i++) {
9860 view[i] = string.charCodeAt(i);
9861 }
9862
9863 return view.buffer;
9864 };
9865
9866 /* global Blob, BlobBuilder, Worker */
9867 // unify worker interface
9868 var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
9869 // node only supports on/off
9870 workerObj.on = workerObj.addEventListener;
9871 workerObj.off = workerObj.removeEventListener;
9872 return workerObj;
9873 };
9874
9875 var createObjectURL = function createObjectURL(str) {
9876 try {
9877 return URL.createObjectURL(new Blob([str], {
9878 type: 'application/javascript'
9879 }));
9880 } catch (e) {
9881 var blob = new BlobBuilder();
9882 blob.append(str);
9883 return URL.createObjectURL(blob.getBlob());
9884 }
9885 };
9886
9887 var factory = function factory(code) {
9888 return function () {
9889 var objectUrl = createObjectURL(code);
9890 var worker = browserWorkerPolyFill(new Worker(objectUrl));
9891 worker.objURL = objectUrl;
9892 var terminate = worker.terminate;
9893 worker.on = worker.addEventListener;
9894 worker.off = worker.removeEventListener;
9895
9896 worker.terminate = function () {
9897 URL.revokeObjectURL(objectUrl);
9898 return terminate.call(this);
9899 };
9900
9901 return worker;
9902 };
9903 };
9904 var transform = function transform(code) {
9905 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
9906 };
9907
9908 var getWorkerString = function getWorkerString(fn) {
9909 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
9910 };
9911
9912 /* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
9913 var workerCode$1 = transform(getWorkerString(function () {
9914 /**
9915 * mux.js
9916 *
9917 * Copyright (c) Brightcove
9918 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9919 *
9920 * A lightweight readable stream implemention that handles event dispatching.
9921 * Objects that inherit from streams should call init in their constructors.
9922 */
9923
9924 var Stream = function Stream() {
9925 this.init = function () {
9926 var listeners = {};
9927 /**
9928 * Add a listener for a specified event type.
9929 * @param type {string} the event name
9930 * @param listener {function} the callback to be invoked when an event of
9931 * the specified type occurs
9932 */
9933
9934 this.on = function (type, listener) {
9935 if (!listeners[type]) {
9936 listeners[type] = [];
9937 }
9938
9939 listeners[type] = listeners[type].concat(listener);
9940 };
9941 /**
9942 * Remove a listener for a specified event type.
9943 * @param type {string} the event name
9944 * @param listener {function} a function previously registered for this
9945 * type of event through `on`
9946 */
9947
9948
9949 this.off = function (type, listener) {
9950 var index;
9951
9952 if (!listeners[type]) {
9953 return false;
9954 }
9955
9956 index = listeners[type].indexOf(listener);
9957 listeners[type] = listeners[type].slice();
9958 listeners[type].splice(index, 1);
9959 return index > -1;
9960 };
9961 /**
9962 * Trigger an event of the specified type on this stream. Any additional
9963 * arguments to this function are passed as parameters to event listeners.
9964 * @param type {string} the event name
9965 */
9966
9967
9968 this.trigger = function (type) {
9969 var callbacks, i, length, args;
9970 callbacks = listeners[type];
9971
9972 if (!callbacks) {
9973 return;
9974 } // Slicing the arguments on every invocation of this method
9975 // can add a significant amount of overhead. Avoid the
9976 // intermediate object creation for the common case of a
9977 // single callback argument
9978
9979
9980 if (arguments.length === 2) {
9981 length = callbacks.length;
9982
9983 for (i = 0; i < length; ++i) {
9984 callbacks[i].call(this, arguments[1]);
9985 }
9986 } else {
9987 args = [];
9988 i = arguments.length;
9989
9990 for (i = 1; i < arguments.length; ++i) {
9991 args.push(arguments[i]);
9992 }
9993
9994 length = callbacks.length;
9995
9996 for (i = 0; i < length; ++i) {
9997 callbacks[i].apply(this, args);
9998 }
9999 }
10000 };
10001 /**
10002 * Destroys the stream and cleans up.
10003 */
10004
10005
10006 this.dispose = function () {
10007 listeners = {};
10008 };
10009 };
10010 };
10011 /**
10012 * Forwards all `data` events on this stream to the destination stream. The
10013 * destination stream should provide a method `push` to receive the data
10014 * events as they arrive.
10015 * @param destination {stream} the stream that will receive all `data` events
10016 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
10017 * when the current stream emits a 'done' event
10018 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
10019 */
10020
10021
10022 Stream.prototype.pipe = function (destination) {
10023 this.on('data', function (data) {
10024 destination.push(data);
10025 });
10026 this.on('done', function (flushSource) {
10027 destination.flush(flushSource);
10028 });
10029 this.on('partialdone', function (flushSource) {
10030 destination.partialFlush(flushSource);
10031 });
10032 this.on('endedtimeline', function (flushSource) {
10033 destination.endTimeline(flushSource);
10034 });
10035 this.on('reset', function (flushSource) {
10036 destination.reset(flushSource);
10037 });
10038 return destination;
10039 }; // Default stream functions that are expected to be overridden to perform
10040 // actual work. These are provided by the prototype as a sort of no-op
10041 // implementation so that we don't have to check for their existence in the
10042 // `pipe` function above.
10043
10044
10045 Stream.prototype.push = function (data) {
10046 this.trigger('data', data);
10047 };
10048
10049 Stream.prototype.flush = function (flushSource) {
10050 this.trigger('done', flushSource);
10051 };
10052
10053 Stream.prototype.partialFlush = function (flushSource) {
10054 this.trigger('partialdone', flushSource);
10055 };
10056
10057 Stream.prototype.endTimeline = function (flushSource) {
10058 this.trigger('endedtimeline', flushSource);
10059 };
10060
10061 Stream.prototype.reset = function (flushSource) {
10062 this.trigger('reset', flushSource);
10063 };
10064
10065 var stream = Stream;
10066 var MAX_UINT32$1 = Math.pow(2, 32);
10067
10068 var getUint64$2 = function getUint64(uint8) {
10069 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
10070 var value;
10071
10072 if (dv.getBigUint64) {
10073 value = dv.getBigUint64(0);
10074
10075 if (value < Number.MAX_SAFE_INTEGER) {
10076 return Number(value);
10077 }
10078
10079 return value;
10080 }
10081
10082 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
10083 };
10084
10085 var numbers = {
10086 getUint64: getUint64$2,
10087 MAX_UINT32: MAX_UINT32$1
10088 };
10089 var MAX_UINT32 = numbers.MAX_UINT32;
10090 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
10091
10092 (function () {
10093 var i;
10094 types = {
10095 avc1: [],
10096 // codingname
10097 avcC: [],
10098 btrt: [],
10099 dinf: [],
10100 dref: [],
10101 esds: [],
10102 ftyp: [],
10103 hdlr: [],
10104 mdat: [],
10105 mdhd: [],
10106 mdia: [],
10107 mfhd: [],
10108 minf: [],
10109 moof: [],
10110 moov: [],
10111 mp4a: [],
10112 // codingname
10113 mvex: [],
10114 mvhd: [],
10115 pasp: [],
10116 sdtp: [],
10117 smhd: [],
10118 stbl: [],
10119 stco: [],
10120 stsc: [],
10121 stsd: [],
10122 stsz: [],
10123 stts: [],
10124 styp: [],
10125 tfdt: [],
10126 tfhd: [],
10127 traf: [],
10128 trak: [],
10129 trun: [],
10130 trex: [],
10131 tkhd: [],
10132 vmhd: []
10133 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
10134 // don't throw an error
10135
10136 if (typeof Uint8Array === 'undefined') {
10137 return;
10138 }
10139
10140 for (i in types) {
10141 if (types.hasOwnProperty(i)) {
10142 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
10143 }
10144 }
10145
10146 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
10147 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
10148 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
10149 VIDEO_HDLR = new Uint8Array([0x00, // version 0
10150 0x00, 0x00, 0x00, // flags
10151 0x00, 0x00, 0x00, 0x00, // pre_defined
10152 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
10153 0x00, 0x00, 0x00, 0x00, // reserved
10154 0x00, 0x00, 0x00, 0x00, // reserved
10155 0x00, 0x00, 0x00, 0x00, // reserved
10156 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
10157 ]);
10158 AUDIO_HDLR = new Uint8Array([0x00, // version 0
10159 0x00, 0x00, 0x00, // flags
10160 0x00, 0x00, 0x00, 0x00, // pre_defined
10161 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
10162 0x00, 0x00, 0x00, 0x00, // reserved
10163 0x00, 0x00, 0x00, 0x00, // reserved
10164 0x00, 0x00, 0x00, 0x00, // reserved
10165 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
10166 ]);
10167 HDLR_TYPES = {
10168 video: VIDEO_HDLR,
10169 audio: AUDIO_HDLR
10170 };
10171 DREF = new Uint8Array([0x00, // version 0
10172 0x00, 0x00, 0x00, // flags
10173 0x00, 0x00, 0x00, 0x01, // entry_count
10174 0x00, 0x00, 0x00, 0x0c, // entry_size
10175 0x75, 0x72, 0x6c, 0x20, // 'url' type
10176 0x00, // version 0
10177 0x00, 0x00, 0x01 // entry_flags
10178 ]);
10179 SMHD = new Uint8Array([0x00, // version
10180 0x00, 0x00, 0x00, // flags
10181 0x00, 0x00, // balance, 0 means centered
10182 0x00, 0x00 // reserved
10183 ]);
10184 STCO = new Uint8Array([0x00, // version
10185 0x00, 0x00, 0x00, // flags
10186 0x00, 0x00, 0x00, 0x00 // entry_count
10187 ]);
10188 STSC = STCO;
10189 STSZ = new Uint8Array([0x00, // version
10190 0x00, 0x00, 0x00, // flags
10191 0x00, 0x00, 0x00, 0x00, // sample_size
10192 0x00, 0x00, 0x00, 0x00 // sample_count
10193 ]);
10194 STTS = STCO;
10195 VMHD = new Uint8Array([0x00, // version
10196 0x00, 0x00, 0x01, // flags
10197 0x00, 0x00, // graphicsmode
10198 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
10199 ]);
10200 })();
10201
10202 box = function box(type) {
10203 var payload = [],
10204 size = 0,
10205 i,
10206 result,
10207 view;
10208
10209 for (i = 1; i < arguments.length; i++) {
10210 payload.push(arguments[i]);
10211 }
10212
10213 i = payload.length; // calculate the total size we need to allocate
10214
10215 while (i--) {
10216 size += payload[i].byteLength;
10217 }
10218
10219 result = new Uint8Array(size + 8);
10220 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
10221 view.setUint32(0, result.byteLength);
10222 result.set(type, 4); // copy the payload into the result
10223
10224 for (i = 0, size = 8; i < payload.length; i++) {
10225 result.set(payload[i], size);
10226 size += payload[i].byteLength;
10227 }
10228
10229 return result;
10230 };
10231
10232 dinf = function dinf() {
10233 return box(types.dinf, box(types.dref, DREF));
10234 };
10235
10236 esds = function esds(track) {
10237 return box(types.esds, new Uint8Array([0x00, // version
10238 0x00, 0x00, 0x00, // flags
10239 // ES_Descriptor
10240 0x03, // tag, ES_DescrTag
10241 0x19, // length
10242 0x00, 0x00, // ES_ID
10243 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
10244 // DecoderConfigDescriptor
10245 0x04, // tag, DecoderConfigDescrTag
10246 0x11, // length
10247 0x40, // object type
10248 0x15, // streamType
10249 0x00, 0x06, 0x00, // bufferSizeDB
10250 0x00, 0x00, 0xda, 0xc0, // maxBitrate
10251 0x00, 0x00, 0xda, 0xc0, // avgBitrate
10252 // DecoderSpecificInfo
10253 0x05, // tag, DecoderSpecificInfoTag
10254 0x02, // length
10255 // ISO/IEC 14496-3, AudioSpecificConfig
10256 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
10257 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
10258 ]));
10259 };
10260
10261 ftyp = function ftyp() {
10262 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
10263 };
10264
10265 hdlr = function hdlr(type) {
10266 return box(types.hdlr, HDLR_TYPES[type]);
10267 };
10268
10269 mdat = function mdat(data) {
10270 return box(types.mdat, data);
10271 };
10272
10273 mdhd = function mdhd(track) {
10274 var result = new Uint8Array([0x00, // version 0
10275 0x00, 0x00, 0x00, // flags
10276 0x00, 0x00, 0x00, 0x02, // creation_time
10277 0x00, 0x00, 0x00, 0x03, // modification_time
10278 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10279 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
10280 0x55, 0xc4, // 'und' language (undetermined)
10281 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
10282 // defined. The sample rate can be parsed out of an ADTS header, for
10283 // instance.
10284
10285 if (track.samplerate) {
10286 result[12] = track.samplerate >>> 24 & 0xFF;
10287 result[13] = track.samplerate >>> 16 & 0xFF;
10288 result[14] = track.samplerate >>> 8 & 0xFF;
10289 result[15] = track.samplerate & 0xFF;
10290 }
10291
10292 return box(types.mdhd, result);
10293 };
10294
10295 mdia = function mdia(track) {
10296 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
10297 };
10298
10299 mfhd = function mfhd(sequenceNumber) {
10300 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
10301 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
10302 ]));
10303 };
10304
10305 minf = function minf(track) {
10306 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
10307 };
10308
10309 moof = function moof(sequenceNumber, tracks) {
10310 var trackFragments = [],
10311 i = tracks.length; // build traf boxes for each track fragment
10312
10313 while (i--) {
10314 trackFragments[i] = traf(tracks[i]);
10315 }
10316
10317 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
10318 };
10319 /**
10320 * Returns a movie box.
10321 * @param tracks {array} the tracks associated with this movie
10322 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
10323 */
10324
10325
10326 moov = function moov(tracks) {
10327 var i = tracks.length,
10328 boxes = [];
10329
10330 while (i--) {
10331 boxes[i] = trak(tracks[i]);
10332 }
10333
10334 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
10335 };
10336
10337 mvex = function mvex(tracks) {
10338 var i = tracks.length,
10339 boxes = [];
10340
10341 while (i--) {
10342 boxes[i] = trex(tracks[i]);
10343 }
10344
10345 return box.apply(null, [types.mvex].concat(boxes));
10346 };
10347
10348 mvhd = function mvhd(duration) {
10349 var bytes = new Uint8Array([0x00, // version 0
10350 0x00, 0x00, 0x00, // flags
10351 0x00, 0x00, 0x00, 0x01, // creation_time
10352 0x00, 0x00, 0x00, 0x02, // modification_time
10353 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10354 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
10355 0x00, 0x01, 0x00, 0x00, // 1.0 rate
10356 0x01, 0x00, // 1.0 volume
10357 0x00, 0x00, // reserved
10358 0x00, 0x00, 0x00, 0x00, // reserved
10359 0x00, 0x00, 0x00, 0x00, // reserved
10360 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10361 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10362 0xff, 0xff, 0xff, 0xff // next_track_ID
10363 ]);
10364 return box(types.mvhd, bytes);
10365 };
10366
10367 sdtp = function sdtp(track) {
10368 var samples = track.samples || [],
10369 bytes = new Uint8Array(4 + samples.length),
10370 flags,
10371 i; // leave the full box header (4 bytes) all zero
10372 // write the sample table
10373
10374 for (i = 0; i < samples.length; i++) {
10375 flags = samples[i].flags;
10376 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
10377 }
10378
10379 return box(types.sdtp, bytes);
10380 };
10381
10382 stbl = function stbl(track) {
10383 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
10384 };
10385
10386 (function () {
10387 var videoSample, audioSample;
10388
10389 stsd = function stsd(track) {
10390 return box(types.stsd, new Uint8Array([0x00, // version 0
10391 0x00, 0x00, 0x00, // flags
10392 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
10393 };
10394
10395 videoSample = function videoSample(track) {
10396 var sps = track.sps || [],
10397 pps = track.pps || [],
10398 sequenceParameterSets = [],
10399 pictureParameterSets = [],
10400 i,
10401 avc1Box; // assemble the SPSs
10402
10403 for (i = 0; i < sps.length; i++) {
10404 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
10405 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
10406
10407 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
10408 } // assemble the PPSs
10409
10410
10411 for (i = 0; i < pps.length; i++) {
10412 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
10413 pictureParameterSets.push(pps[i].byteLength & 0xFF);
10414 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
10415 }
10416
10417 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10418 0x00, 0x01, // data_reference_index
10419 0x00, 0x00, // pre_defined
10420 0x00, 0x00, // reserved
10421 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10422 (track.width & 0xff00) >> 8, track.width & 0xff, // width
10423 (track.height & 0xff00) >> 8, track.height & 0xff, // height
10424 0x00, 0x48, 0x00, 0x00, // horizresolution
10425 0x00, 0x48, 0x00, 0x00, // vertresolution
10426 0x00, 0x00, 0x00, 0x00, // reserved
10427 0x00, 0x01, // frame_count
10428 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
10429 0x00, 0x18, // depth = 24
10430 0x11, 0x11 // pre_defined = -1
10431 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
10432 track.profileIdc, // AVCProfileIndication
10433 track.profileCompatibility, // profile_compatibility
10434 track.levelIdc, // AVCLevelIndication
10435 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
10436 ].concat([sps.length], // numOfSequenceParameterSets
10437 sequenceParameterSets, // "SPS"
10438 [pps.length], // numOfPictureParameterSets
10439 pictureParameterSets // "PPS"
10440 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
10441 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
10442 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
10443 ]))];
10444
10445 if (track.sarRatio) {
10446 var hSpacing = track.sarRatio[0],
10447 vSpacing = track.sarRatio[1];
10448 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
10449 }
10450
10451 return box.apply(null, avc1Box);
10452 };
10453
10454 audioSample = function audioSample(track) {
10455 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
10456 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10457 0x00, 0x01, // data_reference_index
10458 // AudioSampleEntry, ISO/IEC 14496-12
10459 0x00, 0x00, 0x00, 0x00, // reserved
10460 0x00, 0x00, 0x00, 0x00, // reserved
10461 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
10462 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
10463 0x00, 0x00, // pre_defined
10464 0x00, 0x00, // reserved
10465 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
10466 // MP4AudioSampleEntry, ISO/IEC 14496-14
10467 ]), esds(track));
10468 };
10469 })();
10470
10471 tkhd = function tkhd(track) {
10472 var result = new Uint8Array([0x00, // version 0
10473 0x00, 0x00, 0x07, // flags
10474 0x00, 0x00, 0x00, 0x00, // creation_time
10475 0x00, 0x00, 0x00, 0x00, // modification_time
10476 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10477 0x00, 0x00, 0x00, 0x00, // reserved
10478 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
10479 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10480 0x00, 0x00, // layer
10481 0x00, 0x00, // alternate_group
10482 0x01, 0x00, // non-audio track volume
10483 0x00, 0x00, // reserved
10484 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10485 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
10486 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
10487 ]);
10488 return box(types.tkhd, result);
10489 };
10490 /**
10491 * Generate a track fragment (traf) box. A traf box collects metadata
10492 * about tracks in a movie fragment (moof) box.
10493 */
10494
10495
10496 traf = function traf(track) {
10497 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
10498 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
10499 0x00, 0x00, 0x3a, // flags
10500 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10501 0x00, 0x00, 0x00, 0x01, // sample_description_index
10502 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10503 0x00, 0x00, 0x00, 0x00, // default_sample_size
10504 0x00, 0x00, 0x00, 0x00 // default_sample_flags
10505 ]));
10506 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
10507 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
10508 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
10509 0x00, 0x00, 0x00, // flags
10510 // baseMediaDecodeTime
10511 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
10512 // the containing moof to the first payload byte of the associated
10513 // mdat
10514
10515 dataOffset = 32 + // tfhd
10516 20 + // tfdt
10517 8 + // traf header
10518 16 + // mfhd
10519 8 + // moof header
10520 8; // mdat header
10521 // audio tracks require less metadata
10522
10523 if (track.type === 'audio') {
10524 trackFragmentRun = trun$1(track, dataOffset);
10525 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
10526 } // video tracks should contain an independent and disposable samples
10527 // box (sdtp)
10528 // generate one and adjust offsets to match
10529
10530
10531 sampleDependencyTable = sdtp(track);
10532 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
10533 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
10534 };
10535 /**
10536 * Generate a track box.
10537 * @param track {object} a track definition
10538 * @return {Uint8Array} the track box
10539 */
10540
10541
10542 trak = function trak(track) {
10543 track.duration = track.duration || 0xffffffff;
10544 return box(types.trak, tkhd(track), mdia(track));
10545 };
10546
10547 trex = function trex(track) {
10548 var result = new Uint8Array([0x00, // version 0
10549 0x00, 0x00, 0x00, // flags
10550 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10551 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
10552 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10553 0x00, 0x00, 0x00, 0x00, // default_sample_size
10554 0x00, 0x01, 0x00, 0x01 // default_sample_flags
10555 ]); // the last two bytes of default_sample_flags is the sample
10556 // degradation priority, a hint about the importance of this sample
10557 // relative to others. Lower the degradation priority for all sample
10558 // types other than video.
10559
10560 if (track.type !== 'video') {
10561 result[result.length - 1] = 0x00;
10562 }
10563
10564 return box(types.trex, result);
10565 };
10566
10567 (function () {
10568 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
10569 // duration is present for the first sample, it will be present for
10570 // all subsequent samples.
10571 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
10572
10573 trunHeader = function trunHeader(samples, offset) {
10574 var durationPresent = 0,
10575 sizePresent = 0,
10576 flagsPresent = 0,
10577 compositionTimeOffset = 0; // trun flag constants
10578
10579 if (samples.length) {
10580 if (samples[0].duration !== undefined) {
10581 durationPresent = 0x1;
10582 }
10583
10584 if (samples[0].size !== undefined) {
10585 sizePresent = 0x2;
10586 }
10587
10588 if (samples[0].flags !== undefined) {
10589 flagsPresent = 0x4;
10590 }
10591
10592 if (samples[0].compositionTimeOffset !== undefined) {
10593 compositionTimeOffset = 0x8;
10594 }
10595 }
10596
10597 return [0x00, // version 0
10598 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
10599 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
10600 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
10601 ];
10602 };
10603
10604 videoTrun = function videoTrun(track, offset) {
10605 var bytesOffest, bytes, header, samples, sample, i;
10606 samples = track.samples || [];
10607 offset += 8 + 12 + 16 * samples.length;
10608 header = trunHeader(samples, offset);
10609 bytes = new Uint8Array(header.length + samples.length * 16);
10610 bytes.set(header);
10611 bytesOffest = header.length;
10612
10613 for (i = 0; i < samples.length; i++) {
10614 sample = samples[i];
10615 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10616 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10617 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10618 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10619
10620 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10621 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10622 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10623 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10624
10625 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
10626 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
10627 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
10628 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
10629
10630 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
10631 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
10632 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
10633 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
10634 }
10635
10636 return box(types.trun, bytes);
10637 };
10638
10639 audioTrun = function audioTrun(track, offset) {
10640 var bytes, bytesOffest, header, samples, sample, i;
10641 samples = track.samples || [];
10642 offset += 8 + 12 + 8 * samples.length;
10643 header = trunHeader(samples, offset);
10644 bytes = new Uint8Array(header.length + samples.length * 8);
10645 bytes.set(header);
10646 bytesOffest = header.length;
10647
10648 for (i = 0; i < samples.length; i++) {
10649 sample = samples[i];
10650 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10651 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10652 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10653 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10654
10655 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10656 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10657 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10658 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10659 }
10660
10661 return box(types.trun, bytes);
10662 };
10663
10664 trun$1 = function trun(track, offset) {
10665 if (track.type === 'audio') {
10666 return audioTrun(track, offset);
10667 }
10668
10669 return videoTrun(track, offset);
10670 };
10671 })();
10672
10673 var mp4Generator = {
10674 ftyp: ftyp,
10675 mdat: mdat,
10676 moof: moof,
10677 moov: moov,
10678 initSegment: function initSegment(tracks) {
10679 var fileType = ftyp(),
10680 movie = moov(tracks),
10681 result;
10682 result = new Uint8Array(fileType.byteLength + movie.byteLength);
10683 result.set(fileType);
10684 result.set(movie, fileType.byteLength);
10685 return result;
10686 }
10687 };
10688 /**
10689 * mux.js
10690 *
10691 * Copyright (c) Brightcove
10692 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10693 */
10694 // Convert an array of nal units into an array of frames with each frame being
10695 // composed of the nal units that make up that frame
10696 // Also keep track of cummulative data about the frame from the nal units such
10697 // as the frame duration, starting pts, etc.
10698
10699 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
10700 var i,
10701 currentNal,
10702 currentFrame = [],
10703 frames = []; // TODO added for LHLS, make sure this is OK
10704
10705 frames.byteLength = 0;
10706 frames.nalCount = 0;
10707 frames.duration = 0;
10708 currentFrame.byteLength = 0;
10709
10710 for (i = 0; i < nalUnits.length; i++) {
10711 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
10712
10713 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
10714 // Since the very first nal unit is expected to be an AUD
10715 // only push to the frames array when currentFrame is not empty
10716 if (currentFrame.length) {
10717 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
10718
10719 frames.byteLength += currentFrame.byteLength;
10720 frames.nalCount += currentFrame.length;
10721 frames.duration += currentFrame.duration;
10722 frames.push(currentFrame);
10723 }
10724
10725 currentFrame = [currentNal];
10726 currentFrame.byteLength = currentNal.data.byteLength;
10727 currentFrame.pts = currentNal.pts;
10728 currentFrame.dts = currentNal.dts;
10729 } else {
10730 // Specifically flag key frames for ease of use later
10731 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
10732 currentFrame.keyFrame = true;
10733 }
10734
10735 currentFrame.duration = currentNal.dts - currentFrame.dts;
10736 currentFrame.byteLength += currentNal.data.byteLength;
10737 currentFrame.push(currentNal);
10738 }
10739 } // For the last frame, use the duration of the previous frame if we
10740 // have nothing better to go on
10741
10742
10743 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
10744 currentFrame.duration = frames[frames.length - 1].duration;
10745 } // Push the final frame
10746 // TODO added for LHLS, make sure this is OK
10747
10748
10749 frames.byteLength += currentFrame.byteLength;
10750 frames.nalCount += currentFrame.length;
10751 frames.duration += currentFrame.duration;
10752 frames.push(currentFrame);
10753 return frames;
10754 }; // Convert an array of frames into an array of Gop with each Gop being composed
10755 // of the frames that make up that Gop
10756 // Also keep track of cummulative data about the Gop from the frames such as the
10757 // Gop duration, starting pts, etc.
10758
10759
10760 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
10761 var i,
10762 currentFrame,
10763 currentGop = [],
10764 gops = []; // We must pre-set some of the values on the Gop since we
10765 // keep running totals of these values
10766
10767 currentGop.byteLength = 0;
10768 currentGop.nalCount = 0;
10769 currentGop.duration = 0;
10770 currentGop.pts = frames[0].pts;
10771 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
10772
10773 gops.byteLength = 0;
10774 gops.nalCount = 0;
10775 gops.duration = 0;
10776 gops.pts = frames[0].pts;
10777 gops.dts = frames[0].dts;
10778
10779 for (i = 0; i < frames.length; i++) {
10780 currentFrame = frames[i];
10781
10782 if (currentFrame.keyFrame) {
10783 // Since the very first frame is expected to be an keyframe
10784 // only push to the gops array when currentGop is not empty
10785 if (currentGop.length) {
10786 gops.push(currentGop);
10787 gops.byteLength += currentGop.byteLength;
10788 gops.nalCount += currentGop.nalCount;
10789 gops.duration += currentGop.duration;
10790 }
10791
10792 currentGop = [currentFrame];
10793 currentGop.nalCount = currentFrame.length;
10794 currentGop.byteLength = currentFrame.byteLength;
10795 currentGop.pts = currentFrame.pts;
10796 currentGop.dts = currentFrame.dts;
10797 currentGop.duration = currentFrame.duration;
10798 } else {
10799 currentGop.duration += currentFrame.duration;
10800 currentGop.nalCount += currentFrame.length;
10801 currentGop.byteLength += currentFrame.byteLength;
10802 currentGop.push(currentFrame);
10803 }
10804 }
10805
10806 if (gops.length && currentGop.duration <= 0) {
10807 currentGop.duration = gops[gops.length - 1].duration;
10808 }
10809
10810 gops.byteLength += currentGop.byteLength;
10811 gops.nalCount += currentGop.nalCount;
10812 gops.duration += currentGop.duration; // push the final Gop
10813
10814 gops.push(currentGop);
10815 return gops;
10816 };
10817 /*
10818 * Search for the first keyframe in the GOPs and throw away all frames
10819 * until that keyframe. Then extend the duration of the pulled keyframe
10820 * and pull the PTS and DTS of the keyframe so that it covers the time
10821 * range of the frames that were disposed.
10822 *
10823 * @param {Array} gops video GOPs
10824 * @returns {Array} modified video GOPs
10825 */
10826
10827
10828 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
10829 var currentGop;
10830
10831 if (!gops[0][0].keyFrame && gops.length > 1) {
10832 // Remove the first GOP
10833 currentGop = gops.shift();
10834 gops.byteLength -= currentGop.byteLength;
10835 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
10836 // first gop to cover the time period of the
10837 // frames we just removed
10838
10839 gops[0][0].dts = currentGop.dts;
10840 gops[0][0].pts = currentGop.pts;
10841 gops[0][0].duration += currentGop.duration;
10842 }
10843
10844 return gops;
10845 };
10846 /**
10847 * Default sample object
10848 * see ISO/IEC 14496-12:2012, section 8.6.4.3
10849 */
10850
10851
10852 var createDefaultSample = function createDefaultSample() {
10853 return {
10854 size: 0,
10855 flags: {
10856 isLeading: 0,
10857 dependsOn: 1,
10858 isDependedOn: 0,
10859 hasRedundancy: 0,
10860 degradationPriority: 0,
10861 isNonSyncSample: 1
10862 }
10863 };
10864 };
10865 /*
10866 * Collates information from a video frame into an object for eventual
10867 * entry into an MP4 sample table.
10868 *
10869 * @param {Object} frame the video frame
10870 * @param {Number} dataOffset the byte offset to position the sample
10871 * @return {Object} object containing sample table info for a frame
10872 */
10873
10874
10875 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
10876 var sample = createDefaultSample();
10877 sample.dataOffset = dataOffset;
10878 sample.compositionTimeOffset = frame.pts - frame.dts;
10879 sample.duration = frame.duration;
10880 sample.size = 4 * frame.length; // Space for nal unit size
10881
10882 sample.size += frame.byteLength;
10883
10884 if (frame.keyFrame) {
10885 sample.flags.dependsOn = 2;
10886 sample.flags.isNonSyncSample = 0;
10887 }
10888
10889 return sample;
10890 }; // generate the track's sample table from an array of gops
10891
10892
10893 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
10894 var h,
10895 i,
10896 sample,
10897 currentGop,
10898 currentFrame,
10899 dataOffset = baseDataOffset || 0,
10900 samples = [];
10901
10902 for (h = 0; h < gops.length; h++) {
10903 currentGop = gops[h];
10904
10905 for (i = 0; i < currentGop.length; i++) {
10906 currentFrame = currentGop[i];
10907 sample = sampleForFrame(currentFrame, dataOffset);
10908 dataOffset += sample.size;
10909 samples.push(sample);
10910 }
10911 }
10912
10913 return samples;
10914 }; // generate the track's raw mdat data from an array of gops
10915
10916
10917 var concatenateNalData = function concatenateNalData(gops) {
10918 var h,
10919 i,
10920 j,
10921 currentGop,
10922 currentFrame,
10923 currentNal,
10924 dataOffset = 0,
10925 nalsByteLength = gops.byteLength,
10926 numberOfNals = gops.nalCount,
10927 totalByteLength = nalsByteLength + 4 * numberOfNals,
10928 data = new Uint8Array(totalByteLength),
10929 view = new DataView(data.buffer); // For each Gop..
10930
10931 for (h = 0; h < gops.length; h++) {
10932 currentGop = gops[h]; // For each Frame..
10933
10934 for (i = 0; i < currentGop.length; i++) {
10935 currentFrame = currentGop[i]; // For each NAL..
10936
10937 for (j = 0; j < currentFrame.length; j++) {
10938 currentNal = currentFrame[j];
10939 view.setUint32(dataOffset, currentNal.data.byteLength);
10940 dataOffset += 4;
10941 data.set(currentNal.data, dataOffset);
10942 dataOffset += currentNal.data.byteLength;
10943 }
10944 }
10945 }
10946
10947 return data;
10948 }; // generate the track's sample table from a frame
10949
10950
10951 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
10952 var sample,
10953 dataOffset = baseDataOffset || 0,
10954 samples = [];
10955 sample = sampleForFrame(frame, dataOffset);
10956 samples.push(sample);
10957 return samples;
10958 }; // generate the track's raw mdat data from a frame
10959
10960
10961 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
10962 var i,
10963 currentNal,
10964 dataOffset = 0,
10965 nalsByteLength = frame.byteLength,
10966 numberOfNals = frame.length,
10967 totalByteLength = nalsByteLength + 4 * numberOfNals,
10968 data = new Uint8Array(totalByteLength),
10969 view = new DataView(data.buffer); // For each NAL..
10970
10971 for (i = 0; i < frame.length; i++) {
10972 currentNal = frame[i];
10973 view.setUint32(dataOffset, currentNal.data.byteLength);
10974 dataOffset += 4;
10975 data.set(currentNal.data, dataOffset);
10976 dataOffset += currentNal.data.byteLength;
10977 }
10978
10979 return data;
10980 };
10981
10982 var frameUtils = {
10983 groupNalsIntoFrames: groupNalsIntoFrames,
10984 groupFramesIntoGops: groupFramesIntoGops,
10985 extendFirstKeyFrame: extendFirstKeyFrame,
10986 generateSampleTable: generateSampleTable$1,
10987 concatenateNalData: concatenateNalData,
10988 generateSampleTableForFrame: generateSampleTableForFrame,
10989 concatenateNalDataForFrame: concatenateNalDataForFrame
10990 };
10991 /**
10992 * mux.js
10993 *
10994 * Copyright (c) Brightcove
10995 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10996 */
10997
10998 var highPrefix = [33, 16, 5, 32, 164, 27];
10999 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
11000
11001 var zeroFill = function zeroFill(count) {
11002 var a = [];
11003
11004 while (count--) {
11005 a.push(0);
11006 }
11007
11008 return a;
11009 };
11010
11011 var makeTable = function makeTable(metaTable) {
11012 return Object.keys(metaTable).reduce(function (obj, key) {
11013 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
11014 return arr.concat(part);
11015 }, []));
11016 return obj;
11017 }, {});
11018 };
11019
11020 var silence;
11021
11022 var silence_1 = function silence_1() {
11023 if (!silence) {
11024 // Frames-of-silence to use for filling in missing AAC frames
11025 var coneOfSilence = {
11026 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
11027 88200: [highPrefix, [231], zeroFill(170), [56]],
11028 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
11029 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
11030 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
11031 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
11032 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
11033 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
11034 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
11035 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
11036 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
11037 };
11038 silence = makeTable(coneOfSilence);
11039 }
11040
11041 return silence;
11042 };
11043 /**
11044 * mux.js
11045 *
11046 * Copyright (c) Brightcove
11047 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11048 */
11049
11050
11051 var ONE_SECOND_IN_TS$4 = 90000,
11052 // 90kHz clock
11053 secondsToVideoTs,
11054 secondsToAudioTs,
11055 videoTsToSeconds,
11056 audioTsToSeconds,
11057 audioTsToVideoTs,
11058 videoTsToAudioTs,
11059 metadataTsToSeconds;
11060
11061 secondsToVideoTs = function secondsToVideoTs(seconds) {
11062 return seconds * ONE_SECOND_IN_TS$4;
11063 };
11064
11065 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
11066 return seconds * sampleRate;
11067 };
11068
11069 videoTsToSeconds = function videoTsToSeconds(timestamp) {
11070 return timestamp / ONE_SECOND_IN_TS$4;
11071 };
11072
11073 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
11074 return timestamp / sampleRate;
11075 };
11076
11077 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
11078 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
11079 };
11080
11081 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
11082 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
11083 };
11084 /**
11085 * Adjust ID3 tag or caption timing information by the timeline pts values
11086 * (if keepOriginalTimestamps is false) and convert to seconds
11087 */
11088
11089
11090 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
11091 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
11092 };
11093
11094 var clock = {
11095 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
11096 secondsToVideoTs: secondsToVideoTs,
11097 secondsToAudioTs: secondsToAudioTs,
11098 videoTsToSeconds: videoTsToSeconds,
11099 audioTsToSeconds: audioTsToSeconds,
11100 audioTsToVideoTs: audioTsToVideoTs,
11101 videoTsToAudioTs: videoTsToAudioTs,
11102 metadataTsToSeconds: metadataTsToSeconds
11103 };
11104 /**
11105 * mux.js
11106 *
11107 * Copyright (c) Brightcove
11108 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11109 */
11110
11111 /**
11112 * Sum the `byteLength` properties of the data in each AAC frame
11113 */
11114
11115 var sumFrameByteLengths = function sumFrameByteLengths(array) {
11116 var i,
11117 currentObj,
11118 sum = 0; // sum the byteLength's all each nal unit in the frame
11119
11120 for (i = 0; i < array.length; i++) {
11121 currentObj = array[i];
11122 sum += currentObj.data.byteLength;
11123 }
11124
11125 return sum;
11126 }; // Possibly pad (prefix) the audio track with silence if appending this track
11127 // would lead to the introduction of a gap in the audio buffer
11128
11129
11130 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
11131 var baseMediaDecodeTimeTs,
11132 frameDuration = 0,
11133 audioGapDuration = 0,
11134 audioFillFrameCount = 0,
11135 audioFillDuration = 0,
11136 silentFrame,
11137 i,
11138 firstFrame;
11139
11140 if (!frames.length) {
11141 return;
11142 }
11143
11144 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
11145
11146 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
11147
11148 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
11149 // insert the shortest possible amount (audio gap or audio to video gap)
11150 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
11151
11152 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
11153 audioFillDuration = audioFillFrameCount * frameDuration;
11154 } // don't attempt to fill gaps smaller than a single frame or larger
11155 // than a half second
11156
11157
11158 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
11159 return;
11160 }
11161
11162 silentFrame = silence_1()[track.samplerate];
11163
11164 if (!silentFrame) {
11165 // we don't have a silent frame pregenerated for the sample rate, so use a frame
11166 // from the content instead
11167 silentFrame = frames[0].data;
11168 }
11169
11170 for (i = 0; i < audioFillFrameCount; i++) {
11171 firstFrame = frames[0];
11172 frames.splice(0, 0, {
11173 data: silentFrame,
11174 dts: firstFrame.dts - frameDuration,
11175 pts: firstFrame.pts - frameDuration
11176 });
11177 }
11178
11179 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
11180 return audioFillDuration;
11181 }; // If the audio segment extends before the earliest allowed dts
11182 // value, remove AAC frames until starts at or after the earliest
11183 // allowed DTS so that we don't end up with a negative baseMedia-
11184 // DecodeTime for the audio track
11185
11186
11187 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
11188 if (track.minSegmentDts >= earliestAllowedDts) {
11189 return adtsFrames;
11190 } // We will need to recalculate the earliest segment Dts
11191
11192
11193 track.minSegmentDts = Infinity;
11194 return adtsFrames.filter(function (currentFrame) {
11195 // If this is an allowed frame, keep it and record it's Dts
11196 if (currentFrame.dts >= earliestAllowedDts) {
11197 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
11198 track.minSegmentPts = track.minSegmentDts;
11199 return true;
11200 } // Otherwise, discard it
11201
11202
11203 return false;
11204 });
11205 }; // generate the track's raw mdat data from an array of frames
11206
11207
11208 var generateSampleTable = function generateSampleTable(frames) {
11209 var i,
11210 currentFrame,
11211 samples = [];
11212
11213 for (i = 0; i < frames.length; i++) {
11214 currentFrame = frames[i];
11215 samples.push({
11216 size: currentFrame.data.byteLength,
11217 duration: 1024 // For AAC audio, all samples contain 1024 samples
11218
11219 });
11220 }
11221
11222 return samples;
11223 }; // generate the track's sample table from an array of frames
11224
11225
11226 var concatenateFrameData = function concatenateFrameData(frames) {
11227 var i,
11228 currentFrame,
11229 dataOffset = 0,
11230 data = new Uint8Array(sumFrameByteLengths(frames));
11231
11232 for (i = 0; i < frames.length; i++) {
11233 currentFrame = frames[i];
11234 data.set(currentFrame.data, dataOffset);
11235 dataOffset += currentFrame.data.byteLength;
11236 }
11237
11238 return data;
11239 };
11240
11241 var audioFrameUtils = {
11242 prefixWithSilence: prefixWithSilence,
11243 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
11244 generateSampleTable: generateSampleTable,
11245 concatenateFrameData: concatenateFrameData
11246 };
11247 /**
11248 * mux.js
11249 *
11250 * Copyright (c) Brightcove
11251 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11252 */
11253
11254 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
11255 /**
11256 * Store information about the start and end of the track and the
11257 * duration for each frame/sample we process in order to calculate
11258 * the baseMediaDecodeTime
11259 */
11260
11261 var collectDtsInfo = function collectDtsInfo(track, data) {
11262 if (typeof data.pts === 'number') {
11263 if (track.timelineStartInfo.pts === undefined) {
11264 track.timelineStartInfo.pts = data.pts;
11265 }
11266
11267 if (track.minSegmentPts === undefined) {
11268 track.minSegmentPts = data.pts;
11269 } else {
11270 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
11271 }
11272
11273 if (track.maxSegmentPts === undefined) {
11274 track.maxSegmentPts = data.pts;
11275 } else {
11276 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
11277 }
11278 }
11279
11280 if (typeof data.dts === 'number') {
11281 if (track.timelineStartInfo.dts === undefined) {
11282 track.timelineStartInfo.dts = data.dts;
11283 }
11284
11285 if (track.minSegmentDts === undefined) {
11286 track.minSegmentDts = data.dts;
11287 } else {
11288 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
11289 }
11290
11291 if (track.maxSegmentDts === undefined) {
11292 track.maxSegmentDts = data.dts;
11293 } else {
11294 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
11295 }
11296 }
11297 };
11298 /**
11299 * Clear values used to calculate the baseMediaDecodeTime between
11300 * tracks
11301 */
11302
11303
11304 var clearDtsInfo = function clearDtsInfo(track) {
11305 delete track.minSegmentDts;
11306 delete track.maxSegmentDts;
11307 delete track.minSegmentPts;
11308 delete track.maxSegmentPts;
11309 };
11310 /**
11311 * Calculate the track's baseMediaDecodeTime based on the earliest
11312 * DTS the transmuxer has ever seen and the minimum DTS for the
11313 * current track
11314 * @param track {object} track metadata configuration
11315 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
11316 * in the source; false to adjust the first segment to start at 0.
11317 */
11318
11319
11320 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
11321 var baseMediaDecodeTime,
11322 scale,
11323 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
11324
11325 if (!keepOriginalTimestamps) {
11326 minSegmentDts -= track.timelineStartInfo.dts;
11327 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
11328 // we want the start of the first segment to be placed
11329
11330
11331 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
11332
11333 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
11334
11335 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
11336
11337 if (track.type === 'audio') {
11338 // Audio has a different clock equal to the sampling_rate so we need to
11339 // scale the PTS values into the clock rate of the track
11340 scale = track.samplerate / ONE_SECOND_IN_TS$3;
11341 baseMediaDecodeTime *= scale;
11342 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
11343 }
11344
11345 return baseMediaDecodeTime;
11346 };
11347
11348 var trackDecodeInfo = {
11349 clearDtsInfo: clearDtsInfo,
11350 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
11351 collectDtsInfo: collectDtsInfo
11352 };
11353 /**
11354 * mux.js
11355 *
11356 * Copyright (c) Brightcove
11357 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11358 *
11359 * Reads in-band caption information from a video elementary
11360 * stream. Captions must follow the CEA-708 standard for injection
11361 * into an MPEG-2 transport streams.
11362 * @see https://en.wikipedia.org/wiki/CEA-708
11363 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
11364 */
11365 // payload type field to indicate how they are to be
11366 // interpreted. CEAS-708 caption content is always transmitted with
11367 // payload type 0x04.
11368
11369 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
11370 RBSP_TRAILING_BITS = 128;
11371 /**
11372 * Parse a supplemental enhancement information (SEI) NAL unit.
11373 * Stops parsing once a message of type ITU T T35 has been found.
11374 *
11375 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
11376 * @return {object} the parsed SEI payload
11377 * @see Rec. ITU-T H.264, 7.3.2.3.1
11378 */
11379
11380 var parseSei = function parseSei(bytes) {
11381 var i = 0,
11382 result = {
11383 payloadType: -1,
11384 payloadSize: 0
11385 },
11386 payloadType = 0,
11387 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
11388
11389 while (i < bytes.byteLength) {
11390 // stop once we have hit the end of the sei_rbsp
11391 if (bytes[i] === RBSP_TRAILING_BITS) {
11392 break;
11393 } // Parse payload type
11394
11395
11396 while (bytes[i] === 0xFF) {
11397 payloadType += 255;
11398 i++;
11399 }
11400
11401 payloadType += bytes[i++]; // Parse payload size
11402
11403 while (bytes[i] === 0xFF) {
11404 payloadSize += 255;
11405 i++;
11406 }
11407
11408 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
11409 // there can only ever be one caption message in a frame's sei
11410
11411 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
11412 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
11413
11414 if (userIdentifier === 'GA94') {
11415 result.payloadType = payloadType;
11416 result.payloadSize = payloadSize;
11417 result.payload = bytes.subarray(i, i + payloadSize);
11418 break;
11419 } else {
11420 result.payload = void 0;
11421 }
11422 } // skip the payload and parse the next message
11423
11424
11425 i += payloadSize;
11426 payloadType = 0;
11427 payloadSize = 0;
11428 }
11429
11430 return result;
11431 }; // see ANSI/SCTE 128-1 (2013), section 8.1
11432
11433
11434 var parseUserData = function parseUserData(sei) {
11435 // itu_t_t35_contry_code must be 181 (United States) for
11436 // captions
11437 if (sei.payload[0] !== 181) {
11438 return null;
11439 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
11440
11441
11442 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
11443 return null;
11444 } // the user_identifier should be "GA94" to indicate ATSC1 data
11445
11446
11447 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
11448 return null;
11449 } // finally, user_data_type_code should be 0x03 for caption data
11450
11451
11452 if (sei.payload[7] !== 0x03) {
11453 return null;
11454 } // return the user_data_type_structure and strip the trailing
11455 // marker bits
11456
11457
11458 return sei.payload.subarray(8, sei.payload.length - 1);
11459 }; // see CEA-708-D, section 4.4
11460
11461
11462 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
11463 var results = [],
11464 i,
11465 count,
11466 offset,
11467 data; // if this is just filler, return immediately
11468
11469 if (!(userData[0] & 0x40)) {
11470 return results;
11471 } // parse out the cc_data_1 and cc_data_2 fields
11472
11473
11474 count = userData[0] & 0x1f;
11475
11476 for (i = 0; i < count; i++) {
11477 offset = i * 3;
11478 data = {
11479 type: userData[offset + 2] & 0x03,
11480 pts: pts
11481 }; // capture cc data when cc_valid is 1
11482
11483 if (userData[offset + 2] & 0x04) {
11484 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
11485 results.push(data);
11486 }
11487 }
11488
11489 return results;
11490 };
11491
11492 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
11493 var length = data.byteLength,
11494 emulationPreventionBytesPositions = [],
11495 i = 1,
11496 newLength,
11497 newData; // Find all `Emulation Prevention Bytes`
11498
11499 while (i < length - 2) {
11500 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
11501 emulationPreventionBytesPositions.push(i + 2);
11502 i += 2;
11503 } else {
11504 i++;
11505 }
11506 } // If no Emulation Prevention Bytes were found just return the original
11507 // array
11508
11509
11510 if (emulationPreventionBytesPositions.length === 0) {
11511 return data;
11512 } // Create a new array to hold the NAL unit data
11513
11514
11515 newLength = length - emulationPreventionBytesPositions.length;
11516 newData = new Uint8Array(newLength);
11517 var sourceIndex = 0;
11518
11519 for (i = 0; i < newLength; sourceIndex++, i++) {
11520 if (sourceIndex === emulationPreventionBytesPositions[0]) {
11521 // Skip this byte
11522 sourceIndex++; // Remove this position index
11523
11524 emulationPreventionBytesPositions.shift();
11525 }
11526
11527 newData[i] = data[sourceIndex];
11528 }
11529
11530 return newData;
11531 }; // exports
11532
11533
11534 var captionPacketParser = {
11535 parseSei: parseSei,
11536 parseUserData: parseUserData,
11537 parseCaptionPackets: parseCaptionPackets,
11538 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
11539 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
11540 }; // Link To Transport
11541 // -----------------
11542
11543 var CaptionStream$1 = function CaptionStream(options) {
11544 options = options || {};
11545 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
11546
11547 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
11548 this.captionPackets_ = [];
11549 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
11550 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
11551 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
11552 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
11553 ];
11554
11555 if (this.parse708captions_) {
11556 this.cc708Stream_ = new Cea708Stream({
11557 captionServices: options.captionServices
11558 }); // eslint-disable-line no-use-before-define
11559 }
11560
11561 this.reset(); // forward data and done events from CCs to this CaptionStream
11562
11563 this.ccStreams_.forEach(function (cc) {
11564 cc.on('data', this.trigger.bind(this, 'data'));
11565 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
11566 cc.on('done', this.trigger.bind(this, 'done'));
11567 }, this);
11568
11569 if (this.parse708captions_) {
11570 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
11571 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
11572 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
11573 }
11574 };
11575
11576 CaptionStream$1.prototype = new stream();
11577
11578 CaptionStream$1.prototype.push = function (event) {
11579 var sei, userData, newCaptionPackets; // only examine SEI NALs
11580
11581 if (event.nalUnitType !== 'sei_rbsp') {
11582 return;
11583 } // parse the sei
11584
11585
11586 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
11587
11588 if (!sei.payload) {
11589 return;
11590 } // ignore everything but user_data_registered_itu_t_t35
11591
11592
11593 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
11594 return;
11595 } // parse out the user data payload
11596
11597
11598 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
11599
11600 if (!userData) {
11601 return;
11602 } // Sometimes, the same segment # will be downloaded twice. To stop the
11603 // caption data from being processed twice, we track the latest dts we've
11604 // received and ignore everything with a dts before that. However, since
11605 // data for a specific dts can be split across packets on either side of
11606 // a segment boundary, we need to make sure we *don't* ignore the packets
11607 // from the *next* segment that have dts === this.latestDts_. By constantly
11608 // tracking the number of packets received with dts === this.latestDts_, we
11609 // know how many should be ignored once we start receiving duplicates.
11610
11611
11612 if (event.dts < this.latestDts_) {
11613 // We've started getting older data, so set the flag.
11614 this.ignoreNextEqualDts_ = true;
11615 return;
11616 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
11617 this.numSameDts_--;
11618
11619 if (!this.numSameDts_) {
11620 // We've received the last duplicate packet, time to start processing again
11621 this.ignoreNextEqualDts_ = false;
11622 }
11623
11624 return;
11625 } // parse out CC data packets and save them for later
11626
11627
11628 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
11629 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
11630
11631 if (this.latestDts_ !== event.dts) {
11632 this.numSameDts_ = 0;
11633 }
11634
11635 this.numSameDts_++;
11636 this.latestDts_ = event.dts;
11637 };
11638
11639 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
11640 this.ccStreams_.forEach(function (cc) {
11641 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
11642 }, this);
11643 };
11644
11645 CaptionStream$1.prototype.flushStream = function (flushType) {
11646 // make sure we actually parsed captions before proceeding
11647 if (!this.captionPackets_.length) {
11648 this.flushCCStreams(flushType);
11649 return;
11650 } // In Chrome, the Array#sort function is not stable so add a
11651 // presortIndex that we can use to ensure we get a stable-sort
11652
11653
11654 this.captionPackets_.forEach(function (elem, idx) {
11655 elem.presortIndex = idx;
11656 }); // sort caption byte-pairs based on their PTS values
11657
11658 this.captionPackets_.sort(function (a, b) {
11659 if (a.pts === b.pts) {
11660 return a.presortIndex - b.presortIndex;
11661 }
11662
11663 return a.pts - b.pts;
11664 });
11665 this.captionPackets_.forEach(function (packet) {
11666 if (packet.type < 2) {
11667 // Dispatch packet to the right Cea608Stream
11668 this.dispatchCea608Packet(packet);
11669 } else {
11670 // Dispatch packet to the Cea708Stream
11671 this.dispatchCea708Packet(packet);
11672 }
11673 }, this);
11674 this.captionPackets_.length = 0;
11675 this.flushCCStreams(flushType);
11676 };
11677
11678 CaptionStream$1.prototype.flush = function () {
11679 return this.flushStream('flush');
11680 }; // Only called if handling partial data
11681
11682
11683 CaptionStream$1.prototype.partialFlush = function () {
11684 return this.flushStream('partialFlush');
11685 };
11686
11687 CaptionStream$1.prototype.reset = function () {
11688 this.latestDts_ = null;
11689 this.ignoreNextEqualDts_ = false;
11690 this.numSameDts_ = 0;
11691 this.activeCea608Channel_ = [null, null];
11692 this.ccStreams_.forEach(function (ccStream) {
11693 ccStream.reset();
11694 });
11695 }; // From the CEA-608 spec:
11696
11697 /*
11698 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
11699 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
11700 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
11701 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
11702 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
11703 * to switch to captioning or Text.
11704 */
11705 // With that in mind, we ignore any data between an XDS control code and a
11706 // subsequent closed-captioning control code.
11707
11708
11709 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
11710 // NOTE: packet.type is the CEA608 field
11711 if (this.setsTextOrXDSActive(packet)) {
11712 this.activeCea608Channel_[packet.type] = null;
11713 } else if (this.setsChannel1Active(packet)) {
11714 this.activeCea608Channel_[packet.type] = 0;
11715 } else if (this.setsChannel2Active(packet)) {
11716 this.activeCea608Channel_[packet.type] = 1;
11717 }
11718
11719 if (this.activeCea608Channel_[packet.type] === null) {
11720 // If we haven't received anything to set the active channel, or the
11721 // packets are Text/XDS data, discard the data; we don't want jumbled
11722 // captions
11723 return;
11724 }
11725
11726 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
11727 };
11728
11729 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
11730 return (packet.ccData & 0x7800) === 0x1000;
11731 };
11732
11733 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
11734 return (packet.ccData & 0x7800) === 0x1800;
11735 };
11736
11737 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
11738 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
11739 };
11740
11741 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
11742 if (this.parse708captions_) {
11743 this.cc708Stream_.push(packet);
11744 }
11745 }; // ----------------------
11746 // Session to Application
11747 // ----------------------
11748 // This hash maps special and extended character codes to their
11749 // proper Unicode equivalent. The first one-byte key is just a
11750 // non-standard character code. The two-byte keys that follow are
11751 // the extended CEA708 character codes, along with the preceding
11752 // 0x10 extended character byte to distinguish these codes from
11753 // non-extended character codes. Every CEA708 character code that
11754 // is not in this object maps directly to a standard unicode
11755 // character code.
11756 // The transparent space and non-breaking transparent space are
11757 // technically not fully supported since there is no code to
11758 // make them transparent, so they have normal non-transparent
11759 // stand-ins.
11760 // The special closed caption (CC) character isn't a standard
11761 // unicode character, so a fairly similar unicode character was
11762 // chosen in it's place.
11763
11764
11765 var CHARACTER_TRANSLATION_708 = {
11766 0x7f: 0x266a,
11767 // ♪
11768 0x1020: 0x20,
11769 // Transparent Space
11770 0x1021: 0xa0,
11771 // Nob-breaking Transparent Space
11772 0x1025: 0x2026,
11773 // …
11774 0x102a: 0x0160,
11775 // Š
11776 0x102c: 0x0152,
11777 // Œ
11778 0x1030: 0x2588,
11779 // █
11780 0x1031: 0x2018,
11781 // ‘
11782 0x1032: 0x2019,
11783 // ’
11784 0x1033: 0x201c,
11785 // “
11786 0x1034: 0x201d,
11787 // ”
11788 0x1035: 0x2022,
11789 // •
11790 0x1039: 0x2122,
11791 // ™
11792 0x103a: 0x0161,
11793 // š
11794 0x103c: 0x0153,
11795 // œ
11796 0x103d: 0x2120,
11797 // ℠
11798 0x103f: 0x0178,
11799 // Ÿ
11800 0x1076: 0x215b,
11801 // ⅛
11802 0x1077: 0x215c,
11803 // ⅜
11804 0x1078: 0x215d,
11805 // ⅝
11806 0x1079: 0x215e,
11807 // ⅞
11808 0x107a: 0x23d0,
11809 // ⏐
11810 0x107b: 0x23a4,
11811 // ⎤
11812 0x107c: 0x23a3,
11813 // ⎣
11814 0x107d: 0x23af,
11815 // ⎯
11816 0x107e: 0x23a6,
11817 // ⎦
11818 0x107f: 0x23a1,
11819 // ⎡
11820 0x10a0: 0x3138 // ㄸ (CC char)
11821
11822 };
11823
11824 var get708CharFromCode = function get708CharFromCode(code) {
11825 var newCode = CHARACTER_TRANSLATION_708[code] || code;
11826
11827 if (code & 0x1000 && code === newCode) {
11828 // Invalid extended code
11829 return '';
11830 }
11831
11832 return String.fromCharCode(newCode);
11833 };
11834
11835 var within708TextBlock = function within708TextBlock(b) {
11836 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
11837 };
11838
11839 var Cea708Window = function Cea708Window(windowNum) {
11840 this.windowNum = windowNum;
11841 this.reset();
11842 };
11843
11844 Cea708Window.prototype.reset = function () {
11845 this.clearText();
11846 this.pendingNewLine = false;
11847 this.winAttr = {};
11848 this.penAttr = {};
11849 this.penLoc = {};
11850 this.penColor = {}; // These default values are arbitrary,
11851 // defineWindow will usually override them
11852
11853 this.visible = 0;
11854 this.rowLock = 0;
11855 this.columnLock = 0;
11856 this.priority = 0;
11857 this.relativePositioning = 0;
11858 this.anchorVertical = 0;
11859 this.anchorHorizontal = 0;
11860 this.anchorPoint = 0;
11861 this.rowCount = 1;
11862 this.virtualRowCount = this.rowCount + 1;
11863 this.columnCount = 41;
11864 this.windowStyle = 0;
11865 this.penStyle = 0;
11866 };
11867
11868 Cea708Window.prototype.getText = function () {
11869 return this.rows.join('\n');
11870 };
11871
11872 Cea708Window.prototype.clearText = function () {
11873 this.rows = [''];
11874 this.rowIdx = 0;
11875 };
11876
11877 Cea708Window.prototype.newLine = function (pts) {
11878 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
11879 this.beforeRowOverflow(pts);
11880 }
11881
11882 if (this.rows.length > 0) {
11883 this.rows.push('');
11884 this.rowIdx++;
11885 } // Show all virtual rows since there's no visible scrolling
11886
11887
11888 while (this.rows.length > this.virtualRowCount) {
11889 this.rows.shift();
11890 this.rowIdx--;
11891 }
11892 };
11893
11894 Cea708Window.prototype.isEmpty = function () {
11895 if (this.rows.length === 0) {
11896 return true;
11897 } else if (this.rows.length === 1) {
11898 return this.rows[0] === '';
11899 }
11900
11901 return false;
11902 };
11903
11904 Cea708Window.prototype.addText = function (text) {
11905 this.rows[this.rowIdx] += text;
11906 };
11907
11908 Cea708Window.prototype.backspace = function () {
11909 if (!this.isEmpty()) {
11910 var row = this.rows[this.rowIdx];
11911 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
11912 }
11913 };
11914
11915 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
11916 this.serviceNum = serviceNum;
11917 this.text = '';
11918 this.currentWindow = new Cea708Window(-1);
11919 this.windows = [];
11920 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
11921
11922 if (typeof encoding === 'string') {
11923 this.createTextDecoder(encoding);
11924 }
11925 };
11926 /**
11927 * Initialize service windows
11928 * Must be run before service use
11929 *
11930 * @param {Integer} pts PTS value
11931 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
11932 */
11933
11934
11935 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
11936 this.startPts = pts;
11937
11938 for (var win = 0; win < 8; win++) {
11939 this.windows[win] = new Cea708Window(win);
11940
11941 if (typeof beforeRowOverflow === 'function') {
11942 this.windows[win].beforeRowOverflow = beforeRowOverflow;
11943 }
11944 }
11945 };
11946 /**
11947 * Set current window of service to be affected by commands
11948 *
11949 * @param {Integer} windowNum Window number
11950 */
11951
11952
11953 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
11954 this.currentWindow = this.windows[windowNum];
11955 };
11956 /**
11957 * Try to create a TextDecoder if it is natively supported
11958 */
11959
11960
11961 Cea708Service.prototype.createTextDecoder = function (encoding) {
11962 if (typeof TextDecoder === 'undefined') {
11963 this.stream.trigger('log', {
11964 level: 'warn',
11965 message: 'The `encoding` option is unsupported without TextDecoder support'
11966 });
11967 } else {
11968 try {
11969 this.textDecoder_ = new TextDecoder(encoding);
11970 } catch (error) {
11971 this.stream.trigger('log', {
11972 level: 'warn',
11973 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
11974 });
11975 }
11976 }
11977 };
11978
11979 var Cea708Stream = function Cea708Stream(options) {
11980 options = options || {};
11981 Cea708Stream.prototype.init.call(this);
11982 var self = this;
11983 var captionServices = options.captionServices || {};
11984 var captionServiceEncodings = {};
11985 var serviceProps; // Get service encodings from captionServices option block
11986
11987 Object.keys(captionServices).forEach(function (serviceName) {
11988 serviceProps = captionServices[serviceName];
11989
11990 if (/^SERVICE/.test(serviceName)) {
11991 captionServiceEncodings[serviceName] = serviceProps.encoding;
11992 }
11993 });
11994 this.serviceEncodings = captionServiceEncodings;
11995 this.current708Packet = null;
11996 this.services = {};
11997
11998 this.push = function (packet) {
11999 if (packet.type === 3) {
12000 // 708 packet start
12001 self.new708Packet();
12002 self.add708Bytes(packet);
12003 } else {
12004 if (self.current708Packet === null) {
12005 // This should only happen at the start of a file if there's no packet start.
12006 self.new708Packet();
12007 }
12008
12009 self.add708Bytes(packet);
12010 }
12011 };
12012 };
12013
12014 Cea708Stream.prototype = new stream();
12015 /**
12016 * Push current 708 packet, create new 708 packet.
12017 */
12018
12019 Cea708Stream.prototype.new708Packet = function () {
12020 if (this.current708Packet !== null) {
12021 this.push708Packet();
12022 }
12023
12024 this.current708Packet = {
12025 data: [],
12026 ptsVals: []
12027 };
12028 };
12029 /**
12030 * Add pts and both bytes from packet into current 708 packet.
12031 */
12032
12033
12034 Cea708Stream.prototype.add708Bytes = function (packet) {
12035 var data = packet.ccData;
12036 var byte0 = data >>> 8;
12037 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
12038 // that service blocks will always line up with byte pairs.
12039
12040 this.current708Packet.ptsVals.push(packet.pts);
12041 this.current708Packet.data.push(byte0);
12042 this.current708Packet.data.push(byte1);
12043 };
12044 /**
12045 * Parse completed 708 packet into service blocks and push each service block.
12046 */
12047
12048
12049 Cea708Stream.prototype.push708Packet = function () {
12050 var packet708 = this.current708Packet;
12051 var packetData = packet708.data;
12052 var serviceNum = null;
12053 var blockSize = null;
12054 var i = 0;
12055 var b = packetData[i++];
12056 packet708.seq = b >> 6;
12057 packet708.sizeCode = b & 0x3f; // 0b00111111;
12058
12059 for (; i < packetData.length; i++) {
12060 b = packetData[i++];
12061 serviceNum = b >> 5;
12062 blockSize = b & 0x1f; // 0b00011111
12063
12064 if (serviceNum === 7 && blockSize > 0) {
12065 // Extended service num
12066 b = packetData[i++];
12067 serviceNum = b;
12068 }
12069
12070 this.pushServiceBlock(serviceNum, i, blockSize);
12071
12072 if (blockSize > 0) {
12073 i += blockSize - 1;
12074 }
12075 }
12076 };
12077 /**
12078 * Parse service block, execute commands, read text.
12079 *
12080 * Note: While many of these commands serve important purposes,
12081 * many others just parse out the parameters or attributes, but
12082 * nothing is done with them because this is not a full and complete
12083 * implementation of the entire 708 spec.
12084 *
12085 * @param {Integer} serviceNum Service number
12086 * @param {Integer} start Start index of the 708 packet data
12087 * @param {Integer} size Block size
12088 */
12089
12090
12091 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
12092 var b;
12093 var i = start;
12094 var packetData = this.current708Packet.data;
12095 var service = this.services[serviceNum];
12096
12097 if (!service) {
12098 service = this.initService(serviceNum, i);
12099 }
12100
12101 for (; i < start + size && i < packetData.length; i++) {
12102 b = packetData[i];
12103
12104 if (within708TextBlock(b)) {
12105 i = this.handleText(i, service);
12106 } else if (b === 0x18) {
12107 i = this.multiByteCharacter(i, service);
12108 } else if (b === 0x10) {
12109 i = this.extendedCommands(i, service);
12110 } else if (0x80 <= b && b <= 0x87) {
12111 i = this.setCurrentWindow(i, service);
12112 } else if (0x98 <= b && b <= 0x9f) {
12113 i = this.defineWindow(i, service);
12114 } else if (b === 0x88) {
12115 i = this.clearWindows(i, service);
12116 } else if (b === 0x8c) {
12117 i = this.deleteWindows(i, service);
12118 } else if (b === 0x89) {
12119 i = this.displayWindows(i, service);
12120 } else if (b === 0x8a) {
12121 i = this.hideWindows(i, service);
12122 } else if (b === 0x8b) {
12123 i = this.toggleWindows(i, service);
12124 } else if (b === 0x97) {
12125 i = this.setWindowAttributes(i, service);
12126 } else if (b === 0x90) {
12127 i = this.setPenAttributes(i, service);
12128 } else if (b === 0x91) {
12129 i = this.setPenColor(i, service);
12130 } else if (b === 0x92) {
12131 i = this.setPenLocation(i, service);
12132 } else if (b === 0x8f) {
12133 service = this.reset(i, service);
12134 } else if (b === 0x08) {
12135 // BS: Backspace
12136 service.currentWindow.backspace();
12137 } else if (b === 0x0c) {
12138 // FF: Form feed
12139 service.currentWindow.clearText();
12140 } else if (b === 0x0d) {
12141 // CR: Carriage return
12142 service.currentWindow.pendingNewLine = true;
12143 } else if (b === 0x0e) {
12144 // HCR: Horizontal carriage return
12145 service.currentWindow.clearText();
12146 } else if (b === 0x8d) {
12147 // DLY: Delay, nothing to do
12148 i++;
12149 } else ;
12150 }
12151 };
12152 /**
12153 * Execute an extended command
12154 *
12155 * @param {Integer} i Current index in the 708 packet
12156 * @param {Service} service The service object to be affected
12157 * @return {Integer} New index after parsing
12158 */
12159
12160
12161 Cea708Stream.prototype.extendedCommands = function (i, service) {
12162 var packetData = this.current708Packet.data;
12163 var b = packetData[++i];
12164
12165 if (within708TextBlock(b)) {
12166 i = this.handleText(i, service, {
12167 isExtended: true
12168 });
12169 }
12170
12171 return i;
12172 };
12173 /**
12174 * Get PTS value of a given byte index
12175 *
12176 * @param {Integer} byteIndex Index of the byte
12177 * @return {Integer} PTS
12178 */
12179
12180
12181 Cea708Stream.prototype.getPts = function (byteIndex) {
12182 // There's 1 pts value per 2 bytes
12183 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
12184 };
12185 /**
12186 * Initializes a service
12187 *
12188 * @param {Integer} serviceNum Service number
12189 * @return {Service} Initialized service object
12190 */
12191
12192
12193 Cea708Stream.prototype.initService = function (serviceNum, i) {
12194 var serviceName = 'SERVICE' + serviceNum;
12195 var self = this;
12196 var serviceName;
12197 var encoding;
12198
12199 if (serviceName in this.serviceEncodings) {
12200 encoding = this.serviceEncodings[serviceName];
12201 }
12202
12203 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
12204 this.services[serviceNum].init(this.getPts(i), function (pts) {
12205 self.flushDisplayed(pts, self.services[serviceNum]);
12206 });
12207 return this.services[serviceNum];
12208 };
12209 /**
12210 * Execute text writing to current window
12211 *
12212 * @param {Integer} i Current index in the 708 packet
12213 * @param {Service} service The service object to be affected
12214 * @return {Integer} New index after parsing
12215 */
12216
12217
12218 Cea708Stream.prototype.handleText = function (i, service, options) {
12219 var isExtended = options && options.isExtended;
12220 var isMultiByte = options && options.isMultiByte;
12221 var packetData = this.current708Packet.data;
12222 var extended = isExtended ? 0x1000 : 0x0000;
12223 var currentByte = packetData[i];
12224 var nextByte = packetData[i + 1];
12225 var win = service.currentWindow;
12226 var char;
12227 var charCodeArray; // Use the TextDecoder if one was created for this service
12228
12229 if (service.textDecoder_ && !isExtended) {
12230 if (isMultiByte) {
12231 charCodeArray = [currentByte, nextByte];
12232 i++;
12233 } else {
12234 charCodeArray = [currentByte];
12235 }
12236
12237 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
12238 } else {
12239 char = get708CharFromCode(extended | currentByte);
12240 }
12241
12242 if (win.pendingNewLine && !win.isEmpty()) {
12243 win.newLine(this.getPts(i));
12244 }
12245
12246 win.pendingNewLine = false;
12247 win.addText(char);
12248 return i;
12249 };
12250 /**
12251 * Handle decoding of multibyte character
12252 *
12253 * @param {Integer} i Current index in the 708 packet
12254 * @param {Service} service The service object to be affected
12255 * @return {Integer} New index after parsing
12256 */
12257
12258
12259 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
12260 var packetData = this.current708Packet.data;
12261 var firstByte = packetData[i + 1];
12262 var secondByte = packetData[i + 2];
12263
12264 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
12265 i = this.handleText(++i, service, {
12266 isMultiByte: true
12267 });
12268 }
12269
12270 return i;
12271 };
12272 /**
12273 * Parse and execute the CW# command.
12274 *
12275 * Set the current window.
12276 *
12277 * @param {Integer} i Current index in the 708 packet
12278 * @param {Service} service The service object to be affected
12279 * @return {Integer} New index after parsing
12280 */
12281
12282
12283 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
12284 var packetData = this.current708Packet.data;
12285 var b = packetData[i];
12286 var windowNum = b & 0x07;
12287 service.setCurrentWindow(windowNum);
12288 return i;
12289 };
12290 /**
12291 * Parse and execute the DF# command.
12292 *
12293 * Define a window and set it as the current window.
12294 *
12295 * @param {Integer} i Current index in the 708 packet
12296 * @param {Service} service The service object to be affected
12297 * @return {Integer} New index after parsing
12298 */
12299
12300
12301 Cea708Stream.prototype.defineWindow = function (i, service) {
12302 var packetData = this.current708Packet.data;
12303 var b = packetData[i];
12304 var windowNum = b & 0x07;
12305 service.setCurrentWindow(windowNum);
12306 var win = service.currentWindow;
12307 b = packetData[++i];
12308 win.visible = (b & 0x20) >> 5; // v
12309
12310 win.rowLock = (b & 0x10) >> 4; // rl
12311
12312 win.columnLock = (b & 0x08) >> 3; // cl
12313
12314 win.priority = b & 0x07; // p
12315
12316 b = packetData[++i];
12317 win.relativePositioning = (b & 0x80) >> 7; // rp
12318
12319 win.anchorVertical = b & 0x7f; // av
12320
12321 b = packetData[++i];
12322 win.anchorHorizontal = b; // ah
12323
12324 b = packetData[++i];
12325 win.anchorPoint = (b & 0xf0) >> 4; // ap
12326
12327 win.rowCount = b & 0x0f; // rc
12328
12329 b = packetData[++i];
12330 win.columnCount = b & 0x3f; // cc
12331
12332 b = packetData[++i];
12333 win.windowStyle = (b & 0x38) >> 3; // ws
12334
12335 win.penStyle = b & 0x07; // ps
12336 // The spec says there are (rowCount+1) "virtual rows"
12337
12338 win.virtualRowCount = win.rowCount + 1;
12339 return i;
12340 };
12341 /**
12342 * Parse and execute the SWA command.
12343 *
12344 * Set attributes of the current window.
12345 *
12346 * @param {Integer} i Current index in the 708 packet
12347 * @param {Service} service The service object to be affected
12348 * @return {Integer} New index after parsing
12349 */
12350
12351
12352 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
12353 var packetData = this.current708Packet.data;
12354 var b = packetData[i];
12355 var winAttr = service.currentWindow.winAttr;
12356 b = packetData[++i];
12357 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
12358
12359 winAttr.fillRed = (b & 0x30) >> 4; // fr
12360
12361 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
12362
12363 winAttr.fillBlue = b & 0x03; // fb
12364
12365 b = packetData[++i];
12366 winAttr.borderType = (b & 0xc0) >> 6; // bt
12367
12368 winAttr.borderRed = (b & 0x30) >> 4; // br
12369
12370 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
12371
12372 winAttr.borderBlue = b & 0x03; // bb
12373
12374 b = packetData[++i];
12375 winAttr.borderType += (b & 0x80) >> 5; // bt
12376
12377 winAttr.wordWrap = (b & 0x40) >> 6; // ww
12378
12379 winAttr.printDirection = (b & 0x30) >> 4; // pd
12380
12381 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
12382
12383 winAttr.justify = b & 0x03; // j
12384
12385 b = packetData[++i];
12386 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
12387
12388 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
12389
12390 winAttr.displayEffect = b & 0x03; // de
12391
12392 return i;
12393 };
12394 /**
12395 * Gather text from all displayed windows and push a caption to output.
12396 *
12397 * @param {Integer} i Current index in the 708 packet
12398 * @param {Service} service The service object to be affected
12399 */
12400
12401
12402 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
12403 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
12404 // display text in the correct order, but sample files so far have not shown any issue.
12405
12406 for (var winId = 0; winId < 8; winId++) {
12407 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
12408 displayedText.push(service.windows[winId].getText());
12409 }
12410 }
12411
12412 service.endPts = pts;
12413 service.text = displayedText.join('\n\n');
12414 this.pushCaption(service);
12415 service.startPts = pts;
12416 };
12417 /**
12418 * Push a caption to output if the caption contains text.
12419 *
12420 * @param {Service} service The service object to be affected
12421 */
12422
12423
12424 Cea708Stream.prototype.pushCaption = function (service) {
12425 if (service.text !== '') {
12426 this.trigger('data', {
12427 startPts: service.startPts,
12428 endPts: service.endPts,
12429 text: service.text,
12430 stream: 'cc708_' + service.serviceNum
12431 });
12432 service.text = '';
12433 service.startPts = service.endPts;
12434 }
12435 };
12436 /**
12437 * Parse and execute the DSW command.
12438 *
12439 * Set visible property of windows based on the parsed bitmask.
12440 *
12441 * @param {Integer} i Current index in the 708 packet
12442 * @param {Service} service The service object to be affected
12443 * @return {Integer} New index after parsing
12444 */
12445
12446
12447 Cea708Stream.prototype.displayWindows = function (i, service) {
12448 var packetData = this.current708Packet.data;
12449 var b = packetData[++i];
12450 var pts = this.getPts(i);
12451 this.flushDisplayed(pts, service);
12452
12453 for (var winId = 0; winId < 8; winId++) {
12454 if (b & 0x01 << winId) {
12455 service.windows[winId].visible = 1;
12456 }
12457 }
12458
12459 return i;
12460 };
12461 /**
12462 * Parse and execute the HDW command.
12463 *
12464 * Set visible property of windows based on the parsed bitmask.
12465 *
12466 * @param {Integer} i Current index in the 708 packet
12467 * @param {Service} service The service object to be affected
12468 * @return {Integer} New index after parsing
12469 */
12470
12471
12472 Cea708Stream.prototype.hideWindows = function (i, service) {
12473 var packetData = this.current708Packet.data;
12474 var b = packetData[++i];
12475 var pts = this.getPts(i);
12476 this.flushDisplayed(pts, service);
12477
12478 for (var winId = 0; winId < 8; winId++) {
12479 if (b & 0x01 << winId) {
12480 service.windows[winId].visible = 0;
12481 }
12482 }
12483
12484 return i;
12485 };
12486 /**
12487 * Parse and execute the TGW command.
12488 *
12489 * Set visible property of windows based on the parsed bitmask.
12490 *
12491 * @param {Integer} i Current index in the 708 packet
12492 * @param {Service} service The service object to be affected
12493 * @return {Integer} New index after parsing
12494 */
12495
12496
12497 Cea708Stream.prototype.toggleWindows = function (i, service) {
12498 var packetData = this.current708Packet.data;
12499 var b = packetData[++i];
12500 var pts = this.getPts(i);
12501 this.flushDisplayed(pts, service);
12502
12503 for (var winId = 0; winId < 8; winId++) {
12504 if (b & 0x01 << winId) {
12505 service.windows[winId].visible ^= 1;
12506 }
12507 }
12508
12509 return i;
12510 };
12511 /**
12512 * Parse and execute the CLW command.
12513 *
12514 * Clear text of windows based on the parsed bitmask.
12515 *
12516 * @param {Integer} i Current index in the 708 packet
12517 * @param {Service} service The service object to be affected
12518 * @return {Integer} New index after parsing
12519 */
12520
12521
12522 Cea708Stream.prototype.clearWindows = function (i, service) {
12523 var packetData = this.current708Packet.data;
12524 var b = packetData[++i];
12525 var pts = this.getPts(i);
12526 this.flushDisplayed(pts, service);
12527
12528 for (var winId = 0; winId < 8; winId++) {
12529 if (b & 0x01 << winId) {
12530 service.windows[winId].clearText();
12531 }
12532 }
12533
12534 return i;
12535 };
12536 /**
12537 * Parse and execute the DLW command.
12538 *
12539 * Re-initialize windows based on the parsed bitmask.
12540 *
12541 * @param {Integer} i Current index in the 708 packet
12542 * @param {Service} service The service object to be affected
12543 * @return {Integer} New index after parsing
12544 */
12545
12546
12547 Cea708Stream.prototype.deleteWindows = function (i, service) {
12548 var packetData = this.current708Packet.data;
12549 var b = packetData[++i];
12550 var pts = this.getPts(i);
12551 this.flushDisplayed(pts, service);
12552
12553 for (var winId = 0; winId < 8; winId++) {
12554 if (b & 0x01 << winId) {
12555 service.windows[winId].reset();
12556 }
12557 }
12558
12559 return i;
12560 };
12561 /**
12562 * Parse and execute the SPA command.
12563 *
12564 * Set pen attributes of the current window.
12565 *
12566 * @param {Integer} i Current index in the 708 packet
12567 * @param {Service} service The service object to be affected
12568 * @return {Integer} New index after parsing
12569 */
12570
12571
12572 Cea708Stream.prototype.setPenAttributes = function (i, service) {
12573 var packetData = this.current708Packet.data;
12574 var b = packetData[i];
12575 var penAttr = service.currentWindow.penAttr;
12576 b = packetData[++i];
12577 penAttr.textTag = (b & 0xf0) >> 4; // tt
12578
12579 penAttr.offset = (b & 0x0c) >> 2; // o
12580
12581 penAttr.penSize = b & 0x03; // s
12582
12583 b = packetData[++i];
12584 penAttr.italics = (b & 0x80) >> 7; // i
12585
12586 penAttr.underline = (b & 0x40) >> 6; // u
12587
12588 penAttr.edgeType = (b & 0x38) >> 3; // et
12589
12590 penAttr.fontStyle = b & 0x07; // fs
12591
12592 return i;
12593 };
12594 /**
12595 * Parse and execute the SPC command.
12596 *
12597 * Set pen color of the current window.
12598 *
12599 * @param {Integer} i Current index in the 708 packet
12600 * @param {Service} service The service object to be affected
12601 * @return {Integer} New index after parsing
12602 */
12603
12604
12605 Cea708Stream.prototype.setPenColor = function (i, service) {
12606 var packetData = this.current708Packet.data;
12607 var b = packetData[i];
12608 var penColor = service.currentWindow.penColor;
12609 b = packetData[++i];
12610 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
12611
12612 penColor.fgRed = (b & 0x30) >> 4; // fr
12613
12614 penColor.fgGreen = (b & 0x0c) >> 2; // fg
12615
12616 penColor.fgBlue = b & 0x03; // fb
12617
12618 b = packetData[++i];
12619 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
12620
12621 penColor.bgRed = (b & 0x30) >> 4; // br
12622
12623 penColor.bgGreen = (b & 0x0c) >> 2; // bg
12624
12625 penColor.bgBlue = b & 0x03; // bb
12626
12627 b = packetData[++i];
12628 penColor.edgeRed = (b & 0x30) >> 4; // er
12629
12630 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
12631
12632 penColor.edgeBlue = b & 0x03; // eb
12633
12634 return i;
12635 };
12636 /**
12637 * Parse and execute the SPL command.
12638 *
12639 * Set pen location of the current window.
12640 *
12641 * @param {Integer} i Current index in the 708 packet
12642 * @param {Service} service The service object to be affected
12643 * @return {Integer} New index after parsing
12644 */
12645
12646
12647 Cea708Stream.prototype.setPenLocation = function (i, service) {
12648 var packetData = this.current708Packet.data;
12649 var b = packetData[i];
12650 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
12651
12652 service.currentWindow.pendingNewLine = true;
12653 b = packetData[++i];
12654 penLoc.row = b & 0x0f; // r
12655
12656 b = packetData[++i];
12657 penLoc.column = b & 0x3f; // c
12658
12659 return i;
12660 };
12661 /**
12662 * Execute the RST command.
12663 *
12664 * Reset service to a clean slate. Re-initialize.
12665 *
12666 * @param {Integer} i Current index in the 708 packet
12667 * @param {Service} service The service object to be affected
12668 * @return {Service} Re-initialized service
12669 */
12670
12671
12672 Cea708Stream.prototype.reset = function (i, service) {
12673 var pts = this.getPts(i);
12674 this.flushDisplayed(pts, service);
12675 return this.initService(service.serviceNum, i);
12676 }; // This hash maps non-ASCII, special, and extended character codes to their
12677 // proper Unicode equivalent. The first keys that are only a single byte
12678 // are the non-standard ASCII characters, which simply map the CEA608 byte
12679 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
12680 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
12681 // can be performed regardless of the field and data channel on which the
12682 // character code was received.
12683
12684
12685 var CHARACTER_TRANSLATION = {
12686 0x2a: 0xe1,
12687 // á
12688 0x5c: 0xe9,
12689 // é
12690 0x5e: 0xed,
12691 // í
12692 0x5f: 0xf3,
12693 // ó
12694 0x60: 0xfa,
12695 // ú
12696 0x7b: 0xe7,
12697 // ç
12698 0x7c: 0xf7,
12699 // ÷
12700 0x7d: 0xd1,
12701 // Ñ
12702 0x7e: 0xf1,
12703 // ñ
12704 0x7f: 0x2588,
12705 // █
12706 0x0130: 0xae,
12707 // ®
12708 0x0131: 0xb0,
12709 // °
12710 0x0132: 0xbd,
12711 // ½
12712 0x0133: 0xbf,
12713 // ¿
12714 0x0134: 0x2122,
12715 // ™
12716 0x0135: 0xa2,
12717 // ¢
12718 0x0136: 0xa3,
12719 // £
12720 0x0137: 0x266a,
12721 // ♪
12722 0x0138: 0xe0,
12723 // à
12724 0x0139: 0xa0,
12725 //
12726 0x013a: 0xe8,
12727 // è
12728 0x013b: 0xe2,
12729 // â
12730 0x013c: 0xea,
12731 // ê
12732 0x013d: 0xee,
12733 // î
12734 0x013e: 0xf4,
12735 // ô
12736 0x013f: 0xfb,
12737 // û
12738 0x0220: 0xc1,
12739 // Á
12740 0x0221: 0xc9,
12741 // É
12742 0x0222: 0xd3,
12743 // Ó
12744 0x0223: 0xda,
12745 // Ú
12746 0x0224: 0xdc,
12747 // Ü
12748 0x0225: 0xfc,
12749 // ü
12750 0x0226: 0x2018,
12751 // ‘
12752 0x0227: 0xa1,
12753 // ¡
12754 0x0228: 0x2a,
12755 // *
12756 0x0229: 0x27,
12757 // '
12758 0x022a: 0x2014,
12759 // —
12760 0x022b: 0xa9,
12761 // ©
12762 0x022c: 0x2120,
12763 // ℠
12764 0x022d: 0x2022,
12765 // •
12766 0x022e: 0x201c,
12767 // “
12768 0x022f: 0x201d,
12769 // ”
12770 0x0230: 0xc0,
12771 // À
12772 0x0231: 0xc2,
12773 // Â
12774 0x0232: 0xc7,
12775 // Ç
12776 0x0233: 0xc8,
12777 // È
12778 0x0234: 0xca,
12779 // Ê
12780 0x0235: 0xcb,
12781 // Ë
12782 0x0236: 0xeb,
12783 // ë
12784 0x0237: 0xce,
12785 // Î
12786 0x0238: 0xcf,
12787 // Ï
12788 0x0239: 0xef,
12789 // ï
12790 0x023a: 0xd4,
12791 // Ô
12792 0x023b: 0xd9,
12793 // Ù
12794 0x023c: 0xf9,
12795 // ù
12796 0x023d: 0xdb,
12797 // Û
12798 0x023e: 0xab,
12799 // «
12800 0x023f: 0xbb,
12801 // »
12802 0x0320: 0xc3,
12803 // Ã
12804 0x0321: 0xe3,
12805 // ã
12806 0x0322: 0xcd,
12807 // Í
12808 0x0323: 0xcc,
12809 // Ì
12810 0x0324: 0xec,
12811 // ì
12812 0x0325: 0xd2,
12813 // Ò
12814 0x0326: 0xf2,
12815 // ò
12816 0x0327: 0xd5,
12817 // Õ
12818 0x0328: 0xf5,
12819 // õ
12820 0x0329: 0x7b,
12821 // {
12822 0x032a: 0x7d,
12823 // }
12824 0x032b: 0x5c,
12825 // \
12826 0x032c: 0x5e,
12827 // ^
12828 0x032d: 0x5f,
12829 // _
12830 0x032e: 0x7c,
12831 // |
12832 0x032f: 0x7e,
12833 // ~
12834 0x0330: 0xc4,
12835 // Ä
12836 0x0331: 0xe4,
12837 // ä
12838 0x0332: 0xd6,
12839 // Ö
12840 0x0333: 0xf6,
12841 // ö
12842 0x0334: 0xdf,
12843 // ß
12844 0x0335: 0xa5,
12845 // ¥
12846 0x0336: 0xa4,
12847 // ¤
12848 0x0337: 0x2502,
12849 // │
12850 0x0338: 0xc5,
12851 // Å
12852 0x0339: 0xe5,
12853 // å
12854 0x033a: 0xd8,
12855 // Ø
12856 0x033b: 0xf8,
12857 // ø
12858 0x033c: 0x250c,
12859 // ┌
12860 0x033d: 0x2510,
12861 // ┐
12862 0x033e: 0x2514,
12863 // └
12864 0x033f: 0x2518 // ┘
12865
12866 };
12867
12868 var getCharFromCode = function getCharFromCode(code) {
12869 if (code === null) {
12870 return '';
12871 }
12872
12873 code = CHARACTER_TRANSLATION[code] || code;
12874 return String.fromCharCode(code);
12875 }; // the index of the last row in a CEA-608 display buffer
12876
12877
12878 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
12879 // getting it through bit logic.
12880
12881 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
12882 // cells. The "bottom" row is the last element in the outer array.
12883
12884 var createDisplayBuffer = function createDisplayBuffer() {
12885 var result = [],
12886 i = BOTTOM_ROW + 1;
12887
12888 while (i--) {
12889 result.push('');
12890 }
12891
12892 return result;
12893 };
12894
12895 var Cea608Stream = function Cea608Stream(field, dataChannel) {
12896 Cea608Stream.prototype.init.call(this);
12897 this.field_ = field || 0;
12898 this.dataChannel_ = dataChannel || 0;
12899 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
12900 this.setConstants();
12901 this.reset();
12902
12903 this.push = function (packet) {
12904 var data, swap, char0, char1, text; // remove the parity bits
12905
12906 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
12907
12908 if (data === this.lastControlCode_) {
12909 this.lastControlCode_ = null;
12910 return;
12911 } // Store control codes
12912
12913
12914 if ((data & 0xf000) === 0x1000) {
12915 this.lastControlCode_ = data;
12916 } else if (data !== this.PADDING_) {
12917 this.lastControlCode_ = null;
12918 }
12919
12920 char0 = data >>> 8;
12921 char1 = data & 0xff;
12922
12923 if (data === this.PADDING_) {
12924 return;
12925 } else if (data === this.RESUME_CAPTION_LOADING_) {
12926 this.mode_ = 'popOn';
12927 } else if (data === this.END_OF_CAPTION_) {
12928 // If an EOC is received while in paint-on mode, the displayed caption
12929 // text should be swapped to non-displayed memory as if it was a pop-on
12930 // caption. Because of that, we should explicitly switch back to pop-on
12931 // mode
12932 this.mode_ = 'popOn';
12933 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
12934
12935 this.flushDisplayed(packet.pts); // flip memory
12936
12937 swap = this.displayed_;
12938 this.displayed_ = this.nonDisplayed_;
12939 this.nonDisplayed_ = swap; // start measuring the time to display the caption
12940
12941 this.startPts_ = packet.pts;
12942 } else if (data === this.ROLL_UP_2_ROWS_) {
12943 this.rollUpRows_ = 2;
12944 this.setRollUp(packet.pts);
12945 } else if (data === this.ROLL_UP_3_ROWS_) {
12946 this.rollUpRows_ = 3;
12947 this.setRollUp(packet.pts);
12948 } else if (data === this.ROLL_UP_4_ROWS_) {
12949 this.rollUpRows_ = 4;
12950 this.setRollUp(packet.pts);
12951 } else if (data === this.CARRIAGE_RETURN_) {
12952 this.clearFormatting(packet.pts);
12953 this.flushDisplayed(packet.pts);
12954 this.shiftRowsUp_();
12955 this.startPts_ = packet.pts;
12956 } else if (data === this.BACKSPACE_) {
12957 if (this.mode_ === 'popOn') {
12958 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
12959 } else {
12960 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
12961 }
12962 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
12963 this.flushDisplayed(packet.pts);
12964 this.displayed_ = createDisplayBuffer();
12965 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
12966 this.nonDisplayed_ = createDisplayBuffer();
12967 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
12968 if (this.mode_ !== 'paintOn') {
12969 // NOTE: This should be removed when proper caption positioning is
12970 // implemented
12971 this.flushDisplayed(packet.pts);
12972 this.displayed_ = createDisplayBuffer();
12973 }
12974
12975 this.mode_ = 'paintOn';
12976 this.startPts_ = packet.pts; // Append special characters to caption text
12977 } else if (this.isSpecialCharacter(char0, char1)) {
12978 // Bitmask char0 so that we can apply character transformations
12979 // regardless of field and data channel.
12980 // Then byte-shift to the left and OR with char1 so we can pass the
12981 // entire character code to `getCharFromCode`.
12982 char0 = (char0 & 0x03) << 8;
12983 text = getCharFromCode(char0 | char1);
12984 this[this.mode_](packet.pts, text);
12985 this.column_++; // Append extended characters to caption text
12986 } else if (this.isExtCharacter(char0, char1)) {
12987 // Extended characters always follow their "non-extended" equivalents.
12988 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
12989 // decoders are supposed to drop the "è", while compliant decoders
12990 // backspace the "e" and insert "è".
12991 // Delete the previous character
12992 if (this.mode_ === 'popOn') {
12993 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
12994 } else {
12995 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
12996 } // Bitmask char0 so that we can apply character transformations
12997 // regardless of field and data channel.
12998 // Then byte-shift to the left and OR with char1 so we can pass the
12999 // entire character code to `getCharFromCode`.
13000
13001
13002 char0 = (char0 & 0x03) << 8;
13003 text = getCharFromCode(char0 | char1);
13004 this[this.mode_](packet.pts, text);
13005 this.column_++; // Process mid-row codes
13006 } else if (this.isMidRowCode(char0, char1)) {
13007 // Attributes are not additive, so clear all formatting
13008 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
13009 // should be replaced with spaces, so add one now
13010
13011 this[this.mode_](packet.pts, ' ');
13012 this.column_++;
13013
13014 if ((char1 & 0xe) === 0xe) {
13015 this.addFormatting(packet.pts, ['i']);
13016 }
13017
13018 if ((char1 & 0x1) === 0x1) {
13019 this.addFormatting(packet.pts, ['u']);
13020 } // Detect offset control codes and adjust cursor
13021
13022 } else if (this.isOffsetControlCode(char0, char1)) {
13023 // Cursor position is set by indent PAC (see below) in 4-column
13024 // increments, with an additional offset code of 1-3 to reach any
13025 // of the 32 columns specified by CEA-608. So all we need to do
13026 // here is increment the column cursor by the given offset.
13027 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
13028 } else if (this.isPAC(char0, char1)) {
13029 // There's no logic for PAC -> row mapping, so we have to just
13030 // find the row code in an array and use its index :(
13031 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
13032
13033 if (this.mode_ === 'rollUp') {
13034 // This implies that the base row is incorrectly set.
13035 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
13036 // of roll-up rows set.
13037 if (row - this.rollUpRows_ + 1 < 0) {
13038 row = this.rollUpRows_ - 1;
13039 }
13040
13041 this.setRollUp(packet.pts, row);
13042 }
13043
13044 if (row !== this.row_) {
13045 // formatting is only persistent for current row
13046 this.clearFormatting(packet.pts);
13047 this.row_ = row;
13048 } // All PACs can apply underline, so detect and apply
13049 // (All odd-numbered second bytes set underline)
13050
13051
13052 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
13053 this.addFormatting(packet.pts, ['u']);
13054 }
13055
13056 if ((data & 0x10) === 0x10) {
13057 // We've got an indent level code. Each successive even number
13058 // increments the column cursor by 4, so we can get the desired
13059 // column position by bit-shifting to the right (to get n/2)
13060 // and multiplying by 4.
13061 this.column_ = ((data & 0xe) >> 1) * 4;
13062 }
13063
13064 if (this.isColorPAC(char1)) {
13065 // it's a color code, though we only support white, which
13066 // can be either normal or italicized. white italics can be
13067 // either 0x4e or 0x6e depending on the row, so we just
13068 // bitwise-and with 0xe to see if italics should be turned on
13069 if ((char1 & 0xe) === 0xe) {
13070 this.addFormatting(packet.pts, ['i']);
13071 }
13072 } // We have a normal character in char0, and possibly one in char1
13073
13074 } else if (this.isNormalChar(char0)) {
13075 if (char1 === 0x00) {
13076 char1 = null;
13077 }
13078
13079 text = getCharFromCode(char0);
13080 text += getCharFromCode(char1);
13081 this[this.mode_](packet.pts, text);
13082 this.column_ += text.length;
13083 } // finish data processing
13084
13085 };
13086 };
13087
13088 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
13089 // display buffer
13090
13091 Cea608Stream.prototype.flushDisplayed = function (pts) {
13092 var content = this.displayed_ // remove spaces from the start and end of the string
13093 .map(function (row, index) {
13094 try {
13095 return row.trim();
13096 } catch (e) {
13097 // Ordinarily, this shouldn't happen. However, caption
13098 // parsing errors should not throw exceptions and
13099 // break playback.
13100 this.trigger('log', {
13101 level: 'warn',
13102 message: 'Skipping a malformed 608 caption at index ' + index + '.'
13103 });
13104 return '';
13105 }
13106 }, this) // combine all text rows to display in one cue
13107 .join('\n') // and remove blank rows from the start and end, but not the middle
13108 .replace(/^\n+|\n+$/g, '');
13109
13110 if (content.length) {
13111 this.trigger('data', {
13112 startPts: this.startPts_,
13113 endPts: pts,
13114 text: content,
13115 stream: this.name_
13116 });
13117 }
13118 };
13119 /**
13120 * Zero out the data, used for startup and on seek
13121 */
13122
13123
13124 Cea608Stream.prototype.reset = function () {
13125 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
13126 // actually display captions. If a caption is shifted to a row
13127 // with a lower index than this, it is cleared from the display
13128 // buffer
13129
13130 this.topRow_ = 0;
13131 this.startPts_ = 0;
13132 this.displayed_ = createDisplayBuffer();
13133 this.nonDisplayed_ = createDisplayBuffer();
13134 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
13135
13136 this.column_ = 0;
13137 this.row_ = BOTTOM_ROW;
13138 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
13139
13140 this.formatting_ = [];
13141 };
13142 /**
13143 * Sets up control code and related constants for this instance
13144 */
13145
13146
13147 Cea608Stream.prototype.setConstants = function () {
13148 // The following attributes have these uses:
13149 // ext_ : char0 for mid-row codes, and the base for extended
13150 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
13151 // extended codes)
13152 // control_: char0 for control codes, except byte-shifted to the
13153 // left so that we can do this.control_ | CONTROL_CODE
13154 // offset_: char0 for tab offset codes
13155 //
13156 // It's also worth noting that control codes, and _only_ control codes,
13157 // differ between field 1 and field2. Field 2 control codes are always
13158 // their field 1 value plus 1. That's why there's the "| field" on the
13159 // control value.
13160 if (this.dataChannel_ === 0) {
13161 this.BASE_ = 0x10;
13162 this.EXT_ = 0x11;
13163 this.CONTROL_ = (0x14 | this.field_) << 8;
13164 this.OFFSET_ = 0x17;
13165 } else if (this.dataChannel_ === 1) {
13166 this.BASE_ = 0x18;
13167 this.EXT_ = 0x19;
13168 this.CONTROL_ = (0x1c | this.field_) << 8;
13169 this.OFFSET_ = 0x1f;
13170 } // Constants for the LSByte command codes recognized by Cea608Stream. This
13171 // list is not exhaustive. For a more comprehensive listing and semantics see
13172 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
13173 // Padding
13174
13175
13176 this.PADDING_ = 0x0000; // Pop-on Mode
13177
13178 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
13179 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
13180
13181 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
13182 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
13183 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
13184 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
13185
13186 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
13187
13188 this.BACKSPACE_ = this.CONTROL_ | 0x21;
13189 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
13190 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
13191 };
13192 /**
13193 * Detects if the 2-byte packet data is a special character
13194 *
13195 * Special characters have a second byte in the range 0x30 to 0x3f,
13196 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
13197 * data channel 2).
13198 *
13199 * @param {Integer} char0 The first byte
13200 * @param {Integer} char1 The second byte
13201 * @return {Boolean} Whether the 2 bytes are an special character
13202 */
13203
13204
13205 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
13206 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
13207 };
13208 /**
13209 * Detects if the 2-byte packet data is an extended character
13210 *
13211 * Extended characters have a second byte in the range 0x20 to 0x3f,
13212 * with the first byte being 0x12 or 0x13 (for data channel 1) or
13213 * 0x1a or 0x1b (for data channel 2).
13214 *
13215 * @param {Integer} char0 The first byte
13216 * @param {Integer} char1 The second byte
13217 * @return {Boolean} Whether the 2 bytes are an extended character
13218 */
13219
13220
13221 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
13222 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
13223 };
13224 /**
13225 * Detects if the 2-byte packet is a mid-row code
13226 *
13227 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
13228 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
13229 * channel 2).
13230 *
13231 * @param {Integer} char0 The first byte
13232 * @param {Integer} char1 The second byte
13233 * @return {Boolean} Whether the 2 bytes are a mid-row code
13234 */
13235
13236
13237 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
13238 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
13239 };
13240 /**
13241 * Detects if the 2-byte packet is an offset control code
13242 *
13243 * Offset control codes have a second byte in the range 0x21 to 0x23,
13244 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
13245 * data channel 2).
13246 *
13247 * @param {Integer} char0 The first byte
13248 * @param {Integer} char1 The second byte
13249 * @return {Boolean} Whether the 2 bytes are an offset control code
13250 */
13251
13252
13253 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
13254 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
13255 };
13256 /**
13257 * Detects if the 2-byte packet is a Preamble Address Code
13258 *
13259 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
13260 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
13261 * range 0x40 to 0x7f.
13262 *
13263 * @param {Integer} char0 The first byte
13264 * @param {Integer} char1 The second byte
13265 * @return {Boolean} Whether the 2 bytes are a PAC
13266 */
13267
13268
13269 Cea608Stream.prototype.isPAC = function (char0, char1) {
13270 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
13271 };
13272 /**
13273 * Detects if a packet's second byte is in the range of a PAC color code
13274 *
13275 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
13276 * 0x60 to 0x6f.
13277 *
13278 * @param {Integer} char1 The second byte
13279 * @return {Boolean} Whether the byte is a color PAC
13280 */
13281
13282
13283 Cea608Stream.prototype.isColorPAC = function (char1) {
13284 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
13285 };
13286 /**
13287 * Detects if a single byte is in the range of a normal character
13288 *
13289 * Normal text bytes are in the range 0x20 to 0x7f.
13290 *
13291 * @param {Integer} char The byte
13292 * @return {Boolean} Whether the byte is a normal character
13293 */
13294
13295
13296 Cea608Stream.prototype.isNormalChar = function (char) {
13297 return char >= 0x20 && char <= 0x7f;
13298 };
13299 /**
13300 * Configures roll-up
13301 *
13302 * @param {Integer} pts Current PTS
13303 * @param {Integer} newBaseRow Used by PACs to slide the current window to
13304 * a new position
13305 */
13306
13307
13308 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
13309 // Reset the base row to the bottom row when switching modes
13310 if (this.mode_ !== 'rollUp') {
13311 this.row_ = BOTTOM_ROW;
13312 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
13313
13314 this.flushDisplayed(pts);
13315 this.nonDisplayed_ = createDisplayBuffer();
13316 this.displayed_ = createDisplayBuffer();
13317 }
13318
13319 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
13320 // move currently displayed captions (up or down) to the new base row
13321 for (var i = 0; i < this.rollUpRows_; i++) {
13322 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
13323 this.displayed_[this.row_ - i] = '';
13324 }
13325 }
13326
13327 if (newBaseRow === undefined) {
13328 newBaseRow = this.row_;
13329 }
13330
13331 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
13332 }; // Adds the opening HTML tag for the passed character to the caption text,
13333 // and keeps track of it for later closing
13334
13335
13336 Cea608Stream.prototype.addFormatting = function (pts, format) {
13337 this.formatting_ = this.formatting_.concat(format);
13338 var text = format.reduce(function (text, format) {
13339 return text + '<' + format + '>';
13340 }, '');
13341 this[this.mode_](pts, text);
13342 }; // Adds HTML closing tags for current formatting to caption text and
13343 // clears remembered formatting
13344
13345
13346 Cea608Stream.prototype.clearFormatting = function (pts) {
13347 if (!this.formatting_.length) {
13348 return;
13349 }
13350
13351 var text = this.formatting_.reverse().reduce(function (text, format) {
13352 return text + '</' + format + '>';
13353 }, '');
13354 this.formatting_ = [];
13355 this[this.mode_](pts, text);
13356 }; // Mode Implementations
13357
13358
13359 Cea608Stream.prototype.popOn = function (pts, text) {
13360 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
13361
13362 baseRow += text;
13363 this.nonDisplayed_[this.row_] = baseRow;
13364 };
13365
13366 Cea608Stream.prototype.rollUp = function (pts, text) {
13367 var baseRow = this.displayed_[this.row_];
13368 baseRow += text;
13369 this.displayed_[this.row_] = baseRow;
13370 };
13371
13372 Cea608Stream.prototype.shiftRowsUp_ = function () {
13373 var i; // clear out inactive rows
13374
13375 for (i = 0; i < this.topRow_; i++) {
13376 this.displayed_[i] = '';
13377 }
13378
13379 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
13380 this.displayed_[i] = '';
13381 } // shift displayed rows up
13382
13383
13384 for (i = this.topRow_; i < this.row_; i++) {
13385 this.displayed_[i] = this.displayed_[i + 1];
13386 } // clear out the bottom row
13387
13388
13389 this.displayed_[this.row_] = '';
13390 };
13391
13392 Cea608Stream.prototype.paintOn = function (pts, text) {
13393 var baseRow = this.displayed_[this.row_];
13394 baseRow += text;
13395 this.displayed_[this.row_] = baseRow;
13396 }; // exports
13397
13398
13399 var captionStream = {
13400 CaptionStream: CaptionStream$1,
13401 Cea608Stream: Cea608Stream,
13402 Cea708Stream: Cea708Stream
13403 };
13404 /**
13405 * mux.js
13406 *
13407 * Copyright (c) Brightcove
13408 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
13409 */
13410
13411 var streamTypes = {
13412 H264_STREAM_TYPE: 0x1B,
13413 ADTS_STREAM_TYPE: 0x0F,
13414 METADATA_STREAM_TYPE: 0x15
13415 };
13416 var MAX_TS = 8589934592;
13417 var RO_THRESH = 4294967296;
13418 var TYPE_SHARED = 'shared';
13419
13420 var handleRollover$1 = function handleRollover(value, reference) {
13421 var direction = 1;
13422
13423 if (value > reference) {
13424 // If the current timestamp value is greater than our reference timestamp and we detect a
13425 // timestamp rollover, this means the roll over is happening in the opposite direction.
13426 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
13427 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
13428 // rollover point. In loading this segment, the timestamp values will be very large,
13429 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
13430 // the time stamp to be `value - 2^33`.
13431 direction = -1;
13432 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
13433 // cause an incorrect adjustment.
13434
13435
13436 while (Math.abs(reference - value) > RO_THRESH) {
13437 value += direction * MAX_TS;
13438 }
13439
13440 return value;
13441 };
13442
13443 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
13444 var lastDTS, referenceDTS;
13445 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
13446 // video and audio. We could use `undefined` here, but having a string
13447 // makes debugging a little clearer.
13448
13449 this.type_ = type || TYPE_SHARED;
13450
13451 this.push = function (data) {
13452 // Any "shared" rollover streams will accept _all_ data. Otherwise,
13453 // streams will only accept data that matches their type.
13454 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
13455 return;
13456 }
13457
13458 if (referenceDTS === undefined) {
13459 referenceDTS = data.dts;
13460 }
13461
13462 data.dts = handleRollover$1(data.dts, referenceDTS);
13463 data.pts = handleRollover$1(data.pts, referenceDTS);
13464 lastDTS = data.dts;
13465 this.trigger('data', data);
13466 };
13467
13468 this.flush = function () {
13469 referenceDTS = lastDTS;
13470 this.trigger('done');
13471 };
13472
13473 this.endTimeline = function () {
13474 this.flush();
13475 this.trigger('endedtimeline');
13476 };
13477
13478 this.discontinuity = function () {
13479 referenceDTS = void 0;
13480 lastDTS = void 0;
13481 };
13482
13483 this.reset = function () {
13484 this.discontinuity();
13485 this.trigger('reset');
13486 };
13487 };
13488
13489 TimestampRolloverStream$1.prototype = new stream();
13490 var timestampRolloverStream = {
13491 TimestampRolloverStream: TimestampRolloverStream$1,
13492 handleRollover: handleRollover$1
13493 };
13494
13495 var percentEncode$1 = function percentEncode(bytes, start, end) {
13496 var i,
13497 result = '';
13498
13499 for (i = start; i < end; i++) {
13500 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
13501 }
13502
13503 return result;
13504 },
13505 // return the string representation of the specified byte range,
13506 // interpreted as UTf-8.
13507 parseUtf8 = function parseUtf8(bytes, start, end) {
13508 return decodeURIComponent(percentEncode$1(bytes, start, end));
13509 },
13510 // return the string representation of the specified byte range,
13511 // interpreted as ISO-8859-1.
13512 parseIso88591$1 = function parseIso88591(bytes, start, end) {
13513 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
13514 },
13515 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
13516 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
13517 },
13518 tagParsers = {
13519 TXXX: function TXXX(tag) {
13520 var i;
13521
13522 if (tag.data[0] !== 3) {
13523 // ignore frames with unrecognized character encodings
13524 return;
13525 }
13526
13527 for (i = 1; i < tag.data.length; i++) {
13528 if (tag.data[i] === 0) {
13529 // parse the text fields
13530 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
13531
13532 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
13533 break;
13534 }
13535 }
13536
13537 tag.data = tag.value;
13538 },
13539 WXXX: function WXXX(tag) {
13540 var i;
13541
13542 if (tag.data[0] !== 3) {
13543 // ignore frames with unrecognized character encodings
13544 return;
13545 }
13546
13547 for (i = 1; i < tag.data.length; i++) {
13548 if (tag.data[i] === 0) {
13549 // parse the description and URL fields
13550 tag.description = parseUtf8(tag.data, 1, i);
13551 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
13552 break;
13553 }
13554 }
13555 },
13556 PRIV: function PRIV(tag) {
13557 var i;
13558
13559 for (i = 0; i < tag.data.length; i++) {
13560 if (tag.data[i] === 0) {
13561 // parse the description and URL fields
13562 tag.owner = parseIso88591$1(tag.data, 0, i);
13563 break;
13564 }
13565 }
13566
13567 tag.privateData = tag.data.subarray(i + 1);
13568 tag.data = tag.privateData;
13569 }
13570 },
13571 _MetadataStream;
13572
13573 _MetadataStream = function MetadataStream(options) {
13574 var settings = {
13575 // the bytes of the program-level descriptor field in MP2T
13576 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
13577 // program element descriptors"
13578 descriptor: options && options.descriptor
13579 },
13580 // the total size in bytes of the ID3 tag being parsed
13581 tagSize = 0,
13582 // tag data that is not complete enough to be parsed
13583 buffer = [],
13584 // the total number of bytes currently in the buffer
13585 bufferSize = 0,
13586 i;
13587
13588 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
13589 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
13590
13591
13592 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
13593
13594 if (settings.descriptor) {
13595 for (i = 0; i < settings.descriptor.length; i++) {
13596 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
13597 }
13598 }
13599
13600 this.push = function (chunk) {
13601 var tag, frameStart, frameSize, frame, i, frameHeader;
13602
13603 if (chunk.type !== 'timed-metadata') {
13604 return;
13605 } // if data_alignment_indicator is set in the PES header,
13606 // we must have the start of a new ID3 tag. Assume anything
13607 // remaining in the buffer was malformed and throw it out
13608
13609
13610 if (chunk.dataAlignmentIndicator) {
13611 bufferSize = 0;
13612 buffer.length = 0;
13613 } // ignore events that don't look like ID3 data
13614
13615
13616 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
13617 this.trigger('log', {
13618 level: 'warn',
13619 message: 'Skipping unrecognized metadata packet'
13620 });
13621 return;
13622 } // add this chunk to the data we've collected so far
13623
13624
13625 buffer.push(chunk);
13626 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
13627
13628 if (buffer.length === 1) {
13629 // the frame size is transmitted as a 28-bit integer in the
13630 // last four bytes of the ID3 header.
13631 // The most significant bit of each byte is dropped and the
13632 // results concatenated to recover the actual value.
13633 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
13634 // convenient for our comparisons to include it
13635
13636 tagSize += 10;
13637 } // if the entire frame has not arrived, wait for more data
13638
13639
13640 if (bufferSize < tagSize) {
13641 return;
13642 } // collect the entire frame so it can be parsed
13643
13644
13645 tag = {
13646 data: new Uint8Array(tagSize),
13647 frames: [],
13648 pts: buffer[0].pts,
13649 dts: buffer[0].dts
13650 };
13651
13652 for (i = 0; i < tagSize;) {
13653 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
13654 i += buffer[0].data.byteLength;
13655 bufferSize -= buffer[0].data.byteLength;
13656 buffer.shift();
13657 } // find the start of the first frame and the end of the tag
13658
13659
13660 frameStart = 10;
13661
13662 if (tag.data[5] & 0x40) {
13663 // advance the frame start past the extended header
13664 frameStart += 4; // header size field
13665
13666 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
13667
13668 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
13669 } // parse one or more ID3 frames
13670 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
13671
13672
13673 do {
13674 // determine the number of bytes in this frame
13675 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
13676
13677 if (frameSize < 1) {
13678 this.trigger('log', {
13679 level: 'warn',
13680 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
13681 });
13682 return;
13683 }
13684
13685 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
13686 frame = {
13687 id: frameHeader,
13688 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
13689 };
13690 frame.key = frame.id;
13691
13692 if (tagParsers[frame.id]) {
13693 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
13694 // time for raw AAC data
13695
13696 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
13697 var d = frame.data,
13698 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
13699 size *= 4;
13700 size += d[7] & 0x03;
13701 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
13702 // on the value of this frame
13703 // we couldn't have known the appropriate pts and dts before
13704 // parsing this ID3 tag so set those values now
13705
13706 if (tag.pts === undefined && tag.dts === undefined) {
13707 tag.pts = frame.timeStamp;
13708 tag.dts = frame.timeStamp;
13709 }
13710
13711 this.trigger('timestamp', frame);
13712 }
13713 }
13714
13715 tag.frames.push(frame);
13716 frameStart += 10; // advance past the frame header
13717
13718 frameStart += frameSize; // advance past the frame body
13719 } while (frameStart < tagSize);
13720
13721 this.trigger('data', tag);
13722 };
13723 };
13724
13725 _MetadataStream.prototype = new stream();
13726 var metadataStream = _MetadataStream;
13727 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
13728
13729 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
13730
13731
13732 var MP2T_PACKET_LENGTH$1 = 188,
13733 // bytes
13734 SYNC_BYTE$1 = 0x47;
13735 /**
13736 * Splits an incoming stream of binary data into MPEG-2 Transport
13737 * Stream packets.
13738 */
13739
13740 _TransportPacketStream = function TransportPacketStream() {
13741 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
13742 bytesInBuffer = 0;
13743
13744 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
13745
13746 /**
13747 * Split a stream of data into M2TS packets
13748 **/
13749
13750
13751 this.push = function (bytes) {
13752 var startIndex = 0,
13753 endIndex = MP2T_PACKET_LENGTH$1,
13754 everything; // If there are bytes remaining from the last segment, prepend them to the
13755 // bytes that were pushed in
13756
13757 if (bytesInBuffer) {
13758 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
13759 everything.set(buffer.subarray(0, bytesInBuffer));
13760 everything.set(bytes, bytesInBuffer);
13761 bytesInBuffer = 0;
13762 } else {
13763 everything = bytes;
13764 } // While we have enough data for a packet
13765
13766
13767 while (endIndex < everything.byteLength) {
13768 // Look for a pair of start and end sync bytes in the data..
13769 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
13770 // We found a packet so emit it and jump one whole packet forward in
13771 // the stream
13772 this.trigger('data', everything.subarray(startIndex, endIndex));
13773 startIndex += MP2T_PACKET_LENGTH$1;
13774 endIndex += MP2T_PACKET_LENGTH$1;
13775 continue;
13776 } // If we get here, we have somehow become de-synchronized and we need to step
13777 // forward one byte at a time until we find a pair of sync bytes that denote
13778 // a packet
13779
13780
13781 startIndex++;
13782 endIndex++;
13783 } // If there was some data left over at the end of the segment that couldn't
13784 // possibly be a whole packet, keep it because it might be the start of a packet
13785 // that continues in the next segment
13786
13787
13788 if (startIndex < everything.byteLength) {
13789 buffer.set(everything.subarray(startIndex), 0);
13790 bytesInBuffer = everything.byteLength - startIndex;
13791 }
13792 };
13793 /**
13794 * Passes identified M2TS packets to the TransportParseStream to be parsed
13795 **/
13796
13797
13798 this.flush = function () {
13799 // If the buffer contains a whole packet when we are being flushed, emit it
13800 // and empty the buffer. Otherwise hold onto the data because it may be
13801 // important for decoding the next segment
13802 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
13803 this.trigger('data', buffer);
13804 bytesInBuffer = 0;
13805 }
13806
13807 this.trigger('done');
13808 };
13809
13810 this.endTimeline = function () {
13811 this.flush();
13812 this.trigger('endedtimeline');
13813 };
13814
13815 this.reset = function () {
13816 bytesInBuffer = 0;
13817 this.trigger('reset');
13818 };
13819 };
13820
13821 _TransportPacketStream.prototype = new stream();
13822 /**
13823 * Accepts an MP2T TransportPacketStream and emits data events with parsed
13824 * forms of the individual transport stream packets.
13825 */
13826
13827 _TransportParseStream = function TransportParseStream() {
13828 var parsePsi, parsePat, parsePmt, self;
13829
13830 _TransportParseStream.prototype.init.call(this);
13831
13832 self = this;
13833 this.packetsWaitingForPmt = [];
13834 this.programMapTable = undefined;
13835
13836 parsePsi = function parsePsi(payload, psi) {
13837 var offset = 0; // PSI packets may be split into multiple sections and those
13838 // sections may be split into multiple packets. If a PSI
13839 // section starts in this packet, the payload_unit_start_indicator
13840 // will be true and the first byte of the payload will indicate
13841 // the offset from the current position to the start of the
13842 // section.
13843
13844 if (psi.payloadUnitStartIndicator) {
13845 offset += payload[offset] + 1;
13846 }
13847
13848 if (psi.type === 'pat') {
13849 parsePat(payload.subarray(offset), psi);
13850 } else {
13851 parsePmt(payload.subarray(offset), psi);
13852 }
13853 };
13854
13855 parsePat = function parsePat(payload, pat) {
13856 pat.section_number = payload[7]; // eslint-disable-line camelcase
13857
13858 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
13859 // skip the PSI header and parse the first PMT entry
13860
13861 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
13862 pat.pmtPid = self.pmtPid;
13863 };
13864 /**
13865 * Parse out the relevant fields of a Program Map Table (PMT).
13866 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
13867 * packet. The first byte in this array should be the table_id
13868 * field.
13869 * @param pmt {object} the object that should be decorated with
13870 * fields parsed from the PMT.
13871 */
13872
13873
13874 parsePmt = function parsePmt(payload, pmt) {
13875 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
13876 // take effect. We don't believe this should ever be the case
13877 // for HLS but we'll ignore "forward" PMT declarations if we see
13878 // them. Future PMT declarations have the current_next_indicator
13879 // set to zero.
13880
13881 if (!(payload[5] & 0x01)) {
13882 return;
13883 } // overwrite any existing program map table
13884
13885
13886 self.programMapTable = {
13887 video: null,
13888 audio: null,
13889 'timed-metadata': {}
13890 }; // the mapping table ends at the end of the current section
13891
13892 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
13893 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
13894 // long the program info descriptors are
13895
13896 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
13897
13898 offset = 12 + programInfoLength;
13899
13900 while (offset < tableEnd) {
13901 var streamType = payload[offset];
13902 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
13903 // TODO: should this be done for metadata too? for now maintain behavior of
13904 // multiple metadata streams
13905
13906 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
13907 self.programMapTable.video = pid;
13908 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
13909 self.programMapTable.audio = pid;
13910 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
13911 // map pid to stream type for metadata streams
13912 self.programMapTable['timed-metadata'][pid] = streamType;
13913 } // move to the next table entry
13914 // skip past the elementary stream descriptors, if present
13915
13916
13917 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
13918 } // record the map on the packet as well
13919
13920
13921 pmt.programMapTable = self.programMapTable;
13922 };
13923 /**
13924 * Deliver a new MP2T packet to the next stream in the pipeline.
13925 */
13926
13927
13928 this.push = function (packet) {
13929 var result = {},
13930 offset = 4;
13931 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
13932
13933 result.pid = packet[1] & 0x1f;
13934 result.pid <<= 8;
13935 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
13936 // fifth byte of the TS packet header. The adaptation field is
13937 // used to add stuffing to PES packets that don't fill a complete
13938 // TS packet, and to specify some forms of timing and control data
13939 // that we do not currently use.
13940
13941 if ((packet[3] & 0x30) >>> 4 > 0x01) {
13942 offset += packet[offset] + 1;
13943 } // parse the rest of the packet based on the type
13944
13945
13946 if (result.pid === 0) {
13947 result.type = 'pat';
13948 parsePsi(packet.subarray(offset), result);
13949 this.trigger('data', result);
13950 } else if (result.pid === this.pmtPid) {
13951 result.type = 'pmt';
13952 parsePsi(packet.subarray(offset), result);
13953 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
13954
13955 while (this.packetsWaitingForPmt.length) {
13956 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
13957 }
13958 } else if (this.programMapTable === undefined) {
13959 // When we have not seen a PMT yet, defer further processing of
13960 // PES packets until one has been parsed
13961 this.packetsWaitingForPmt.push([packet, offset, result]);
13962 } else {
13963 this.processPes_(packet, offset, result);
13964 }
13965 };
13966
13967 this.processPes_ = function (packet, offset, result) {
13968 // set the appropriate stream type
13969 if (result.pid === this.programMapTable.video) {
13970 result.streamType = streamTypes.H264_STREAM_TYPE;
13971 } else if (result.pid === this.programMapTable.audio) {
13972 result.streamType = streamTypes.ADTS_STREAM_TYPE;
13973 } else {
13974 // if not video or audio, it is timed-metadata or unknown
13975 // if unknown, streamType will be undefined
13976 result.streamType = this.programMapTable['timed-metadata'][result.pid];
13977 }
13978
13979 result.type = 'pes';
13980 result.data = packet.subarray(offset);
13981 this.trigger('data', result);
13982 };
13983 };
13984
13985 _TransportParseStream.prototype = new stream();
13986 _TransportParseStream.STREAM_TYPES = {
13987 h264: 0x1b,
13988 adts: 0x0f
13989 };
13990 /**
13991 * Reconsistutes program elementary stream (PES) packets from parsed
13992 * transport stream packets. That is, if you pipe an
13993 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
13994 * events will be events which capture the bytes for individual PES
13995 * packets plus relevant metadata that has been extracted from the
13996 * container.
13997 */
13998
13999 _ElementaryStream = function ElementaryStream() {
14000 var self = this,
14001 segmentHadPmt = false,
14002 // PES packet fragments
14003 video = {
14004 data: [],
14005 size: 0
14006 },
14007 audio = {
14008 data: [],
14009 size: 0
14010 },
14011 timedMetadata = {
14012 data: [],
14013 size: 0
14014 },
14015 programMapTable,
14016 parsePes = function parsePes(payload, pes) {
14017 var ptsDtsFlags;
14018 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
14019
14020 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
14021 // that are frame data that is continuing from the previous fragment. This
14022 // is to check that the pes data is the start of a new pes payload
14023
14024 if (startPrefix !== 1) {
14025 return;
14026 } // get the packet length, this will be 0 for video
14027
14028
14029 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
14030
14031 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
14032 // and a DTS value. Determine what combination of values is
14033 // available to work with.
14034
14035 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
14036 // performs all bitwise operations on 32-bit integers but javascript
14037 // supports a much greater range (52-bits) of integer using standard
14038 // mathematical operations.
14039 // We construct a 31-bit value using bitwise operators over the 31
14040 // most significant bits and then multiply by 4 (equal to a left-shift
14041 // of 2) before we add the final 2 least significant bits of the
14042 // timestamp (equal to an OR.)
14043
14044 if (ptsDtsFlags & 0xC0) {
14045 // the PTS and DTS are not written out directly. For information
14046 // on how they are encoded, see
14047 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
14048 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
14049 pes.pts *= 4; // Left shift by 2
14050
14051 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
14052
14053 pes.dts = pes.pts;
14054
14055 if (ptsDtsFlags & 0x40) {
14056 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
14057 pes.dts *= 4; // Left shift by 2
14058
14059 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
14060 }
14061 } // the data section starts immediately after the PES header.
14062 // pes_header_data_length specifies the number of header bytes
14063 // that follow the last byte of the field.
14064
14065
14066 pes.data = payload.subarray(9 + payload[8]);
14067 },
14068
14069 /**
14070 * Pass completely parsed PES packets to the next stream in the pipeline
14071 **/
14072 flushStream = function flushStream(stream, type, forceFlush) {
14073 var packetData = new Uint8Array(stream.size),
14074 event = {
14075 type: type
14076 },
14077 i = 0,
14078 offset = 0,
14079 packetFlushable = false,
14080 fragment; // do nothing if there is not enough buffered data for a complete
14081 // PES header
14082
14083 if (!stream.data.length || stream.size < 9) {
14084 return;
14085 }
14086
14087 event.trackId = stream.data[0].pid; // reassemble the packet
14088
14089 for (i = 0; i < stream.data.length; i++) {
14090 fragment = stream.data[i];
14091 packetData.set(fragment.data, offset);
14092 offset += fragment.data.byteLength;
14093 } // parse assembled packet's PES header
14094
14095
14096 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
14097 // check that there is enough stream data to fill the packet
14098
14099 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
14100
14101 if (forceFlush || packetFlushable) {
14102 stream.size = 0;
14103 stream.data.length = 0;
14104 } // only emit packets that are complete. this is to avoid assembling
14105 // incomplete PES packets due to poor segmentation
14106
14107
14108 if (packetFlushable) {
14109 self.trigger('data', event);
14110 }
14111 };
14112
14113 _ElementaryStream.prototype.init.call(this);
14114 /**
14115 * Identifies M2TS packet types and parses PES packets using metadata
14116 * parsed from the PMT
14117 **/
14118
14119
14120 this.push = function (data) {
14121 ({
14122 pat: function pat() {// we have to wait for the PMT to arrive as well before we
14123 // have any meaningful metadata
14124 },
14125 pes: function pes() {
14126 var stream, streamType;
14127
14128 switch (data.streamType) {
14129 case streamTypes.H264_STREAM_TYPE:
14130 stream = video;
14131 streamType = 'video';
14132 break;
14133
14134 case streamTypes.ADTS_STREAM_TYPE:
14135 stream = audio;
14136 streamType = 'audio';
14137 break;
14138
14139 case streamTypes.METADATA_STREAM_TYPE:
14140 stream = timedMetadata;
14141 streamType = 'timed-metadata';
14142 break;
14143
14144 default:
14145 // ignore unknown stream types
14146 return;
14147 } // if a new packet is starting, we can flush the completed
14148 // packet
14149
14150
14151 if (data.payloadUnitStartIndicator) {
14152 flushStream(stream, streamType, true);
14153 } // buffer this fragment until we are sure we've received the
14154 // complete payload
14155
14156
14157 stream.data.push(data);
14158 stream.size += data.data.byteLength;
14159 },
14160 pmt: function pmt() {
14161 var event = {
14162 type: 'metadata',
14163 tracks: []
14164 };
14165 programMapTable = data.programMapTable; // translate audio and video streams to tracks
14166
14167 if (programMapTable.video !== null) {
14168 event.tracks.push({
14169 timelineStartInfo: {
14170 baseMediaDecodeTime: 0
14171 },
14172 id: +programMapTable.video,
14173 codec: 'avc',
14174 type: 'video'
14175 });
14176 }
14177
14178 if (programMapTable.audio !== null) {
14179 event.tracks.push({
14180 timelineStartInfo: {
14181 baseMediaDecodeTime: 0
14182 },
14183 id: +programMapTable.audio,
14184 codec: 'adts',
14185 type: 'audio'
14186 });
14187 }
14188
14189 segmentHadPmt = true;
14190 self.trigger('data', event);
14191 }
14192 })[data.type]();
14193 };
14194
14195 this.reset = function () {
14196 video.size = 0;
14197 video.data.length = 0;
14198 audio.size = 0;
14199 audio.data.length = 0;
14200 this.trigger('reset');
14201 };
14202 /**
14203 * Flush any remaining input. Video PES packets may be of variable
14204 * length. Normally, the start of a new video packet can trigger the
14205 * finalization of the previous packet. That is not possible if no
14206 * more video is forthcoming, however. In that case, some other
14207 * mechanism (like the end of the file) has to be employed. When it is
14208 * clear that no additional data is forthcoming, calling this method
14209 * will flush the buffered packets.
14210 */
14211
14212
14213 this.flushStreams_ = function () {
14214 // !!THIS ORDER IS IMPORTANT!!
14215 // video first then audio
14216 flushStream(video, 'video');
14217 flushStream(audio, 'audio');
14218 flushStream(timedMetadata, 'timed-metadata');
14219 };
14220
14221 this.flush = function () {
14222 // if on flush we haven't had a pmt emitted
14223 // and we have a pmt to emit. emit the pmt
14224 // so that we trigger a trackinfo downstream.
14225 if (!segmentHadPmt && programMapTable) {
14226 var pmt = {
14227 type: 'metadata',
14228 tracks: []
14229 }; // translate audio and video streams to tracks
14230
14231 if (programMapTable.video !== null) {
14232 pmt.tracks.push({
14233 timelineStartInfo: {
14234 baseMediaDecodeTime: 0
14235 },
14236 id: +programMapTable.video,
14237 codec: 'avc',
14238 type: 'video'
14239 });
14240 }
14241
14242 if (programMapTable.audio !== null) {
14243 pmt.tracks.push({
14244 timelineStartInfo: {
14245 baseMediaDecodeTime: 0
14246 },
14247 id: +programMapTable.audio,
14248 codec: 'adts',
14249 type: 'audio'
14250 });
14251 }
14252
14253 self.trigger('data', pmt);
14254 }
14255
14256 segmentHadPmt = false;
14257 this.flushStreams_();
14258 this.trigger('done');
14259 };
14260 };
14261
14262 _ElementaryStream.prototype = new stream();
14263 var m2ts = {
14264 PAT_PID: 0x0000,
14265 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
14266 TransportPacketStream: _TransportPacketStream,
14267 TransportParseStream: _TransportParseStream,
14268 ElementaryStream: _ElementaryStream,
14269 TimestampRolloverStream: TimestampRolloverStream,
14270 CaptionStream: captionStream.CaptionStream,
14271 Cea608Stream: captionStream.Cea608Stream,
14272 Cea708Stream: captionStream.Cea708Stream,
14273 MetadataStream: metadataStream
14274 };
14275
14276 for (var type in streamTypes) {
14277 if (streamTypes.hasOwnProperty(type)) {
14278 m2ts[type] = streamTypes[type];
14279 }
14280 }
14281
14282 var m2ts_1 = m2ts;
14283 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
14284
14285 var _AdtsStream;
14286
14287 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
14288 /*
14289 * Accepts a ElementaryStream and emits data events with parsed
14290 * AAC Audio Frames of the individual packets. Input audio in ADTS
14291 * format is unpacked and re-emitted as AAC frames.
14292 *
14293 * @see http://wiki.multimedia.cx/index.php?title=ADTS
14294 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
14295 */
14296
14297 _AdtsStream = function AdtsStream(handlePartialSegments) {
14298 var buffer,
14299 frameNum = 0;
14300
14301 _AdtsStream.prototype.init.call(this);
14302
14303 this.skipWarn_ = function (start, end) {
14304 this.trigger('log', {
14305 level: 'warn',
14306 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
14307 });
14308 };
14309
14310 this.push = function (packet) {
14311 var i = 0,
14312 frameLength,
14313 protectionSkipBytes,
14314 oldBuffer,
14315 sampleCount,
14316 adtsFrameDuration;
14317
14318 if (!handlePartialSegments) {
14319 frameNum = 0;
14320 }
14321
14322 if (packet.type !== 'audio') {
14323 // ignore non-audio data
14324 return;
14325 } // Prepend any data in the buffer to the input data so that we can parse
14326 // aac frames the cross a PES packet boundary
14327
14328
14329 if (buffer && buffer.length) {
14330 oldBuffer = buffer;
14331 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
14332 buffer.set(oldBuffer);
14333 buffer.set(packet.data, oldBuffer.byteLength);
14334 } else {
14335 buffer = packet.data;
14336 } // unpack any ADTS frames which have been fully received
14337 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
14338
14339
14340 var skip; // We use i + 7 here because we want to be able to parse the entire header.
14341 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
14342
14343 while (i + 7 < buffer.length) {
14344 // Look for the start of an ADTS header..
14345 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
14346 if (typeof skip !== 'number') {
14347 skip = i;
14348 } // If a valid header was not found, jump one forward and attempt to
14349 // find a valid ADTS header starting at the next byte
14350
14351
14352 i++;
14353 continue;
14354 }
14355
14356 if (typeof skip === 'number') {
14357 this.skipWarn_(skip, i);
14358 skip = null;
14359 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
14360 // end of the ADTS header
14361
14362
14363 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
14364 // end of the sync sequence
14365 // NOTE: frame length includes the size of the header
14366
14367 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
14368 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
14369 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
14370 // then we have to wait for more data
14371
14372 if (buffer.byteLength - i < frameLength) {
14373 break;
14374 } // Otherwise, deliver the complete AAC frame
14375
14376
14377 this.trigger('data', {
14378 pts: packet.pts + frameNum * adtsFrameDuration,
14379 dts: packet.dts + frameNum * adtsFrameDuration,
14380 sampleCount: sampleCount,
14381 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
14382 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
14383 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
14384 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
14385 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
14386 samplesize: 16,
14387 // data is the frame without it's header
14388 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
14389 });
14390 frameNum++;
14391 i += frameLength;
14392 }
14393
14394 if (typeof skip === 'number') {
14395 this.skipWarn_(skip, i);
14396 skip = null;
14397 } // remove processed bytes from the buffer.
14398
14399
14400 buffer = buffer.subarray(i);
14401 };
14402
14403 this.flush = function () {
14404 frameNum = 0;
14405 this.trigger('done');
14406 };
14407
14408 this.reset = function () {
14409 buffer = void 0;
14410 this.trigger('reset');
14411 };
14412
14413 this.endTimeline = function () {
14414 buffer = void 0;
14415 this.trigger('endedtimeline');
14416 };
14417 };
14418
14419 _AdtsStream.prototype = new stream();
14420 var adts = _AdtsStream;
14421 /**
14422 * mux.js
14423 *
14424 * Copyright (c) Brightcove
14425 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14426 */
14427
14428 var ExpGolomb;
14429 /**
14430 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
14431 * scheme used by h264.
14432 */
14433
14434 ExpGolomb = function ExpGolomb(workingData) {
14435 var // the number of bytes left to examine in workingData
14436 workingBytesAvailable = workingData.byteLength,
14437 // the current word being examined
14438 workingWord = 0,
14439 // :uint
14440 // the number of bits left to examine in the current word
14441 workingBitsAvailable = 0; // :uint;
14442 // ():uint
14443
14444 this.length = function () {
14445 return 8 * workingBytesAvailable;
14446 }; // ():uint
14447
14448
14449 this.bitsAvailable = function () {
14450 return 8 * workingBytesAvailable + workingBitsAvailable;
14451 }; // ():void
14452
14453
14454 this.loadWord = function () {
14455 var position = workingData.byteLength - workingBytesAvailable,
14456 workingBytes = new Uint8Array(4),
14457 availableBytes = Math.min(4, workingBytesAvailable);
14458
14459 if (availableBytes === 0) {
14460 throw new Error('no bytes available');
14461 }
14462
14463 workingBytes.set(workingData.subarray(position, position + availableBytes));
14464 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
14465
14466 workingBitsAvailable = availableBytes * 8;
14467 workingBytesAvailable -= availableBytes;
14468 }; // (count:int):void
14469
14470
14471 this.skipBits = function (count) {
14472 var skipBytes; // :int
14473
14474 if (workingBitsAvailable > count) {
14475 workingWord <<= count;
14476 workingBitsAvailable -= count;
14477 } else {
14478 count -= workingBitsAvailable;
14479 skipBytes = Math.floor(count / 8);
14480 count -= skipBytes * 8;
14481 workingBytesAvailable -= skipBytes;
14482 this.loadWord();
14483 workingWord <<= count;
14484 workingBitsAvailable -= count;
14485 }
14486 }; // (size:int):uint
14487
14488
14489 this.readBits = function (size) {
14490 var bits = Math.min(workingBitsAvailable, size),
14491 // :uint
14492 valu = workingWord >>> 32 - bits; // :uint
14493 // if size > 31, handle error
14494
14495 workingBitsAvailable -= bits;
14496
14497 if (workingBitsAvailable > 0) {
14498 workingWord <<= bits;
14499 } else if (workingBytesAvailable > 0) {
14500 this.loadWord();
14501 }
14502
14503 bits = size - bits;
14504
14505 if (bits > 0) {
14506 return valu << bits | this.readBits(bits);
14507 }
14508
14509 return valu;
14510 }; // ():uint
14511
14512
14513 this.skipLeadingZeros = function () {
14514 var leadingZeroCount; // :uint
14515
14516 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
14517 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
14518 // the first bit of working word is 1
14519 workingWord <<= leadingZeroCount;
14520 workingBitsAvailable -= leadingZeroCount;
14521 return leadingZeroCount;
14522 }
14523 } // we exhausted workingWord and still have not found a 1
14524
14525
14526 this.loadWord();
14527 return leadingZeroCount + this.skipLeadingZeros();
14528 }; // ():void
14529
14530
14531 this.skipUnsignedExpGolomb = function () {
14532 this.skipBits(1 + this.skipLeadingZeros());
14533 }; // ():void
14534
14535
14536 this.skipExpGolomb = function () {
14537 this.skipBits(1 + this.skipLeadingZeros());
14538 }; // ():uint
14539
14540
14541 this.readUnsignedExpGolomb = function () {
14542 var clz = this.skipLeadingZeros(); // :uint
14543
14544 return this.readBits(clz + 1) - 1;
14545 }; // ():int
14546
14547
14548 this.readExpGolomb = function () {
14549 var valu = this.readUnsignedExpGolomb(); // :int
14550
14551 if (0x01 & valu) {
14552 // the number is odd if the low order bit is set
14553 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
14554 }
14555
14556 return -1 * (valu >>> 1); // divide by two then make it negative
14557 }; // Some convenience functions
14558 // :Boolean
14559
14560
14561 this.readBoolean = function () {
14562 return this.readBits(1) === 1;
14563 }; // ():int
14564
14565
14566 this.readUnsignedByte = function () {
14567 return this.readBits(8);
14568 };
14569
14570 this.loadWord();
14571 };
14572
14573 var expGolomb = ExpGolomb;
14574
14575 var _H264Stream, _NalByteStream;
14576
14577 var PROFILES_WITH_OPTIONAL_SPS_DATA;
14578 /**
14579 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
14580 */
14581
14582 _NalByteStream = function NalByteStream() {
14583 var syncPoint = 0,
14584 i,
14585 buffer;
14586
14587 _NalByteStream.prototype.init.call(this);
14588 /*
14589 * Scans a byte stream and triggers a data event with the NAL units found.
14590 * @param {Object} data Event received from H264Stream
14591 * @param {Uint8Array} data.data The h264 byte stream to be scanned
14592 *
14593 * @see H264Stream.push
14594 */
14595
14596
14597 this.push = function (data) {
14598 var swapBuffer;
14599
14600 if (!buffer) {
14601 buffer = data.data;
14602 } else {
14603 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
14604 swapBuffer.set(buffer);
14605 swapBuffer.set(data.data, buffer.byteLength);
14606 buffer = swapBuffer;
14607 }
14608
14609 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
14610 // scan for NAL unit boundaries
14611 // a match looks like this:
14612 // 0 0 1 .. NAL .. 0 0 1
14613 // ^ sync point ^ i
14614 // or this:
14615 // 0 0 1 .. NAL .. 0 0 0
14616 // ^ sync point ^ i
14617 // advance the sync point to a NAL start, if necessary
14618
14619 for (; syncPoint < len - 3; syncPoint++) {
14620 if (buffer[syncPoint + 2] === 1) {
14621 // the sync point is properly aligned
14622 i = syncPoint + 5;
14623 break;
14624 }
14625 }
14626
14627 while (i < len) {
14628 // look at the current byte to determine if we've hit the end of
14629 // a NAL unit boundary
14630 switch (buffer[i]) {
14631 case 0:
14632 // skip past non-sync sequences
14633 if (buffer[i - 1] !== 0) {
14634 i += 2;
14635 break;
14636 } else if (buffer[i - 2] !== 0) {
14637 i++;
14638 break;
14639 } // deliver the NAL unit if it isn't empty
14640
14641
14642 if (syncPoint + 3 !== i - 2) {
14643 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14644 } // drop trailing zeroes
14645
14646
14647 do {
14648 i++;
14649 } while (buffer[i] !== 1 && i < len);
14650
14651 syncPoint = i - 2;
14652 i += 3;
14653 break;
14654
14655 case 1:
14656 // skip past non-sync sequences
14657 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
14658 i += 3;
14659 break;
14660 } // deliver the NAL unit
14661
14662
14663 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14664 syncPoint = i - 2;
14665 i += 3;
14666 break;
14667
14668 default:
14669 // the current byte isn't a one or zero, so it cannot be part
14670 // of a sync sequence
14671 i += 3;
14672 break;
14673 }
14674 } // filter out the NAL units that were delivered
14675
14676
14677 buffer = buffer.subarray(syncPoint);
14678 i -= syncPoint;
14679 syncPoint = 0;
14680 };
14681
14682 this.reset = function () {
14683 buffer = null;
14684 syncPoint = 0;
14685 this.trigger('reset');
14686 };
14687
14688 this.flush = function () {
14689 // deliver the last buffered NAL unit
14690 if (buffer && buffer.byteLength > 3) {
14691 this.trigger('data', buffer.subarray(syncPoint + 3));
14692 } // reset the stream state
14693
14694
14695 buffer = null;
14696 syncPoint = 0;
14697 this.trigger('done');
14698 };
14699
14700 this.endTimeline = function () {
14701 this.flush();
14702 this.trigger('endedtimeline');
14703 };
14704 };
14705
14706 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
14707 // see Recommendation ITU-T H.264 (4/2013),
14708 // 7.3.2.1.1 Sequence parameter set data syntax
14709
14710 PROFILES_WITH_OPTIONAL_SPS_DATA = {
14711 100: true,
14712 110: true,
14713 122: true,
14714 244: true,
14715 44: true,
14716 83: true,
14717 86: true,
14718 118: true,
14719 128: true,
14720 // TODO: the three profiles below don't
14721 // appear to have sps data in the specificiation anymore?
14722 138: true,
14723 139: true,
14724 134: true
14725 };
14726 /**
14727 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
14728 * events.
14729 */
14730
14731 _H264Stream = function H264Stream() {
14732 var nalByteStream = new _NalByteStream(),
14733 self,
14734 trackId,
14735 currentPts,
14736 currentDts,
14737 discardEmulationPreventionBytes,
14738 readSequenceParameterSet,
14739 skipScalingList;
14740
14741 _H264Stream.prototype.init.call(this);
14742
14743 self = this;
14744 /*
14745 * Pushes a packet from a stream onto the NalByteStream
14746 *
14747 * @param {Object} packet - A packet received from a stream
14748 * @param {Uint8Array} packet.data - The raw bytes of the packet
14749 * @param {Number} packet.dts - Decode timestamp of the packet
14750 * @param {Number} packet.pts - Presentation timestamp of the packet
14751 * @param {Number} packet.trackId - The id of the h264 track this packet came from
14752 * @param {('video'|'audio')} packet.type - The type of packet
14753 *
14754 */
14755
14756 this.push = function (packet) {
14757 if (packet.type !== 'video') {
14758 return;
14759 }
14760
14761 trackId = packet.trackId;
14762 currentPts = packet.pts;
14763 currentDts = packet.dts;
14764 nalByteStream.push(packet);
14765 };
14766 /*
14767 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
14768 * for the NALUs to the next stream component.
14769 * Also, preprocess caption and sequence parameter NALUs.
14770 *
14771 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
14772 * @see NalByteStream.push
14773 */
14774
14775
14776 nalByteStream.on('data', function (data) {
14777 var event = {
14778 trackId: trackId,
14779 pts: currentPts,
14780 dts: currentDts,
14781 data: data,
14782 nalUnitTypeCode: data[0] & 0x1f
14783 };
14784
14785 switch (event.nalUnitTypeCode) {
14786 case 0x05:
14787 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
14788 break;
14789
14790 case 0x06:
14791 event.nalUnitType = 'sei_rbsp';
14792 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14793 break;
14794
14795 case 0x07:
14796 event.nalUnitType = 'seq_parameter_set_rbsp';
14797 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14798 event.config = readSequenceParameterSet(event.escapedRBSP);
14799 break;
14800
14801 case 0x08:
14802 event.nalUnitType = 'pic_parameter_set_rbsp';
14803 break;
14804
14805 case 0x09:
14806 event.nalUnitType = 'access_unit_delimiter_rbsp';
14807 break;
14808 } // This triggers data on the H264Stream
14809
14810
14811 self.trigger('data', event);
14812 });
14813 nalByteStream.on('done', function () {
14814 self.trigger('done');
14815 });
14816 nalByteStream.on('partialdone', function () {
14817 self.trigger('partialdone');
14818 });
14819 nalByteStream.on('reset', function () {
14820 self.trigger('reset');
14821 });
14822 nalByteStream.on('endedtimeline', function () {
14823 self.trigger('endedtimeline');
14824 });
14825
14826 this.flush = function () {
14827 nalByteStream.flush();
14828 };
14829
14830 this.partialFlush = function () {
14831 nalByteStream.partialFlush();
14832 };
14833
14834 this.reset = function () {
14835 nalByteStream.reset();
14836 };
14837
14838 this.endTimeline = function () {
14839 nalByteStream.endTimeline();
14840 };
14841 /**
14842 * Advance the ExpGolomb decoder past a scaling list. The scaling
14843 * list is optionally transmitted as part of a sequence parameter
14844 * set and is not relevant to transmuxing.
14845 * @param count {number} the number of entries in this scaling list
14846 * @param expGolombDecoder {object} an ExpGolomb pointed to the
14847 * start of a scaling list
14848 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14849 */
14850
14851
14852 skipScalingList = function skipScalingList(count, expGolombDecoder) {
14853 var lastScale = 8,
14854 nextScale = 8,
14855 j,
14856 deltaScale;
14857
14858 for (j = 0; j < count; j++) {
14859 if (nextScale !== 0) {
14860 deltaScale = expGolombDecoder.readExpGolomb();
14861 nextScale = (lastScale + deltaScale + 256) % 256;
14862 }
14863
14864 lastScale = nextScale === 0 ? lastScale : nextScale;
14865 }
14866 };
14867 /**
14868 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
14869 * Sequence Payload"
14870 * @param data {Uint8Array} the bytes of a RBSP from a NAL
14871 * unit
14872 * @return {Uint8Array} the RBSP without any Emulation
14873 * Prevention Bytes
14874 */
14875
14876
14877 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
14878 var length = data.byteLength,
14879 emulationPreventionBytesPositions = [],
14880 i = 1,
14881 newLength,
14882 newData; // Find all `Emulation Prevention Bytes`
14883
14884 while (i < length - 2) {
14885 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
14886 emulationPreventionBytesPositions.push(i + 2);
14887 i += 2;
14888 } else {
14889 i++;
14890 }
14891 } // If no Emulation Prevention Bytes were found just return the original
14892 // array
14893
14894
14895 if (emulationPreventionBytesPositions.length === 0) {
14896 return data;
14897 } // Create a new array to hold the NAL unit data
14898
14899
14900 newLength = length - emulationPreventionBytesPositions.length;
14901 newData = new Uint8Array(newLength);
14902 var sourceIndex = 0;
14903
14904 for (i = 0; i < newLength; sourceIndex++, i++) {
14905 if (sourceIndex === emulationPreventionBytesPositions[0]) {
14906 // Skip this byte
14907 sourceIndex++; // Remove this position index
14908
14909 emulationPreventionBytesPositions.shift();
14910 }
14911
14912 newData[i] = data[sourceIndex];
14913 }
14914
14915 return newData;
14916 };
14917 /**
14918 * Read a sequence parameter set and return some interesting video
14919 * properties. A sequence parameter set is the H264 metadata that
14920 * describes the properties of upcoming video frames.
14921 * @param data {Uint8Array} the bytes of a sequence parameter set
14922 * @return {object} an object with configuration parsed from the
14923 * sequence parameter set, including the dimensions of the
14924 * associated video frames.
14925 */
14926
14927
14928 readSequenceParameterSet = function readSequenceParameterSet(data) {
14929 var frameCropLeftOffset = 0,
14930 frameCropRightOffset = 0,
14931 frameCropTopOffset = 0,
14932 frameCropBottomOffset = 0,
14933 expGolombDecoder,
14934 profileIdc,
14935 levelIdc,
14936 profileCompatibility,
14937 chromaFormatIdc,
14938 picOrderCntType,
14939 numRefFramesInPicOrderCntCycle,
14940 picWidthInMbsMinus1,
14941 picHeightInMapUnitsMinus1,
14942 frameMbsOnlyFlag,
14943 scalingListCount,
14944 sarRatio = [1, 1],
14945 aspectRatioIdc,
14946 i;
14947 expGolombDecoder = new expGolomb(data);
14948 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
14949
14950 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
14951
14952 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
14953
14954 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
14955 // some profiles have more optional data we don't need
14956
14957 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
14958 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
14959
14960 if (chromaFormatIdc === 3) {
14961 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
14962 }
14963
14964 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
14965
14966 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
14967
14968 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
14969
14970 if (expGolombDecoder.readBoolean()) {
14971 // seq_scaling_matrix_present_flag
14972 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
14973
14974 for (i = 0; i < scalingListCount; i++) {
14975 if (expGolombDecoder.readBoolean()) {
14976 // seq_scaling_list_present_flag[ i ]
14977 if (i < 6) {
14978 skipScalingList(16, expGolombDecoder);
14979 } else {
14980 skipScalingList(64, expGolombDecoder);
14981 }
14982 }
14983 }
14984 }
14985 }
14986
14987 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
14988
14989 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
14990
14991 if (picOrderCntType === 0) {
14992 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
14993 } else if (picOrderCntType === 1) {
14994 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
14995
14996 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
14997
14998 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
14999
15000 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
15001
15002 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
15003 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
15004 }
15005 }
15006
15007 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
15008
15009 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
15010
15011 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15012 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15013 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
15014
15015 if (frameMbsOnlyFlag === 0) {
15016 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
15017 }
15018
15019 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
15020
15021 if (expGolombDecoder.readBoolean()) {
15022 // frame_cropping_flag
15023 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
15024 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
15025 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
15026 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
15027 }
15028
15029 if (expGolombDecoder.readBoolean()) {
15030 // vui_parameters_present_flag
15031 if (expGolombDecoder.readBoolean()) {
15032 // aspect_ratio_info_present_flag
15033 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
15034
15035 switch (aspectRatioIdc) {
15036 case 1:
15037 sarRatio = [1, 1];
15038 break;
15039
15040 case 2:
15041 sarRatio = [12, 11];
15042 break;
15043
15044 case 3:
15045 sarRatio = [10, 11];
15046 break;
15047
15048 case 4:
15049 sarRatio = [16, 11];
15050 break;
15051
15052 case 5:
15053 sarRatio = [40, 33];
15054 break;
15055
15056 case 6:
15057 sarRatio = [24, 11];
15058 break;
15059
15060 case 7:
15061 sarRatio = [20, 11];
15062 break;
15063
15064 case 8:
15065 sarRatio = [32, 11];
15066 break;
15067
15068 case 9:
15069 sarRatio = [80, 33];
15070 break;
15071
15072 case 10:
15073 sarRatio = [18, 11];
15074 break;
15075
15076 case 11:
15077 sarRatio = [15, 11];
15078 break;
15079
15080 case 12:
15081 sarRatio = [64, 33];
15082 break;
15083
15084 case 13:
15085 sarRatio = [160, 99];
15086 break;
15087
15088 case 14:
15089 sarRatio = [4, 3];
15090 break;
15091
15092 case 15:
15093 sarRatio = [3, 2];
15094 break;
15095
15096 case 16:
15097 sarRatio = [2, 1];
15098 break;
15099
15100 case 255:
15101 {
15102 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
15103 break;
15104 }
15105 }
15106
15107 if (sarRatio) {
15108 sarRatio[0] / sarRatio[1];
15109 }
15110 }
15111 }
15112
15113 return {
15114 profileIdc: profileIdc,
15115 levelIdc: levelIdc,
15116 profileCompatibility: profileCompatibility,
15117 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
15118 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
15119 // sar is sample aspect ratio
15120 sarRatio: sarRatio
15121 };
15122 };
15123 };
15124
15125 _H264Stream.prototype = new stream();
15126 var h264 = {
15127 H264Stream: _H264Stream,
15128 NalByteStream: _NalByteStream
15129 };
15130 /**
15131 * mux.js
15132 *
15133 * Copyright (c) Brightcove
15134 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15135 *
15136 * Utilities to detect basic properties and metadata about Aac data.
15137 */
15138
15139 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
15140
15141 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
15142 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
15143 flags = header[byteIndex + 5],
15144 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
15145
15146 returnSize = returnSize >= 0 ? returnSize : 0;
15147
15148 if (footerPresent) {
15149 return returnSize + 20;
15150 }
15151
15152 return returnSize + 10;
15153 };
15154
15155 var getId3Offset = function getId3Offset(data, offset) {
15156 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
15157 return offset;
15158 }
15159
15160 offset += parseId3TagSize(data, offset);
15161 return getId3Offset(data, offset);
15162 }; // TODO: use vhs-utils
15163
15164
15165 var isLikelyAacData$1 = function isLikelyAacData(data) {
15166 var offset = getId3Offset(data, 0);
15167 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
15168 // is not mp3 data but aac data.
15169 (data[offset + 1] & 0x16) === 0x10;
15170 };
15171
15172 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
15173 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
15174 }; // return a percent-encoded representation of the specified byte range
15175 // @see http://en.wikipedia.org/wiki/Percent-encoding
15176
15177
15178 var percentEncode = function percentEncode(bytes, start, end) {
15179 var i,
15180 result = '';
15181
15182 for (i = start; i < end; i++) {
15183 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
15184 }
15185
15186 return result;
15187 }; // return the string representation of the specified byte range,
15188 // interpreted as ISO-8859-1.
15189
15190
15191 var parseIso88591 = function parseIso88591(bytes, start, end) {
15192 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
15193 };
15194
15195 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
15196 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
15197 middle = header[byteIndex + 4] << 3,
15198 highTwo = header[byteIndex + 3] & 0x3 << 11;
15199 return highTwo | middle | lowThree;
15200 };
15201
15202 var parseType$2 = function parseType(header, byteIndex) {
15203 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
15204 return 'timed-metadata';
15205 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
15206 return 'audio';
15207 }
15208
15209 return null;
15210 };
15211
15212 var parseSampleRate = function parseSampleRate(packet) {
15213 var i = 0;
15214
15215 while (i + 5 < packet.length) {
15216 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
15217 // If a valid header was not found, jump one forward and attempt to
15218 // find a valid ADTS header starting at the next byte
15219 i++;
15220 continue;
15221 }
15222
15223 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
15224 }
15225
15226 return null;
15227 };
15228
15229 var parseAacTimestamp = function parseAacTimestamp(packet) {
15230 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
15231
15232 frameStart = 10;
15233
15234 if (packet[5] & 0x40) {
15235 // advance the frame start past the extended header
15236 frameStart += 4; // header size field
15237
15238 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
15239 } // parse one or more ID3 frames
15240 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
15241
15242
15243 do {
15244 // determine the number of bytes in this frame
15245 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
15246
15247 if (frameSize < 1) {
15248 return null;
15249 }
15250
15251 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
15252
15253 if (frameHeader === 'PRIV') {
15254 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
15255
15256 for (var i = 0; i < frame.byteLength; i++) {
15257 if (frame[i] === 0) {
15258 var owner = parseIso88591(frame, 0, i);
15259
15260 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
15261 var d = frame.subarray(i + 1);
15262 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
15263 size *= 4;
15264 size += d[7] & 0x03;
15265 return size;
15266 }
15267
15268 break;
15269 }
15270 }
15271 }
15272
15273 frameStart += 10; // advance past the frame header
15274
15275 frameStart += frameSize; // advance past the frame body
15276 } while (frameStart < packet.byteLength);
15277
15278 return null;
15279 };
15280
15281 var utils = {
15282 isLikelyAacData: isLikelyAacData$1,
15283 parseId3TagSize: parseId3TagSize,
15284 parseAdtsSize: parseAdtsSize,
15285 parseType: parseType$2,
15286 parseSampleRate: parseSampleRate,
15287 parseAacTimestamp: parseAacTimestamp
15288 };
15289
15290 var _AacStream;
15291 /**
15292 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
15293 */
15294
15295
15296 _AacStream = function AacStream() {
15297 var everything = new Uint8Array(),
15298 timeStamp = 0;
15299
15300 _AacStream.prototype.init.call(this);
15301
15302 this.setTimestamp = function (timestamp) {
15303 timeStamp = timestamp;
15304 };
15305
15306 this.push = function (bytes) {
15307 var frameSize = 0,
15308 byteIndex = 0,
15309 bytesLeft,
15310 chunk,
15311 packet,
15312 tempLength; // If there are bytes remaining from the last segment, prepend them to the
15313 // bytes that were pushed in
15314
15315 if (everything.length) {
15316 tempLength = everything.length;
15317 everything = new Uint8Array(bytes.byteLength + tempLength);
15318 everything.set(everything.subarray(0, tempLength));
15319 everything.set(bytes, tempLength);
15320 } else {
15321 everything = bytes;
15322 }
15323
15324 while (everything.length - byteIndex >= 3) {
15325 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
15326 // Exit early because we don't have enough to parse
15327 // the ID3 tag header
15328 if (everything.length - byteIndex < 10) {
15329 break;
15330 } // check framesize
15331
15332
15333 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15334 // to emit a full packet
15335 // Add to byteIndex to support multiple ID3 tags in sequence
15336
15337 if (byteIndex + frameSize > everything.length) {
15338 break;
15339 }
15340
15341 chunk = {
15342 type: 'timed-metadata',
15343 data: everything.subarray(byteIndex, byteIndex + frameSize)
15344 };
15345 this.trigger('data', chunk);
15346 byteIndex += frameSize;
15347 continue;
15348 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
15349 // Exit early because we don't have enough to parse
15350 // the ADTS frame header
15351 if (everything.length - byteIndex < 7) {
15352 break;
15353 }
15354
15355 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15356 // to emit a full packet
15357
15358 if (byteIndex + frameSize > everything.length) {
15359 break;
15360 }
15361
15362 packet = {
15363 type: 'audio',
15364 data: everything.subarray(byteIndex, byteIndex + frameSize),
15365 pts: timeStamp,
15366 dts: timeStamp
15367 };
15368 this.trigger('data', packet);
15369 byteIndex += frameSize;
15370 continue;
15371 }
15372
15373 byteIndex++;
15374 }
15375
15376 bytesLeft = everything.length - byteIndex;
15377
15378 if (bytesLeft > 0) {
15379 everything = everything.subarray(byteIndex);
15380 } else {
15381 everything = new Uint8Array();
15382 }
15383 };
15384
15385 this.reset = function () {
15386 everything = new Uint8Array();
15387 this.trigger('reset');
15388 };
15389
15390 this.endTimeline = function () {
15391 everything = new Uint8Array();
15392 this.trigger('endedtimeline');
15393 };
15394 };
15395
15396 _AacStream.prototype = new stream();
15397 var aac = _AacStream; // constants
15398
15399 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
15400 var audioProperties = AUDIO_PROPERTIES;
15401 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
15402 var videoProperties = VIDEO_PROPERTIES;
15403 var H264Stream = h264.H264Stream;
15404 var isLikelyAacData = utils.isLikelyAacData;
15405 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
15406
15407 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
15408
15409 var retriggerForStream = function retriggerForStream(key, event) {
15410 event.stream = key;
15411 this.trigger('log', event);
15412 };
15413
15414 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
15415 var keys = Object.keys(pipeline);
15416
15417 for (var i = 0; i < keys.length; i++) {
15418 var key = keys[i]; // skip non-stream keys and headOfPipeline
15419 // which is just a duplicate
15420
15421 if (key === 'headOfPipeline' || !pipeline[key].on) {
15422 continue;
15423 }
15424
15425 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
15426 }
15427 };
15428 /**
15429 * Compare two arrays (even typed) for same-ness
15430 */
15431
15432
15433 var arrayEquals = function arrayEquals(a, b) {
15434 var i;
15435
15436 if (a.length !== b.length) {
15437 return false;
15438 } // compare the value of each element in the array
15439
15440
15441 for (i = 0; i < a.length; i++) {
15442 if (a[i] !== b[i]) {
15443 return false;
15444 }
15445 }
15446
15447 return true;
15448 };
15449
15450 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
15451 var ptsOffsetFromDts = startPts - startDts,
15452 decodeDuration = endDts - startDts,
15453 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
15454 // however, the player time values will reflect a start from the baseMediaDecodeTime.
15455 // In order to provide relevant values for the player times, base timing info on the
15456 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
15457
15458 return {
15459 start: {
15460 dts: baseMediaDecodeTime,
15461 pts: baseMediaDecodeTime + ptsOffsetFromDts
15462 },
15463 end: {
15464 dts: baseMediaDecodeTime + decodeDuration,
15465 pts: baseMediaDecodeTime + presentationDuration
15466 },
15467 prependedContentDuration: prependedContentDuration,
15468 baseMediaDecodeTime: baseMediaDecodeTime
15469 };
15470 };
15471 /**
15472 * Constructs a single-track, ISO BMFF media segment from AAC data
15473 * events. The output of this stream can be fed to a SourceBuffer
15474 * configured with a suitable initialization segment.
15475 * @param track {object} track metadata configuration
15476 * @param options {object} transmuxer options object
15477 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15478 * in the source; false to adjust the first segment to start at 0.
15479 */
15480
15481
15482 _AudioSegmentStream = function AudioSegmentStream(track, options) {
15483 var adtsFrames = [],
15484 sequenceNumber,
15485 earliestAllowedDts = 0,
15486 audioAppendStartTs = 0,
15487 videoBaseMediaDecodeTime = Infinity;
15488 options = options || {};
15489 sequenceNumber = options.firstSequenceNumber || 0;
15490
15491 _AudioSegmentStream.prototype.init.call(this);
15492
15493 this.push = function (data) {
15494 trackDecodeInfo.collectDtsInfo(track, data);
15495
15496 if (track) {
15497 audioProperties.forEach(function (prop) {
15498 track[prop] = data[prop];
15499 });
15500 } // buffer audio data until end() is called
15501
15502
15503 adtsFrames.push(data);
15504 };
15505
15506 this.setEarliestDts = function (earliestDts) {
15507 earliestAllowedDts = earliestDts;
15508 };
15509
15510 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
15511 videoBaseMediaDecodeTime = baseMediaDecodeTime;
15512 };
15513
15514 this.setAudioAppendStart = function (timestamp) {
15515 audioAppendStartTs = timestamp;
15516 };
15517
15518 this.flush = function () {
15519 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
15520
15521 if (adtsFrames.length === 0) {
15522 this.trigger('done', 'AudioSegmentStream');
15523 return;
15524 }
15525
15526 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
15527 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
15528
15529 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
15530 // samples (that is, adts frames) in the audio data
15531
15532 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
15533
15534 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
15535 adtsFrames = [];
15536 moof = mp4Generator.moof(sequenceNumber, [track]);
15537 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
15538
15539 sequenceNumber++;
15540 boxes.set(moof);
15541 boxes.set(mdat, moof.byteLength);
15542 trackDecodeInfo.clearDtsInfo(track);
15543 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
15544 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
15545 // valid use-case where an init segment/data should be triggered without associated
15546 // frames. Leaving for now, but should be looked into.
15547
15548 if (frames.length) {
15549 segmentDuration = frames.length * frameDuration;
15550 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
15551 // frame info is in video clock cycles. Convert to match expectation of
15552 // listeners (that all timestamps will be based on video clock cycles).
15553 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
15554 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
15555 this.trigger('timingInfo', {
15556 start: frames[0].pts,
15557 end: frames[0].pts + segmentDuration
15558 });
15559 }
15560
15561 this.trigger('data', {
15562 track: track,
15563 boxes: boxes
15564 });
15565 this.trigger('done', 'AudioSegmentStream');
15566 };
15567
15568 this.reset = function () {
15569 trackDecodeInfo.clearDtsInfo(track);
15570 adtsFrames = [];
15571 this.trigger('reset');
15572 };
15573 };
15574
15575 _AudioSegmentStream.prototype = new stream();
15576 /**
15577 * Constructs a single-track, ISO BMFF media segment from H264 data
15578 * events. The output of this stream can be fed to a SourceBuffer
15579 * configured with a suitable initialization segment.
15580 * @param track {object} track metadata configuration
15581 * @param options {object} transmuxer options object
15582 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
15583 * gopsToAlignWith list when attempting to align gop pts
15584 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15585 * in the source; false to adjust the first segment to start at 0.
15586 */
15587
15588 _VideoSegmentStream = function VideoSegmentStream(track, options) {
15589 var sequenceNumber,
15590 nalUnits = [],
15591 gopsToAlignWith = [],
15592 config,
15593 pps;
15594 options = options || {};
15595 sequenceNumber = options.firstSequenceNumber || 0;
15596
15597 _VideoSegmentStream.prototype.init.call(this);
15598
15599 delete track.minPTS;
15600 this.gopCache_ = [];
15601 /**
15602 * Constructs a ISO BMFF segment given H264 nalUnits
15603 * @param {Object} nalUnit A data event representing a nalUnit
15604 * @param {String} nalUnit.nalUnitType
15605 * @param {Object} nalUnit.config Properties for a mp4 track
15606 * @param {Uint8Array} nalUnit.data The nalUnit bytes
15607 * @see lib/codecs/h264.js
15608 **/
15609
15610 this.push = function (nalUnit) {
15611 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
15612
15613 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
15614 config = nalUnit.config;
15615 track.sps = [nalUnit.data];
15616 videoProperties.forEach(function (prop) {
15617 track[prop] = config[prop];
15618 }, this);
15619 }
15620
15621 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
15622 pps = nalUnit.data;
15623 track.pps = [nalUnit.data];
15624 } // buffer video until flush() is called
15625
15626
15627 nalUnits.push(nalUnit);
15628 };
15629 /**
15630 * Pass constructed ISO BMFF track and boxes on to the
15631 * next stream in the pipeline
15632 **/
15633
15634
15635 this.flush = function () {
15636 var frames,
15637 gopForFusion,
15638 gops,
15639 moof,
15640 mdat,
15641 boxes,
15642 prependedContentDuration = 0,
15643 firstGop,
15644 lastGop; // Throw away nalUnits at the start of the byte stream until
15645 // we find the first AUD
15646
15647 while (nalUnits.length) {
15648 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
15649 break;
15650 }
15651
15652 nalUnits.shift();
15653 } // Return early if no video data has been observed
15654
15655
15656 if (nalUnits.length === 0) {
15657 this.resetStream_();
15658 this.trigger('done', 'VideoSegmentStream');
15659 return;
15660 } // Organize the raw nal-units into arrays that represent
15661 // higher-level constructs such as frames and gops
15662 // (group-of-pictures)
15663
15664
15665 frames = frameUtils.groupNalsIntoFrames(nalUnits);
15666 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
15667 // a problem since MSE (on Chrome) requires a leading keyframe.
15668 //
15669 // We have two approaches to repairing this situation:
15670 // 1) GOP-FUSION:
15671 // This is where we keep track of the GOPS (group-of-pictures)
15672 // from previous fragments and attempt to find one that we can
15673 // prepend to the current fragment in order to create a valid
15674 // fragment.
15675 // 2) KEYFRAME-PULLING:
15676 // Here we search for the first keyframe in the fragment and
15677 // throw away all the frames between the start of the fragment
15678 // and that keyframe. We then extend the duration and pull the
15679 // PTS of the keyframe forward so that it covers the time range
15680 // of the frames that were disposed of.
15681 //
15682 // #1 is far prefereable over #2 which can cause "stuttering" but
15683 // requires more things to be just right.
15684
15685 if (!gops[0][0].keyFrame) {
15686 // Search for a gop for fusion from our gopCache
15687 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
15688
15689 if (gopForFusion) {
15690 // in order to provide more accurate timing information about the segment, save
15691 // the number of seconds prepended to the original segment due to GOP fusion
15692 prependedContentDuration = gopForFusion.duration;
15693 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
15694 // new gop at the beginning
15695
15696 gops.byteLength += gopForFusion.byteLength;
15697 gops.nalCount += gopForFusion.nalCount;
15698 gops.pts = gopForFusion.pts;
15699 gops.dts = gopForFusion.dts;
15700 gops.duration += gopForFusion.duration;
15701 } else {
15702 // If we didn't find a candidate gop fall back to keyframe-pulling
15703 gops = frameUtils.extendFirstKeyFrame(gops);
15704 }
15705 } // Trim gops to align with gopsToAlignWith
15706
15707
15708 if (gopsToAlignWith.length) {
15709 var alignedGops;
15710
15711 if (options.alignGopsAtEnd) {
15712 alignedGops = this.alignGopsAtEnd_(gops);
15713 } else {
15714 alignedGops = this.alignGopsAtStart_(gops);
15715 }
15716
15717 if (!alignedGops) {
15718 // save all the nals in the last GOP into the gop cache
15719 this.gopCache_.unshift({
15720 gop: gops.pop(),
15721 pps: track.pps,
15722 sps: track.sps
15723 }); // Keep a maximum of 6 GOPs in the cache
15724
15725 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15726
15727 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
15728
15729 this.resetStream_();
15730 this.trigger('done', 'VideoSegmentStream');
15731 return;
15732 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
15733 // when recalculated before sending off to CoalesceStream
15734
15735
15736 trackDecodeInfo.clearDtsInfo(track);
15737 gops = alignedGops;
15738 }
15739
15740 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
15741 // samples (that is, frames) in the video data
15742
15743 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
15744
15745 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
15746 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
15747 this.trigger('processedGopsInfo', gops.map(function (gop) {
15748 return {
15749 pts: gop.pts,
15750 dts: gop.dts,
15751 byteLength: gop.byteLength
15752 };
15753 }));
15754 firstGop = gops[0];
15755 lastGop = gops[gops.length - 1];
15756 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
15757 this.trigger('timingInfo', {
15758 start: gops[0].pts,
15759 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
15760 }); // save all the nals in the last GOP into the gop cache
15761
15762 this.gopCache_.unshift({
15763 gop: gops.pop(),
15764 pps: track.pps,
15765 sps: track.sps
15766 }); // Keep a maximum of 6 GOPs in the cache
15767
15768 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15769
15770 nalUnits = [];
15771 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
15772 this.trigger('timelineStartInfo', track.timelineStartInfo);
15773 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
15774 // throwing away hundreds of media segment fragments
15775
15776 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
15777
15778 sequenceNumber++;
15779 boxes.set(moof);
15780 boxes.set(mdat, moof.byteLength);
15781 this.trigger('data', {
15782 track: track,
15783 boxes: boxes
15784 });
15785 this.resetStream_(); // Continue with the flush process now
15786
15787 this.trigger('done', 'VideoSegmentStream');
15788 };
15789
15790 this.reset = function () {
15791 this.resetStream_();
15792 nalUnits = [];
15793 this.gopCache_.length = 0;
15794 gopsToAlignWith.length = 0;
15795 this.trigger('reset');
15796 };
15797
15798 this.resetStream_ = function () {
15799 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
15800 // for instance, when we are rendition switching
15801
15802 config = undefined;
15803 pps = undefined;
15804 }; // Search for a candidate Gop for gop-fusion from the gop cache and
15805 // return it or return null if no good candidate was found
15806
15807
15808 this.getGopForFusion_ = function (nalUnit) {
15809 var halfSecond = 45000,
15810 // Half-a-second in a 90khz clock
15811 allowableOverlap = 10000,
15812 // About 3 frames @ 30fps
15813 nearestDistance = Infinity,
15814 dtsDistance,
15815 nearestGopObj,
15816 currentGop,
15817 currentGopObj,
15818 i; // Search for the GOP nearest to the beginning of this nal unit
15819
15820 for (i = 0; i < this.gopCache_.length; i++) {
15821 currentGopObj = this.gopCache_[i];
15822 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
15823
15824 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
15825 continue;
15826 } // Reject Gops that would require a negative baseMediaDecodeTime
15827
15828
15829 if (currentGop.dts < track.timelineStartInfo.dts) {
15830 continue;
15831 } // The distance between the end of the gop and the start of the nalUnit
15832
15833
15834 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
15835 // a half-second of the nal unit
15836
15837 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
15838 // Always use the closest GOP we found if there is more than
15839 // one candidate
15840 if (!nearestGopObj || nearestDistance > dtsDistance) {
15841 nearestGopObj = currentGopObj;
15842 nearestDistance = dtsDistance;
15843 }
15844 }
15845 }
15846
15847 if (nearestGopObj) {
15848 return nearestGopObj.gop;
15849 }
15850
15851 return null;
15852 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15853 // of gopsToAlignWith starting from the START of the list
15854
15855
15856 this.alignGopsAtStart_ = function (gops) {
15857 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
15858 byteLength = gops.byteLength;
15859 nalCount = gops.nalCount;
15860 duration = gops.duration;
15861 alignIndex = gopIndex = 0;
15862
15863 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
15864 align = gopsToAlignWith[alignIndex];
15865 gop = gops[gopIndex];
15866
15867 if (align.pts === gop.pts) {
15868 break;
15869 }
15870
15871 if (gop.pts > align.pts) {
15872 // this current gop starts after the current gop we want to align on, so increment
15873 // align index
15874 alignIndex++;
15875 continue;
15876 } // current gop starts before the current gop we want to align on. so increment gop
15877 // index
15878
15879
15880 gopIndex++;
15881 byteLength -= gop.byteLength;
15882 nalCount -= gop.nalCount;
15883 duration -= gop.duration;
15884 }
15885
15886 if (gopIndex === 0) {
15887 // no gops to trim
15888 return gops;
15889 }
15890
15891 if (gopIndex === gops.length) {
15892 // all gops trimmed, skip appending all gops
15893 return null;
15894 }
15895
15896 alignedGops = gops.slice(gopIndex);
15897 alignedGops.byteLength = byteLength;
15898 alignedGops.duration = duration;
15899 alignedGops.nalCount = nalCount;
15900 alignedGops.pts = alignedGops[0].pts;
15901 alignedGops.dts = alignedGops[0].dts;
15902 return alignedGops;
15903 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15904 // of gopsToAlignWith starting from the END of the list
15905
15906
15907 this.alignGopsAtEnd_ = function (gops) {
15908 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
15909 alignIndex = gopsToAlignWith.length - 1;
15910 gopIndex = gops.length - 1;
15911 alignEndIndex = null;
15912 matchFound = false;
15913
15914 while (alignIndex >= 0 && gopIndex >= 0) {
15915 align = gopsToAlignWith[alignIndex];
15916 gop = gops[gopIndex];
15917
15918 if (align.pts === gop.pts) {
15919 matchFound = true;
15920 break;
15921 }
15922
15923 if (align.pts > gop.pts) {
15924 alignIndex--;
15925 continue;
15926 }
15927
15928 if (alignIndex === gopsToAlignWith.length - 1) {
15929 // gop.pts is greater than the last alignment candidate. If no match is found
15930 // by the end of this loop, we still want to append gops that come after this
15931 // point
15932 alignEndIndex = gopIndex;
15933 }
15934
15935 gopIndex--;
15936 }
15937
15938 if (!matchFound && alignEndIndex === null) {
15939 return null;
15940 }
15941
15942 var trimIndex;
15943
15944 if (matchFound) {
15945 trimIndex = gopIndex;
15946 } else {
15947 trimIndex = alignEndIndex;
15948 }
15949
15950 if (trimIndex === 0) {
15951 return gops;
15952 }
15953
15954 var alignedGops = gops.slice(trimIndex);
15955 var metadata = alignedGops.reduce(function (total, gop) {
15956 total.byteLength += gop.byteLength;
15957 total.duration += gop.duration;
15958 total.nalCount += gop.nalCount;
15959 return total;
15960 }, {
15961 byteLength: 0,
15962 duration: 0,
15963 nalCount: 0
15964 });
15965 alignedGops.byteLength = metadata.byteLength;
15966 alignedGops.duration = metadata.duration;
15967 alignedGops.nalCount = metadata.nalCount;
15968 alignedGops.pts = alignedGops[0].pts;
15969 alignedGops.dts = alignedGops[0].dts;
15970 return alignedGops;
15971 };
15972
15973 this.alignGopsWith = function (newGopsToAlignWith) {
15974 gopsToAlignWith = newGopsToAlignWith;
15975 };
15976 };
15977
15978 _VideoSegmentStream.prototype = new stream();
15979 /**
15980 * A Stream that can combine multiple streams (ie. audio & video)
15981 * into a single output segment for MSE. Also supports audio-only
15982 * and video-only streams.
15983 * @param options {object} transmuxer options object
15984 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15985 * in the source; false to adjust the first segment to start at media timeline start.
15986 */
15987
15988 _CoalesceStream = function CoalesceStream(options, metadataStream) {
15989 // Number of Tracks per output segment
15990 // If greater than 1, we combine multiple
15991 // tracks into a single segment
15992 this.numberOfTracks = 0;
15993 this.metadataStream = metadataStream;
15994 options = options || {};
15995
15996 if (typeof options.remux !== 'undefined') {
15997 this.remuxTracks = !!options.remux;
15998 } else {
15999 this.remuxTracks = true;
16000 }
16001
16002 if (typeof options.keepOriginalTimestamps === 'boolean') {
16003 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
16004 } else {
16005 this.keepOriginalTimestamps = false;
16006 }
16007
16008 this.pendingTracks = [];
16009 this.videoTrack = null;
16010 this.pendingBoxes = [];
16011 this.pendingCaptions = [];
16012 this.pendingMetadata = [];
16013 this.pendingBytes = 0;
16014 this.emittedTracks = 0;
16015
16016 _CoalesceStream.prototype.init.call(this); // Take output from multiple
16017
16018
16019 this.push = function (output) {
16020 // buffer incoming captions until the associated video segment
16021 // finishes
16022 if (output.text) {
16023 return this.pendingCaptions.push(output);
16024 } // buffer incoming id3 tags until the final flush
16025
16026
16027 if (output.frames) {
16028 return this.pendingMetadata.push(output);
16029 } // Add this track to the list of pending tracks and store
16030 // important information required for the construction of
16031 // the final segment
16032
16033
16034 this.pendingTracks.push(output.track);
16035 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
16036 // We unshift audio and push video because
16037 // as of Chrome 75 when switching from
16038 // one init segment to another if the video
16039 // mdat does not appear after the audio mdat
16040 // only audio will play for the duration of our transmux.
16041
16042 if (output.track.type === 'video') {
16043 this.videoTrack = output.track;
16044 this.pendingBoxes.push(output.boxes);
16045 }
16046
16047 if (output.track.type === 'audio') {
16048 this.audioTrack = output.track;
16049 this.pendingBoxes.unshift(output.boxes);
16050 }
16051 };
16052 };
16053
16054 _CoalesceStream.prototype = new stream();
16055
16056 _CoalesceStream.prototype.flush = function (flushSource) {
16057 var offset = 0,
16058 event = {
16059 captions: [],
16060 captionStreams: {},
16061 metadata: [],
16062 info: {}
16063 },
16064 caption,
16065 id3,
16066 initSegment,
16067 timelineStartPts = 0,
16068 i;
16069
16070 if (this.pendingTracks.length < this.numberOfTracks) {
16071 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
16072 // Return because we haven't received a flush from a data-generating
16073 // portion of the segment (meaning that we have only recieved meta-data
16074 // or captions.)
16075 return;
16076 } else if (this.remuxTracks) {
16077 // Return until we have enough tracks from the pipeline to remux (if we
16078 // are remuxing audio and video into a single MP4)
16079 return;
16080 } else if (this.pendingTracks.length === 0) {
16081 // In the case where we receive a flush without any data having been
16082 // received we consider it an emitted track for the purposes of coalescing
16083 // `done` events.
16084 // We do this for the case where there is an audio and video track in the
16085 // segment but no audio data. (seen in several playlists with alternate
16086 // audio tracks and no audio present in the main TS segments.)
16087 this.emittedTracks++;
16088
16089 if (this.emittedTracks >= this.numberOfTracks) {
16090 this.trigger('done');
16091 this.emittedTracks = 0;
16092 }
16093
16094 return;
16095 }
16096 }
16097
16098 if (this.videoTrack) {
16099 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
16100 videoProperties.forEach(function (prop) {
16101 event.info[prop] = this.videoTrack[prop];
16102 }, this);
16103 } else if (this.audioTrack) {
16104 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
16105 audioProperties.forEach(function (prop) {
16106 event.info[prop] = this.audioTrack[prop];
16107 }, this);
16108 }
16109
16110 if (this.videoTrack || this.audioTrack) {
16111 if (this.pendingTracks.length === 1) {
16112 event.type = this.pendingTracks[0].type;
16113 } else {
16114 event.type = 'combined';
16115 }
16116
16117 this.emittedTracks += this.pendingTracks.length;
16118 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
16119
16120 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
16121 // and track definitions
16122
16123 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
16124
16125 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
16126
16127 for (i = 0; i < this.pendingBoxes.length; i++) {
16128 event.data.set(this.pendingBoxes[i], offset);
16129 offset += this.pendingBoxes[i].byteLength;
16130 } // Translate caption PTS times into second offsets to match the
16131 // video timeline for the segment, and add track info
16132
16133
16134 for (i = 0; i < this.pendingCaptions.length; i++) {
16135 caption = this.pendingCaptions[i];
16136 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
16137 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
16138 event.captionStreams[caption.stream] = true;
16139 event.captions.push(caption);
16140 } // Translate ID3 frame PTS times into second offsets to match the
16141 // video timeline for the segment
16142
16143
16144 for (i = 0; i < this.pendingMetadata.length; i++) {
16145 id3 = this.pendingMetadata[i];
16146 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
16147 event.metadata.push(id3);
16148 } // We add this to every single emitted segment even though we only need
16149 // it for the first
16150
16151
16152 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
16153
16154 this.pendingTracks.length = 0;
16155 this.videoTrack = null;
16156 this.pendingBoxes.length = 0;
16157 this.pendingCaptions.length = 0;
16158 this.pendingBytes = 0;
16159 this.pendingMetadata.length = 0; // Emit the built segment
16160 // We include captions and ID3 tags for backwards compatibility,
16161 // ideally we should send only video and audio in the data event
16162
16163 this.trigger('data', event); // Emit each caption to the outside world
16164 // Ideally, this would happen immediately on parsing captions,
16165 // but we need to ensure that video data is sent back first
16166 // so that caption timing can be adjusted to match video timing
16167
16168 for (i = 0; i < event.captions.length; i++) {
16169 caption = event.captions[i];
16170 this.trigger('caption', caption);
16171 } // Emit each id3 tag to the outside world
16172 // Ideally, this would happen immediately on parsing the tag,
16173 // but we need to ensure that video data is sent back first
16174 // so that ID3 frame timing can be adjusted to match video timing
16175
16176
16177 for (i = 0; i < event.metadata.length; i++) {
16178 id3 = event.metadata[i];
16179 this.trigger('id3Frame', id3);
16180 }
16181 } // Only emit `done` if all tracks have been flushed and emitted
16182
16183
16184 if (this.emittedTracks >= this.numberOfTracks) {
16185 this.trigger('done');
16186 this.emittedTracks = 0;
16187 }
16188 };
16189
16190 _CoalesceStream.prototype.setRemux = function (val) {
16191 this.remuxTracks = val;
16192 };
16193 /**
16194 * A Stream that expects MP2T binary data as input and produces
16195 * corresponding media segments, suitable for use with Media Source
16196 * Extension (MSE) implementations that support the ISO BMFF byte
16197 * stream format, like Chrome.
16198 */
16199
16200
16201 _Transmuxer = function Transmuxer(options) {
16202 var self = this,
16203 hasFlushed = true,
16204 videoTrack,
16205 audioTrack;
16206
16207 _Transmuxer.prototype.init.call(this);
16208
16209 options = options || {};
16210 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
16211 this.transmuxPipeline_ = {};
16212
16213 this.setupAacPipeline = function () {
16214 var pipeline = {};
16215 this.transmuxPipeline_ = pipeline;
16216 pipeline.type = 'aac';
16217 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16218
16219 pipeline.aacStream = new aac();
16220 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
16221 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
16222 pipeline.adtsStream = new adts();
16223 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16224 pipeline.headOfPipeline = pipeline.aacStream;
16225 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
16226 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
16227 pipeline.metadataStream.on('timestamp', function (frame) {
16228 pipeline.aacStream.setTimestamp(frame.timeStamp);
16229 });
16230 pipeline.aacStream.on('data', function (data) {
16231 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
16232 return;
16233 }
16234
16235 audioTrack = audioTrack || {
16236 timelineStartInfo: {
16237 baseMediaDecodeTime: self.baseMediaDecodeTime
16238 },
16239 codec: 'adts',
16240 type: 'audio'
16241 }; // hook up the audio segment stream to the first track with aac data
16242
16243 pipeline.coalesceStream.numberOfTracks++;
16244 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16245 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16246 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
16247
16248 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
16249
16250 self.trigger('trackinfo', {
16251 hasAudio: !!audioTrack,
16252 hasVideo: !!videoTrack
16253 });
16254 }); // Re-emit any data coming from the coalesce stream to the outside world
16255
16256 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
16257
16258 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16259 addPipelineLogRetriggers(this, pipeline);
16260 };
16261
16262 this.setupTsPipeline = function () {
16263 var pipeline = {};
16264 this.transmuxPipeline_ = pipeline;
16265 pipeline.type = 'ts';
16266 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16267
16268 pipeline.packetStream = new m2ts_1.TransportPacketStream();
16269 pipeline.parseStream = new m2ts_1.TransportParseStream();
16270 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
16271 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
16272 pipeline.adtsStream = new adts();
16273 pipeline.h264Stream = new H264Stream();
16274 pipeline.captionStream = new m2ts_1.CaptionStream(options);
16275 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16276 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
16277
16278 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
16279 // demux the streams
16280
16281 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
16282 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
16283 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
16284
16285 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
16286 pipeline.elementaryStream.on('data', function (data) {
16287 var i;
16288
16289 if (data.type === 'metadata') {
16290 i = data.tracks.length; // scan the tracks listed in the metadata
16291
16292 while (i--) {
16293 if (!videoTrack && data.tracks[i].type === 'video') {
16294 videoTrack = data.tracks[i];
16295 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16296 } else if (!audioTrack && data.tracks[i].type === 'audio') {
16297 audioTrack = data.tracks[i];
16298 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16299 }
16300 } // hook up the video segment stream to the first track with h264 data
16301
16302
16303 if (videoTrack && !pipeline.videoSegmentStream) {
16304 pipeline.coalesceStream.numberOfTracks++;
16305 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
16306 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
16307 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
16308 // When video emits timelineStartInfo data after a flush, we forward that
16309 // info to the AudioSegmentStream, if it exists, because video timeline
16310 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
16311 // because this is a particularly subtle form of timestamp alteration.
16312 if (audioTrack && !options.keepOriginalTimestamps) {
16313 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
16314 // very earliest DTS we have seen in video because Chrome will
16315 // interpret any video track with a baseMediaDecodeTime that is
16316 // non-zero as a gap.
16317
16318 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
16319 }
16320 });
16321 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
16322 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
16323 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
16324 if (audioTrack) {
16325 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
16326 }
16327 });
16328 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
16329
16330 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
16331 }
16332
16333 if (audioTrack && !pipeline.audioSegmentStream) {
16334 // hook up the audio segment stream to the first track with aac data
16335 pipeline.coalesceStream.numberOfTracks++;
16336 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16337 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16338 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
16339 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
16340
16341 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
16342 } // emit pmt info
16343
16344
16345 self.trigger('trackinfo', {
16346 hasAudio: !!audioTrack,
16347 hasVideo: !!videoTrack
16348 });
16349 }
16350 }); // Re-emit any data coming from the coalesce stream to the outside world
16351
16352 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
16353 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
16354 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
16355 self.trigger('id3Frame', id3Frame);
16356 });
16357 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
16358
16359 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16360 addPipelineLogRetriggers(this, pipeline);
16361 }; // hook up the segment streams once track metadata is delivered
16362
16363
16364 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
16365 var pipeline = this.transmuxPipeline_;
16366
16367 if (!options.keepOriginalTimestamps) {
16368 this.baseMediaDecodeTime = baseMediaDecodeTime;
16369 }
16370
16371 if (audioTrack) {
16372 audioTrack.timelineStartInfo.dts = undefined;
16373 audioTrack.timelineStartInfo.pts = undefined;
16374 trackDecodeInfo.clearDtsInfo(audioTrack);
16375
16376 if (pipeline.audioTimestampRolloverStream) {
16377 pipeline.audioTimestampRolloverStream.discontinuity();
16378 }
16379 }
16380
16381 if (videoTrack) {
16382 if (pipeline.videoSegmentStream) {
16383 pipeline.videoSegmentStream.gopCache_ = [];
16384 }
16385
16386 videoTrack.timelineStartInfo.dts = undefined;
16387 videoTrack.timelineStartInfo.pts = undefined;
16388 trackDecodeInfo.clearDtsInfo(videoTrack);
16389 pipeline.captionStream.reset();
16390 }
16391
16392 if (pipeline.timestampRolloverStream) {
16393 pipeline.timestampRolloverStream.discontinuity();
16394 }
16395 };
16396
16397 this.setAudioAppendStart = function (timestamp) {
16398 if (audioTrack) {
16399 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
16400 }
16401 };
16402
16403 this.setRemux = function (val) {
16404 var pipeline = this.transmuxPipeline_;
16405 options.remux = val;
16406
16407 if (pipeline && pipeline.coalesceStream) {
16408 pipeline.coalesceStream.setRemux(val);
16409 }
16410 };
16411
16412 this.alignGopsWith = function (gopsToAlignWith) {
16413 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
16414 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
16415 }
16416 };
16417
16418 this.getLogTrigger_ = function (key) {
16419 var self = this;
16420 return function (event) {
16421 event.stream = key;
16422 self.trigger('log', event);
16423 };
16424 }; // feed incoming data to the front of the parsing pipeline
16425
16426
16427 this.push = function (data) {
16428 if (hasFlushed) {
16429 var isAac = isLikelyAacData(data);
16430
16431 if (isAac && this.transmuxPipeline_.type !== 'aac') {
16432 this.setupAacPipeline();
16433 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
16434 this.setupTsPipeline();
16435 }
16436
16437 hasFlushed = false;
16438 }
16439
16440 this.transmuxPipeline_.headOfPipeline.push(data);
16441 }; // flush any buffered data
16442
16443
16444 this.flush = function () {
16445 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
16446
16447 this.transmuxPipeline_.headOfPipeline.flush();
16448 };
16449
16450 this.endTimeline = function () {
16451 this.transmuxPipeline_.headOfPipeline.endTimeline();
16452 };
16453
16454 this.reset = function () {
16455 if (this.transmuxPipeline_.headOfPipeline) {
16456 this.transmuxPipeline_.headOfPipeline.reset();
16457 }
16458 }; // Caption data has to be reset when seeking outside buffered range
16459
16460
16461 this.resetCaptions = function () {
16462 if (this.transmuxPipeline_.captionStream) {
16463 this.transmuxPipeline_.captionStream.reset();
16464 }
16465 };
16466 };
16467
16468 _Transmuxer.prototype = new stream();
16469 var transmuxer = {
16470 Transmuxer: _Transmuxer,
16471 VideoSegmentStream: _VideoSegmentStream,
16472 AudioSegmentStream: _AudioSegmentStream,
16473 AUDIO_PROPERTIES: audioProperties,
16474 VIDEO_PROPERTIES: videoProperties,
16475 // exported for testing
16476 generateSegmentTimingInfo: generateSegmentTimingInfo
16477 };
16478 /**
16479 * mux.js
16480 *
16481 * Copyright (c) Brightcove
16482 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16483 */
16484
16485 var toUnsigned$3 = function toUnsigned(value) {
16486 return value >>> 0;
16487 };
16488
16489 var toHexString$1 = function toHexString(value) {
16490 return ('00' + value.toString(16)).slice(-2);
16491 };
16492
16493 var bin = {
16494 toUnsigned: toUnsigned$3,
16495 toHexString: toHexString$1
16496 };
16497
16498 var parseType$1 = function parseType(buffer) {
16499 var result = '';
16500 result += String.fromCharCode(buffer[0]);
16501 result += String.fromCharCode(buffer[1]);
16502 result += String.fromCharCode(buffer[2]);
16503 result += String.fromCharCode(buffer[3]);
16504 return result;
16505 };
16506
16507 var parseType_1 = parseType$1;
16508 var toUnsigned$2 = bin.toUnsigned;
16509
16510 var findBox = function findBox(data, path) {
16511 var results = [],
16512 i,
16513 size,
16514 type,
16515 end,
16516 subresults;
16517
16518 if (!path.length) {
16519 // short-circuit the search for empty paths
16520 return null;
16521 }
16522
16523 for (i = 0; i < data.byteLength;) {
16524 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
16525 type = parseType_1(data.subarray(i + 4, i + 8));
16526 end = size > 1 ? i + size : data.byteLength;
16527
16528 if (type === path[0]) {
16529 if (path.length === 1) {
16530 // this is the end of the path and we've found the box we were
16531 // looking for
16532 results.push(data.subarray(i + 8, end));
16533 } else {
16534 // recursively search for the next box along the path
16535 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
16536
16537 if (subresults.length) {
16538 results = results.concat(subresults);
16539 }
16540 }
16541 }
16542
16543 i = end;
16544 } // we've finished searching all of data
16545
16546
16547 return results;
16548 };
16549
16550 var findBox_1 = findBox;
16551 var toUnsigned$1 = bin.toUnsigned;
16552 var getUint64$1 = numbers.getUint64;
16553
16554 var tfdt = function tfdt(data) {
16555 var result = {
16556 version: data[0],
16557 flags: new Uint8Array(data.subarray(1, 4))
16558 };
16559
16560 if (result.version === 1) {
16561 result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
16562 } else {
16563 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
16564 }
16565
16566 return result;
16567 };
16568
16569 var parseTfdt = tfdt;
16570
16571 var parseSampleFlags = function parseSampleFlags(flags) {
16572 return {
16573 isLeading: (flags[0] & 0x0c) >>> 2,
16574 dependsOn: flags[0] & 0x03,
16575 isDependedOn: (flags[1] & 0xc0) >>> 6,
16576 hasRedundancy: (flags[1] & 0x30) >>> 4,
16577 paddingValue: (flags[1] & 0x0e) >>> 1,
16578 isNonSyncSample: flags[1] & 0x01,
16579 degradationPriority: flags[2] << 8 | flags[3]
16580 };
16581 };
16582
16583 var parseSampleFlags_1 = parseSampleFlags;
16584
16585 var trun = function trun(data) {
16586 var result = {
16587 version: data[0],
16588 flags: new Uint8Array(data.subarray(1, 4)),
16589 samples: []
16590 },
16591 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16592 // Flag interpretation
16593 dataOffsetPresent = result.flags[2] & 0x01,
16594 // compare with 2nd byte of 0x1
16595 firstSampleFlagsPresent = result.flags[2] & 0x04,
16596 // compare with 2nd byte of 0x4
16597 sampleDurationPresent = result.flags[1] & 0x01,
16598 // compare with 2nd byte of 0x100
16599 sampleSizePresent = result.flags[1] & 0x02,
16600 // compare with 2nd byte of 0x200
16601 sampleFlagsPresent = result.flags[1] & 0x04,
16602 // compare with 2nd byte of 0x400
16603 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
16604 // compare with 2nd byte of 0x800
16605 sampleCount = view.getUint32(4),
16606 offset = 8,
16607 sample;
16608
16609 if (dataOffsetPresent) {
16610 // 32 bit signed integer
16611 result.dataOffset = view.getInt32(offset);
16612 offset += 4;
16613 } // Overrides the flags for the first sample only. The order of
16614 // optional values will be: duration, size, compositionTimeOffset
16615
16616
16617 if (firstSampleFlagsPresent && sampleCount) {
16618 sample = {
16619 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
16620 };
16621 offset += 4;
16622
16623 if (sampleDurationPresent) {
16624 sample.duration = view.getUint32(offset);
16625 offset += 4;
16626 }
16627
16628 if (sampleSizePresent) {
16629 sample.size = view.getUint32(offset);
16630 offset += 4;
16631 }
16632
16633 if (sampleCompositionTimeOffsetPresent) {
16634 if (result.version === 1) {
16635 sample.compositionTimeOffset = view.getInt32(offset);
16636 } else {
16637 sample.compositionTimeOffset = view.getUint32(offset);
16638 }
16639
16640 offset += 4;
16641 }
16642
16643 result.samples.push(sample);
16644 sampleCount--;
16645 }
16646
16647 while (sampleCount--) {
16648 sample = {};
16649
16650 if (sampleDurationPresent) {
16651 sample.duration = view.getUint32(offset);
16652 offset += 4;
16653 }
16654
16655 if (sampleSizePresent) {
16656 sample.size = view.getUint32(offset);
16657 offset += 4;
16658 }
16659
16660 if (sampleFlagsPresent) {
16661 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
16662 offset += 4;
16663 }
16664
16665 if (sampleCompositionTimeOffsetPresent) {
16666 if (result.version === 1) {
16667 sample.compositionTimeOffset = view.getInt32(offset);
16668 } else {
16669 sample.compositionTimeOffset = view.getUint32(offset);
16670 }
16671
16672 offset += 4;
16673 }
16674
16675 result.samples.push(sample);
16676 }
16677
16678 return result;
16679 };
16680
16681 var parseTrun = trun;
16682
16683 var tfhd = function tfhd(data) {
16684 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16685 result = {
16686 version: data[0],
16687 flags: new Uint8Array(data.subarray(1, 4)),
16688 trackId: view.getUint32(4)
16689 },
16690 baseDataOffsetPresent = result.flags[2] & 0x01,
16691 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
16692 defaultSampleDurationPresent = result.flags[2] & 0x08,
16693 defaultSampleSizePresent = result.flags[2] & 0x10,
16694 defaultSampleFlagsPresent = result.flags[2] & 0x20,
16695 durationIsEmpty = result.flags[0] & 0x010000,
16696 defaultBaseIsMoof = result.flags[0] & 0x020000,
16697 i;
16698 i = 8;
16699
16700 if (baseDataOffsetPresent) {
16701 i += 4; // truncate top 4 bytes
16702 // FIXME: should we read the full 64 bits?
16703
16704 result.baseDataOffset = view.getUint32(12);
16705 i += 4;
16706 }
16707
16708 if (sampleDescriptionIndexPresent) {
16709 result.sampleDescriptionIndex = view.getUint32(i);
16710 i += 4;
16711 }
16712
16713 if (defaultSampleDurationPresent) {
16714 result.defaultSampleDuration = view.getUint32(i);
16715 i += 4;
16716 }
16717
16718 if (defaultSampleSizePresent) {
16719 result.defaultSampleSize = view.getUint32(i);
16720 i += 4;
16721 }
16722
16723 if (defaultSampleFlagsPresent) {
16724 result.defaultSampleFlags = view.getUint32(i);
16725 }
16726
16727 if (durationIsEmpty) {
16728 result.durationIsEmpty = true;
16729 }
16730
16731 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
16732 result.baseDataOffsetIsMoof = true;
16733 }
16734
16735 return result;
16736 };
16737
16738 var parseTfhd = tfhd;
16739 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
16740 var win;
16741
16742 if (typeof window !== "undefined") {
16743 win = window;
16744 } else if (typeof commonjsGlobal !== "undefined") {
16745 win = commonjsGlobal;
16746 } else if (typeof self !== "undefined") {
16747 win = self;
16748 } else {
16749 win = {};
16750 }
16751
16752 var window_1 = win;
16753 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
16754 var CaptionStream = captionStream.CaptionStream;
16755 /**
16756 * Maps an offset in the mdat to a sample based on the the size of the samples.
16757 * Assumes that `parseSamples` has been called first.
16758 *
16759 * @param {Number} offset - The offset into the mdat
16760 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
16761 * @return {?Object} The matching sample, or null if no match was found.
16762 *
16763 * @see ISO-BMFF-12/2015, Section 8.8.8
16764 **/
16765
16766 var mapToSample = function mapToSample(offset, samples) {
16767 var approximateOffset = offset;
16768
16769 for (var i = 0; i < samples.length; i++) {
16770 var sample = samples[i];
16771
16772 if (approximateOffset < sample.size) {
16773 return sample;
16774 }
16775
16776 approximateOffset -= sample.size;
16777 }
16778
16779 return null;
16780 };
16781 /**
16782 * Finds SEI nal units contained in a Media Data Box.
16783 * Assumes that `parseSamples` has been called first.
16784 *
16785 * @param {Uint8Array} avcStream - The bytes of the mdat
16786 * @param {Object[]} samples - The samples parsed out by `parseSamples`
16787 * @param {Number} trackId - The trackId of this video track
16788 * @return {Object[]} seiNals - the parsed SEI NALUs found.
16789 * The contents of the seiNal should match what is expected by
16790 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
16791 *
16792 * @see ISO-BMFF-12/2015, Section 8.1.1
16793 * @see Rec. ITU-T H.264, 7.3.2.3.1
16794 **/
16795
16796
16797 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
16798 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
16799 result = {
16800 logs: [],
16801 seiNals: []
16802 },
16803 seiNal,
16804 i,
16805 length,
16806 lastMatchedSample;
16807
16808 for (i = 0; i + 4 < avcStream.length; i += length) {
16809 length = avcView.getUint32(i);
16810 i += 4; // Bail if this doesn't appear to be an H264 stream
16811
16812 if (length <= 0) {
16813 continue;
16814 }
16815
16816 switch (avcStream[i] & 0x1F) {
16817 case 0x06:
16818 var data = avcStream.subarray(i + 1, i + 1 + length);
16819 var matchingSample = mapToSample(i, samples);
16820 seiNal = {
16821 nalUnitType: 'sei_rbsp',
16822 size: length,
16823 data: data,
16824 escapedRBSP: discardEmulationPreventionBytes(data),
16825 trackId: trackId
16826 };
16827
16828 if (matchingSample) {
16829 seiNal.pts = matchingSample.pts;
16830 seiNal.dts = matchingSample.dts;
16831 lastMatchedSample = matchingSample;
16832 } else if (lastMatchedSample) {
16833 // If a matching sample cannot be found, use the last
16834 // sample's values as they should be as close as possible
16835 seiNal.pts = lastMatchedSample.pts;
16836 seiNal.dts = lastMatchedSample.dts;
16837 } else {
16838 result.logs.push({
16839 level: 'warn',
16840 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
16841 });
16842 break;
16843 }
16844
16845 result.seiNals.push(seiNal);
16846 break;
16847 }
16848 }
16849
16850 return result;
16851 };
16852 /**
16853 * Parses sample information out of Track Run Boxes and calculates
16854 * the absolute presentation and decode timestamps of each sample.
16855 *
16856 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
16857 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
16858 @see ISO-BMFF-12/2015, Section 8.8.12
16859 * @param {Object} tfhd - The parsed Track Fragment Header
16860 * @see inspect.parseTfhd
16861 * @return {Object[]} the parsed samples
16862 *
16863 * @see ISO-BMFF-12/2015, Section 8.8.8
16864 **/
16865
16866
16867 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
16868 var currentDts = baseMediaDecodeTime;
16869 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
16870 var defaultSampleSize = tfhd.defaultSampleSize || 0;
16871 var trackId = tfhd.trackId;
16872 var allSamples = [];
16873 truns.forEach(function (trun) {
16874 // Note: We currently do not parse the sample table as well
16875 // as the trun. It's possible some sources will require this.
16876 // moov > trak > mdia > minf > stbl
16877 var trackRun = parseTrun(trun);
16878 var samples = trackRun.samples;
16879 samples.forEach(function (sample) {
16880 if (sample.duration === undefined) {
16881 sample.duration = defaultSampleDuration;
16882 }
16883
16884 if (sample.size === undefined) {
16885 sample.size = defaultSampleSize;
16886 }
16887
16888 sample.trackId = trackId;
16889 sample.dts = currentDts;
16890
16891 if (sample.compositionTimeOffset === undefined) {
16892 sample.compositionTimeOffset = 0;
16893 }
16894
16895 if (typeof currentDts === 'bigint') {
16896 sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
16897 currentDts += window_1.BigInt(sample.duration);
16898 } else {
16899 sample.pts = currentDts + sample.compositionTimeOffset;
16900 currentDts += sample.duration;
16901 }
16902 });
16903 allSamples = allSamples.concat(samples);
16904 });
16905 return allSamples;
16906 };
16907 /**
16908 * Parses out caption nals from an FMP4 segment's video tracks.
16909 *
16910 * @param {Uint8Array} segment - The bytes of a single segment
16911 * @param {Number} videoTrackId - The trackId of a video track in the segment
16912 * @return {Object.<Number, Object[]>} A mapping of video trackId to
16913 * a list of seiNals found in that track
16914 **/
16915
16916
16917 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
16918 // To get the samples
16919 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
16920
16921 var mdats = findBox_1(segment, ['mdat']);
16922 var captionNals = {};
16923 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
16924
16925 mdats.forEach(function (mdat, index) {
16926 var matchingTraf = trafs[index];
16927 mdatTrafPairs.push({
16928 mdat: mdat,
16929 traf: matchingTraf
16930 });
16931 });
16932 mdatTrafPairs.forEach(function (pair) {
16933 var mdat = pair.mdat;
16934 var traf = pair.traf;
16935 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
16936
16937 var headerInfo = parseTfhd(tfhd[0]);
16938 var trackId = headerInfo.trackId;
16939 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
16940
16941 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
16942 var truns = findBox_1(traf, ['trun']);
16943 var samples;
16944 var result; // Only parse video data for the chosen video track
16945
16946 if (videoTrackId === trackId && truns.length > 0) {
16947 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
16948 result = findSeiNals(mdat, samples, trackId);
16949
16950 if (!captionNals[trackId]) {
16951 captionNals[trackId] = {
16952 seiNals: [],
16953 logs: []
16954 };
16955 }
16956
16957 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
16958 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
16959 }
16960 });
16961 return captionNals;
16962 };
16963 /**
16964 * Parses out inband captions from an MP4 container and returns
16965 * caption objects that can be used by WebVTT and the TextTrack API.
16966 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
16967 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
16968 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
16969 *
16970 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
16971 * @param {Number} trackId - The id of the video track to parse
16972 * @param {Number} timescale - The timescale for the video track from the init segment
16973 *
16974 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
16975 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
16976 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
16977 * @return {String} parsedCaptions[].text - The visible content of the caption
16978 **/
16979
16980
16981 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
16982 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
16983
16984 if (trackId === null) {
16985 return null;
16986 }
16987
16988 captionNals = parseCaptionNals(segment, trackId);
16989 var trackNals = captionNals[trackId] || {};
16990 return {
16991 seiNals: trackNals.seiNals,
16992 logs: trackNals.logs,
16993 timescale: timescale
16994 };
16995 };
16996 /**
16997 * Converts SEI NALUs into captions that can be used by video.js
16998 **/
16999
17000
17001 var CaptionParser = function CaptionParser() {
17002 var isInitialized = false;
17003 var captionStream; // Stores segments seen before trackId and timescale are set
17004
17005 var segmentCache; // Stores video track ID of the track being parsed
17006
17007 var trackId; // Stores the timescale of the track being parsed
17008
17009 var timescale; // Stores captions parsed so far
17010
17011 var parsedCaptions; // Stores whether we are receiving partial data or not
17012
17013 var parsingPartial;
17014 /**
17015 * A method to indicate whether a CaptionParser has been initalized
17016 * @returns {Boolean}
17017 **/
17018
17019 this.isInitialized = function () {
17020 return isInitialized;
17021 };
17022 /**
17023 * Initializes the underlying CaptionStream, SEI NAL parsing
17024 * and management, and caption collection
17025 **/
17026
17027
17028 this.init = function (options) {
17029 captionStream = new CaptionStream();
17030 isInitialized = true;
17031 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
17032
17033 captionStream.on('data', function (event) {
17034 // Convert to seconds in the source's timescale
17035 event.startTime = event.startPts / timescale;
17036 event.endTime = event.endPts / timescale;
17037 parsedCaptions.captions.push(event);
17038 parsedCaptions.captionStreams[event.stream] = true;
17039 });
17040 captionStream.on('log', function (log) {
17041 parsedCaptions.logs.push(log);
17042 });
17043 };
17044 /**
17045 * Determines if a new video track will be selected
17046 * or if the timescale changed
17047 * @return {Boolean}
17048 **/
17049
17050
17051 this.isNewInit = function (videoTrackIds, timescales) {
17052 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
17053 return false;
17054 }
17055
17056 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
17057 };
17058 /**
17059 * Parses out SEI captions and interacts with underlying
17060 * CaptionStream to return dispatched captions
17061 *
17062 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
17063 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
17064 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
17065 * @see parseEmbeddedCaptions
17066 * @see m2ts/caption-stream.js
17067 **/
17068
17069
17070 this.parse = function (segment, videoTrackIds, timescales) {
17071 var parsedData;
17072
17073 if (!this.isInitialized()) {
17074 return null; // This is not likely to be a video segment
17075 } else if (!videoTrackIds || !timescales) {
17076 return null;
17077 } else if (this.isNewInit(videoTrackIds, timescales)) {
17078 // Use the first video track only as there is no
17079 // mechanism to switch to other video tracks
17080 trackId = videoTrackIds[0];
17081 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
17082 // data until we have one.
17083 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
17084 } else if (trackId === null || !timescale) {
17085 segmentCache.push(segment);
17086 return null;
17087 } // Now that a timescale and trackId is set, parse cached segments
17088
17089
17090 while (segmentCache.length > 0) {
17091 var cachedSegment = segmentCache.shift();
17092 this.parse(cachedSegment, videoTrackIds, timescales);
17093 }
17094
17095 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
17096
17097 if (parsedData && parsedData.logs) {
17098 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
17099 }
17100
17101 if (parsedData === null || !parsedData.seiNals) {
17102 if (parsedCaptions.logs.length) {
17103 return {
17104 logs: parsedCaptions.logs,
17105 captions: [],
17106 captionStreams: []
17107 };
17108 }
17109
17110 return null;
17111 }
17112
17113 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
17114
17115 this.flushStream();
17116 return parsedCaptions;
17117 };
17118 /**
17119 * Pushes SEI NALUs onto CaptionStream
17120 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
17121 * Assumes that `parseCaptionNals` has been called first
17122 * @see m2ts/caption-stream.js
17123 **/
17124
17125
17126 this.pushNals = function (nals) {
17127 if (!this.isInitialized() || !nals || nals.length === 0) {
17128 return null;
17129 }
17130
17131 nals.forEach(function (nal) {
17132 captionStream.push(nal);
17133 });
17134 };
17135 /**
17136 * Flushes underlying CaptionStream to dispatch processed, displayable captions
17137 * @see m2ts/caption-stream.js
17138 **/
17139
17140
17141 this.flushStream = function () {
17142 if (!this.isInitialized()) {
17143 return null;
17144 }
17145
17146 if (!parsingPartial) {
17147 captionStream.flush();
17148 } else {
17149 captionStream.partialFlush();
17150 }
17151 };
17152 /**
17153 * Reset caption buckets for new data
17154 **/
17155
17156
17157 this.clearParsedCaptions = function () {
17158 parsedCaptions.captions = [];
17159 parsedCaptions.captionStreams = {};
17160 parsedCaptions.logs = [];
17161 };
17162 /**
17163 * Resets underlying CaptionStream
17164 * @see m2ts/caption-stream.js
17165 **/
17166
17167
17168 this.resetCaptionStream = function () {
17169 if (!this.isInitialized()) {
17170 return null;
17171 }
17172
17173 captionStream.reset();
17174 };
17175 /**
17176 * Convenience method to clear all captions flushed from the
17177 * CaptionStream and still being parsed
17178 * @see m2ts/caption-stream.js
17179 **/
17180
17181
17182 this.clearAllCaptions = function () {
17183 this.clearParsedCaptions();
17184 this.resetCaptionStream();
17185 };
17186 /**
17187 * Reset caption parser
17188 **/
17189
17190
17191 this.reset = function () {
17192 segmentCache = [];
17193 trackId = null;
17194 timescale = null;
17195
17196 if (!parsedCaptions) {
17197 parsedCaptions = {
17198 captions: [],
17199 // CC1, CC2, CC3, CC4
17200 captionStreams: {},
17201 logs: []
17202 };
17203 } else {
17204 this.clearParsedCaptions();
17205 }
17206
17207 this.resetCaptionStream();
17208 };
17209
17210 this.reset();
17211 };
17212
17213 var captionParser = CaptionParser;
17214 var toUnsigned = bin.toUnsigned;
17215 var toHexString = bin.toHexString;
17216 var getUint64 = numbers.getUint64;
17217 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
17218 /**
17219 * Parses an MP4 initialization segment and extracts the timescale
17220 * values for any declared tracks. Timescale values indicate the
17221 * number of clock ticks per second to assume for time-based values
17222 * elsewhere in the MP4.
17223 *
17224 * To determine the start time of an MP4, you need two pieces of
17225 * information: the timescale unit and the earliest base media decode
17226 * time. Multiple timescales can be specified within an MP4 but the
17227 * base media decode time is always expressed in the timescale from
17228 * the media header box for the track:
17229 * ```
17230 * moov > trak > mdia > mdhd.timescale
17231 * ```
17232 * @param init {Uint8Array} the bytes of the init segment
17233 * @return {object} a hash of track ids to timescale values or null if
17234 * the init segment is malformed.
17235 */
17236
17237 timescale = function timescale(init) {
17238 var result = {},
17239 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
17240
17241 return traks.reduce(function (result, trak) {
17242 var tkhd, version, index, id, mdhd;
17243 tkhd = findBox_1(trak, ['tkhd'])[0];
17244
17245 if (!tkhd) {
17246 return null;
17247 }
17248
17249 version = tkhd[0];
17250 index = version === 0 ? 12 : 20;
17251 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
17252 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17253
17254 if (!mdhd) {
17255 return null;
17256 }
17257
17258 version = mdhd[0];
17259 index = version === 0 ? 12 : 20;
17260 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17261 return result;
17262 }, result);
17263 };
17264 /**
17265 * Determine the base media decode start time, in seconds, for an MP4
17266 * fragment. If multiple fragments are specified, the earliest time is
17267 * returned.
17268 *
17269 * The base media decode time can be parsed from track fragment
17270 * metadata:
17271 * ```
17272 * moof > traf > tfdt.baseMediaDecodeTime
17273 * ```
17274 * It requires the timescale value from the mdhd to interpret.
17275 *
17276 * @param timescale {object} a hash of track ids to timescale values.
17277 * @return {number} the earliest base media decode start time for the
17278 * fragment, in seconds
17279 */
17280
17281
17282 startTime = function startTime(timescale, fragment) {
17283 var trafs; // we need info from two childrend of each track fragment box
17284
17285 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
17286
17287 var lowestTime = trafs.reduce(function (acc, traf) {
17288 var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
17289
17290 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
17291
17292 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
17293
17294 var tfdt = findBox_1(traf, ['tfdt'])[0];
17295 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
17296 var baseTime; // version 1 is 64 bit
17297
17298 if (tfdt[0] === 1) {
17299 baseTime = getUint64(tfdt.subarray(4, 12));
17300 } else {
17301 baseTime = dv.getUint32(4);
17302 } // convert base time to seconds if it is a valid number.
17303
17304
17305 var seconds;
17306
17307 if (typeof baseTime === 'bigint') {
17308 seconds = baseTime / window_1.BigInt(scale);
17309 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
17310 seconds = baseTime / scale;
17311 }
17312
17313 if (seconds < Number.MAX_SAFE_INTEGER) {
17314 seconds = Number(seconds);
17315 }
17316
17317 if (seconds < acc) {
17318 acc = seconds;
17319 }
17320
17321 return acc;
17322 }, Infinity);
17323 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
17324 };
17325 /**
17326 * Determine the composition start, in seconds, for an MP4
17327 * fragment.
17328 *
17329 * The composition start time of a fragment can be calculated using the base
17330 * media decode time, composition time offset, and timescale, as follows:
17331 *
17332 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
17333 *
17334 * All of the aforementioned information is contained within a media fragment's
17335 * `traf` box, except for timescale info, which comes from the initialization
17336 * segment, so a track id (also contained within a `traf`) is also necessary to
17337 * associate it with a timescale
17338 *
17339 *
17340 * @param timescales {object} - a hash of track ids to timescale values.
17341 * @param fragment {Unit8Array} - the bytes of a media segment
17342 * @return {number} the composition start time for the fragment, in seconds
17343 **/
17344
17345
17346 compositionStartTime = function compositionStartTime(timescales, fragment) {
17347 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
17348 var baseMediaDecodeTime = 0;
17349 var compositionTimeOffset = 0;
17350 var trackId;
17351
17352 if (trafBoxes && trafBoxes.length) {
17353 // The spec states that track run samples contained within a `traf` box are contiguous, but
17354 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
17355 // We will assume that they are, so we only need the first to calculate start time.
17356 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
17357 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
17358 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
17359
17360 if (tfhd) {
17361 var parsedTfhd = parseTfhd(tfhd);
17362 trackId = parsedTfhd.trackId;
17363 }
17364
17365 if (tfdt) {
17366 var parsedTfdt = parseTfdt(tfdt);
17367 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
17368 }
17369
17370 if (trun) {
17371 var parsedTrun = parseTrun(trun);
17372
17373 if (parsedTrun.samples && parsedTrun.samples.length) {
17374 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
17375 }
17376 }
17377 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
17378 // specified.
17379
17380
17381 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
17382
17383 if (typeof baseMediaDecodeTime === 'bigint') {
17384 compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
17385 timescale = window_1.BigInt(timescale);
17386 }
17387
17388 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
17389
17390 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
17391 result = Number(result);
17392 }
17393
17394 return result;
17395 };
17396 /**
17397 * Find the trackIds of the video tracks in this source.
17398 * Found by parsing the Handler Reference and Track Header Boxes:
17399 * moov > trak > mdia > hdlr
17400 * moov > trak > tkhd
17401 *
17402 * @param {Uint8Array} init - The bytes of the init segment for this source
17403 * @return {Number[]} A list of trackIds
17404 *
17405 * @see ISO-BMFF-12/2015, Section 8.4.3
17406 **/
17407
17408
17409 getVideoTrackIds = function getVideoTrackIds(init) {
17410 var traks = findBox_1(init, ['moov', 'trak']);
17411 var videoTrackIds = [];
17412 traks.forEach(function (trak) {
17413 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
17414 var tkhds = findBox_1(trak, ['tkhd']);
17415 hdlrs.forEach(function (hdlr, index) {
17416 var handlerType = parseType_1(hdlr.subarray(8, 12));
17417 var tkhd = tkhds[index];
17418 var view;
17419 var version;
17420 var trackId;
17421
17422 if (handlerType === 'vide') {
17423 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17424 version = view.getUint8(0);
17425 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
17426 videoTrackIds.push(trackId);
17427 }
17428 });
17429 });
17430 return videoTrackIds;
17431 };
17432
17433 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
17434 // mdhd is a FullBox, meaning it will have its own version as the first byte
17435 var version = mdhd[0];
17436 var index = version === 0 ? 12 : 20;
17437 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17438 };
17439 /**
17440 * Get all the video, audio, and hint tracks from a non fragmented
17441 * mp4 segment
17442 */
17443
17444
17445 getTracks = function getTracks(init) {
17446 var traks = findBox_1(init, ['moov', 'trak']);
17447 var tracks = [];
17448 traks.forEach(function (trak) {
17449 var track = {};
17450 var tkhd = findBox_1(trak, ['tkhd'])[0];
17451 var view, tkhdVersion; // id
17452
17453 if (tkhd) {
17454 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17455 tkhdVersion = view.getUint8(0);
17456 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
17457 }
17458
17459 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
17460
17461 if (hdlr) {
17462 var type = parseType_1(hdlr.subarray(8, 12));
17463
17464 if (type === 'vide') {
17465 track.type = 'video';
17466 } else if (type === 'soun') {
17467 track.type = 'audio';
17468 } else {
17469 track.type = type;
17470 }
17471 } // codec
17472
17473
17474 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
17475
17476 if (stsd) {
17477 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
17478
17479 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
17480 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
17481 var codecConfig, codecConfigType;
17482
17483 if (codecBox) {
17484 // https://tools.ietf.org/html/rfc6381#section-3.3
17485 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
17486 // we don't need anything but the "config" parameter of the
17487 // avc1 codecBox
17488 codecConfig = codecBox.subarray(78);
17489 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17490
17491 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
17492 track.codec += '.'; // left padded with zeroes for single digit hex
17493 // profile idc
17494
17495 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
17496
17497 track.codec += toHexString(codecConfig[10]); // level idc
17498
17499 track.codec += toHexString(codecConfig[11]);
17500 } else {
17501 // TODO: show a warning that we couldn't parse the codec
17502 // and are using the default
17503 track.codec = 'avc1.4d400d';
17504 }
17505 } else if (/^mp4[a,v]$/i.test(track.codec)) {
17506 // we do not need anything but the streamDescriptor of the mp4a codecBox
17507 codecConfig = codecBox.subarray(28);
17508 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17509
17510 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
17511 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
17512
17513 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
17514 } else {
17515 // TODO: show a warning that we couldn't parse the codec
17516 // and are using the default
17517 track.codec = 'mp4a.40.2';
17518 }
17519 } else {
17520 // flac, opus, etc
17521 track.codec = track.codec.toLowerCase();
17522 }
17523 }
17524 }
17525
17526 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17527
17528 if (mdhd) {
17529 track.timescale = getTimescaleFromMediaHeader(mdhd);
17530 }
17531
17532 tracks.push(track);
17533 });
17534 return tracks;
17535 };
17536
17537 var probe$2 = {
17538 // export mp4 inspector's findBox and parseType for backwards compatibility
17539 findBox: findBox_1,
17540 parseType: parseType_1,
17541 timescale: timescale,
17542 startTime: startTime,
17543 compositionStartTime: compositionStartTime,
17544 videoTrackIds: getVideoTrackIds,
17545 tracks: getTracks,
17546 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
17547 };
17548
17549 var parsePid = function parsePid(packet) {
17550 var pid = packet[1] & 0x1f;
17551 pid <<= 8;
17552 pid |= packet[2];
17553 return pid;
17554 };
17555
17556 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
17557 return !!(packet[1] & 0x40);
17558 };
17559
17560 var parseAdaptionField = function parseAdaptionField(packet) {
17561 var offset = 0; // if an adaption field is present, its length is specified by the
17562 // fifth byte of the TS packet header. The adaptation field is
17563 // used to add stuffing to PES packets that don't fill a complete
17564 // TS packet, and to specify some forms of timing and control data
17565 // that we do not currently use.
17566
17567 if ((packet[3] & 0x30) >>> 4 > 0x01) {
17568 offset += packet[4] + 1;
17569 }
17570
17571 return offset;
17572 };
17573
17574 var parseType = function parseType(packet, pmtPid) {
17575 var pid = parsePid(packet);
17576
17577 if (pid === 0) {
17578 return 'pat';
17579 } else if (pid === pmtPid) {
17580 return 'pmt';
17581 } else if (pmtPid) {
17582 return 'pes';
17583 }
17584
17585 return null;
17586 };
17587
17588 var parsePat = function parsePat(packet) {
17589 var pusi = parsePayloadUnitStartIndicator(packet);
17590 var offset = 4 + parseAdaptionField(packet);
17591
17592 if (pusi) {
17593 offset += packet[offset] + 1;
17594 }
17595
17596 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
17597 };
17598
17599 var parsePmt = function parsePmt(packet) {
17600 var programMapTable = {};
17601 var pusi = parsePayloadUnitStartIndicator(packet);
17602 var payloadOffset = 4 + parseAdaptionField(packet);
17603
17604 if (pusi) {
17605 payloadOffset += packet[payloadOffset] + 1;
17606 } // PMTs can be sent ahead of the time when they should actually
17607 // take effect. We don't believe this should ever be the case
17608 // for HLS but we'll ignore "forward" PMT declarations if we see
17609 // them. Future PMT declarations have the current_next_indicator
17610 // set to zero.
17611
17612
17613 if (!(packet[payloadOffset + 5] & 0x01)) {
17614 return;
17615 }
17616
17617 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
17618
17619 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
17620 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
17621 // long the program info descriptors are
17622
17623 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
17624
17625 var offset = 12 + programInfoLength;
17626
17627 while (offset < tableEnd) {
17628 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
17629
17630 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
17631 // skip past the elementary stream descriptors, if present
17632
17633 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
17634 }
17635
17636 return programMapTable;
17637 };
17638
17639 var parsePesType = function parsePesType(packet, programMapTable) {
17640 var pid = parsePid(packet);
17641 var type = programMapTable[pid];
17642
17643 switch (type) {
17644 case streamTypes.H264_STREAM_TYPE:
17645 return 'video';
17646
17647 case streamTypes.ADTS_STREAM_TYPE:
17648 return 'audio';
17649
17650 case streamTypes.METADATA_STREAM_TYPE:
17651 return 'timed-metadata';
17652
17653 default:
17654 return null;
17655 }
17656 };
17657
17658 var parsePesTime = function parsePesTime(packet) {
17659 var pusi = parsePayloadUnitStartIndicator(packet);
17660
17661 if (!pusi) {
17662 return null;
17663 }
17664
17665 var offset = 4 + parseAdaptionField(packet);
17666
17667 if (offset >= packet.byteLength) {
17668 // From the H 222.0 MPEG-TS spec
17669 // "For transport stream packets carrying PES packets, stuffing is needed when there
17670 // is insufficient PES packet data to completely fill the transport stream packet
17671 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
17672 // the sum of the lengths of the data elements in it, so that the payload bytes
17673 // remaining after the adaptation field exactly accommodates the available PES packet
17674 // data."
17675 //
17676 // If the offset is >= the length of the packet, then the packet contains no data
17677 // and instead is just adaption field stuffing bytes
17678 return null;
17679 }
17680
17681 var pes = null;
17682 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
17683 // and a DTS value. Determine what combination of values is
17684 // available to work with.
17685
17686 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
17687 // performs all bitwise operations on 32-bit integers but javascript
17688 // supports a much greater range (52-bits) of integer using standard
17689 // mathematical operations.
17690 // We construct a 31-bit value using bitwise operators over the 31
17691 // most significant bits and then multiply by 4 (equal to a left-shift
17692 // of 2) before we add the final 2 least significant bits of the
17693 // timestamp (equal to an OR.)
17694
17695 if (ptsDtsFlags & 0xC0) {
17696 pes = {}; // the PTS and DTS are not written out directly. For information
17697 // on how they are encoded, see
17698 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
17699
17700 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
17701 pes.pts *= 4; // Left shift by 2
17702
17703 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
17704
17705 pes.dts = pes.pts;
17706
17707 if (ptsDtsFlags & 0x40) {
17708 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
17709 pes.dts *= 4; // Left shift by 2
17710
17711 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
17712 }
17713 }
17714
17715 return pes;
17716 };
17717
17718 var parseNalUnitType = function parseNalUnitType(type) {
17719 switch (type) {
17720 case 0x05:
17721 return 'slice_layer_without_partitioning_rbsp_idr';
17722
17723 case 0x06:
17724 return 'sei_rbsp';
17725
17726 case 0x07:
17727 return 'seq_parameter_set_rbsp';
17728
17729 case 0x08:
17730 return 'pic_parameter_set_rbsp';
17731
17732 case 0x09:
17733 return 'access_unit_delimiter_rbsp';
17734
17735 default:
17736 return null;
17737 }
17738 };
17739
17740 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
17741 var offset = 4 + parseAdaptionField(packet);
17742 var frameBuffer = packet.subarray(offset);
17743 var frameI = 0;
17744 var frameSyncPoint = 0;
17745 var foundKeyFrame = false;
17746 var nalType; // advance the sync point to a NAL start, if necessary
17747
17748 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
17749 if (frameBuffer[frameSyncPoint + 2] === 1) {
17750 // the sync point is properly aligned
17751 frameI = frameSyncPoint + 5;
17752 break;
17753 }
17754 }
17755
17756 while (frameI < frameBuffer.byteLength) {
17757 // look at the current byte to determine if we've hit the end of
17758 // a NAL unit boundary
17759 switch (frameBuffer[frameI]) {
17760 case 0:
17761 // skip past non-sync sequences
17762 if (frameBuffer[frameI - 1] !== 0) {
17763 frameI += 2;
17764 break;
17765 } else if (frameBuffer[frameI - 2] !== 0) {
17766 frameI++;
17767 break;
17768 }
17769
17770 if (frameSyncPoint + 3 !== frameI - 2) {
17771 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17772
17773 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17774 foundKeyFrame = true;
17775 }
17776 } // drop trailing zeroes
17777
17778
17779 do {
17780 frameI++;
17781 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
17782
17783 frameSyncPoint = frameI - 2;
17784 frameI += 3;
17785 break;
17786
17787 case 1:
17788 // skip past non-sync sequences
17789 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
17790 frameI += 3;
17791 break;
17792 }
17793
17794 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17795
17796 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17797 foundKeyFrame = true;
17798 }
17799
17800 frameSyncPoint = frameI - 2;
17801 frameI += 3;
17802 break;
17803
17804 default:
17805 // the current byte isn't a one or zero, so it cannot be part
17806 // of a sync sequence
17807 frameI += 3;
17808 break;
17809 }
17810 }
17811
17812 frameBuffer = frameBuffer.subarray(frameSyncPoint);
17813 frameI -= frameSyncPoint;
17814 frameSyncPoint = 0; // parse the final nal
17815
17816 if (frameBuffer && frameBuffer.byteLength > 3) {
17817 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17818
17819 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17820 foundKeyFrame = true;
17821 }
17822 }
17823
17824 return foundKeyFrame;
17825 };
17826
17827 var probe$1 = {
17828 parseType: parseType,
17829 parsePat: parsePat,
17830 parsePmt: parsePmt,
17831 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
17832 parsePesType: parsePesType,
17833 parsePesTime: parsePesTime,
17834 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
17835 };
17836 var handleRollover = timestampRolloverStream.handleRollover;
17837 var probe = {};
17838 probe.ts = probe$1;
17839 probe.aac = utils;
17840 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
17841 var MP2T_PACKET_LENGTH = 188,
17842 // bytes
17843 SYNC_BYTE = 0x47;
17844 /**
17845 * walks through segment data looking for pat and pmt packets to parse out
17846 * program map table information
17847 */
17848
17849 var parsePsi_ = function parsePsi_(bytes, pmt) {
17850 var startIndex = 0,
17851 endIndex = MP2T_PACKET_LENGTH,
17852 packet,
17853 type;
17854
17855 while (endIndex < bytes.byteLength) {
17856 // Look for a pair of start and end sync bytes in the data..
17857 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17858 // We found a packet
17859 packet = bytes.subarray(startIndex, endIndex);
17860 type = probe.ts.parseType(packet, pmt.pid);
17861
17862 switch (type) {
17863 case 'pat':
17864 pmt.pid = probe.ts.parsePat(packet);
17865 break;
17866
17867 case 'pmt':
17868 var table = probe.ts.parsePmt(packet);
17869 pmt.table = pmt.table || {};
17870 Object.keys(table).forEach(function (key) {
17871 pmt.table[key] = table[key];
17872 });
17873 break;
17874 }
17875
17876 startIndex += MP2T_PACKET_LENGTH;
17877 endIndex += MP2T_PACKET_LENGTH;
17878 continue;
17879 } // If we get here, we have somehow become de-synchronized and we need to step
17880 // forward one byte at a time until we find a pair of sync bytes that denote
17881 // a packet
17882
17883
17884 startIndex++;
17885 endIndex++;
17886 }
17887 };
17888 /**
17889 * walks through the segment data from the start and end to get timing information
17890 * for the first and last audio pes packets
17891 */
17892
17893
17894 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
17895 var startIndex = 0,
17896 endIndex = MP2T_PACKET_LENGTH,
17897 packet,
17898 type,
17899 pesType,
17900 pusi,
17901 parsed;
17902 var endLoop = false; // Start walking from start of segment to get first audio packet
17903
17904 while (endIndex <= bytes.byteLength) {
17905 // Look for a pair of start and end sync bytes in the data..
17906 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17907 // We found a packet
17908 packet = bytes.subarray(startIndex, endIndex);
17909 type = probe.ts.parseType(packet, pmt.pid);
17910
17911 switch (type) {
17912 case 'pes':
17913 pesType = probe.ts.parsePesType(packet, pmt.table);
17914 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17915
17916 if (pesType === 'audio' && pusi) {
17917 parsed = probe.ts.parsePesTime(packet);
17918
17919 if (parsed) {
17920 parsed.type = 'audio';
17921 result.audio.push(parsed);
17922 endLoop = true;
17923 }
17924 }
17925
17926 break;
17927 }
17928
17929 if (endLoop) {
17930 break;
17931 }
17932
17933 startIndex += MP2T_PACKET_LENGTH;
17934 endIndex += MP2T_PACKET_LENGTH;
17935 continue;
17936 } // If we get here, we have somehow become de-synchronized and we need to step
17937 // forward one byte at a time until we find a pair of sync bytes that denote
17938 // a packet
17939
17940
17941 startIndex++;
17942 endIndex++;
17943 } // Start walking from end of segment to get last audio packet
17944
17945
17946 endIndex = bytes.byteLength;
17947 startIndex = endIndex - MP2T_PACKET_LENGTH;
17948 endLoop = false;
17949
17950 while (startIndex >= 0) {
17951 // Look for a pair of start and end sync bytes in the data..
17952 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17953 // We found a packet
17954 packet = bytes.subarray(startIndex, endIndex);
17955 type = probe.ts.parseType(packet, pmt.pid);
17956
17957 switch (type) {
17958 case 'pes':
17959 pesType = probe.ts.parsePesType(packet, pmt.table);
17960 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17961
17962 if (pesType === 'audio' && pusi) {
17963 parsed = probe.ts.parsePesTime(packet);
17964
17965 if (parsed) {
17966 parsed.type = 'audio';
17967 result.audio.push(parsed);
17968 endLoop = true;
17969 }
17970 }
17971
17972 break;
17973 }
17974
17975 if (endLoop) {
17976 break;
17977 }
17978
17979 startIndex -= MP2T_PACKET_LENGTH;
17980 endIndex -= MP2T_PACKET_LENGTH;
17981 continue;
17982 } // If we get here, we have somehow become de-synchronized and we need to step
17983 // forward one byte at a time until we find a pair of sync bytes that denote
17984 // a packet
17985
17986
17987 startIndex--;
17988 endIndex--;
17989 }
17990 };
17991 /**
17992 * walks through the segment data from the start and end to get timing information
17993 * for the first and last video pes packets as well as timing information for the first
17994 * key frame.
17995 */
17996
17997
17998 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
17999 var startIndex = 0,
18000 endIndex = MP2T_PACKET_LENGTH,
18001 packet,
18002 type,
18003 pesType,
18004 pusi,
18005 parsed,
18006 frame,
18007 i,
18008 pes;
18009 var endLoop = false;
18010 var currentFrame = {
18011 data: [],
18012 size: 0
18013 }; // Start walking from start of segment to get first video packet
18014
18015 while (endIndex < bytes.byteLength) {
18016 // Look for a pair of start and end sync bytes in the data..
18017 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
18018 // We found a packet
18019 packet = bytes.subarray(startIndex, endIndex);
18020 type = probe.ts.parseType(packet, pmt.pid);
18021
18022 switch (type) {
18023 case 'pes':
18024 pesType = probe.ts.parsePesType(packet, pmt.table);
18025 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18026
18027 if (pesType === 'video') {
18028 if (pusi && !endLoop) {
18029 parsed = probe.ts.parsePesTime(packet);
18030
18031 if (parsed) {
18032 parsed.type = 'video';
18033 result.video.push(parsed);
18034 endLoop = true;
18035 }
18036 }
18037
18038 if (!result.firstKeyFrame) {
18039 if (pusi) {
18040 if (currentFrame.size !== 0) {
18041 frame = new Uint8Array(currentFrame.size);
18042 i = 0;
18043
18044 while (currentFrame.data.length) {
18045 pes = currentFrame.data.shift();
18046 frame.set(pes, i);
18047 i += pes.byteLength;
18048 }
18049
18050 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
18051 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
18052 // the keyframe seems to work fine with HLS playback
18053 // and definitely preferable to a crash with TypeError...
18054
18055 if (firstKeyFrame) {
18056 result.firstKeyFrame = firstKeyFrame;
18057 result.firstKeyFrame.type = 'video';
18058 } else {
18059 // eslint-disable-next-line
18060 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
18061 }
18062 }
18063
18064 currentFrame.size = 0;
18065 }
18066 }
18067
18068 currentFrame.data.push(packet);
18069 currentFrame.size += packet.byteLength;
18070 }
18071 }
18072
18073 break;
18074 }
18075
18076 if (endLoop && result.firstKeyFrame) {
18077 break;
18078 }
18079
18080 startIndex += MP2T_PACKET_LENGTH;
18081 endIndex += MP2T_PACKET_LENGTH;
18082 continue;
18083 } // If we get here, we have somehow become de-synchronized and we need to step
18084 // forward one byte at a time until we find a pair of sync bytes that denote
18085 // a packet
18086
18087
18088 startIndex++;
18089 endIndex++;
18090 } // Start walking from end of segment to get last video packet
18091
18092
18093 endIndex = bytes.byteLength;
18094 startIndex = endIndex - MP2T_PACKET_LENGTH;
18095 endLoop = false;
18096
18097 while (startIndex >= 0) {
18098 // Look for a pair of start and end sync bytes in the data..
18099 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
18100 // We found a packet
18101 packet = bytes.subarray(startIndex, endIndex);
18102 type = probe.ts.parseType(packet, pmt.pid);
18103
18104 switch (type) {
18105 case 'pes':
18106 pesType = probe.ts.parsePesType(packet, pmt.table);
18107 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18108
18109 if (pesType === 'video' && pusi) {
18110 parsed = probe.ts.parsePesTime(packet);
18111
18112 if (parsed) {
18113 parsed.type = 'video';
18114 result.video.push(parsed);
18115 endLoop = true;
18116 }
18117 }
18118
18119 break;
18120 }
18121
18122 if (endLoop) {
18123 break;
18124 }
18125
18126 startIndex -= MP2T_PACKET_LENGTH;
18127 endIndex -= MP2T_PACKET_LENGTH;
18128 continue;
18129 } // If we get here, we have somehow become de-synchronized and we need to step
18130 // forward one byte at a time until we find a pair of sync bytes that denote
18131 // a packet
18132
18133
18134 startIndex--;
18135 endIndex--;
18136 }
18137 };
18138 /**
18139 * Adjusts the timestamp information for the segment to account for
18140 * rollover and convert to seconds based on pes packet timescale (90khz clock)
18141 */
18142
18143
18144 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
18145 if (segmentInfo.audio && segmentInfo.audio.length) {
18146 var audioBaseTimestamp = baseTimestamp;
18147
18148 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
18149 audioBaseTimestamp = segmentInfo.audio[0].dts;
18150 }
18151
18152 segmentInfo.audio.forEach(function (info) {
18153 info.dts = handleRollover(info.dts, audioBaseTimestamp);
18154 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
18155
18156 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18157 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18158 });
18159 }
18160
18161 if (segmentInfo.video && segmentInfo.video.length) {
18162 var videoBaseTimestamp = baseTimestamp;
18163
18164 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
18165 videoBaseTimestamp = segmentInfo.video[0].dts;
18166 }
18167
18168 segmentInfo.video.forEach(function (info) {
18169 info.dts = handleRollover(info.dts, videoBaseTimestamp);
18170 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
18171
18172 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18173 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18174 });
18175
18176 if (segmentInfo.firstKeyFrame) {
18177 var frame = segmentInfo.firstKeyFrame;
18178 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
18179 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
18180
18181 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
18182 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
18183 }
18184 }
18185 };
18186 /**
18187 * inspects the aac data stream for start and end time information
18188 */
18189
18190
18191 var inspectAac_ = function inspectAac_(bytes) {
18192 var endLoop = false,
18193 audioCount = 0,
18194 sampleRate = null,
18195 timestamp = null,
18196 frameSize = 0,
18197 byteIndex = 0,
18198 packet;
18199
18200 while (bytes.length - byteIndex >= 3) {
18201 var type = probe.aac.parseType(bytes, byteIndex);
18202
18203 switch (type) {
18204 case 'timed-metadata':
18205 // Exit early because we don't have enough to parse
18206 // the ID3 tag header
18207 if (bytes.length - byteIndex < 10) {
18208 endLoop = true;
18209 break;
18210 }
18211
18212 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18213 // to emit a full packet
18214
18215 if (frameSize > bytes.length) {
18216 endLoop = true;
18217 break;
18218 }
18219
18220 if (timestamp === null) {
18221 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18222 timestamp = probe.aac.parseAacTimestamp(packet);
18223 }
18224
18225 byteIndex += frameSize;
18226 break;
18227
18228 case 'audio':
18229 // Exit early because we don't have enough to parse
18230 // the ADTS frame header
18231 if (bytes.length - byteIndex < 7) {
18232 endLoop = true;
18233 break;
18234 }
18235
18236 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18237 // to emit a full packet
18238
18239 if (frameSize > bytes.length) {
18240 endLoop = true;
18241 break;
18242 }
18243
18244 if (sampleRate === null) {
18245 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18246 sampleRate = probe.aac.parseSampleRate(packet);
18247 }
18248
18249 audioCount++;
18250 byteIndex += frameSize;
18251 break;
18252
18253 default:
18254 byteIndex++;
18255 break;
18256 }
18257
18258 if (endLoop) {
18259 return null;
18260 }
18261 }
18262
18263 if (sampleRate === null || timestamp === null) {
18264 return null;
18265 }
18266
18267 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
18268 var result = {
18269 audio: [{
18270 type: 'audio',
18271 dts: timestamp,
18272 pts: timestamp
18273 }, {
18274 type: 'audio',
18275 dts: timestamp + audioCount * 1024 * audioTimescale,
18276 pts: timestamp + audioCount * 1024 * audioTimescale
18277 }]
18278 };
18279 return result;
18280 };
18281 /**
18282 * inspects the transport stream segment data for start and end time information
18283 * of the audio and video tracks (when present) as well as the first key frame's
18284 * start time.
18285 */
18286
18287
18288 var inspectTs_ = function inspectTs_(bytes) {
18289 var pmt = {
18290 pid: null,
18291 table: null
18292 };
18293 var result = {};
18294 parsePsi_(bytes, pmt);
18295
18296 for (var pid in pmt.table) {
18297 if (pmt.table.hasOwnProperty(pid)) {
18298 var type = pmt.table[pid];
18299
18300 switch (type) {
18301 case streamTypes.H264_STREAM_TYPE:
18302 result.video = [];
18303 parseVideoPes_(bytes, pmt, result);
18304
18305 if (result.video.length === 0) {
18306 delete result.video;
18307 }
18308
18309 break;
18310
18311 case streamTypes.ADTS_STREAM_TYPE:
18312 result.audio = [];
18313 parseAudioPes_(bytes, pmt, result);
18314
18315 if (result.audio.length === 0) {
18316 delete result.audio;
18317 }
18318
18319 break;
18320 }
18321 }
18322 }
18323
18324 return result;
18325 };
18326 /**
18327 * Inspects segment byte data and returns an object with start and end timing information
18328 *
18329 * @param {Uint8Array} bytes The segment byte data
18330 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
18331 * timestamps for rollover. This value must be in 90khz clock.
18332 * @return {Object} Object containing start and end frame timing info of segment.
18333 */
18334
18335
18336 var inspect = function inspect(bytes, baseTimestamp) {
18337 var isAacData = probe.aac.isLikelyAacData(bytes);
18338 var result;
18339
18340 if (isAacData) {
18341 result = inspectAac_(bytes);
18342 } else {
18343 result = inspectTs_(bytes);
18344 }
18345
18346 if (!result || !result.audio && !result.video) {
18347 return null;
18348 }
18349
18350 adjustTimestamp_(result, baseTimestamp);
18351 return result;
18352 };
18353
18354 var tsInspector = {
18355 inspect: inspect,
18356 parseAudioPes_: parseAudioPes_
18357 };
18358 /* global self */
18359
18360 /**
18361 * Re-emits transmuxer events by converting them into messages to the
18362 * world outside the worker.
18363 *
18364 * @param {Object} transmuxer the transmuxer to wire events on
18365 * @private
18366 */
18367
18368 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
18369 transmuxer.on('data', function (segment) {
18370 // transfer ownership of the underlying ArrayBuffer
18371 // instead of doing a copy to save memory
18372 // ArrayBuffers are transferable but generic TypedArrays are not
18373 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
18374 var initArray = segment.initSegment;
18375 segment.initSegment = {
18376 data: initArray.buffer,
18377 byteOffset: initArray.byteOffset,
18378 byteLength: initArray.byteLength
18379 };
18380 var typedArray = segment.data;
18381 segment.data = typedArray.buffer;
18382 self.postMessage({
18383 action: 'data',
18384 segment: segment,
18385 byteOffset: typedArray.byteOffset,
18386 byteLength: typedArray.byteLength
18387 }, [segment.data]);
18388 });
18389 transmuxer.on('done', function (data) {
18390 self.postMessage({
18391 action: 'done'
18392 });
18393 });
18394 transmuxer.on('gopInfo', function (gopInfo) {
18395 self.postMessage({
18396 action: 'gopInfo',
18397 gopInfo: gopInfo
18398 });
18399 });
18400 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
18401 var videoSegmentTimingInfo = {
18402 start: {
18403 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18404 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18405 },
18406 end: {
18407 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18408 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18409 },
18410 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18411 };
18412
18413 if (timingInfo.prependedContentDuration) {
18414 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18415 }
18416
18417 self.postMessage({
18418 action: 'videoSegmentTimingInfo',
18419 videoSegmentTimingInfo: videoSegmentTimingInfo
18420 });
18421 });
18422 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
18423 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
18424 var audioSegmentTimingInfo = {
18425 start: {
18426 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18427 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18428 },
18429 end: {
18430 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18431 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18432 },
18433 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18434 };
18435
18436 if (timingInfo.prependedContentDuration) {
18437 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18438 }
18439
18440 self.postMessage({
18441 action: 'audioSegmentTimingInfo',
18442 audioSegmentTimingInfo: audioSegmentTimingInfo
18443 });
18444 });
18445 transmuxer.on('id3Frame', function (id3Frame) {
18446 self.postMessage({
18447 action: 'id3Frame',
18448 id3Frame: id3Frame
18449 });
18450 });
18451 transmuxer.on('caption', function (caption) {
18452 self.postMessage({
18453 action: 'caption',
18454 caption: caption
18455 });
18456 });
18457 transmuxer.on('trackinfo', function (trackInfo) {
18458 self.postMessage({
18459 action: 'trackinfo',
18460 trackInfo: trackInfo
18461 });
18462 });
18463 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
18464 // convert to video TS since we prioritize video time over audio
18465 self.postMessage({
18466 action: 'audioTimingInfo',
18467 audioTimingInfo: {
18468 start: clock.videoTsToSeconds(audioTimingInfo.start),
18469 end: clock.videoTsToSeconds(audioTimingInfo.end)
18470 }
18471 });
18472 });
18473 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
18474 self.postMessage({
18475 action: 'videoTimingInfo',
18476 videoTimingInfo: {
18477 start: clock.videoTsToSeconds(videoTimingInfo.start),
18478 end: clock.videoTsToSeconds(videoTimingInfo.end)
18479 }
18480 });
18481 });
18482 transmuxer.on('log', function (log) {
18483 self.postMessage({
18484 action: 'log',
18485 log: log
18486 });
18487 });
18488 };
18489 /**
18490 * All incoming messages route through this hash. If no function exists
18491 * to handle an incoming message, then we ignore the message.
18492 *
18493 * @class MessageHandlers
18494 * @param {Object} options the options to initialize with
18495 */
18496
18497
18498 var MessageHandlers = /*#__PURE__*/function () {
18499 function MessageHandlers(self, options) {
18500 this.options = options || {};
18501 this.self = self;
18502 this.init();
18503 }
18504 /**
18505 * initialize our web worker and wire all the events.
18506 */
18507
18508
18509 var _proto = MessageHandlers.prototype;
18510
18511 _proto.init = function init() {
18512 if (this.transmuxer) {
18513 this.transmuxer.dispose();
18514 }
18515
18516 this.transmuxer = new transmuxer.Transmuxer(this.options);
18517 wireTransmuxerEvents(this.self, this.transmuxer);
18518 };
18519
18520 _proto.pushMp4Captions = function pushMp4Captions(data) {
18521 if (!this.captionParser) {
18522 this.captionParser = new captionParser();
18523 this.captionParser.init();
18524 }
18525
18526 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18527 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
18528 this.self.postMessage({
18529 action: 'mp4Captions',
18530 captions: parsed && parsed.captions || [],
18531 logs: parsed && parsed.logs || [],
18532 data: segment.buffer
18533 }, [segment.buffer]);
18534 };
18535
18536 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
18537 var timescales = _ref.timescales,
18538 data = _ref.data;
18539 var startTime = probe$2.startTime(timescales, data);
18540 this.self.postMessage({
18541 action: 'probeMp4StartTime',
18542 startTime: startTime,
18543 data: data
18544 }, [data.buffer]);
18545 };
18546
18547 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
18548 var data = _ref2.data;
18549 var tracks = probe$2.tracks(data);
18550 this.self.postMessage({
18551 action: 'probeMp4Tracks',
18552 tracks: tracks,
18553 data: data
18554 }, [data.buffer]);
18555 }
18556 /**
18557 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
18558 * internal "media time," as well as whether it contains video and/or audio.
18559 *
18560 * @private
18561 * @param {Uint8Array} bytes - segment bytes
18562 * @param {number} baseStartTime
18563 * Relative reference timestamp used when adjusting frame timestamps for rollover.
18564 * This value should be in seconds, as it's converted to a 90khz clock within the
18565 * function body.
18566 * @return {Object} The start time of the current segment in "media time" as well as
18567 * whether it contains video and/or audio
18568 */
18569 ;
18570
18571 _proto.probeTs = function probeTs(_ref3) {
18572 var data = _ref3.data,
18573 baseStartTime = _ref3.baseStartTime;
18574 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
18575 var timeInfo = tsInspector.inspect(data, tsStartTime);
18576 var result = null;
18577
18578 if (timeInfo) {
18579 result = {
18580 // each type's time info comes back as an array of 2 times, start and end
18581 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
18582 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
18583 };
18584
18585 if (result.hasVideo) {
18586 result.videoStart = timeInfo.video[0].ptsTime;
18587 }
18588
18589 if (result.hasAudio) {
18590 result.audioStart = timeInfo.audio[0].ptsTime;
18591 }
18592 }
18593
18594 this.self.postMessage({
18595 action: 'probeTs',
18596 result: result,
18597 data: data
18598 }, [data.buffer]);
18599 };
18600
18601 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
18602 if (this.captionParser) {
18603 this.captionParser.clearAllCaptions();
18604 }
18605 };
18606
18607 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
18608 if (this.captionParser) {
18609 this.captionParser.clearParsedCaptions();
18610 }
18611 }
18612 /**
18613 * Adds data (a ts segment) to the start of the transmuxer pipeline for
18614 * processing.
18615 *
18616 * @param {ArrayBuffer} data data to push into the muxer
18617 */
18618 ;
18619
18620 _proto.push = function push(data) {
18621 // Cast array buffer to correct type for transmuxer
18622 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18623 this.transmuxer.push(segment);
18624 }
18625 /**
18626 * Recreate the transmuxer so that the next segment added via `push`
18627 * start with a fresh transmuxer.
18628 */
18629 ;
18630
18631 _proto.reset = function reset() {
18632 this.transmuxer.reset();
18633 }
18634 /**
18635 * Set the value that will be used as the `baseMediaDecodeTime` time for the
18636 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
18637 * set relative to the first based on the PTS values.
18638 *
18639 * @param {Object} data used to set the timestamp offset in the muxer
18640 */
18641 ;
18642
18643 _proto.setTimestampOffset = function setTimestampOffset(data) {
18644 var timestampOffset = data.timestampOffset || 0;
18645 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
18646 };
18647
18648 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
18649 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
18650 };
18651
18652 _proto.setRemux = function setRemux(data) {
18653 this.transmuxer.setRemux(data.remux);
18654 }
18655 /**
18656 * Forces the pipeline to finish processing the last segment and emit it's
18657 * results.
18658 *
18659 * @param {Object} data event data, not really used
18660 */
18661 ;
18662
18663 _proto.flush = function flush(data) {
18664 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
18665
18666 self.postMessage({
18667 action: 'done',
18668 type: 'transmuxed'
18669 });
18670 };
18671
18672 _proto.endTimeline = function endTimeline() {
18673 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
18674 // timelines
18675
18676 self.postMessage({
18677 action: 'endedtimeline',
18678 type: 'transmuxed'
18679 });
18680 };
18681
18682 _proto.alignGopsWith = function alignGopsWith(data) {
18683 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
18684 };
18685
18686 return MessageHandlers;
18687 }();
18688 /**
18689 * Our web worker interface so that things can talk to mux.js
18690 * that will be running in a web worker. the scope is passed to this by
18691 * webworkify.
18692 *
18693 * @param {Object} self the scope for the web worker
18694 */
18695
18696
18697 self.onmessage = function (event) {
18698 if (event.data.action === 'init' && event.data.options) {
18699 this.messageHandlers = new MessageHandlers(self, event.data.options);
18700 return;
18701 }
18702
18703 if (!this.messageHandlers) {
18704 this.messageHandlers = new MessageHandlers(self);
18705 }
18706
18707 if (event.data && event.data.action && event.data.action !== 'init') {
18708 if (this.messageHandlers[event.data.action]) {
18709 this.messageHandlers[event.data.action](event.data);
18710 }
18711 }
18712 };
18713 }));
18714 var TransmuxWorker = factory(workerCode$1);
18715 /* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
18716
18717 var handleData_ = function handleData_(event, transmuxedData, callback) {
18718 var _event$data$segment = event.data.segment,
18719 type = _event$data$segment.type,
18720 initSegment = _event$data$segment.initSegment,
18721 captions = _event$data$segment.captions,
18722 captionStreams = _event$data$segment.captionStreams,
18723 metadata = _event$data$segment.metadata,
18724 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
18725 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
18726 transmuxedData.buffer.push({
18727 captions: captions,
18728 captionStreams: captionStreams,
18729 metadata: metadata
18730 });
18731 var boxes = event.data.segment.boxes || {
18732 data: event.data.segment.data
18733 };
18734 var result = {
18735 type: type,
18736 // cast ArrayBuffer to TypedArray
18737 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
18738 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
18739 };
18740
18741 if (typeof videoFrameDtsTime !== 'undefined') {
18742 result.videoFrameDtsTime = videoFrameDtsTime;
18743 }
18744
18745 if (typeof videoFramePtsTime !== 'undefined') {
18746 result.videoFramePtsTime = videoFramePtsTime;
18747 }
18748
18749 callback(result);
18750 };
18751 var handleDone_ = function handleDone_(_ref) {
18752 var transmuxedData = _ref.transmuxedData,
18753 callback = _ref.callback;
18754 // Previously we only returned data on data events,
18755 // not on done events. Clear out the buffer to keep that consistent.
18756 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
18757 // have received
18758
18759 callback(transmuxedData);
18760 };
18761 var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
18762 transmuxedData.gopInfo = event.data.gopInfo;
18763 };
18764 var processTransmux = function processTransmux(options) {
18765 var transmuxer = options.transmuxer,
18766 bytes = options.bytes,
18767 audioAppendStart = options.audioAppendStart,
18768 gopsToAlignWith = options.gopsToAlignWith,
18769 remux = options.remux,
18770 onData = options.onData,
18771 onTrackInfo = options.onTrackInfo,
18772 onAudioTimingInfo = options.onAudioTimingInfo,
18773 onVideoTimingInfo = options.onVideoTimingInfo,
18774 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
18775 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
18776 onId3 = options.onId3,
18777 onCaptions = options.onCaptions,
18778 onDone = options.onDone,
18779 onEndedTimeline = options.onEndedTimeline,
18780 onTransmuxerLog = options.onTransmuxerLog,
18781 isEndOfTimeline = options.isEndOfTimeline;
18782 var transmuxedData = {
18783 buffer: []
18784 };
18785 var waitForEndedTimelineEvent = isEndOfTimeline;
18786
18787 var handleMessage = function handleMessage(event) {
18788 if (transmuxer.currentTransmux !== options) {
18789 // disposed
18790 return;
18791 }
18792
18793 if (event.data.action === 'data') {
18794 handleData_(event, transmuxedData, onData);
18795 }
18796
18797 if (event.data.action === 'trackinfo') {
18798 onTrackInfo(event.data.trackInfo);
18799 }
18800
18801 if (event.data.action === 'gopInfo') {
18802 handleGopInfo_(event, transmuxedData);
18803 }
18804
18805 if (event.data.action === 'audioTimingInfo') {
18806 onAudioTimingInfo(event.data.audioTimingInfo);
18807 }
18808
18809 if (event.data.action === 'videoTimingInfo') {
18810 onVideoTimingInfo(event.data.videoTimingInfo);
18811 }
18812
18813 if (event.data.action === 'videoSegmentTimingInfo') {
18814 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
18815 }
18816
18817 if (event.data.action === 'audioSegmentTimingInfo') {
18818 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
18819 }
18820
18821 if (event.data.action === 'id3Frame') {
18822 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
18823 }
18824
18825 if (event.data.action === 'caption') {
18826 onCaptions(event.data.caption);
18827 }
18828
18829 if (event.data.action === 'endedtimeline') {
18830 waitForEndedTimelineEvent = false;
18831 onEndedTimeline();
18832 }
18833
18834 if (event.data.action === 'log') {
18835 onTransmuxerLog(event.data.log);
18836 } // wait for the transmuxed event since we may have audio and video
18837
18838
18839 if (event.data.type !== 'transmuxed') {
18840 return;
18841 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
18842 // of a timeline, that means there may still be data events before the segment
18843 // processing can be considerred complete. In that case, the final event should be
18844 // an "endedtimeline" event with the type "transmuxed."
18845
18846
18847 if (waitForEndedTimelineEvent) {
18848 return;
18849 }
18850
18851 transmuxer.onmessage = null;
18852 handleDone_({
18853 transmuxedData: transmuxedData,
18854 callback: onDone
18855 });
18856 /* eslint-disable no-use-before-define */
18857
18858 dequeue(transmuxer);
18859 /* eslint-enable */
18860 };
18861
18862 transmuxer.onmessage = handleMessage;
18863
18864 if (audioAppendStart) {
18865 transmuxer.postMessage({
18866 action: 'setAudioAppendStart',
18867 appendStart: audioAppendStart
18868 });
18869 } // allow empty arrays to be passed to clear out GOPs
18870
18871
18872 if (Array.isArray(gopsToAlignWith)) {
18873 transmuxer.postMessage({
18874 action: 'alignGopsWith',
18875 gopsToAlignWith: gopsToAlignWith
18876 });
18877 }
18878
18879 if (typeof remux !== 'undefined') {
18880 transmuxer.postMessage({
18881 action: 'setRemux',
18882 remux: remux
18883 });
18884 }
18885
18886 if (bytes.byteLength) {
18887 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
18888 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
18889 transmuxer.postMessage({
18890 action: 'push',
18891 // Send the typed-array of data as an ArrayBuffer so that
18892 // it can be sent as a "Transferable" and avoid the costly
18893 // memory copy
18894 data: buffer,
18895 // To recreate the original typed-array, we need information
18896 // about what portion of the ArrayBuffer it was a view into
18897 byteOffset: byteOffset,
18898 byteLength: bytes.byteLength
18899 }, [buffer]);
18900 }
18901
18902 if (isEndOfTimeline) {
18903 transmuxer.postMessage({
18904 action: 'endTimeline'
18905 });
18906 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
18907 // the end of the segment
18908
18909
18910 transmuxer.postMessage({
18911 action: 'flush'
18912 });
18913 };
18914 var dequeue = function dequeue(transmuxer) {
18915 transmuxer.currentTransmux = null;
18916
18917 if (transmuxer.transmuxQueue.length) {
18918 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
18919
18920 if (typeof transmuxer.currentTransmux === 'function') {
18921 transmuxer.currentTransmux();
18922 } else {
18923 processTransmux(transmuxer.currentTransmux);
18924 }
18925 }
18926 };
18927 var processAction = function processAction(transmuxer, action) {
18928 transmuxer.postMessage({
18929 action: action
18930 });
18931 dequeue(transmuxer);
18932 };
18933 var enqueueAction = function enqueueAction(action, transmuxer) {
18934 if (!transmuxer.currentTransmux) {
18935 transmuxer.currentTransmux = action;
18936 processAction(transmuxer, action);
18937 return;
18938 }
18939
18940 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
18941 };
18942 var reset = function reset(transmuxer) {
18943 enqueueAction('reset', transmuxer);
18944 };
18945 var endTimeline = function endTimeline(transmuxer) {
18946 enqueueAction('endTimeline', transmuxer);
18947 };
18948 var transmux = function transmux(options) {
18949 if (!options.transmuxer.currentTransmux) {
18950 options.transmuxer.currentTransmux = options;
18951 processTransmux(options);
18952 return;
18953 }
18954
18955 options.transmuxer.transmuxQueue.push(options);
18956 };
18957 var createTransmuxer = function createTransmuxer(options) {
18958 var transmuxer = new TransmuxWorker();
18959 transmuxer.currentTransmux = null;
18960 transmuxer.transmuxQueue = [];
18961 var term = transmuxer.terminate;
18962
18963 transmuxer.terminate = function () {
18964 transmuxer.currentTransmux = null;
18965 transmuxer.transmuxQueue.length = 0;
18966 return term.call(transmuxer);
18967 };
18968
18969 transmuxer.postMessage({
18970 action: 'init',
18971 options: options
18972 });
18973 return transmuxer;
18974 };
18975 var segmentTransmuxer = {
18976 reset: reset,
18977 endTimeline: endTimeline,
18978 transmux: transmux,
18979 createTransmuxer: createTransmuxer
18980 };
18981
18982 var workerCallback = function workerCallback(options) {
18983 var transmuxer = options.transmuxer;
18984 var endAction = options.endAction || options.action;
18985 var callback = options.callback;
18986
18987 var message = _extends_1({}, options, {
18988 endAction: null,
18989 transmuxer: null,
18990 callback: null
18991 });
18992
18993 var listenForEndEvent = function listenForEndEvent(event) {
18994 if (event.data.action !== endAction) {
18995 return;
18996 }
18997
18998 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
18999
19000 if (event.data.data) {
19001 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
19002
19003 if (options.data) {
19004 options.data = event.data.data;
19005 }
19006 }
19007
19008 callback(event.data);
19009 };
19010
19011 transmuxer.addEventListener('message', listenForEndEvent);
19012
19013 if (options.data) {
19014 var isArrayBuffer = options.data instanceof ArrayBuffer;
19015 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
19016 message.byteLength = options.data.byteLength;
19017 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
19018 transmuxer.postMessage(message, transfers);
19019 } else {
19020 transmuxer.postMessage(message);
19021 }
19022 };
19023
19024 var REQUEST_ERRORS = {
19025 FAILURE: 2,
19026 TIMEOUT: -101,
19027 ABORTED: -102
19028 };
19029 /**
19030 * Abort all requests
19031 *
19032 * @param {Object} activeXhrs - an object that tracks all XHR requests
19033 */
19034
19035 var abortAll = function abortAll(activeXhrs) {
19036 activeXhrs.forEach(function (xhr) {
19037 xhr.abort();
19038 });
19039 };
19040 /**
19041 * Gather important bandwidth stats once a request has completed
19042 *
19043 * @param {Object} request - the XHR request from which to gather stats
19044 */
19045
19046
19047 var getRequestStats = function getRequestStats(request) {
19048 return {
19049 bandwidth: request.bandwidth,
19050 bytesReceived: request.bytesReceived || 0,
19051 roundTripTime: request.roundTripTime || 0
19052 };
19053 };
19054 /**
19055 * If possible gather bandwidth stats as a request is in
19056 * progress
19057 *
19058 * @param {Event} progressEvent - an event object from an XHR's progress event
19059 */
19060
19061
19062 var getProgressStats = function getProgressStats(progressEvent) {
19063 var request = progressEvent.target;
19064 var roundTripTime = Date.now() - request.requestTime;
19065 var stats = {
19066 bandwidth: Infinity,
19067 bytesReceived: 0,
19068 roundTripTime: roundTripTime || 0
19069 };
19070 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
19071 // because we should only use bandwidth stats on progress to determine when
19072 // abort a request early due to insufficient bandwidth
19073
19074 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
19075 return stats;
19076 };
19077 /**
19078 * Handle all error conditions in one place and return an object
19079 * with all the information
19080 *
19081 * @param {Error|null} error - if non-null signals an error occured with the XHR
19082 * @param {Object} request - the XHR request that possibly generated the error
19083 */
19084
19085
19086 var handleErrors = function handleErrors(error, request) {
19087 if (request.timedout) {
19088 return {
19089 status: request.status,
19090 message: 'HLS request timed-out at URL: ' + request.uri,
19091 code: REQUEST_ERRORS.TIMEOUT,
19092 xhr: request
19093 };
19094 }
19095
19096 if (request.aborted) {
19097 return {
19098 status: request.status,
19099 message: 'HLS request aborted at URL: ' + request.uri,
19100 code: REQUEST_ERRORS.ABORTED,
19101 xhr: request
19102 };
19103 }
19104
19105 if (error) {
19106 return {
19107 status: request.status,
19108 message: 'HLS request errored at URL: ' + request.uri,
19109 code: REQUEST_ERRORS.FAILURE,
19110 xhr: request
19111 };
19112 }
19113
19114 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
19115 return {
19116 status: request.status,
19117 message: 'Empty HLS response at URL: ' + request.uri,
19118 code: REQUEST_ERRORS.FAILURE,
19119 xhr: request
19120 };
19121 }
19122
19123 return null;
19124 };
19125 /**
19126 * Handle responses for key data and convert the key data to the correct format
19127 * for the decryption step later
19128 *
19129 * @param {Object} segment - a simplified copy of the segmentInfo object
19130 * from SegmentLoader
19131 * @param {Array} objects - objects to add the key bytes to.
19132 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19133 * this request
19134 */
19135
19136
19137 var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
19138 return function (error, request) {
19139 var response = request.response;
19140 var errorObj = handleErrors(error, request);
19141
19142 if (errorObj) {
19143 return finishProcessingFn(errorObj, segment);
19144 }
19145
19146 if (response.byteLength !== 16) {
19147 return finishProcessingFn({
19148 status: request.status,
19149 message: 'Invalid HLS key at URL: ' + request.uri,
19150 code: REQUEST_ERRORS.FAILURE,
19151 xhr: request
19152 }, segment);
19153 }
19154
19155 var view = new DataView(response);
19156 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
19157
19158 for (var i = 0; i < objects.length; i++) {
19159 objects[i].bytes = bytes;
19160 }
19161
19162 return finishProcessingFn(null, segment);
19163 };
19164 };
19165
19166 var parseInitSegment = function parseInitSegment(segment, _callback) {
19167 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
19168 // only know how to parse mp4 init segments at the moment
19169
19170 if (type !== 'mp4') {
19171 var uri = segment.map.resolvedUri || segment.map.uri;
19172 return _callback({
19173 internal: true,
19174 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
19175 code: REQUEST_ERRORS.FAILURE
19176 });
19177 }
19178
19179 workerCallback({
19180 action: 'probeMp4Tracks',
19181 data: segment.map.bytes,
19182 transmuxer: segment.transmuxer,
19183 callback: function callback(_ref) {
19184 var tracks = _ref.tracks,
19185 data = _ref.data;
19186 // transfer bytes back to us
19187 segment.map.bytes = data;
19188 tracks.forEach(function (track) {
19189 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
19190
19191 if (segment.map.tracks[track.type]) {
19192 return;
19193 }
19194
19195 segment.map.tracks[track.type] = track;
19196
19197 if (typeof track.id === 'number' && track.timescale) {
19198 segment.map.timescales = segment.map.timescales || {};
19199 segment.map.timescales[track.id] = track.timescale;
19200 }
19201 });
19202 return _callback(null);
19203 }
19204 });
19205 };
19206 /**
19207 * Handle init-segment responses
19208 *
19209 * @param {Object} segment - a simplified copy of the segmentInfo object
19210 * from SegmentLoader
19211 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19212 * this request
19213 */
19214
19215
19216 var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
19217 var segment = _ref2.segment,
19218 finishProcessingFn = _ref2.finishProcessingFn;
19219 return function (error, request) {
19220 var errorObj = handleErrors(error, request);
19221
19222 if (errorObj) {
19223 return finishProcessingFn(errorObj, segment);
19224 }
19225
19226 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
19227 // until the key request is done to decrypt.
19228
19229 if (segment.map.key) {
19230 segment.map.encryptedBytes = bytes;
19231 return finishProcessingFn(null, segment);
19232 }
19233
19234 segment.map.bytes = bytes;
19235 parseInitSegment(segment, function (parseError) {
19236 if (parseError) {
19237 parseError.xhr = request;
19238 parseError.status = request.status;
19239 return finishProcessingFn(parseError, segment);
19240 }
19241
19242 finishProcessingFn(null, segment);
19243 });
19244 };
19245 };
19246 /**
19247 * Response handler for segment-requests being sure to set the correct
19248 * property depending on whether the segment is encryped or not
19249 * Also records and keeps track of stats that are used for ABR purposes
19250 *
19251 * @param {Object} segment - a simplified copy of the segmentInfo object
19252 * from SegmentLoader
19253 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19254 * this request
19255 */
19256
19257
19258 var handleSegmentResponse = function handleSegmentResponse(_ref3) {
19259 var segment = _ref3.segment,
19260 finishProcessingFn = _ref3.finishProcessingFn,
19261 responseType = _ref3.responseType;
19262 return function (error, request) {
19263 var errorObj = handleErrors(error, request);
19264
19265 if (errorObj) {
19266 return finishProcessingFn(errorObj, segment);
19267 }
19268
19269 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
19270 // thrown for two primary cases:
19271 // 1. the mime type override stops working, or is not implemented for a specific
19272 // browser
19273 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
19274 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
19275 segment.stats = getRequestStats(request);
19276
19277 if (segment.key) {
19278 segment.encryptedBytes = new Uint8Array(newBytes);
19279 } else {
19280 segment.bytes = new Uint8Array(newBytes);
19281 }
19282
19283 return finishProcessingFn(null, segment);
19284 };
19285 };
19286
19287 var transmuxAndNotify = function transmuxAndNotify(_ref4) {
19288 var segment = _ref4.segment,
19289 bytes = _ref4.bytes,
19290 trackInfoFn = _ref4.trackInfoFn,
19291 timingInfoFn = _ref4.timingInfoFn,
19292 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
19293 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
19294 id3Fn = _ref4.id3Fn,
19295 captionsFn = _ref4.captionsFn,
19296 isEndOfTimeline = _ref4.isEndOfTimeline,
19297 endedTimelineFn = _ref4.endedTimelineFn,
19298 dataFn = _ref4.dataFn,
19299 doneFn = _ref4.doneFn,
19300 onTransmuxerLog = _ref4.onTransmuxerLog;
19301 var fmp4Tracks = segment.map && segment.map.tracks || {};
19302 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
19303 // One reason for this is that in the case of full segments, we want to trust start
19304 // times from the probe, rather than the transmuxer.
19305
19306 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
19307 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
19308 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
19309 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
19310
19311 var finish = function finish() {
19312 return transmux({
19313 bytes: bytes,
19314 transmuxer: segment.transmuxer,
19315 audioAppendStart: segment.audioAppendStart,
19316 gopsToAlignWith: segment.gopsToAlignWith,
19317 remux: isMuxed,
19318 onData: function onData(result) {
19319 result.type = result.type === 'combined' ? 'video' : result.type;
19320 dataFn(segment, result);
19321 },
19322 onTrackInfo: function onTrackInfo(trackInfo) {
19323 if (trackInfoFn) {
19324 if (isMuxed) {
19325 trackInfo.isMuxed = true;
19326 }
19327
19328 trackInfoFn(segment, trackInfo);
19329 }
19330 },
19331 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
19332 // we only want the first start value we encounter
19333 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
19334 audioStartFn(audioTimingInfo.start);
19335 audioStartFn = null;
19336 } // we want to continually update the end time
19337
19338
19339 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
19340 audioEndFn(audioTimingInfo.end);
19341 }
19342 },
19343 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
19344 // we only want the first start value we encounter
19345 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
19346 videoStartFn(videoTimingInfo.start);
19347 videoStartFn = null;
19348 } // we want to continually update the end time
19349
19350
19351 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
19352 videoEndFn(videoTimingInfo.end);
19353 }
19354 },
19355 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
19356 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
19357 },
19358 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
19359 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
19360 },
19361 onId3: function onId3(id3Frames, dispatchType) {
19362 id3Fn(segment, id3Frames, dispatchType);
19363 },
19364 onCaptions: function onCaptions(captions) {
19365 captionsFn(segment, [captions]);
19366 },
19367 isEndOfTimeline: isEndOfTimeline,
19368 onEndedTimeline: function onEndedTimeline() {
19369 endedTimelineFn();
19370 },
19371 onTransmuxerLog: onTransmuxerLog,
19372 onDone: function onDone(result) {
19373 if (!doneFn) {
19374 return;
19375 }
19376
19377 result.type = result.type === 'combined' ? 'video' : result.type;
19378 doneFn(null, segment, result);
19379 }
19380 });
19381 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
19382 // Meaning cached frame data may corrupt our notion of where this segment
19383 // really starts. To get around this, probe for the info needed.
19384
19385
19386 workerCallback({
19387 action: 'probeTs',
19388 transmuxer: segment.transmuxer,
19389 data: bytes,
19390 baseStartTime: segment.baseStartTime,
19391 callback: function callback(data) {
19392 segment.bytes = bytes = data.data;
19393 var probeResult = data.result;
19394
19395 if (probeResult) {
19396 trackInfoFn(segment, {
19397 hasAudio: probeResult.hasAudio,
19398 hasVideo: probeResult.hasVideo,
19399 isMuxed: isMuxed
19400 });
19401 trackInfoFn = null;
19402
19403 if (probeResult.hasAudio && !isMuxed) {
19404 audioStartFn(probeResult.audioStart);
19405 }
19406
19407 if (probeResult.hasVideo) {
19408 videoStartFn(probeResult.videoStart);
19409 }
19410
19411 audioStartFn = null;
19412 videoStartFn = null;
19413 }
19414
19415 finish();
19416 }
19417 });
19418 };
19419
19420 var handleSegmentBytes = function handleSegmentBytes(_ref5) {
19421 var segment = _ref5.segment,
19422 bytes = _ref5.bytes,
19423 trackInfoFn = _ref5.trackInfoFn,
19424 timingInfoFn = _ref5.timingInfoFn,
19425 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
19426 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
19427 id3Fn = _ref5.id3Fn,
19428 captionsFn = _ref5.captionsFn,
19429 isEndOfTimeline = _ref5.isEndOfTimeline,
19430 endedTimelineFn = _ref5.endedTimelineFn,
19431 dataFn = _ref5.dataFn,
19432 doneFn = _ref5.doneFn,
19433 onTransmuxerLog = _ref5.onTransmuxerLog;
19434 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
19435 // We should have a handler that fetches the number of bytes required
19436 // to check if something is fmp4. This will allow us to save bandwidth
19437 // because we can only blacklist a playlist and abort requests
19438 // by codec after trackinfo triggers.
19439
19440 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
19441 segment.isFmp4 = true;
19442 var tracks = segment.map.tracks;
19443 var trackInfo = {
19444 isFmp4: true,
19445 hasVideo: !!tracks.video,
19446 hasAudio: !!tracks.audio
19447 }; // if we have a audio track, with a codec that is not set to
19448 // encrypted audio
19449
19450 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
19451 trackInfo.audioCodec = tracks.audio.codec;
19452 } // if we have a video track, with a codec that is not set to
19453 // encrypted video
19454
19455
19456 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
19457 trackInfo.videoCodec = tracks.video.codec;
19458 }
19459
19460 if (tracks.video && tracks.audio) {
19461 trackInfo.isMuxed = true;
19462 } // since we don't support appending fmp4 data on progress, we know we have the full
19463 // segment here
19464
19465
19466 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
19467 // time. The end time can be roughly calculated by the receiver using the duration.
19468 //
19469 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
19470 // that is the true start of the segment (where the playback engine should begin
19471 // decoding).
19472
19473 var finishLoading = function finishLoading(captions) {
19474 // if the track still has audio at this point it is only possible
19475 // for it to be audio only. See `tracks.video && tracks.audio` if statement
19476 // above.
19477 // we make sure to use segment.bytes here as that
19478 dataFn(segment, {
19479 data: bytesAsUint8Array,
19480 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
19481 });
19482
19483 if (captions && captions.length) {
19484 captionsFn(segment, captions);
19485 }
19486
19487 doneFn(null, segment, {});
19488 };
19489
19490 workerCallback({
19491 action: 'probeMp4StartTime',
19492 timescales: segment.map.timescales,
19493 data: bytesAsUint8Array,
19494 transmuxer: segment.transmuxer,
19495 callback: function callback(_ref6) {
19496 var data = _ref6.data,
19497 startTime = _ref6.startTime;
19498 // transfer bytes back to us
19499 bytes = data.buffer;
19500 segment.bytes = bytesAsUint8Array = data;
19501
19502 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
19503 timingInfoFn(segment, 'audio', 'start', startTime);
19504 }
19505
19506 if (trackInfo.hasVideo) {
19507 timingInfoFn(segment, 'video', 'start', startTime);
19508 } // Run through the CaptionParser in case there are captions.
19509 // Initialize CaptionParser if it hasn't been yet
19510
19511
19512 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
19513 finishLoading();
19514 return;
19515 }
19516
19517 workerCallback({
19518 action: 'pushMp4Captions',
19519 endAction: 'mp4Captions',
19520 transmuxer: segment.transmuxer,
19521 data: bytesAsUint8Array,
19522 timescales: segment.map.timescales,
19523 trackIds: [tracks.video.id],
19524 callback: function callback(message) {
19525 // transfer bytes back to us
19526 bytes = message.data.buffer;
19527 segment.bytes = bytesAsUint8Array = message.data;
19528 message.logs.forEach(function (log) {
19529 onTransmuxerLog(videojs__default["default"].mergeOptions(log, {
19530 stream: 'mp4CaptionParser'
19531 }));
19532 });
19533 finishLoading(message.captions);
19534 }
19535 });
19536 }
19537 });
19538 return;
19539 } // VTT or other segments that don't need processing
19540
19541
19542 if (!segment.transmuxer) {
19543 doneFn(null, segment, {});
19544 return;
19545 }
19546
19547 if (typeof segment.container === 'undefined') {
19548 segment.container = detectContainerForBytes(bytesAsUint8Array);
19549 }
19550
19551 if (segment.container !== 'ts' && segment.container !== 'aac') {
19552 trackInfoFn(segment, {
19553 hasAudio: false,
19554 hasVideo: false
19555 });
19556 doneFn(null, segment, {});
19557 return;
19558 } // ts or aac
19559
19560
19561 transmuxAndNotify({
19562 segment: segment,
19563 bytes: bytes,
19564 trackInfoFn: trackInfoFn,
19565 timingInfoFn: timingInfoFn,
19566 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19567 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19568 id3Fn: id3Fn,
19569 captionsFn: captionsFn,
19570 isEndOfTimeline: isEndOfTimeline,
19571 endedTimelineFn: endedTimelineFn,
19572 dataFn: dataFn,
19573 doneFn: doneFn,
19574 onTransmuxerLog: onTransmuxerLog
19575 });
19576 };
19577
19578 var decrypt = function decrypt(_ref7, callback) {
19579 var id = _ref7.id,
19580 key = _ref7.key,
19581 encryptedBytes = _ref7.encryptedBytes,
19582 decryptionWorker = _ref7.decryptionWorker;
19583
19584 var decryptionHandler = function decryptionHandler(event) {
19585 if (event.data.source === id) {
19586 decryptionWorker.removeEventListener('message', decryptionHandler);
19587 var decrypted = event.data.decrypted;
19588 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
19589 }
19590 };
19591
19592 decryptionWorker.addEventListener('message', decryptionHandler);
19593 var keyBytes;
19594
19595 if (key.bytes.slice) {
19596 keyBytes = key.bytes.slice();
19597 } else {
19598 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
19599 } // incrementally decrypt the bytes
19600
19601
19602 decryptionWorker.postMessage(createTransferableMessage({
19603 source: id,
19604 encrypted: encryptedBytes,
19605 key: keyBytes,
19606 iv: key.iv
19607 }), [encryptedBytes.buffer, keyBytes.buffer]);
19608 };
19609 /**
19610 * Decrypt the segment via the decryption web worker
19611 *
19612 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19613 * routines
19614 * @param {Object} segment - a simplified copy of the segmentInfo object
19615 * from SegmentLoader
19616 * @param {Function} trackInfoFn - a callback that receives track info
19617 * @param {Function} timingInfoFn - a callback that receives timing info
19618 * @param {Function} videoSegmentTimingInfoFn
19619 * a callback that receives video timing info based on media times and
19620 * any adjustments made by the transmuxer
19621 * @param {Function} audioSegmentTimingInfoFn
19622 * a callback that receives audio timing info based on media times and
19623 * any adjustments made by the transmuxer
19624 * @param {boolean} isEndOfTimeline
19625 * true if this segment represents the last segment in a timeline
19626 * @param {Function} endedTimelineFn
19627 * a callback made when a timeline is ended, will only be called if
19628 * isEndOfTimeline is true
19629 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19630 * and ready to use
19631 * @param {Function} doneFn - a callback that is executed after decryption has completed
19632 */
19633
19634
19635 var decryptSegment = function decryptSegment(_ref8) {
19636 var decryptionWorker = _ref8.decryptionWorker,
19637 segment = _ref8.segment,
19638 trackInfoFn = _ref8.trackInfoFn,
19639 timingInfoFn = _ref8.timingInfoFn,
19640 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
19641 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
19642 id3Fn = _ref8.id3Fn,
19643 captionsFn = _ref8.captionsFn,
19644 isEndOfTimeline = _ref8.isEndOfTimeline,
19645 endedTimelineFn = _ref8.endedTimelineFn,
19646 dataFn = _ref8.dataFn,
19647 doneFn = _ref8.doneFn,
19648 onTransmuxerLog = _ref8.onTransmuxerLog;
19649 decrypt({
19650 id: segment.requestId,
19651 key: segment.key,
19652 encryptedBytes: segment.encryptedBytes,
19653 decryptionWorker: decryptionWorker
19654 }, function (decryptedBytes) {
19655 segment.bytes = decryptedBytes;
19656 handleSegmentBytes({
19657 segment: segment,
19658 bytes: segment.bytes,
19659 trackInfoFn: trackInfoFn,
19660 timingInfoFn: timingInfoFn,
19661 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19662 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19663 id3Fn: id3Fn,
19664 captionsFn: captionsFn,
19665 isEndOfTimeline: isEndOfTimeline,
19666 endedTimelineFn: endedTimelineFn,
19667 dataFn: dataFn,
19668 doneFn: doneFn,
19669 onTransmuxerLog: onTransmuxerLog
19670 });
19671 });
19672 };
19673 /**
19674 * This function waits for all XHRs to finish (with either success or failure)
19675 * before continueing processing via it's callback. The function gathers errors
19676 * from each request into a single errors array so that the error status for
19677 * each request can be examined later.
19678 *
19679 * @param {Object} activeXhrs - an object that tracks all XHR requests
19680 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19681 * routines
19682 * @param {Function} trackInfoFn - a callback that receives track info
19683 * @param {Function} timingInfoFn - a callback that receives timing info
19684 * @param {Function} videoSegmentTimingInfoFn
19685 * a callback that receives video timing info based on media times and
19686 * any adjustments made by the transmuxer
19687 * @param {Function} audioSegmentTimingInfoFn
19688 * a callback that receives audio timing info based on media times and
19689 * any adjustments made by the transmuxer
19690 * @param {Function} id3Fn - a callback that receives ID3 metadata
19691 * @param {Function} captionsFn - a callback that receives captions
19692 * @param {boolean} isEndOfTimeline
19693 * true if this segment represents the last segment in a timeline
19694 * @param {Function} endedTimelineFn
19695 * a callback made when a timeline is ended, will only be called if
19696 * isEndOfTimeline is true
19697 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19698 * and ready to use
19699 * @param {Function} doneFn - a callback that is executed after all resources have been
19700 * downloaded and any decryption completed
19701 */
19702
19703
19704 var waitForCompletion = function waitForCompletion(_ref9) {
19705 var activeXhrs = _ref9.activeXhrs,
19706 decryptionWorker = _ref9.decryptionWorker,
19707 trackInfoFn = _ref9.trackInfoFn,
19708 timingInfoFn = _ref9.timingInfoFn,
19709 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
19710 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
19711 id3Fn = _ref9.id3Fn,
19712 captionsFn = _ref9.captionsFn,
19713 isEndOfTimeline = _ref9.isEndOfTimeline,
19714 endedTimelineFn = _ref9.endedTimelineFn,
19715 dataFn = _ref9.dataFn,
19716 doneFn = _ref9.doneFn,
19717 onTransmuxerLog = _ref9.onTransmuxerLog;
19718 var count = 0;
19719 var didError = false;
19720 return function (error, segment) {
19721 if (didError) {
19722 return;
19723 }
19724
19725 if (error) {
19726 didError = true; // If there are errors, we have to abort any outstanding requests
19727
19728 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
19729 // handle the aborted events from those requests, there are some cases where we may
19730 // never get an aborted event. For instance, if the network connection is lost and
19731 // there were two requests, the first may have triggered an error immediately, while
19732 // the second request remains unsent. In that case, the aborted algorithm will not
19733 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
19734 //
19735 // We also can't rely on the ready state of the XHR, since the request that
19736 // triggered the connection error may also show as a ready state of 0 (unsent).
19737 // Therefore, we have to finish this group of requests immediately after the first
19738 // seen error.
19739
19740 return doneFn(error, segment);
19741 }
19742
19743 count += 1;
19744
19745 if (count === activeXhrs.length) {
19746 var segmentFinish = function segmentFinish() {
19747 if (segment.encryptedBytes) {
19748 return decryptSegment({
19749 decryptionWorker: decryptionWorker,
19750 segment: segment,
19751 trackInfoFn: trackInfoFn,
19752 timingInfoFn: timingInfoFn,
19753 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19754 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19755 id3Fn: id3Fn,
19756 captionsFn: captionsFn,
19757 isEndOfTimeline: isEndOfTimeline,
19758 endedTimelineFn: endedTimelineFn,
19759 dataFn: dataFn,
19760 doneFn: doneFn,
19761 onTransmuxerLog: onTransmuxerLog
19762 });
19763 } // Otherwise, everything is ready just continue
19764
19765
19766 handleSegmentBytes({
19767 segment: segment,
19768 bytes: segment.bytes,
19769 trackInfoFn: trackInfoFn,
19770 timingInfoFn: timingInfoFn,
19771 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19772 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19773 id3Fn: id3Fn,
19774 captionsFn: captionsFn,
19775 isEndOfTimeline: isEndOfTimeline,
19776 endedTimelineFn: endedTimelineFn,
19777 dataFn: dataFn,
19778 doneFn: doneFn,
19779 onTransmuxerLog: onTransmuxerLog
19780 });
19781 }; // Keep track of when *all* of the requests have completed
19782
19783
19784 segment.endOfAllRequests = Date.now();
19785
19786 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
19787 return decrypt({
19788 decryptionWorker: decryptionWorker,
19789 // add -init to the "id" to differentiate between segment
19790 // and init segment decryption, just in case they happen
19791 // at the same time at some point in the future.
19792 id: segment.requestId + '-init',
19793 encryptedBytes: segment.map.encryptedBytes,
19794 key: segment.map.key
19795 }, function (decryptedBytes) {
19796 segment.map.bytes = decryptedBytes;
19797 parseInitSegment(segment, function (parseError) {
19798 if (parseError) {
19799 abortAll(activeXhrs);
19800 return doneFn(parseError, segment);
19801 }
19802
19803 segmentFinish();
19804 });
19805 });
19806 }
19807
19808 segmentFinish();
19809 }
19810 };
19811 };
19812 /**
19813 * Calls the abort callback if any request within the batch was aborted. Will only call
19814 * the callback once per batch of requests, even if multiple were aborted.
19815 *
19816 * @param {Object} loadendState - state to check to see if the abort function was called
19817 * @param {Function} abortFn - callback to call for abort
19818 */
19819
19820
19821 var handleLoadEnd = function handleLoadEnd(_ref10) {
19822 var loadendState = _ref10.loadendState,
19823 abortFn = _ref10.abortFn;
19824 return function (event) {
19825 var request = event.target;
19826
19827 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
19828 abortFn();
19829 loadendState.calledAbortFn = true;
19830 }
19831 };
19832 };
19833 /**
19834 * Simple progress event callback handler that gathers some stats before
19835 * executing a provided callback with the `segment` object
19836 *
19837 * @param {Object} segment - a simplified copy of the segmentInfo object
19838 * from SegmentLoader
19839 * @param {Function} progressFn - a callback that is executed each time a progress event
19840 * is received
19841 * @param {Function} trackInfoFn - a callback that receives track info
19842 * @param {Function} timingInfoFn - a callback that receives timing info
19843 * @param {Function} videoSegmentTimingInfoFn
19844 * a callback that receives video timing info based on media times and
19845 * any adjustments made by the transmuxer
19846 * @param {Function} audioSegmentTimingInfoFn
19847 * a callback that receives audio timing info based on media times and
19848 * any adjustments made by the transmuxer
19849 * @param {boolean} isEndOfTimeline
19850 * true if this segment represents the last segment in a timeline
19851 * @param {Function} endedTimelineFn
19852 * a callback made when a timeline is ended, will only be called if
19853 * isEndOfTimeline is true
19854 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19855 * and ready to use
19856 * @param {Event} event - the progress event object from XMLHttpRequest
19857 */
19858
19859
19860 var handleProgress = function handleProgress(_ref11) {
19861 var segment = _ref11.segment,
19862 progressFn = _ref11.progressFn;
19863 _ref11.trackInfoFn;
19864 _ref11.timingInfoFn;
19865 _ref11.videoSegmentTimingInfoFn;
19866 _ref11.audioSegmentTimingInfoFn;
19867 _ref11.id3Fn;
19868 _ref11.captionsFn;
19869 _ref11.isEndOfTimeline;
19870 _ref11.endedTimelineFn;
19871 _ref11.dataFn;
19872 return function (event) {
19873 var request = event.target;
19874
19875 if (request.aborted) {
19876 return;
19877 }
19878
19879 segment.stats = videojs__default["default"].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
19880
19881 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
19882 segment.stats.firstBytesReceivedAt = Date.now();
19883 }
19884
19885 return progressFn(event, segment);
19886 };
19887 };
19888 /**
19889 * Load all resources and does any processing necessary for a media-segment
19890 *
19891 * Features:
19892 * decrypts the media-segment if it has a key uri and an iv
19893 * aborts *all* requests if *any* one request fails
19894 *
19895 * The segment object, at minimum, has the following format:
19896 * {
19897 * resolvedUri: String,
19898 * [transmuxer]: Object,
19899 * [byterange]: {
19900 * offset: Number,
19901 * length: Number
19902 * },
19903 * [key]: {
19904 * resolvedUri: String
19905 * [byterange]: {
19906 * offset: Number,
19907 * length: Number
19908 * },
19909 * iv: {
19910 * bytes: Uint32Array
19911 * }
19912 * },
19913 * [map]: {
19914 * resolvedUri: String,
19915 * [byterange]: {
19916 * offset: Number,
19917 * length: Number
19918 * },
19919 * [bytes]: Uint8Array
19920 * }
19921 * }
19922 * ...where [name] denotes optional properties
19923 *
19924 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
19925 * @param {Object} xhrOptions - the base options to provide to all xhr requests
19926 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
19927 * decryption routines
19928 * @param {Object} segment - a simplified copy of the segmentInfo object
19929 * from SegmentLoader
19930 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
19931 * aborted
19932 * @param {Function} progressFn - a callback that receives progress events from the main
19933 * segment's xhr request
19934 * @param {Function} trackInfoFn - a callback that receives track info
19935 * @param {Function} timingInfoFn - a callback that receives timing info
19936 * @param {Function} videoSegmentTimingInfoFn
19937 * a callback that receives video timing info based on media times and
19938 * any adjustments made by the transmuxer
19939 * @param {Function} audioSegmentTimingInfoFn
19940 * a callback that receives audio timing info based on media times and
19941 * any adjustments made by the transmuxer
19942 * @param {Function} id3Fn - a callback that receives ID3 metadata
19943 * @param {Function} captionsFn - a callback that receives captions
19944 * @param {boolean} isEndOfTimeline
19945 * true if this segment represents the last segment in a timeline
19946 * @param {Function} endedTimelineFn
19947 * a callback made when a timeline is ended, will only be called if
19948 * isEndOfTimeline is true
19949 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
19950 * request, transmuxed if needed
19951 * @param {Function} doneFn - a callback that is executed only once all requests have
19952 * succeeded or failed
19953 * @return {Function} a function that, when invoked, immediately aborts all
19954 * outstanding requests
19955 */
19956
19957
19958 var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
19959 var xhr = _ref12.xhr,
19960 xhrOptions = _ref12.xhrOptions,
19961 decryptionWorker = _ref12.decryptionWorker,
19962 segment = _ref12.segment,
19963 abortFn = _ref12.abortFn,
19964 progressFn = _ref12.progressFn,
19965 trackInfoFn = _ref12.trackInfoFn,
19966 timingInfoFn = _ref12.timingInfoFn,
19967 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
19968 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
19969 id3Fn = _ref12.id3Fn,
19970 captionsFn = _ref12.captionsFn,
19971 isEndOfTimeline = _ref12.isEndOfTimeline,
19972 endedTimelineFn = _ref12.endedTimelineFn,
19973 dataFn = _ref12.dataFn,
19974 doneFn = _ref12.doneFn,
19975 onTransmuxerLog = _ref12.onTransmuxerLog;
19976 var activeXhrs = [];
19977 var finishProcessingFn = waitForCompletion({
19978 activeXhrs: activeXhrs,
19979 decryptionWorker: decryptionWorker,
19980 trackInfoFn: trackInfoFn,
19981 timingInfoFn: timingInfoFn,
19982 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19983 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19984 id3Fn: id3Fn,
19985 captionsFn: captionsFn,
19986 isEndOfTimeline: isEndOfTimeline,
19987 endedTimelineFn: endedTimelineFn,
19988 dataFn: dataFn,
19989 doneFn: doneFn,
19990 onTransmuxerLog: onTransmuxerLog
19991 }); // optionally, request the decryption key
19992
19993 if (segment.key && !segment.key.bytes) {
19994 var objects = [segment.key];
19995
19996 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
19997 objects.push(segment.map.key);
19998 }
19999
20000 var keyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20001 uri: segment.key.resolvedUri,
20002 responseType: 'arraybuffer'
20003 });
20004 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
20005 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
20006 activeXhrs.push(keyXhr);
20007 } // optionally, request the associated media init segment
20008
20009
20010 if (segment.map && !segment.map.bytes) {
20011 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
20012
20013 if (differentMapKey) {
20014 var mapKeyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20015 uri: segment.map.key.resolvedUri,
20016 responseType: 'arraybuffer'
20017 });
20018 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
20019 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
20020 activeXhrs.push(mapKeyXhr);
20021 }
20022
20023 var initSegmentOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20024 uri: segment.map.resolvedUri,
20025 responseType: 'arraybuffer',
20026 headers: segmentXhrHeaders(segment.map)
20027 });
20028 var initSegmentRequestCallback = handleInitSegmentResponse({
20029 segment: segment,
20030 finishProcessingFn: finishProcessingFn
20031 });
20032 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
20033 activeXhrs.push(initSegmentXhr);
20034 }
20035
20036 var segmentRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20037 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
20038 responseType: 'arraybuffer',
20039 headers: segmentXhrHeaders(segment)
20040 });
20041 var segmentRequestCallback = handleSegmentResponse({
20042 segment: segment,
20043 finishProcessingFn: finishProcessingFn,
20044 responseType: segmentRequestOptions.responseType
20045 });
20046 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
20047 segmentXhr.addEventListener('progress', handleProgress({
20048 segment: segment,
20049 progressFn: progressFn,
20050 trackInfoFn: trackInfoFn,
20051 timingInfoFn: timingInfoFn,
20052 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
20053 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
20054 id3Fn: id3Fn,
20055 captionsFn: captionsFn,
20056 isEndOfTimeline: isEndOfTimeline,
20057 endedTimelineFn: endedTimelineFn,
20058 dataFn: dataFn
20059 }));
20060 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
20061 // multiple times, provide a shared state object
20062
20063 var loadendState = {};
20064 activeXhrs.forEach(function (activeXhr) {
20065 activeXhr.addEventListener('loadend', handleLoadEnd({
20066 loadendState: loadendState,
20067 abortFn: abortFn
20068 }));
20069 });
20070 return function () {
20071 return abortAll(activeXhrs);
20072 };
20073 };
20074
20075 /**
20076 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
20077 * codec strings, or translating codec strings into objects that can be examined.
20078 */
20079 var logFn$1 = logger('CodecUtils');
20080 /**
20081 * Returns a set of codec strings parsed from the playlist or the default
20082 * codec strings if no codecs were specified in the playlist
20083 *
20084 * @param {Playlist} media the current media playlist
20085 * @return {Object} an object with the video and audio codecs
20086 */
20087
20088 var getCodecs = function getCodecs(media) {
20089 // if the codecs were explicitly specified, use them instead of the
20090 // defaults
20091 var mediaAttributes = media.attributes || {};
20092
20093 if (mediaAttributes.CODECS) {
20094 return parseCodecs(mediaAttributes.CODECS);
20095 }
20096 };
20097
20098 var isMaat = function isMaat(master, media) {
20099 var mediaAttributes = media.attributes || {};
20100 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20101 };
20102 var isMuxed = function isMuxed(master, media) {
20103 if (!isMaat(master, media)) {
20104 return true;
20105 }
20106
20107 var mediaAttributes = media.attributes || {};
20108 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20109
20110 for (var groupId in audioGroup) {
20111 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
20112 // or there are listed playlists (the case for DASH, as the manifest will have already
20113 // provided all of the details necessary to generate the audio playlist, as opposed to
20114 // HLS' externally requested playlists), then the content is demuxed.
20115 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
20116 return true;
20117 }
20118 }
20119
20120 return false;
20121 };
20122 var unwrapCodecList = function unwrapCodecList(codecList) {
20123 var codecs = {};
20124 codecList.forEach(function (_ref) {
20125 var mediaType = _ref.mediaType,
20126 type = _ref.type,
20127 details = _ref.details;
20128 codecs[mediaType] = codecs[mediaType] || [];
20129 codecs[mediaType].push(translateLegacyCodec("" + type + details));
20130 });
20131 Object.keys(codecs).forEach(function (mediaType) {
20132 if (codecs[mediaType].length > 1) {
20133 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
20134 codecs[mediaType] = null;
20135 return;
20136 }
20137
20138 codecs[mediaType] = codecs[mediaType][0];
20139 });
20140 return codecs;
20141 };
20142 var codecCount = function codecCount(codecObj) {
20143 var count = 0;
20144
20145 if (codecObj.audio) {
20146 count++;
20147 }
20148
20149 if (codecObj.video) {
20150 count++;
20151 }
20152
20153 return count;
20154 };
20155 /**
20156 * Calculates the codec strings for a working configuration of
20157 * SourceBuffers to play variant streams in a master playlist. If
20158 * there is no possible working configuration, an empty object will be
20159 * returned.
20160 *
20161 * @param master {Object} the m3u8 object for the master playlist
20162 * @param media {Object} the m3u8 object for the variant playlist
20163 * @return {Object} the codec strings.
20164 *
20165 * @private
20166 */
20167
20168 var codecsForPlaylist = function codecsForPlaylist(master, media) {
20169 var mediaAttributes = media.attributes || {};
20170 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
20171 // Put another way, there is no way to have a video-only multiple-audio HLS!
20172
20173 if (isMaat(master, media) && !codecInfo.audio) {
20174 if (!isMuxed(master, media)) {
20175 // It is possible for codecs to be specified on the audio media group playlist but
20176 // not on the rendition playlist. This is mostly the case for DASH, where audio and
20177 // video are always separate (and separately specified).
20178 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
20179
20180 if (defaultCodecs.audio) {
20181 codecInfo.audio = defaultCodecs.audio;
20182 }
20183 }
20184 }
20185
20186 return codecInfo;
20187 };
20188
20189 var logFn = logger('PlaylistSelector');
20190
20191 var representationToString = function representationToString(representation) {
20192 if (!representation || !representation.playlist) {
20193 return;
20194 }
20195
20196 var playlist = representation.playlist;
20197 return JSON.stringify({
20198 id: playlist.id,
20199 bandwidth: representation.bandwidth,
20200 width: representation.width,
20201 height: representation.height,
20202 codecs: playlist.attributes && playlist.attributes.CODECS || ''
20203 });
20204 }; // Utilities
20205
20206 /**
20207 * Returns the CSS value for the specified property on an element
20208 * using `getComputedStyle`. Firefox has a long-standing issue where
20209 * getComputedStyle() may return null when running in an iframe with
20210 * `display: none`.
20211 *
20212 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
20213 * @param {HTMLElement} el the htmlelement to work on
20214 * @param {string} the proprety to get the style for
20215 */
20216
20217
20218 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
20219 if (!el) {
20220 return '';
20221 }
20222
20223 var result = window.getComputedStyle(el);
20224
20225 if (!result) {
20226 return '';
20227 }
20228
20229 return result[property];
20230 };
20231 /**
20232 * Resuable stable sort function
20233 *
20234 * @param {Playlists} array
20235 * @param {Function} sortFn Different comparators
20236 * @function stableSort
20237 */
20238
20239
20240 var stableSort = function stableSort(array, sortFn) {
20241 var newArray = array.slice();
20242 array.sort(function (left, right) {
20243 var cmp = sortFn(left, right);
20244
20245 if (cmp === 0) {
20246 return newArray.indexOf(left) - newArray.indexOf(right);
20247 }
20248
20249 return cmp;
20250 });
20251 };
20252 /**
20253 * A comparator function to sort two playlist object by bandwidth.
20254 *
20255 * @param {Object} left a media playlist object
20256 * @param {Object} right a media playlist object
20257 * @return {number} Greater than zero if the bandwidth attribute of
20258 * left is greater than the corresponding attribute of right. Less
20259 * than zero if the bandwidth of right is greater than left and
20260 * exactly zero if the two are equal.
20261 */
20262
20263
20264 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
20265 var leftBandwidth;
20266 var rightBandwidth;
20267
20268 if (left.attributes.BANDWIDTH) {
20269 leftBandwidth = left.attributes.BANDWIDTH;
20270 }
20271
20272 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
20273
20274 if (right.attributes.BANDWIDTH) {
20275 rightBandwidth = right.attributes.BANDWIDTH;
20276 }
20277
20278 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
20279 return leftBandwidth - rightBandwidth;
20280 };
20281 /**
20282 * A comparator function to sort two playlist object by resolution (width).
20283 *
20284 * @param {Object} left a media playlist object
20285 * @param {Object} right a media playlist object
20286 * @return {number} Greater than zero if the resolution.width attribute of
20287 * left is greater than the corresponding attribute of right. Less
20288 * than zero if the resolution.width of right is greater than left and
20289 * exactly zero if the two are equal.
20290 */
20291
20292 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
20293 var leftWidth;
20294 var rightWidth;
20295
20296 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
20297 leftWidth = left.attributes.RESOLUTION.width;
20298 }
20299
20300 leftWidth = leftWidth || window.Number.MAX_VALUE;
20301
20302 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
20303 rightWidth = right.attributes.RESOLUTION.width;
20304 }
20305
20306 rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
20307 // have the same media dimensions/ resolution
20308
20309 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
20310 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
20311 }
20312
20313 return leftWidth - rightWidth;
20314 };
20315 /**
20316 * Chooses the appropriate media playlist based on bandwidth and player size
20317 *
20318 * @param {Object} master
20319 * Object representation of the master manifest
20320 * @param {number} playerBandwidth
20321 * Current calculated bandwidth of the player
20322 * @param {number} playerWidth
20323 * Current width of the player element (should account for the device pixel ratio)
20324 * @param {number} playerHeight
20325 * Current height of the player element (should account for the device pixel ratio)
20326 * @param {boolean} limitRenditionByPlayerDimensions
20327 * True if the player width and height should be used during the selection, false otherwise
20328 * @param {Object} masterPlaylistController
20329 * the current masterPlaylistController object
20330 * @return {Playlist} the highest bitrate playlist less than the
20331 * currently detected bandwidth, accounting for some amount of
20332 * bandwidth variance
20333 */
20334
20335 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
20336 // If we end up getting called before `master` is available, exit early
20337 if (!master) {
20338 return;
20339 }
20340
20341 var options = {
20342 bandwidth: playerBandwidth,
20343 width: playerWidth,
20344 height: playerHeight,
20345 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
20346 };
20347 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
20348
20349 if (Playlist.isAudioOnly(master)) {
20350 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
20351 // at the buttom of this function for debugging.
20352
20353 options.audioOnly = true;
20354 } // convert the playlists to an intermediary representation to make comparisons easier
20355
20356
20357 var sortedPlaylistReps = playlists.map(function (playlist) {
20358 var bandwidth;
20359 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
20360 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
20361 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
20362 bandwidth = bandwidth || window.Number.MAX_VALUE;
20363 return {
20364 bandwidth: bandwidth,
20365 width: width,
20366 height: height,
20367 playlist: playlist
20368 };
20369 });
20370 stableSort(sortedPlaylistReps, function (left, right) {
20371 return left.bandwidth - right.bandwidth;
20372 }); // filter out any playlists that have been excluded due to
20373 // incompatible configurations
20374
20375 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20376 return !Playlist.isIncompatible(rep.playlist);
20377 }); // filter out any playlists that have been disabled manually through the representations
20378 // api or blacklisted temporarily due to playback errors.
20379
20380 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20381 return Playlist.isEnabled(rep.playlist);
20382 });
20383
20384 if (!enabledPlaylistReps.length) {
20385 // if there are no enabled playlists, then they have all been blacklisted or disabled
20386 // by the user through the representations api. In this case, ignore blacklisting and
20387 // fallback to what the user wants by using playlists the user has not disabled.
20388 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20389 return !Playlist.isDisabled(rep.playlist);
20390 });
20391 } // filter out any variant that has greater effective bitrate
20392 // than the current estimated bandwidth
20393
20394
20395 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
20396 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
20397 });
20398 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
20399 // and then taking the very first element
20400
20401 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
20402 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20403 })[0]; // if we're not going to limit renditions by player size, make an early decision.
20404
20405 if (limitRenditionByPlayerDimensions === false) {
20406 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20407
20408 if (_chosenRep && _chosenRep.playlist) {
20409 var type = 'sortedPlaylistReps';
20410
20411 if (bandwidthBestRep) {
20412 type = 'bandwidthBestRep';
20413 }
20414
20415 if (enabledPlaylistReps[0]) {
20416 type = 'enabledPlaylistReps';
20417 }
20418
20419 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
20420 return _chosenRep.playlist;
20421 }
20422
20423 logFn('could not choose a playlist with options', options);
20424 return null;
20425 } // filter out playlists without resolution information
20426
20427
20428 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
20429 return rep.width && rep.height;
20430 }); // sort variants by resolution
20431
20432 stableSort(haveResolution, function (left, right) {
20433 return left.width - right.width;
20434 }); // if we have the exact resolution as the player use it
20435
20436 var resolutionBestRepList = haveResolution.filter(function (rep) {
20437 return rep.width === playerWidth && rep.height === playerHeight;
20438 });
20439 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
20440
20441 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
20442 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20443 })[0];
20444 var resolutionPlusOneList;
20445 var resolutionPlusOneSmallest;
20446 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
20447 // if there is no match of exact resolution
20448
20449 if (!resolutionBestRep) {
20450 resolutionPlusOneList = haveResolution.filter(function (rep) {
20451 return rep.width > playerWidth || rep.height > playerHeight;
20452 }); // find all the variants have the same smallest resolution
20453
20454 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
20455 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
20456 }); // ensure that we also pick the highest bandwidth variant that
20457 // is just-larger-than the video player
20458
20459 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
20460 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
20461 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20462 })[0];
20463 }
20464
20465 var leastPixelDiffRep; // If this selector proves to be better than others,
20466 // resolutionPlusOneRep and resolutionBestRep and all
20467 // the code involving them should be removed.
20468
20469 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
20470 // find the variant that is closest to the player's pixel size
20471 var leastPixelDiffList = haveResolution.map(function (rep) {
20472 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
20473 return rep;
20474 }); // get the highest bandwidth, closest resolution playlist
20475
20476 stableSort(leastPixelDiffList, function (left, right) {
20477 // sort by highest bandwidth if pixelDiff is the same
20478 if (left.pixelDiff === right.pixelDiff) {
20479 return right.bandwidth - left.bandwidth;
20480 }
20481
20482 return left.pixelDiff - right.pixelDiff;
20483 });
20484 leastPixelDiffRep = leastPixelDiffList[0];
20485 } // fallback chain of variants
20486
20487
20488 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20489
20490 if (chosenRep && chosenRep.playlist) {
20491 var _type = 'sortedPlaylistReps';
20492
20493 if (leastPixelDiffRep) {
20494 _type = 'leastPixelDiffRep';
20495 } else if (resolutionPlusOneRep) {
20496 _type = 'resolutionPlusOneRep';
20497 } else if (resolutionBestRep) {
20498 _type = 'resolutionBestRep';
20499 } else if (bandwidthBestRep) {
20500 _type = 'bandwidthBestRep';
20501 } else if (enabledPlaylistReps[0]) {
20502 _type = 'enabledPlaylistReps';
20503 }
20504
20505 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
20506 return chosenRep.playlist;
20507 }
20508
20509 logFn('could not choose a playlist with options', options);
20510 return null;
20511 };
20512
20513 /**
20514 * Chooses the appropriate media playlist based on the most recent
20515 * bandwidth estimate and the player size.
20516 *
20517 * Expects to be called within the context of an instance of VhsHandler
20518 *
20519 * @return {Playlist} the highest bitrate playlist less than the
20520 * currently detected bandwidth, accounting for some amount of
20521 * bandwidth variance
20522 */
20523
20524 var lastBandwidthSelector = function lastBandwidthSelector() {
20525 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20526 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20527 };
20528 /**
20529 * Chooses the appropriate media playlist based on an
20530 * exponential-weighted moving average of the bandwidth after
20531 * filtering for player size.
20532 *
20533 * Expects to be called within the context of an instance of VhsHandler
20534 *
20535 * @param {number} decay - a number between 0 and 1. Higher values of
20536 * this parameter will cause previous bandwidth estimates to lose
20537 * significance more quickly.
20538 * @return {Function} a function which can be invoked to create a new
20539 * playlist selector function.
20540 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
20541 */
20542
20543 var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
20544 var average = -1;
20545 var lastSystemBandwidth = -1;
20546
20547 if (decay < 0 || decay > 1) {
20548 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
20549 }
20550
20551 return function () {
20552 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20553
20554 if (average < 0) {
20555 average = this.systemBandwidth;
20556 lastSystemBandwidth = this.systemBandwidth;
20557 } // stop the average value from decaying for every 250ms
20558 // when the systemBandwidth is constant
20559 // and
20560 // stop average from setting to a very low value when the
20561 // systemBandwidth becomes 0 in case of chunk cancellation
20562
20563
20564 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
20565 average = decay * this.systemBandwidth + (1 - decay) * average;
20566 lastSystemBandwidth = this.systemBandwidth;
20567 }
20568
20569 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20570 };
20571 };
20572 /**
20573 * Chooses the appropriate media playlist based on the potential to rebuffer
20574 *
20575 * @param {Object} settings
20576 * Object of information required to use this selector
20577 * @param {Object} settings.master
20578 * Object representation of the master manifest
20579 * @param {number} settings.currentTime
20580 * The current time of the player
20581 * @param {number} settings.bandwidth
20582 * Current measured bandwidth
20583 * @param {number} settings.duration
20584 * Duration of the media
20585 * @param {number} settings.segmentDuration
20586 * Segment duration to be used in round trip time calculations
20587 * @param {number} settings.timeUntilRebuffer
20588 * Time left in seconds until the player has to rebuffer
20589 * @param {number} settings.currentTimeline
20590 * The current timeline segments are being loaded from
20591 * @param {SyncController} settings.syncController
20592 * SyncController for determining if we have a sync point for a given playlist
20593 * @return {Object|null}
20594 * {Object} return.playlist
20595 * The highest bandwidth playlist with the least amount of rebuffering
20596 * {Number} return.rebufferingImpact
20597 * The amount of time in seconds switching to this playlist will rebuffer. A
20598 * negative value means that switching will cause zero rebuffering.
20599 */
20600
20601 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
20602 var master = settings.master,
20603 currentTime = settings.currentTime,
20604 bandwidth = settings.bandwidth,
20605 duration = settings.duration,
20606 segmentDuration = settings.segmentDuration,
20607 timeUntilRebuffer = settings.timeUntilRebuffer,
20608 currentTimeline = settings.currentTimeline,
20609 syncController = settings.syncController; // filter out any playlists that have been excluded due to
20610 // incompatible configurations
20611
20612 var compatiblePlaylists = master.playlists.filter(function (playlist) {
20613 return !Playlist.isIncompatible(playlist);
20614 }); // filter out any playlists that have been disabled manually through the representations
20615 // api or blacklisted temporarily due to playback errors.
20616
20617 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
20618
20619 if (!enabledPlaylists.length) {
20620 // if there are no enabled playlists, then they have all been blacklisted or disabled
20621 // by the user through the representations api. In this case, ignore blacklisting and
20622 // fallback to what the user wants by using playlists the user has not disabled.
20623 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
20624 return !Playlist.isDisabled(playlist);
20625 });
20626 }
20627
20628 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
20629 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
20630 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
20631 // sync request first. This will double the request time
20632
20633 var numRequests = syncPoint ? 1 : 2;
20634 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
20635 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
20636 return {
20637 playlist: playlist,
20638 rebufferingImpact: rebufferingImpact
20639 };
20640 });
20641 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
20642 return estimate.rebufferingImpact <= 0;
20643 }); // Sort by bandwidth DESC
20644
20645 stableSort(noRebufferingPlaylists, function (a, b) {
20646 return comparePlaylistBandwidth(b.playlist, a.playlist);
20647 });
20648
20649 if (noRebufferingPlaylists.length) {
20650 return noRebufferingPlaylists[0];
20651 }
20652
20653 stableSort(rebufferingEstimates, function (a, b) {
20654 return a.rebufferingImpact - b.rebufferingImpact;
20655 });
20656 return rebufferingEstimates[0] || null;
20657 };
20658 /**
20659 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
20660 * one with video. If no renditions with video exist, return the lowest audio rendition.
20661 *
20662 * Expects to be called within the context of an instance of VhsHandler
20663 *
20664 * @return {Object|null}
20665 * {Object} return.playlist
20666 * The lowest bitrate playlist that contains a video codec. If no such rendition
20667 * exists pick the lowest audio rendition.
20668 */
20669
20670 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
20671 var _this = this;
20672
20673 // filter out any playlists that have been excluded due to
20674 // incompatible configurations or playback errors
20675 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
20676
20677 stableSort(playlists, function (a, b) {
20678 return comparePlaylistBandwidth(a, b);
20679 }); // Parse and assume that playlists with no video codec have no video
20680 // (this is not necessarily true, although it is generally true).
20681 //
20682 // If an entire manifest has no valid videos everything will get filtered
20683 // out.
20684
20685 var playlistsWithVideo = playlists.filter(function (playlist) {
20686 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
20687 });
20688 return playlistsWithVideo[0] || null;
20689 };
20690
20691 /**
20692 * Combine all segments into a single Uint8Array
20693 *
20694 * @param {Object} segmentObj
20695 * @return {Uint8Array} concatenated bytes
20696 * @private
20697 */
20698 var concatSegments = function concatSegments(segmentObj) {
20699 var offset = 0;
20700 var tempBuffer;
20701
20702 if (segmentObj.bytes) {
20703 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
20704
20705 segmentObj.segments.forEach(function (segment) {
20706 tempBuffer.set(segment, offset);
20707 offset += segment.byteLength;
20708 });
20709 }
20710
20711 return tempBuffer;
20712 };
20713
20714 /**
20715 * @file text-tracks.js
20716 */
20717 /**
20718 * Create captions text tracks on video.js if they do not exist
20719 *
20720 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20721 * @param {Object} tech the video.js tech
20722 * @param {Object} captionStream the caption stream to create
20723 * @private
20724 */
20725
20726 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
20727 if (!inbandTextTracks[captionStream]) {
20728 tech.trigger({
20729 type: 'usage',
20730 name: 'vhs-608'
20731 });
20732 tech.trigger({
20733 type: 'usage',
20734 name: 'hls-608'
20735 });
20736 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
20737
20738 if (/^cc708_/.test(captionStream)) {
20739 instreamId = 'SERVICE' + captionStream.split('_')[1];
20740 }
20741
20742 var track = tech.textTracks().getTrackById(instreamId);
20743
20744 if (track) {
20745 // Resuse an existing track with a CC# id because this was
20746 // very likely created by videojs-contrib-hls from information
20747 // in the m3u8 for us to use
20748 inbandTextTracks[captionStream] = track;
20749 } else {
20750 // This section gets called when we have caption services that aren't specified in the manifest.
20751 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
20752 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
20753 var label = captionStream;
20754 var language = captionStream;
20755 var def = false;
20756 var captionService = captionServices[instreamId];
20757
20758 if (captionService) {
20759 label = captionService.label;
20760 language = captionService.language;
20761 def = captionService.default;
20762 } // Otherwise, create a track with the default `CC#` label and
20763 // without a language
20764
20765
20766 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
20767 kind: 'captions',
20768 id: instreamId,
20769 // TODO: investigate why this doesn't seem to turn the caption on by default
20770 default: def,
20771 label: label,
20772 language: language
20773 }, false).track;
20774 }
20775 }
20776 };
20777 /**
20778 * Add caption text track data to a source handler given an array of captions
20779 *
20780 * @param {Object}
20781 * @param {Object} inbandTextTracks the inband text tracks
20782 * @param {number} timestampOffset the timestamp offset of the source buffer
20783 * @param {Array} captionArray an array of caption data
20784 * @private
20785 */
20786
20787 var addCaptionData = function addCaptionData(_ref) {
20788 var inbandTextTracks = _ref.inbandTextTracks,
20789 captionArray = _ref.captionArray,
20790 timestampOffset = _ref.timestampOffset;
20791
20792 if (!captionArray) {
20793 return;
20794 }
20795
20796 var Cue = window.WebKitDataCue || window.VTTCue;
20797 captionArray.forEach(function (caption) {
20798 var track = caption.stream;
20799 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
20800 });
20801 };
20802 /**
20803 * Define properties on a cue for backwards compatability,
20804 * but warn the user that the way that they are using it
20805 * is depricated and will be removed at a later date.
20806 *
20807 * @param {Cue} cue the cue to add the properties on
20808 * @private
20809 */
20810
20811 var deprecateOldCue = function deprecateOldCue(cue) {
20812 Object.defineProperties(cue.frame, {
20813 id: {
20814 get: function get() {
20815 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
20816 return cue.value.key;
20817 }
20818 },
20819 value: {
20820 get: function get() {
20821 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
20822 return cue.value.data;
20823 }
20824 },
20825 privateData: {
20826 get: function get() {
20827 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
20828 return cue.value.data;
20829 }
20830 }
20831 });
20832 };
20833 /**
20834 * Add metadata text track data to a source handler given an array of metadata
20835 *
20836 * @param {Object}
20837 * @param {Object} inbandTextTracks the inband text tracks
20838 * @param {Array} metadataArray an array of meta data
20839 * @param {number} timestampOffset the timestamp offset of the source buffer
20840 * @param {number} videoDuration the duration of the video
20841 * @private
20842 */
20843
20844
20845 var addMetadata = function addMetadata(_ref2) {
20846 var inbandTextTracks = _ref2.inbandTextTracks,
20847 metadataArray = _ref2.metadataArray,
20848 timestampOffset = _ref2.timestampOffset,
20849 videoDuration = _ref2.videoDuration;
20850
20851 if (!metadataArray) {
20852 return;
20853 }
20854
20855 var Cue = window.WebKitDataCue || window.VTTCue;
20856 var metadataTrack = inbandTextTracks.metadataTrack_;
20857
20858 if (!metadataTrack) {
20859 return;
20860 }
20861
20862 metadataArray.forEach(function (metadata) {
20863 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
20864 // ignore this bit of metadata.
20865 // This likely occurs when you have an non-timed ID3 tag like TIT2,
20866 // which is the "Title/Songname/Content description" frame
20867
20868 if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
20869 return;
20870 }
20871
20872 metadata.frames.forEach(function (frame) {
20873 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
20874 cue.frame = frame;
20875 cue.value = frame;
20876 deprecateOldCue(cue);
20877 metadataTrack.addCue(cue);
20878 });
20879 });
20880
20881 if (!metadataTrack.cues || !metadataTrack.cues.length) {
20882 return;
20883 } // Updating the metadeta cues so that
20884 // the endTime of each cue is the startTime of the next cue
20885 // the endTime of last cue is the duration of the video
20886
20887
20888 var cues = metadataTrack.cues;
20889 var cuesArray = []; // Create a copy of the TextTrackCueList...
20890 // ...disregarding cues with a falsey value
20891
20892 for (var i = 0; i < cues.length; i++) {
20893 if (cues[i]) {
20894 cuesArray.push(cues[i]);
20895 }
20896 } // Group cues by their startTime value
20897
20898
20899 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
20900 var timeSlot = obj[cue.startTime] || [];
20901 timeSlot.push(cue);
20902 obj[cue.startTime] = timeSlot;
20903 return obj;
20904 }, {}); // Sort startTimes by ascending order
20905
20906 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
20907 return Number(a) - Number(b);
20908 }); // Map each cue group's endTime to the next group's startTime
20909
20910 sortedStartTimes.forEach(function (startTime, idx) {
20911 var cueGroup = cuesGroupedByStartTime[startTime];
20912 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
20913
20914 cueGroup.forEach(function (cue) {
20915 cue.endTime = nextTime;
20916 });
20917 });
20918 };
20919 /**
20920 * Create metadata text track on video.js if it does not exist
20921 *
20922 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20923 * @param {string} dispatchType the inband metadata track dispatch type
20924 * @param {Object} tech the video.js tech
20925 * @private
20926 */
20927
20928 var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
20929 if (inbandTextTracks.metadataTrack_) {
20930 return;
20931 }
20932
20933 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
20934 kind: 'metadata',
20935 label: 'Timed Metadata'
20936 }, false).track;
20937 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
20938 };
20939 /**
20940 * Remove cues from a track on video.js.
20941 *
20942 * @param {Double} start start of where we should remove the cue
20943 * @param {Double} end end of where the we should remove the cue
20944 * @param {Object} track the text track to remove the cues from
20945 * @private
20946 */
20947
20948 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
20949 var i;
20950 var cue;
20951
20952 if (!track) {
20953 return;
20954 }
20955
20956 if (!track.cues) {
20957 return;
20958 }
20959
20960 i = track.cues.length;
20961
20962 while (i--) {
20963 cue = track.cues[i]; // Remove any cue within the provided start and end time
20964
20965 if (cue.startTime >= start && cue.endTime <= end) {
20966 track.removeCue(cue);
20967 }
20968 }
20969 };
20970 /**
20971 * Remove duplicate cues from a track on video.js (a cue is considered a
20972 * duplicate if it has the same time interval and text as another)
20973 *
20974 * @param {Object} track the text track to remove the duplicate cues from
20975 * @private
20976 */
20977
20978 var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
20979 var cues = track.cues;
20980
20981 if (!cues) {
20982 return;
20983 }
20984
20985 for (var i = 0; i < cues.length; i++) {
20986 var duplicates = [];
20987 var occurrences = 0;
20988
20989 for (var j = 0; j < cues.length; j++) {
20990 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
20991 occurrences++;
20992
20993 if (occurrences > 1) {
20994 duplicates.push(cues[j]);
20995 }
20996 }
20997 }
20998
20999 if (duplicates.length) {
21000 duplicates.forEach(function (dupe) {
21001 return track.removeCue(dupe);
21002 });
21003 }
21004 }
21005 };
21006
21007 /**
21008 * mux.js
21009 *
21010 * Copyright (c) Brightcove
21011 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
21012 */
21013 var ONE_SECOND_IN_TS = 90000,
21014 // 90kHz clock
21015 secondsToVideoTs,
21016 secondsToAudioTs,
21017 videoTsToSeconds,
21018 audioTsToSeconds,
21019 audioTsToVideoTs,
21020 videoTsToAudioTs,
21021 metadataTsToSeconds;
21022
21023 secondsToVideoTs = function secondsToVideoTs(seconds) {
21024 return seconds * ONE_SECOND_IN_TS;
21025 };
21026
21027 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
21028 return seconds * sampleRate;
21029 };
21030
21031 videoTsToSeconds = function videoTsToSeconds(timestamp) {
21032 return timestamp / ONE_SECOND_IN_TS;
21033 };
21034
21035 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
21036 return timestamp / sampleRate;
21037 };
21038
21039 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
21040 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
21041 };
21042
21043 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
21044 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
21045 };
21046 /**
21047 * Adjust ID3 tag or caption timing information by the timeline pts values
21048 * (if keepOriginalTimestamps is false) and convert to seconds
21049 */
21050
21051
21052 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
21053 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
21054 };
21055
21056 var clock = {
21057 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
21058 secondsToVideoTs: secondsToVideoTs,
21059 secondsToAudioTs: secondsToAudioTs,
21060 videoTsToSeconds: videoTsToSeconds,
21061 audioTsToSeconds: audioTsToSeconds,
21062 audioTsToVideoTs: audioTsToVideoTs,
21063 videoTsToAudioTs: videoTsToAudioTs,
21064 metadataTsToSeconds: metadataTsToSeconds
21065 };
21066
21067 /**
21068 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
21069 * front of current time.
21070 *
21071 * @param {Array} buffer
21072 * The current buffer of gop information
21073 * @param {number} currentTime
21074 * The current time
21075 * @param {Double} mapping
21076 * Offset to map display time to stream presentation time
21077 * @return {Array}
21078 * List of gops considered safe to append over
21079 */
21080
21081 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
21082 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
21083 return [];
21084 } // pts value for current time + 3 seconds to give a bit more wiggle room
21085
21086
21087 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
21088 var i;
21089
21090 for (i = 0; i < buffer.length; i++) {
21091 if (buffer[i].pts > currentTimePts) {
21092 break;
21093 }
21094 }
21095
21096 return buffer.slice(i);
21097 };
21098 /**
21099 * Appends gop information (timing and byteLength) received by the transmuxer for the
21100 * gops appended in the last call to appendBuffer
21101 *
21102 * @param {Array} buffer
21103 * The current buffer of gop information
21104 * @param {Array} gops
21105 * List of new gop information
21106 * @param {boolean} replace
21107 * If true, replace the buffer with the new gop information. If false, append the
21108 * new gop information to the buffer in the right location of time.
21109 * @return {Array}
21110 * Updated list of gop information
21111 */
21112
21113 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
21114 if (!gops.length) {
21115 return buffer;
21116 }
21117
21118 if (replace) {
21119 // If we are in safe append mode, then completely overwrite the gop buffer
21120 // with the most recent appeneded data. This will make sure that when appending
21121 // future segments, we only try to align with gops that are both ahead of current
21122 // time and in the last segment appended.
21123 return gops.slice();
21124 }
21125
21126 var start = gops[0].pts;
21127 var i = 0;
21128
21129 for (i; i < buffer.length; i++) {
21130 if (buffer[i].pts >= start) {
21131 break;
21132 }
21133 }
21134
21135 return buffer.slice(0, i).concat(gops);
21136 };
21137 /**
21138 * Removes gop information in buffer that overlaps with provided start and end
21139 *
21140 * @param {Array} buffer
21141 * The current buffer of gop information
21142 * @param {Double} start
21143 * position to start the remove at
21144 * @param {Double} end
21145 * position to end the remove at
21146 * @param {Double} mapping
21147 * Offset to map display time to stream presentation time
21148 */
21149
21150 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
21151 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
21152 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
21153 var updatedBuffer = buffer.slice();
21154 var i = buffer.length;
21155
21156 while (i--) {
21157 if (buffer[i].pts <= endPts) {
21158 break;
21159 }
21160 }
21161
21162 if (i === -1) {
21163 // no removal because end of remove range is before start of buffer
21164 return updatedBuffer;
21165 }
21166
21167 var j = i + 1;
21168
21169 while (j--) {
21170 if (buffer[j].pts <= startPts) {
21171 break;
21172 }
21173 } // clamp remove range start to 0 index
21174
21175
21176 j = Math.max(j, 0);
21177 updatedBuffer.splice(j, i - j + 1);
21178 return updatedBuffer;
21179 };
21180
21181 var shallowEqual = function shallowEqual(a, b) {
21182 // if both are undefined
21183 // or one or the other is undefined
21184 // they are not equal
21185 if (!a && !b || !a && b || a && !b) {
21186 return false;
21187 } // they are the same object and thus, equal
21188
21189
21190 if (a === b) {
21191 return true;
21192 } // sort keys so we can make sure they have
21193 // all the same keys later.
21194
21195
21196 var akeys = Object.keys(a).sort();
21197 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
21198
21199 if (akeys.length !== bkeys.length) {
21200 return false;
21201 }
21202
21203 for (var i = 0; i < akeys.length; i++) {
21204 var key = akeys[i]; // different sorted keys, not equal
21205
21206 if (key !== bkeys[i]) {
21207 return false;
21208 } // different values, not equal
21209
21210
21211 if (a[key] !== b[key]) {
21212 return false;
21213 }
21214 }
21215
21216 return true;
21217 };
21218
21219 // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
21220 var QUOTA_EXCEEDED_ERR = 22;
21221
21222 /**
21223 * The segment loader has no recourse except to fetch a segment in the
21224 * current playlist and use the internal timestamps in that segment to
21225 * generate a syncPoint. This function returns a good candidate index
21226 * for that process.
21227 *
21228 * @param {Array} segments - the segments array from a playlist.
21229 * @return {number} An index of a segment from the playlist to load
21230 */
21231
21232 var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
21233 segments = segments || [];
21234 var timelineSegments = [];
21235 var time = 0;
21236
21237 for (var i = 0; i < segments.length; i++) {
21238 var segment = segments[i];
21239
21240 if (currentTimeline === segment.timeline) {
21241 timelineSegments.push(i);
21242 time += segment.duration;
21243
21244 if (time > targetTime) {
21245 return i;
21246 }
21247 }
21248 }
21249
21250 if (timelineSegments.length === 0) {
21251 return 0;
21252 } // default to the last timeline segment
21253
21254
21255 return timelineSegments[timelineSegments.length - 1];
21256 }; // In the event of a quota exceeded error, keep at least one second of back buffer. This
21257 // number was arbitrarily chosen and may be updated in the future, but seemed reasonable
21258 // as a start to prevent any potential issues with removing content too close to the
21259 // playhead.
21260
21261 var MIN_BACK_BUFFER = 1; // in ms
21262
21263 var CHECK_BUFFER_DELAY = 500;
21264
21265 var finite = function finite(num) {
21266 return typeof num === 'number' && isFinite(num);
21267 }; // With most content hovering around 30fps, if a segment has a duration less than a half
21268 // frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
21269 // not accurately reflect the rest of the content.
21270
21271
21272 var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
21273 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
21274 // Although these checks should most likely cover non 'main' types, for now it narrows
21275 // the scope of our checks.
21276 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
21277 return null;
21278 }
21279
21280 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
21281 return 'Neither audio nor video found in segment.';
21282 }
21283
21284 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
21285 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
21286 }
21287
21288 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
21289 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
21290 }
21291
21292 return null;
21293 };
21294 /**
21295 * Calculates a time value that is safe to remove from the back buffer without interrupting
21296 * playback.
21297 *
21298 * @param {TimeRange} seekable
21299 * The current seekable range
21300 * @param {number} currentTime
21301 * The current time of the player
21302 * @param {number} targetDuration
21303 * The target duration of the current playlist
21304 * @return {number}
21305 * Time that is safe to remove from the back buffer without interrupting playback
21306 */
21307
21308 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
21309 // 30 seconds before the playhead provides a safe default for trimming.
21310 //
21311 // Choosing a reasonable default is particularly important for high bitrate content and
21312 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
21313 // throw an APPEND_BUFFER_ERR.
21314 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
21315
21316 if (seekable.length) {
21317 // Some live playlists may have a shorter window of content than the full allowed back
21318 // buffer. For these playlists, don't save content that's no longer within the window.
21319 trimTime = Math.max(trimTime, seekable.start(0));
21320 } // Don't remove within target duration of the current time to avoid the possibility of
21321 // removing the GOP currently being played, as removing it can cause playback stalls.
21322
21323
21324 var maxTrimTime = currentTime - targetDuration;
21325 return Math.min(maxTrimTime, trimTime);
21326 };
21327 var segmentInfoString = function segmentInfoString(segmentInfo) {
21328 var startOfSegment = segmentInfo.startOfSegment,
21329 duration = segmentInfo.duration,
21330 segment = segmentInfo.segment,
21331 part = segmentInfo.part,
21332 _segmentInfo$playlist = segmentInfo.playlist,
21333 seq = _segmentInfo$playlist.mediaSequence,
21334 id = _segmentInfo$playlist.id,
21335 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
21336 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
21337 index = segmentInfo.mediaIndex,
21338 partIndex = segmentInfo.partIndex,
21339 timeline = segmentInfo.timeline;
21340 var segmentLen = segments.length - 1;
21341 var selection = 'mediaIndex/partIndex increment';
21342
21343 if (segmentInfo.getMediaInfoForTime) {
21344 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
21345 } else if (segmentInfo.isSyncRequest) {
21346 selection = 'getSyncSegmentCandidate (isSyncRequest)';
21347 }
21348
21349 if (segmentInfo.independent) {
21350 selection += " with independent " + segmentInfo.independent;
21351 }
21352
21353 var hasPartIndex = typeof partIndex === 'number';
21354 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
21355 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
21356 preloadSegment: segment
21357 }) - 1 : 0;
21358 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
21359 };
21360
21361 var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
21362 return mediaType + "TimingInfo";
21363 };
21364 /**
21365 * Returns the timestamp offset to use for the segment.
21366 *
21367 * @param {number} segmentTimeline
21368 * The timeline of the segment
21369 * @param {number} currentTimeline
21370 * The timeline currently being followed by the loader
21371 * @param {number} startOfSegment
21372 * The estimated segment start
21373 * @param {TimeRange[]} buffered
21374 * The loader's buffer
21375 * @param {boolean} overrideCheck
21376 * If true, no checks are made to see if the timestamp offset value should be set,
21377 * but sets it directly to a value.
21378 *
21379 * @return {number|null}
21380 * Either a number representing a new timestamp offset, or null if the segment is
21381 * part of the same timeline
21382 */
21383
21384
21385 var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
21386 var segmentTimeline = _ref.segmentTimeline,
21387 currentTimeline = _ref.currentTimeline,
21388 startOfSegment = _ref.startOfSegment,
21389 buffered = _ref.buffered,
21390 overrideCheck = _ref.overrideCheck;
21391
21392 // Check to see if we are crossing a discontinuity to see if we need to set the
21393 // timestamp offset on the transmuxer and source buffer.
21394 //
21395 // Previously, we changed the timestampOffset if the start of this segment was less than
21396 // the currently set timestampOffset, but this isn't desirable as it can produce bad
21397 // behavior, especially around long running live streams.
21398 if (!overrideCheck && segmentTimeline === currentTimeline) {
21399 return null;
21400 } // When changing renditions, it's possible to request a segment on an older timeline. For
21401 // instance, given two renditions with the following:
21402 //
21403 // #EXTINF:10
21404 // segment1
21405 // #EXT-X-DISCONTINUITY
21406 // #EXTINF:10
21407 // segment2
21408 // #EXTINF:10
21409 // segment3
21410 //
21411 // And the current player state:
21412 //
21413 // current time: 8
21414 // buffer: 0 => 20
21415 //
21416 // The next segment on the current rendition would be segment3, filling the buffer from
21417 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
21418 // then the next segment to be requested will be segment1 from the new rendition in
21419 // order to fill time 8 and onwards. Using the buffered end would result in repeated
21420 // content (since it would position segment1 of the new rendition starting at 20s). This
21421 // case can be identified when the new segment's timeline is a prior value. Instead of
21422 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
21423 // more accurate to the actual start time of the segment.
21424
21425
21426 if (segmentTimeline < currentTimeline) {
21427 return startOfSegment;
21428 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
21429 // value uses the end of the last segment if it is available. While this value
21430 // should often be correct, it's better to rely on the buffered end, as the new
21431 // content post discontinuity should line up with the buffered end as if it were
21432 // time 0 for the new content.
21433
21434
21435 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
21436 };
21437 /**
21438 * Returns whether or not the loader should wait for a timeline change from the timeline
21439 * change controller before processing the segment.
21440 *
21441 * Primary timing in VHS goes by video. This is different from most media players, as
21442 * audio is more often used as the primary timing source. For the foreseeable future, VHS
21443 * will continue to use video as the primary timing source, due to the current logic and
21444 * expectations built around it.
21445
21446 * Since the timing follows video, in order to maintain sync, the video loader is
21447 * responsible for setting both audio and video source buffer timestamp offsets.
21448 *
21449 * Setting different values for audio and video source buffers could lead to
21450 * desyncing. The following examples demonstrate some of the situations where this
21451 * distinction is important. Note that all of these cases involve demuxed content. When
21452 * content is muxed, the audio and video are packaged together, therefore syncing
21453 * separate media playlists is not an issue.
21454 *
21455 * CASE 1: Audio prepares to load a new timeline before video:
21456 *
21457 * Timeline: 0 1
21458 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21459 * Audio Loader: ^
21460 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21461 * Video Loader ^
21462 *
21463 * In the above example, the audio loader is preparing to load the 6th segment, the first
21464 * after a discontinuity, while the video loader is still loading the 5th segment, before
21465 * the discontinuity.
21466 *
21467 * If the audio loader goes ahead and loads and appends the 6th segment before the video
21468 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
21469 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
21470 * the audio loader must provide the audioAppendStart value to trim the content in the
21471 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
21472 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
21473 * segment until that value is provided.
21474 *
21475 * CASE 2: Video prepares to load a new timeline before audio:
21476 *
21477 * Timeline: 0 1
21478 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21479 * Audio Loader: ^
21480 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21481 * Video Loader ^
21482 *
21483 * In the above example, the video loader is preparing to load the 6th segment, the first
21484 * after a discontinuity, while the audio loader is still loading the 5th segment, before
21485 * the discontinuity.
21486 *
21487 * If the video loader goes ahead and loads and appends the 6th segment, then once the
21488 * segment is loaded and processed, both the video and audio timestamp offsets will be
21489 * set, since video is used as the primary timing source. This is to ensure content lines
21490 * up appropriately, as any modifications to the video timing are reflected by audio when
21491 * the video loader sets the audio and video timestamp offsets to the same value. However,
21492 * setting the timestamp offset for audio before audio has had a chance to change
21493 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
21494 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
21495 *
21496 * CASE 3: When seeking, audio prepares to load a new timeline before video
21497 *
21498 * Timeline: 0 1
21499 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21500 * Audio Loader: ^
21501 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21502 * Video Loader ^
21503 *
21504 * In the above example, both audio and video loaders are loading segments from timeline
21505 * 0, but imagine that the seek originated from timeline 1.
21506 *
21507 * When seeking to a new timeline, the timestamp offset will be set based on the expected
21508 * segment start of the loaded video segment. In order to maintain sync, the audio loader
21509 * must wait for the video loader to load its segment and update both the audio and video
21510 * timestamp offsets before it may load and append its own segment. This is the case
21511 * whether the seek results in a mismatched segment request (e.g., the audio loader
21512 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
21513 * loaders choose to load the same segment index from each playlist, as the segments may
21514 * not be aligned perfectly, even for matching segment indexes.
21515 *
21516 * @param {Object} timelinechangeController
21517 * @param {number} currentTimeline
21518 * The timeline currently being followed by the loader
21519 * @param {number} segmentTimeline
21520 * The timeline of the segment being loaded
21521 * @param {('main'|'audio')} loaderType
21522 * The loader type
21523 * @param {boolean} audioDisabled
21524 * Whether the audio is disabled for the loader. This should only be true when the
21525 * loader may have muxed audio in its segment, but should not append it, e.g., for
21526 * the main loader when an alternate audio playlist is active.
21527 *
21528 * @return {boolean}
21529 * Whether the loader should wait for a timeline change from the timeline change
21530 * controller before processing the segment
21531 */
21532
21533 var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
21534 var timelineChangeController = _ref2.timelineChangeController,
21535 currentTimeline = _ref2.currentTimeline,
21536 segmentTimeline = _ref2.segmentTimeline,
21537 loaderType = _ref2.loaderType,
21538 audioDisabled = _ref2.audioDisabled;
21539
21540 if (currentTimeline === segmentTimeline) {
21541 return false;
21542 }
21543
21544 if (loaderType === 'audio') {
21545 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
21546 type: 'main'
21547 }); // Audio loader should wait if:
21548 //
21549 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
21550 // * main hasn't yet changed to the timeline audio is looking to load
21551
21552 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
21553 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
21554 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
21555 // loader's segments (or the content is audio/video only and handled by the main
21556 // loader).
21557
21558
21559 if (loaderType === 'main' && audioDisabled) {
21560 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
21561 type: 'audio'
21562 }); // Main loader should wait for the audio loader if audio is not pending a timeline
21563 // change to the current timeline.
21564 //
21565 // Since the main loader is responsible for setting the timestamp offset for both
21566 // audio and video, the main loader must wait for audio to be about to change to its
21567 // timeline before setting the offset, otherwise, if audio is behind in loading,
21568 // segments from the previous timeline would be adjusted by the new timestamp offset.
21569 //
21570 // This requirement means that video will not cross a timeline until the audio is
21571 // about to cross to it, so that way audio and video will always cross the timeline
21572 // together.
21573 //
21574 // In addition to normal timeline changes, these rules also apply to the start of a
21575 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
21576 // that these rules apply to the first timeline change because if they did not, it's
21577 // possible that the main loader will cross two timelines before the audio loader has
21578 // crossed one. Logic may be implemented to handle the startup as a special case, but
21579 // it's easier to simply treat all timeline changes the same.
21580
21581 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
21582 return false;
21583 }
21584
21585 return true;
21586 }
21587
21588 return false;
21589 };
21590 var mediaDuration = function mediaDuration(timingInfos) {
21591 var maxDuration = 0;
21592 ['video', 'audio'].forEach(function (type) {
21593 var typeTimingInfo = timingInfos[type + "TimingInfo"];
21594
21595 if (!typeTimingInfo) {
21596 return;
21597 }
21598
21599 var start = typeTimingInfo.start,
21600 end = typeTimingInfo.end;
21601 var duration;
21602
21603 if (typeof start === 'bigint' || typeof end === 'bigint') {
21604 duration = window.BigInt(end) - window.BigInt(start);
21605 } else if (typeof start === 'number' && typeof end === 'number') {
21606 duration = end - start;
21607 }
21608
21609 if (typeof duration !== 'undefined' && duration > maxDuration) {
21610 maxDuration = duration;
21611 }
21612 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
21613 // as we only need BigInt when we are above that.
21614
21615 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
21616 maxDuration = Number(maxDuration);
21617 }
21618
21619 return maxDuration;
21620 };
21621 var segmentTooLong = function segmentTooLong(_ref3) {
21622 var segmentDuration = _ref3.segmentDuration,
21623 maxDuration = _ref3.maxDuration;
21624
21625 // 0 duration segments are most likely due to metadata only segments or a lack of
21626 // information.
21627 if (!segmentDuration) {
21628 return false;
21629 } // For HLS:
21630 //
21631 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
21632 // The EXTINF duration of each Media Segment in the Playlist
21633 // file, when rounded to the nearest integer, MUST be less than or equal
21634 // to the target duration; longer segments can trigger playback stalls
21635 // or other errors.
21636 //
21637 // For DASH, the mpd-parser uses the largest reported segment duration as the target
21638 // duration. Although that reported duration is occasionally approximate (i.e., not
21639 // exact), a strict check may report that a segment is too long more often in DASH.
21640
21641
21642 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
21643 };
21644 var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
21645 // Right now we aren't following DASH's timing model exactly, so only perform
21646 // this check for HLS content.
21647 if (sourceType !== 'hls') {
21648 return null;
21649 }
21650
21651 var segmentDuration = mediaDuration({
21652 audioTimingInfo: segmentInfo.audioTimingInfo,
21653 videoTimingInfo: segmentInfo.videoTimingInfo
21654 }); // Don't report if we lack information.
21655 //
21656 // If the segment has a duration of 0 it is either a lack of information or a
21657 // metadata only segment and shouldn't be reported here.
21658
21659 if (!segmentDuration) {
21660 return null;
21661 }
21662
21663 var targetDuration = segmentInfo.playlist.targetDuration;
21664 var isSegmentWayTooLong = segmentTooLong({
21665 segmentDuration: segmentDuration,
21666 maxDuration: targetDuration * 2
21667 });
21668 var isSegmentSlightlyTooLong = segmentTooLong({
21669 segmentDuration: segmentDuration,
21670 maxDuration: targetDuration
21671 });
21672 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
21673
21674 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
21675 return {
21676 severity: isSegmentWayTooLong ? 'warn' : 'info',
21677 message: segmentTooLongMessage
21678 };
21679 }
21680
21681 return null;
21682 };
21683 /**
21684 * An object that manages segment loading and appending.
21685 *
21686 * @class SegmentLoader
21687 * @param {Object} options required and optional options
21688 * @extends videojs.EventTarget
21689 */
21690
21691 var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
21692 inheritsLoose(SegmentLoader, _videojs$EventTarget);
21693
21694 function SegmentLoader(settings, options) {
21695 var _this;
21696
21697 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
21698
21699 if (!settings) {
21700 throw new TypeError('Initialization settings are required');
21701 }
21702
21703 if (typeof settings.currentTime !== 'function') {
21704 throw new TypeError('No currentTime getter specified');
21705 }
21706
21707 if (!settings.mediaSource) {
21708 throw new TypeError('No MediaSource specified');
21709 } // public properties
21710
21711
21712 _this.bandwidth = settings.bandwidth;
21713 _this.throughput = {
21714 rate: 0,
21715 count: 0
21716 };
21717 _this.roundTrip = NaN;
21718
21719 _this.resetStats_();
21720
21721 _this.mediaIndex = null;
21722 _this.partIndex = null; // private settings
21723
21724 _this.hasPlayed_ = settings.hasPlayed;
21725 _this.currentTime_ = settings.currentTime;
21726 _this.seekable_ = settings.seekable;
21727 _this.seeking_ = settings.seeking;
21728 _this.duration_ = settings.duration;
21729 _this.mediaSource_ = settings.mediaSource;
21730 _this.vhs_ = settings.vhs;
21731 _this.loaderType_ = settings.loaderType;
21732 _this.currentMediaInfo_ = void 0;
21733 _this.startingMediaInfo_ = void 0;
21734 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
21735 _this.goalBufferLength_ = settings.goalBufferLength;
21736 _this.sourceType_ = settings.sourceType;
21737 _this.sourceUpdater_ = settings.sourceUpdater;
21738 _this.inbandTextTracks_ = settings.inbandTextTracks;
21739 _this.state_ = 'INIT';
21740 _this.timelineChangeController_ = settings.timelineChangeController;
21741 _this.shouldSaveSegmentTimingInfo_ = true;
21742 _this.parse708captions_ = settings.parse708captions;
21743 _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
21744 _this.captionServices_ = settings.captionServices;
21745 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
21746
21747 _this.checkBufferTimeout_ = null;
21748 _this.error_ = void 0;
21749 _this.currentTimeline_ = -1;
21750 _this.pendingSegment_ = null;
21751 _this.xhrOptions_ = null;
21752 _this.pendingSegments_ = [];
21753 _this.audioDisabled_ = false;
21754 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
21755
21756 _this.gopBuffer_ = [];
21757 _this.timeMapping_ = 0;
21758 _this.safeAppend_ = videojs__default["default"].browser.IE_VERSION >= 11;
21759 _this.appendInitSegment_ = {
21760 audio: true,
21761 video: true
21762 };
21763 _this.playlistOfLastInitSegment_ = {
21764 audio: null,
21765 video: null
21766 };
21767 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
21768 // information yet to start the loading process (e.g., if the audio loader wants to
21769 // load a segment from the next timeline but the main loader hasn't yet crossed that
21770 // timeline), then the load call will be added to the queue until it is ready to be
21771 // processed.
21772
21773 _this.loadQueue_ = [];
21774 _this.metadataQueue_ = {
21775 id3: [],
21776 caption: []
21777 };
21778 _this.waitingOnRemove_ = false;
21779 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
21780
21781 _this.activeInitSegmentId_ = null;
21782 _this.initSegments_ = {}; // HLSe playback
21783
21784 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
21785 _this.keyCache_ = {};
21786 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
21787 // between a time in the display time and a segment index within
21788 // a playlist
21789
21790 _this.syncController_ = settings.syncController;
21791 _this.syncPoint_ = {
21792 segmentIndex: 0,
21793 time: 0
21794 };
21795 _this.transmuxer_ = _this.createTransmuxer_();
21796
21797 _this.triggerSyncInfoUpdate_ = function () {
21798 return _this.trigger('syncinfoupdate');
21799 };
21800
21801 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
21802
21803 _this.mediaSource_.addEventListener('sourceopen', function () {
21804 if (!_this.isEndOfStream_()) {
21805 _this.ended_ = false;
21806 }
21807 }); // ...for determining the fetch location
21808
21809
21810 _this.fetchAtBuffer_ = false;
21811 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
21812 Object.defineProperty(assertThisInitialized(_this), 'state', {
21813 get: function get() {
21814 return this.state_;
21815 },
21816 set: function set(newState) {
21817 if (newState !== this.state_) {
21818 this.logger_(this.state_ + " -> " + newState);
21819 this.state_ = newState;
21820 this.trigger('statechange');
21821 }
21822 }
21823 });
21824
21825 _this.sourceUpdater_.on('ready', function () {
21826 if (_this.hasEnoughInfoToAppend_()) {
21827 _this.processCallQueue_();
21828 }
21829 }); // Only the main loader needs to listen for pending timeline changes, as the main
21830 // loader should wait for audio to be ready to change its timeline so that both main
21831 // and audio timelines change together. For more details, see the
21832 // shouldWaitForTimelineChange function.
21833
21834
21835 if (_this.loaderType_ === 'main') {
21836 _this.timelineChangeController_.on('pendingtimelinechange', function () {
21837 if (_this.hasEnoughInfoToAppend_()) {
21838 _this.processCallQueue_();
21839 }
21840 });
21841 } // The main loader only listens on pending timeline changes, but the audio loader,
21842 // since its loads follow main, needs to listen on timeline changes. For more details,
21843 // see the shouldWaitForTimelineChange function.
21844
21845
21846 if (_this.loaderType_ === 'audio') {
21847 _this.timelineChangeController_.on('timelinechange', function () {
21848 if (_this.hasEnoughInfoToLoad_()) {
21849 _this.processLoadQueue_();
21850 }
21851
21852 if (_this.hasEnoughInfoToAppend_()) {
21853 _this.processCallQueue_();
21854 }
21855 });
21856 }
21857
21858 return _this;
21859 }
21860
21861 var _proto = SegmentLoader.prototype;
21862
21863 _proto.createTransmuxer_ = function createTransmuxer_() {
21864 return segmentTransmuxer.createTransmuxer({
21865 remux: false,
21866 alignGopsAtEnd: this.safeAppend_,
21867 keepOriginalTimestamps: true,
21868 parse708captions: this.parse708captions_,
21869 captionServices: this.captionServices_
21870 });
21871 }
21872 /**
21873 * reset all of our media stats
21874 *
21875 * @private
21876 */
21877 ;
21878
21879 _proto.resetStats_ = function resetStats_() {
21880 this.mediaBytesTransferred = 0;
21881 this.mediaRequests = 0;
21882 this.mediaRequestsAborted = 0;
21883 this.mediaRequestsTimedout = 0;
21884 this.mediaRequestsErrored = 0;
21885 this.mediaTransferDuration = 0;
21886 this.mediaSecondsLoaded = 0;
21887 this.mediaAppends = 0;
21888 }
21889 /**
21890 * dispose of the SegmentLoader and reset to the default state
21891 */
21892 ;
21893
21894 _proto.dispose = function dispose() {
21895 this.trigger('dispose');
21896 this.state = 'DISPOSED';
21897 this.pause();
21898 this.abort_();
21899
21900 if (this.transmuxer_) {
21901 this.transmuxer_.terminate();
21902 }
21903
21904 this.resetStats_();
21905
21906 if (this.checkBufferTimeout_) {
21907 window.clearTimeout(this.checkBufferTimeout_);
21908 }
21909
21910 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
21911 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
21912 }
21913
21914 this.off();
21915 };
21916
21917 _proto.setAudio = function setAudio(enable) {
21918 this.audioDisabled_ = !enable;
21919
21920 if (enable) {
21921 this.appendInitSegment_.audio = true;
21922 } else {
21923 // remove current track audio if it gets disabled
21924 this.sourceUpdater_.removeAudio(0, this.duration_());
21925 }
21926 }
21927 /**
21928 * abort anything that is currently doing on with the SegmentLoader
21929 * and reset to a default state
21930 */
21931 ;
21932
21933 _proto.abort = function abort() {
21934 if (this.state !== 'WAITING') {
21935 if (this.pendingSegment_) {
21936 this.pendingSegment_ = null;
21937 }
21938
21939 return;
21940 }
21941
21942 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
21943 // since we are no longer "waiting" on any requests. XHR callback is not always run
21944 // when the request is aborted. This will prevent the loader from being stuck in the
21945 // WAITING state indefinitely.
21946
21947 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
21948 // next segment
21949
21950 if (!this.paused()) {
21951 this.monitorBuffer_();
21952 }
21953 }
21954 /**
21955 * abort all pending xhr requests and null any pending segements
21956 *
21957 * @private
21958 */
21959 ;
21960
21961 _proto.abort_ = function abort_() {
21962 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
21963 this.pendingSegment_.abortRequests();
21964 } // clear out the segment being processed
21965
21966
21967 this.pendingSegment_ = null;
21968 this.callQueue_ = [];
21969 this.loadQueue_ = [];
21970 this.metadataQueue_.id3 = [];
21971 this.metadataQueue_.caption = [];
21972 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
21973 this.waitingOnRemove_ = false;
21974 window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
21975 this.quotaExceededErrorRetryTimeout_ = null;
21976 };
21977
21978 _proto.checkForAbort_ = function checkForAbort_(requestId) {
21979 // If the state is APPENDING, then aborts will not modify the state, meaning the first
21980 // callback that happens should reset the state to READY so that loading can continue.
21981 if (this.state === 'APPENDING' && !this.pendingSegment_) {
21982 this.state = 'READY';
21983 return true;
21984 }
21985
21986 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
21987 return true;
21988 }
21989
21990 return false;
21991 }
21992 /**
21993 * set an error on the segment loader and null out any pending segements
21994 *
21995 * @param {Error} error the error to set on the SegmentLoader
21996 * @return {Error} the error that was set or that is currently set
21997 */
21998 ;
21999
22000 _proto.error = function error(_error) {
22001 if (typeof _error !== 'undefined') {
22002 this.logger_('error occurred:', _error);
22003 this.error_ = _error;
22004 }
22005
22006 this.pendingSegment_ = null;
22007 return this.error_;
22008 };
22009
22010 _proto.endOfStream = function endOfStream() {
22011 this.ended_ = true;
22012
22013 if (this.transmuxer_) {
22014 // need to clear out any cached data to prepare for the new segment
22015 segmentTransmuxer.reset(this.transmuxer_);
22016 }
22017
22018 this.gopBuffer_.length = 0;
22019 this.pause();
22020 this.trigger('ended');
22021 }
22022 /**
22023 * Indicates which time ranges are buffered
22024 *
22025 * @return {TimeRange}
22026 * TimeRange object representing the current buffered ranges
22027 */
22028 ;
22029
22030 _proto.buffered_ = function buffered_() {
22031 var trackInfo = this.getMediaInfo_();
22032
22033 if (!this.sourceUpdater_ || !trackInfo) {
22034 return videojs__default["default"].createTimeRanges();
22035 }
22036
22037 if (this.loaderType_ === 'main') {
22038 var hasAudio = trackInfo.hasAudio,
22039 hasVideo = trackInfo.hasVideo,
22040 isMuxed = trackInfo.isMuxed;
22041
22042 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
22043 return this.sourceUpdater_.buffered();
22044 }
22045
22046 if (hasVideo) {
22047 return this.sourceUpdater_.videoBuffered();
22048 }
22049 } // One case that can be ignored for now is audio only with alt audio,
22050 // as we don't yet have proper support for that.
22051
22052
22053 return this.sourceUpdater_.audioBuffered();
22054 }
22055 /**
22056 * Gets and sets init segment for the provided map
22057 *
22058 * @param {Object} map
22059 * The map object representing the init segment to get or set
22060 * @param {boolean=} set
22061 * If true, the init segment for the provided map should be saved
22062 * @return {Object}
22063 * map object for desired init segment
22064 */
22065 ;
22066
22067 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
22068 if (set === void 0) {
22069 set = false;
22070 }
22071
22072 if (!map) {
22073 return null;
22074 }
22075
22076 var id = initSegmentId(map);
22077 var storedMap = this.initSegments_[id];
22078
22079 if (set && !storedMap && map.bytes) {
22080 this.initSegments_[id] = storedMap = {
22081 resolvedUri: map.resolvedUri,
22082 byterange: map.byterange,
22083 bytes: map.bytes,
22084 tracks: map.tracks,
22085 timescales: map.timescales
22086 };
22087 }
22088
22089 return storedMap || map;
22090 }
22091 /**
22092 * Gets and sets key for the provided key
22093 *
22094 * @param {Object} key
22095 * The key object representing the key to get or set
22096 * @param {boolean=} set
22097 * If true, the key for the provided key should be saved
22098 * @return {Object}
22099 * Key object for desired key
22100 */
22101 ;
22102
22103 _proto.segmentKey = function segmentKey(key, set) {
22104 if (set === void 0) {
22105 set = false;
22106 }
22107
22108 if (!key) {
22109 return null;
22110 }
22111
22112 var id = segmentKeyId(key);
22113 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
22114 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
22115
22116 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
22117 this.keyCache_[id] = storedKey = {
22118 resolvedUri: key.resolvedUri,
22119 bytes: key.bytes
22120 };
22121 }
22122
22123 var result = {
22124 resolvedUri: (storedKey || key).resolvedUri
22125 };
22126
22127 if (storedKey) {
22128 result.bytes = storedKey.bytes;
22129 }
22130
22131 return result;
22132 }
22133 /**
22134 * Returns true if all configuration required for loading is present, otherwise false.
22135 *
22136 * @return {boolean} True if the all configuration is ready for loading
22137 * @private
22138 */
22139 ;
22140
22141 _proto.couldBeginLoading_ = function couldBeginLoading_() {
22142 return this.playlist_ && !this.paused();
22143 }
22144 /**
22145 * load a playlist and start to fill the buffer
22146 */
22147 ;
22148
22149 _proto.load = function load() {
22150 // un-pause
22151 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
22152 // specified
22153
22154 if (!this.playlist_) {
22155 return;
22156 } // if all the configuration is ready, initialize and begin loading
22157
22158
22159 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22160 return this.init_();
22161 } // if we're in the middle of processing a segment already, don't
22162 // kick off an additional segment request
22163
22164
22165 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
22166 return;
22167 }
22168
22169 this.state = 'READY';
22170 }
22171 /**
22172 * Once all the starting parameters have been specified, begin
22173 * operation. This method should only be invoked from the INIT
22174 * state.
22175 *
22176 * @private
22177 */
22178 ;
22179
22180 _proto.init_ = function init_() {
22181 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
22182 // audio data from the muxed content should be removed
22183
22184 this.resetEverything();
22185 return this.monitorBuffer_();
22186 }
22187 /**
22188 * set a playlist on the segment loader
22189 *
22190 * @param {PlaylistLoader} media the playlist to set on the segment loader
22191 */
22192 ;
22193
22194 _proto.playlist = function playlist(newPlaylist, options) {
22195 if (options === void 0) {
22196 options = {};
22197 }
22198
22199 if (!newPlaylist) {
22200 return;
22201 }
22202
22203 var oldPlaylist = this.playlist_;
22204 var segmentInfo = this.pendingSegment_;
22205 this.playlist_ = newPlaylist;
22206 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
22207 // is always our zero-time so force a sync update each time the playlist
22208 // is refreshed from the server
22209 //
22210 // Use the INIT state to determine if playback has started, as the playlist sync info
22211 // should be fixed once requests begin (as sync points are generated based on sync
22212 // info), but not before then.
22213
22214 if (this.state === 'INIT') {
22215 newPlaylist.syncInfo = {
22216 mediaSequence: newPlaylist.mediaSequence,
22217 time: 0
22218 }; // Setting the date time mapping means mapping the program date time (if available)
22219 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
22220 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
22221 // be updated as the playlist is refreshed before the loader starts loading, the
22222 // program date time mapping needs to be updated as well.
22223 //
22224 // This mapping is only done for the main loader because a program date time should
22225 // map equivalently between playlists.
22226
22227 if (this.loaderType_ === 'main') {
22228 this.syncController_.setDateTimeMappingForStart(newPlaylist);
22229 }
22230 }
22231
22232 var oldId = null;
22233
22234 if (oldPlaylist) {
22235 if (oldPlaylist.id) {
22236 oldId = oldPlaylist.id;
22237 } else if (oldPlaylist.uri) {
22238 oldId = oldPlaylist.uri;
22239 }
22240 }
22241
22242 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
22243 // in LIVE, we always want to update with new playlists (including refreshes)
22244
22245 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
22246 // buffering now
22247
22248 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22249 return this.init_();
22250 }
22251
22252 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
22253 if (this.mediaIndex !== null) {
22254 // we must reset/resync the segment loader when we switch renditions and
22255 // the segment loader is already synced to the previous rendition
22256 // on playlist changes we want it to be possible to fetch
22257 // at the buffer for vod but not for live. So we use resetLoader
22258 // for live and resyncLoader for vod. We want this because
22259 // if a playlist uses independent and non-independent segments/parts the
22260 // buffer may not accurately reflect the next segment that we should try
22261 // downloading.
22262 if (!newPlaylist.endList) {
22263 this.resetLoader();
22264 } else {
22265 this.resyncLoader();
22266 }
22267 }
22268
22269 this.currentMediaInfo_ = void 0;
22270 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
22271
22272 return;
22273 } // we reloaded the same playlist so we are in a live scenario
22274 // and we will likely need to adjust the mediaIndex
22275
22276
22277 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
22278 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
22279 // this is important because we can abort a request and this value must be
22280 // equal to the last appended mediaIndex
22281
22282 if (this.mediaIndex !== null) {
22283 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
22284 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
22285 // new playlist was incremented by 1.
22286
22287 if (this.mediaIndex < 0) {
22288 this.mediaIndex = null;
22289 this.partIndex = null;
22290 } else {
22291 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
22292 // unless parts fell off of the playlist for this segment.
22293 // In that case we need to reset partIndex and resync
22294
22295 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
22296 var mediaIndex = this.mediaIndex;
22297 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
22298 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
22299 // as the part was dropped from our current playlists segment.
22300 // The mediaIndex will still be valid so keep that around.
22301
22302 this.mediaIndex = mediaIndex;
22303 }
22304 }
22305 } // update the mediaIndex on the SegmentInfo object
22306 // this is important because we will update this.mediaIndex with this value
22307 // in `handleAppendsDone_` after the segment has been successfully appended
22308
22309
22310 if (segmentInfo) {
22311 segmentInfo.mediaIndex -= mediaSequenceDiff;
22312
22313 if (segmentInfo.mediaIndex < 0) {
22314 segmentInfo.mediaIndex = null;
22315 segmentInfo.partIndex = null;
22316 } else {
22317 // we need to update the referenced segment so that timing information is
22318 // saved for the new playlist's segment, however, if the segment fell off the
22319 // playlist, we can leave the old reference and just lose the timing info
22320 if (segmentInfo.mediaIndex >= 0) {
22321 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
22322 }
22323
22324 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
22325 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
22326 }
22327 }
22328 }
22329
22330 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
22331 }
22332 /**
22333 * Prevent the loader from fetching additional segments. If there
22334 * is a segment request outstanding, it will finish processing
22335 * before the loader halts. A segment loader can be unpaused by
22336 * calling load().
22337 */
22338 ;
22339
22340 _proto.pause = function pause() {
22341 if (this.checkBufferTimeout_) {
22342 window.clearTimeout(this.checkBufferTimeout_);
22343 this.checkBufferTimeout_ = null;
22344 }
22345 }
22346 /**
22347 * Returns whether the segment loader is fetching additional
22348 * segments when given the opportunity. This property can be
22349 * modified through calls to pause() and load().
22350 */
22351 ;
22352
22353 _proto.paused = function paused() {
22354 return this.checkBufferTimeout_ === null;
22355 }
22356 /**
22357 * Delete all the buffered data and reset the SegmentLoader
22358 *
22359 * @param {Function} [done] an optional callback to be executed when the remove
22360 * operation is complete
22361 */
22362 ;
22363
22364 _proto.resetEverything = function resetEverything(done) {
22365 this.ended_ = false;
22366 this.appendInitSegment_ = {
22367 audio: true,
22368 video: true
22369 };
22370 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
22371 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
22372 // we then clamp the value to duration if necessary.
22373
22374 this.remove(0, Infinity, done); // clears fmp4 captions
22375
22376 if (this.transmuxer_) {
22377 this.transmuxer_.postMessage({
22378 action: 'clearAllMp4Captions'
22379 }); // reset the cache in the transmuxer
22380
22381 this.transmuxer_.postMessage({
22382 action: 'reset'
22383 });
22384 }
22385 }
22386 /**
22387 * Force the SegmentLoader to resync and start loading around the currentTime instead
22388 * of starting at the end of the buffer
22389 *
22390 * Useful for fast quality changes
22391 */
22392 ;
22393
22394 _proto.resetLoader = function resetLoader() {
22395 this.fetchAtBuffer_ = false;
22396 this.resyncLoader();
22397 }
22398 /**
22399 * Force the SegmentLoader to restart synchronization and make a conservative guess
22400 * before returning to the simple walk-forward method
22401 */
22402 ;
22403
22404 _proto.resyncLoader = function resyncLoader() {
22405 if (this.transmuxer_) {
22406 // need to clear out any cached data to prepare for the new segment
22407 segmentTransmuxer.reset(this.transmuxer_);
22408 }
22409
22410 this.mediaIndex = null;
22411 this.partIndex = null;
22412 this.syncPoint_ = null;
22413 this.isPendingTimestampOffset_ = false;
22414 this.callQueue_ = [];
22415 this.loadQueue_ = [];
22416 this.metadataQueue_.id3 = [];
22417 this.metadataQueue_.caption = [];
22418 this.abort();
22419
22420 if (this.transmuxer_) {
22421 this.transmuxer_.postMessage({
22422 action: 'clearParsedMp4Captions'
22423 });
22424 }
22425 }
22426 /**
22427 * Remove any data in the source buffer between start and end times
22428 *
22429 * @param {number} start - the start time of the region to remove from the buffer
22430 * @param {number} end - the end time of the region to remove from the buffer
22431 * @param {Function} [done] - an optional callback to be executed when the remove
22432 * @param {boolean} force - force all remove operations to happen
22433 * operation is complete
22434 */
22435 ;
22436
22437 _proto.remove = function remove(start, end, done, force) {
22438 if (done === void 0) {
22439 done = function done() {};
22440 }
22441
22442 if (force === void 0) {
22443 force = false;
22444 }
22445
22446 // clamp end to duration if we need to remove everything.
22447 // This is due to a browser bug that causes issues if we remove to Infinity.
22448 // videojs/videojs-contrib-hls#1225
22449 if (end === Infinity) {
22450 end = this.duration_();
22451 } // skip removes that would throw an error
22452 // commonly happens during a rendition switch at the start of a video
22453 // from start 0 to end 0
22454
22455
22456 if (end <= start) {
22457 this.logger_('skipping remove because end ${end} is <= start ${start}');
22458 return;
22459 }
22460
22461 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
22462 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
22463
22464 return;
22465 } // set it to one to complete this function's removes
22466
22467
22468 var removesRemaining = 1;
22469
22470 var removeFinished = function removeFinished() {
22471 removesRemaining--;
22472
22473 if (removesRemaining === 0) {
22474 done();
22475 }
22476 };
22477
22478 if (force || !this.audioDisabled_) {
22479 removesRemaining++;
22480 this.sourceUpdater_.removeAudio(start, end, removeFinished);
22481 } // While it would be better to only remove video if the main loader has video, this
22482 // should be safe with audio only as removeVideo will call back even if there's no
22483 // video buffer.
22484 //
22485 // In theory we can check to see if there's video before calling the remove, but in
22486 // the event that we're switching between renditions and from video to audio only
22487 // (when we add support for that), we may need to clear the video contents despite
22488 // what the new media will contain.
22489
22490
22491 if (force || this.loaderType_ === 'main') {
22492 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
22493 removesRemaining++;
22494 this.sourceUpdater_.removeVideo(start, end, removeFinished);
22495 } // remove any captions and ID3 tags
22496
22497
22498 for (var track in this.inbandTextTracks_) {
22499 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
22500 }
22501
22502 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
22503
22504 removeFinished();
22505 }
22506 /**
22507 * (re-)schedule monitorBufferTick_ to run as soon as possible
22508 *
22509 * @private
22510 */
22511 ;
22512
22513 _proto.monitorBuffer_ = function monitorBuffer_() {
22514 if (this.checkBufferTimeout_) {
22515 window.clearTimeout(this.checkBufferTimeout_);
22516 }
22517
22518 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);
22519 }
22520 /**
22521 * As long as the SegmentLoader is in the READY state, periodically
22522 * invoke fillBuffer_().
22523 *
22524 * @private
22525 */
22526 ;
22527
22528 _proto.monitorBufferTick_ = function monitorBufferTick_() {
22529 if (this.state === 'READY') {
22530 this.fillBuffer_();
22531 }
22532
22533 if (this.checkBufferTimeout_) {
22534 window.clearTimeout(this.checkBufferTimeout_);
22535 }
22536
22537 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
22538 }
22539 /**
22540 * fill the buffer with segements unless the sourceBuffers are
22541 * currently updating
22542 *
22543 * Note: this function should only ever be called by monitorBuffer_
22544 * and never directly
22545 *
22546 * @private
22547 */
22548 ;
22549
22550 _proto.fillBuffer_ = function fillBuffer_() {
22551 // TODO since the source buffer maintains a queue, and we shouldn't call this function
22552 // except when we're ready for the next segment, this check can most likely be removed
22553 if (this.sourceUpdater_.updating()) {
22554 return;
22555 } // see if we need to begin loading immediately
22556
22557
22558 var segmentInfo = this.chooseNextRequest_();
22559
22560 if (!segmentInfo) {
22561 return;
22562 }
22563
22564 if (typeof segmentInfo.timestampOffset === 'number') {
22565 this.isPendingTimestampOffset_ = false;
22566 this.timelineChangeController_.pendingTimelineChange({
22567 type: this.loaderType_,
22568 from: this.currentTimeline_,
22569 to: segmentInfo.timeline
22570 });
22571 }
22572
22573 this.loadSegment_(segmentInfo);
22574 }
22575 /**
22576 * Determines if we should call endOfStream on the media source based
22577 * on the state of the buffer or if appened segment was the final
22578 * segment in the playlist.
22579 *
22580 * @param {number} [mediaIndex] the media index of segment we last appended
22581 * @param {Object} [playlist] a media playlist object
22582 * @return {boolean} do we need to call endOfStream on the MediaSource
22583 */
22584 ;
22585
22586 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
22587 if (mediaIndex === void 0) {
22588 mediaIndex = this.mediaIndex;
22589 }
22590
22591 if (playlist === void 0) {
22592 playlist = this.playlist_;
22593 }
22594
22595 if (partIndex === void 0) {
22596 partIndex = this.partIndex;
22597 }
22598
22599 if (!playlist || !this.mediaSource_) {
22600 return false;
22601 }
22602
22603 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
22604
22605 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
22606
22607 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
22608 // so that MediaSources can trigger the `ended` event when it runs out of
22609 // buffered data instead of waiting for me
22610
22611 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
22612 }
22613 /**
22614 * Determines what request should be made given current segment loader state.
22615 *
22616 * @return {Object} a request object that describes the segment/part to load
22617 */
22618 ;
22619
22620 _proto.chooseNextRequest_ = function chooseNextRequest_() {
22621 var buffered = this.buffered_();
22622 var bufferedEnd = lastBufferedEnd(buffered) || 0;
22623 var bufferedTime = timeAheadOf(buffered, this.currentTime_());
22624 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
22625 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
22626 var segments = this.playlist_.segments; // return no segment if:
22627 // 1. we don't have segments
22628 // 2. The video has not yet played and we already downloaded a segment
22629 // 3. we already have enough buffered time
22630
22631 if (!segments.length || preloaded || haveEnoughBuffer) {
22632 return null;
22633 }
22634
22635 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
22636 var next = {
22637 partIndex: null,
22638 mediaIndex: null,
22639 startOfSegment: null,
22640 playlist: this.playlist_,
22641 isSyncRequest: Boolean(!this.syncPoint_)
22642 };
22643
22644 if (next.isSyncRequest) {
22645 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
22646 } else if (this.mediaIndex !== null) {
22647 var segment = segments[this.mediaIndex];
22648 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
22649 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
22650
22651 if (segment.parts && segment.parts[partIndex + 1]) {
22652 next.mediaIndex = this.mediaIndex;
22653 next.partIndex = partIndex + 1;
22654 } else {
22655 next.mediaIndex = this.mediaIndex + 1;
22656 }
22657 } else {
22658 // Find the segment containing the end of the buffer or current time.
22659 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
22660 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
22661 playlist: this.playlist_,
22662 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
22663 startingPartIndex: this.syncPoint_.partIndex,
22664 startingSegmentIndex: this.syncPoint_.segmentIndex,
22665 startTime: this.syncPoint_.time
22666 }),
22667 segmentIndex = _Playlist$getMediaInf.segmentIndex,
22668 startTime = _Playlist$getMediaInf.startTime,
22669 _partIndex = _Playlist$getMediaInf.partIndex;
22670
22671 next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
22672 next.mediaIndex = segmentIndex;
22673 next.startOfSegment = startTime;
22674 next.partIndex = _partIndex;
22675 }
22676
22677 var nextSegment = segments[next.mediaIndex];
22678 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
22679 // the next partIndex is invalid do not choose a next segment.
22680
22681 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
22682 return null;
22683 } // if the next segment has parts, and we don't have a partIndex.
22684 // Set partIndex to 0
22685
22686
22687 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
22688 next.partIndex = 0;
22689 nextPart = nextSegment.parts[0];
22690 } // if we have no buffered data then we need to make sure
22691 // that the next part we append is "independent" if possible.
22692 // So we check if the previous part is independent, and request
22693 // it if it is.
22694
22695
22696 if (!bufferedTime && nextPart && !nextPart.independent) {
22697 if (next.partIndex === 0) {
22698 var lastSegment = segments[next.mediaIndex - 1];
22699 var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
22700
22701 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
22702 next.mediaIndex -= 1;
22703 next.partIndex = lastSegment.parts.length - 1;
22704 next.independent = 'previous segment';
22705 }
22706 } else if (nextSegment.parts[next.partIndex - 1].independent) {
22707 next.partIndex -= 1;
22708 next.independent = 'previous part';
22709 }
22710 }
22711
22712 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
22713 // 1. this is the last segment in the playlist
22714 // 2. end of stream has been called on the media source already
22715 // 3. the player is not seeking
22716
22717 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
22718 return null;
22719 }
22720
22721 return this.generateSegmentInfo_(next);
22722 };
22723
22724 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
22725 var independent = options.independent,
22726 playlist = options.playlist,
22727 mediaIndex = options.mediaIndex,
22728 startOfSegment = options.startOfSegment,
22729 isSyncRequest = options.isSyncRequest,
22730 partIndex = options.partIndex,
22731 forceTimestampOffset = options.forceTimestampOffset,
22732 getMediaInfoForTime = options.getMediaInfoForTime;
22733 var segment = playlist.segments[mediaIndex];
22734 var part = typeof partIndex === 'number' && segment.parts[partIndex];
22735 var segmentInfo = {
22736 requestId: 'segment-loader-' + Math.random(),
22737 // resolve the segment URL relative to the playlist
22738 uri: part && part.resolvedUri || segment.resolvedUri,
22739 // the segment's mediaIndex at the time it was requested
22740 mediaIndex: mediaIndex,
22741 partIndex: part ? partIndex : null,
22742 // whether or not to update the SegmentLoader's state with this
22743 // segment's mediaIndex
22744 isSyncRequest: isSyncRequest,
22745 startOfSegment: startOfSegment,
22746 // the segment's playlist
22747 playlist: playlist,
22748 // unencrypted bytes of the segment
22749 bytes: null,
22750 // when a key is defined for this segment, the encrypted bytes
22751 encryptedBytes: null,
22752 // The target timestampOffset for this segment when we append it
22753 // to the source buffer
22754 timestampOffset: null,
22755 // The timeline that the segment is in
22756 timeline: segment.timeline,
22757 // The expected duration of the segment in seconds
22758 duration: part && part.duration || segment.duration,
22759 // retain the segment in case the playlist updates while doing an async process
22760 segment: segment,
22761 part: part,
22762 byteLength: 0,
22763 transmuxer: this.transmuxer_,
22764 // type of getMediaInfoForTime that was used to get this segment
22765 getMediaInfoForTime: getMediaInfoForTime,
22766 independent: independent
22767 };
22768 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
22769 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
22770 segmentTimeline: segment.timeline,
22771 currentTimeline: this.currentTimeline_,
22772 startOfSegment: startOfSegment,
22773 buffered: this.buffered_(),
22774 overrideCheck: overrideCheck
22775 });
22776 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
22777
22778 if (typeof audioBufferedEnd === 'number') {
22779 // since the transmuxer is using the actual timing values, but the buffer is
22780 // adjusted by the timestamp offset, we must adjust the value here
22781 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
22782 }
22783
22784 if (this.sourceUpdater_.videoBuffered().length) {
22785 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
22786 // adjusted by the timestmap offset, we must adjust the value here
22787 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
22788 }
22789
22790 return segmentInfo;
22791 } // get the timestampoffset for a segment,
22792 // added so that vtt segment loader can override and prevent
22793 // adding timestamp offsets.
22794 ;
22795
22796 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
22797 return timestampOffsetForSegment(options);
22798 }
22799 /**
22800 * Determines if the network has enough bandwidth to complete the current segment
22801 * request in a timely manner. If not, the request will be aborted early and bandwidth
22802 * updated to trigger a playlist switch.
22803 *
22804 * @param {Object} stats
22805 * Object containing stats about the request timing and size
22806 * @private
22807 */
22808 ;
22809
22810 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
22811 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
22812 // TODO: Replace using timeout with a boolean indicating whether this playlist is
22813 // the lowestEnabledRendition.
22814 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
22815 !this.playlist_.attributes.BANDWIDTH) {
22816 return;
22817 } // Wait at least 1 second since the first byte of data has been received before
22818 // using the calculated bandwidth from the progress event to allow the bitrate
22819 // to stabilize
22820
22821
22822 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
22823 return;
22824 }
22825
22826 var currentTime = this.currentTime_();
22827 var measuredBandwidth = stats.bandwidth;
22828 var segmentDuration = this.pendingSegment_.duration;
22829 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
22830 // if we are only left with less than 1 second when the request completes.
22831 // A negative timeUntilRebuffering indicates we are already rebuffering
22832
22833 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
22834 // is larger than the estimated time until the player runs out of forward buffer
22835
22836 if (requestTimeRemaining <= timeUntilRebuffer$1) {
22837 return;
22838 }
22839
22840 var switchCandidate = minRebufferMaxBandwidthSelector({
22841 master: this.vhs_.playlists.master,
22842 currentTime: currentTime,
22843 bandwidth: measuredBandwidth,
22844 duration: this.duration_(),
22845 segmentDuration: segmentDuration,
22846 timeUntilRebuffer: timeUntilRebuffer$1,
22847 currentTimeline: this.currentTimeline_,
22848 syncController: this.syncController_
22849 });
22850
22851 if (!switchCandidate) {
22852 return;
22853 }
22854
22855 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
22856 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
22857 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
22858 // potential round trip time of the new request so that we are not too aggressive
22859 // with switching to a playlist that might save us a fraction of a second.
22860
22861 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
22862 minimumTimeSaving = 1;
22863 }
22864
22865 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
22866 return;
22867 } // set the bandwidth to that of the desired playlist being sure to scale by
22868 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
22869 // don't trigger a bandwidthupdate as the bandwidth is artifial
22870
22871
22872 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
22873 this.trigger('earlyabort');
22874 };
22875
22876 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
22877 this.logger_("Aborting " + segmentInfoString(segmentInfo));
22878 this.mediaRequestsAborted += 1;
22879 }
22880 /**
22881 * XHR `progress` event handler
22882 *
22883 * @param {Event}
22884 * The XHR `progress` event
22885 * @param {Object} simpleSegment
22886 * A simplified segment object copy
22887 * @private
22888 */
22889 ;
22890
22891 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
22892 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22893
22894 if (this.checkForAbort_(simpleSegment.requestId)) {
22895 return;
22896 }
22897
22898 this.trigger('progress');
22899 };
22900
22901 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
22902 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22903
22904 if (this.checkForAbort_(simpleSegment.requestId)) {
22905 return;
22906 }
22907
22908 if (this.checkForIllegalMediaSwitch(trackInfo)) {
22909 return;
22910 }
22911
22912 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
22913 // Guard against cases where we're not getting track info at all until we are
22914 // certain that all streams will provide it.
22915
22916 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
22917 this.appendInitSegment_ = {
22918 audio: true,
22919 video: true
22920 };
22921 this.startingMediaInfo_ = trackInfo;
22922 this.currentMediaInfo_ = trackInfo;
22923 this.logger_('trackinfo update', trackInfo);
22924 this.trigger('trackinfo');
22925 } // trackinfo may cause an abort if the trackinfo
22926 // causes a codec change to an unsupported codec.
22927
22928
22929 if (this.checkForAbort_(simpleSegment.requestId)) {
22930 return;
22931 } // set trackinfo on the pending segment so that
22932 // it can append.
22933
22934
22935 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
22936
22937 if (this.hasEnoughInfoToAppend_()) {
22938 this.processCallQueue_();
22939 }
22940 };
22941
22942 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
22943 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22944
22945 if (this.checkForAbort_(simpleSegment.requestId)) {
22946 return;
22947 }
22948
22949 var segmentInfo = this.pendingSegment_;
22950 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
22951 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
22952 segmentInfo[timingInfoProperty][timeType] = time;
22953 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
22954
22955 if (this.hasEnoughInfoToAppend_()) {
22956 this.processCallQueue_();
22957 }
22958 };
22959
22960 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
22961 var _this2 = this;
22962
22963 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22964
22965 if (this.checkForAbort_(simpleSegment.requestId)) {
22966 return;
22967 } // This could only happen with fmp4 segments, but
22968 // should still not happen in general
22969
22970
22971 if (captionData.length === 0) {
22972 this.logger_('SegmentLoader received no captions from a caption event');
22973 return;
22974 }
22975
22976 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
22977 // can be adjusted by the timestamp offset
22978
22979 if (!segmentInfo.hasAppendedData_) {
22980 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
22981 return;
22982 }
22983
22984 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
22985 var captionTracks = {}; // get total start/end and captions for each track/stream
22986
22987 captionData.forEach(function (caption) {
22988 // caption.stream is actually a track name...
22989 // set to the existing values in tracks or default values
22990 captionTracks[caption.stream] = captionTracks[caption.stream] || {
22991 // Infinity, as any other value will be less than this
22992 startTime: Infinity,
22993 captions: [],
22994 // 0 as an other value will be more than this
22995 endTime: 0
22996 };
22997 var captionTrack = captionTracks[caption.stream];
22998 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
22999 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
23000 captionTrack.captions.push(caption);
23001 });
23002 Object.keys(captionTracks).forEach(function (trackName) {
23003 var _captionTracks$trackN = captionTracks[trackName],
23004 startTime = _captionTracks$trackN.startTime,
23005 endTime = _captionTracks$trackN.endTime,
23006 captions = _captionTracks$trackN.captions;
23007 var inbandTextTracks = _this2.inbandTextTracks_;
23008
23009 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
23010
23011 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
23012 // We do this because a rendition change that also changes the timescale for captions
23013 // will result in captions being re-parsed for certain segments. If we add them again
23014 // without clearing we will have two of the same captions visible.
23015
23016 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
23017 addCaptionData({
23018 captionArray: captions,
23019 inbandTextTracks: inbandTextTracks,
23020 timestampOffset: timestampOffset
23021 });
23022 }); // Reset stored captions since we added parsed
23023 // captions to a text track at this point
23024
23025 if (this.transmuxer_) {
23026 this.transmuxer_.postMessage({
23027 action: 'clearParsedMp4Captions'
23028 });
23029 }
23030 };
23031
23032 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
23033 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23034
23035 if (this.checkForAbort_(simpleSegment.requestId)) {
23036 return;
23037 }
23038
23039 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
23040
23041 if (!segmentInfo.hasAppendedData_) {
23042 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
23043 return;
23044 }
23045
23046 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
23047 // audio/video source with a metadata track, and an alt audio with a metadata track.
23048 // However, this probably won't happen, and if it does it can be handled then.
23049
23050 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
23051 addMetadata({
23052 inbandTextTracks: this.inbandTextTracks_,
23053 metadataArray: id3Frames,
23054 timestampOffset: timestampOffset,
23055 videoDuration: this.duration_()
23056 });
23057 };
23058
23059 _proto.processMetadataQueue_ = function processMetadataQueue_() {
23060 this.metadataQueue_.id3.forEach(function (fn) {
23061 return fn();
23062 });
23063 this.metadataQueue_.caption.forEach(function (fn) {
23064 return fn();
23065 });
23066 this.metadataQueue_.id3 = [];
23067 this.metadataQueue_.caption = [];
23068 };
23069
23070 _proto.processCallQueue_ = function processCallQueue_() {
23071 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
23072 // functions may check the length of the load queue and default to pushing themselves
23073 // back onto the queue.
23074
23075 this.callQueue_ = [];
23076 callQueue.forEach(function (fun) {
23077 return fun();
23078 });
23079 };
23080
23081 _proto.processLoadQueue_ = function processLoadQueue_() {
23082 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
23083 // functions may check the length of the load queue and default to pushing themselves
23084 // back onto the queue.
23085
23086 this.loadQueue_ = [];
23087 loadQueue.forEach(function (fun) {
23088 return fun();
23089 });
23090 }
23091 /**
23092 * Determines whether the loader has enough info to load the next segment.
23093 *
23094 * @return {boolean}
23095 * Whether or not the loader has enough info to load the next segment
23096 */
23097 ;
23098
23099 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
23100 // Since primary timing goes by video, only the audio loader potentially needs to wait
23101 // to load.
23102 if (this.loaderType_ !== 'audio') {
23103 return true;
23104 }
23105
23106 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
23107 // enough info to load.
23108
23109 if (!segmentInfo) {
23110 return false;
23111 } // The first segment can and should be loaded immediately so that source buffers are
23112 // created together (before appending). Source buffer creation uses the presence of
23113 // audio and video data to determine whether to create audio/video source buffers, and
23114 // uses processed (transmuxed or parsed) media to determine the types required.
23115
23116
23117 if (!this.getCurrentMediaInfo_()) {
23118 return true;
23119 }
23120
23121 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
23122 // can be requested and downloaded and only wait before it is transmuxed or parsed.
23123 // But in practice, there are a few reasons why it is better to wait until a loader
23124 // is ready to append that segment before requesting and downloading:
23125 //
23126 // 1. Because audio and main loaders cross discontinuities together, if this loader
23127 // is waiting for the other to catch up, then instead of requesting another
23128 // segment and using up more bandwidth, by not yet loading, more bandwidth is
23129 // allotted to the loader currently behind.
23130 // 2. media-segment-request doesn't have to have logic to consider whether a segment
23131 // is ready to be processed or not, isolating the queueing behavior to the loader.
23132 // 3. The audio loader bases some of its segment properties on timing information
23133 // provided by the main loader, meaning that, if the logic for waiting on
23134 // processing was in media-segment-request, then it would also need to know how
23135 // to re-generate the segment information after the main loader caught up.
23136 shouldWaitForTimelineChange({
23137 timelineChangeController: this.timelineChangeController_,
23138 currentTimeline: this.currentTimeline_,
23139 segmentTimeline: segmentInfo.timeline,
23140 loaderType: this.loaderType_,
23141 audioDisabled: this.audioDisabled_
23142 })) {
23143 return false;
23144 }
23145
23146 return true;
23147 };
23148
23149 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
23150 if (segmentInfo === void 0) {
23151 segmentInfo = this.pendingSegment_;
23152 }
23153
23154 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
23155 };
23156
23157 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
23158 if (segmentInfo === void 0) {
23159 segmentInfo = this.pendingSegment_;
23160 }
23161
23162 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
23163 };
23164
23165 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
23166 if (!this.sourceUpdater_.ready()) {
23167 return false;
23168 } // If content needs to be removed or the loader is waiting on an append reattempt,
23169 // then no additional content should be appended until the prior append is resolved.
23170
23171
23172 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
23173 return false;
23174 }
23175
23176 var segmentInfo = this.pendingSegment_;
23177 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
23178 // we do not have information on this specific
23179 // segment yet
23180
23181 if (!segmentInfo || !trackInfo) {
23182 return false;
23183 }
23184
23185 var hasAudio = trackInfo.hasAudio,
23186 hasVideo = trackInfo.hasVideo,
23187 isMuxed = trackInfo.isMuxed;
23188
23189 if (hasVideo && !segmentInfo.videoTimingInfo) {
23190 return false;
23191 } // muxed content only relies on video timing information for now.
23192
23193
23194 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
23195 return false;
23196 }
23197
23198 if (shouldWaitForTimelineChange({
23199 timelineChangeController: this.timelineChangeController_,
23200 currentTimeline: this.currentTimeline_,
23201 segmentTimeline: segmentInfo.timeline,
23202 loaderType: this.loaderType_,
23203 audioDisabled: this.audioDisabled_
23204 })) {
23205 return false;
23206 }
23207
23208 return true;
23209 };
23210
23211 _proto.handleData_ = function handleData_(simpleSegment, result) {
23212 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23213
23214 if (this.checkForAbort_(simpleSegment.requestId)) {
23215 return;
23216 } // If there's anything in the call queue, then this data came later and should be
23217 // executed after the calls currently queued.
23218
23219
23220 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
23221 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
23222 return;
23223 }
23224
23225 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
23226
23227 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
23228
23229 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
23230 // logic may change behavior depending on the state, and changing state too early may
23231 // inflate our estimates of bandwidth. In the future this should be re-examined to
23232 // note more granular states.
23233 // don't process and append data if the mediaSource is closed
23234
23235 if (this.mediaSource_.readyState === 'closed') {
23236 return;
23237 } // if this request included an initialization segment, save that data
23238 // to the initSegment cache
23239
23240
23241 if (simpleSegment.map) {
23242 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
23243
23244 segmentInfo.segment.map = simpleSegment.map;
23245 } // if this request included a segment key, save that data in the cache
23246
23247
23248 if (simpleSegment.key) {
23249 this.segmentKey(simpleSegment.key, true);
23250 }
23251
23252 segmentInfo.isFmp4 = simpleSegment.isFmp4;
23253 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
23254
23255 if (segmentInfo.isFmp4) {
23256 this.trigger('fmp4');
23257 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
23258 } else {
23259 var trackInfo = this.getCurrentMediaInfo_();
23260 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
23261 var firstVideoFrameTimeForData;
23262
23263 if (useVideoTimingInfo) {
23264 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
23265 } // Segment loader knows more about segment timing than the transmuxer (in certain
23266 // aspects), so make any changes required for a more accurate start time.
23267 // Don't set the end time yet, as the segment may not be finished processing.
23268
23269
23270 segmentInfo.timingInfo.start = this.trueSegmentStart_({
23271 currentStart: segmentInfo.timingInfo.start,
23272 playlist: segmentInfo.playlist,
23273 mediaIndex: segmentInfo.mediaIndex,
23274 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
23275 useVideoTimingInfo: useVideoTimingInfo,
23276 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
23277 videoTimingInfo: segmentInfo.videoTimingInfo,
23278 audioTimingInfo: segmentInfo.audioTimingInfo
23279 });
23280 } // Init segments for audio and video only need to be appended in certain cases. Now
23281 // that data is about to be appended, we can check the final cases to determine
23282 // whether we should append an init segment.
23283
23284
23285 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
23286 // as we use the start of the segment to offset the best guess (playlist provided)
23287 // timestamp offset.
23288
23289 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
23290 // be appended or not.
23291
23292 if (segmentInfo.isSyncRequest) {
23293 // first save/update our timing info for this segment.
23294 // this is what allows us to choose an accurate segment
23295 // and the main reason we make a sync request.
23296 this.updateTimingInfoEnd_(segmentInfo);
23297 this.syncController_.saveSegmentTimingInfo({
23298 segmentInfo: segmentInfo,
23299 shouldSaveTimelineMapping: this.loaderType_ === 'main'
23300 });
23301 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
23302 // after taking into account its timing info, do not append it.
23303
23304 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
23305 this.logger_('sync segment was incorrect, not appending');
23306 return;
23307 } // otherwise append it like any other segment as our guess was correct.
23308
23309
23310 this.logger_('sync segment was correct, appending');
23311 } // Save some state so that in the future anything waiting on first append (and/or
23312 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
23313 // we need some notion of whether the timestamp offset or other relevant information
23314 // has had a chance to be set.
23315
23316
23317 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
23318
23319 this.processMetadataQueue_();
23320 this.appendData_(segmentInfo, result);
23321 };
23322
23323 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
23324 // alt audio doesn't manage timestamp offset
23325 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
23326 // segment for each chunk
23327 !segmentInfo.changedTimestampOffset) {
23328 // if the timestamp offset changed, the timeline may have changed, so we have to re-
23329 // append init segments
23330 this.appendInitSegment_ = {
23331 audio: true,
23332 video: true
23333 };
23334 }
23335
23336 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
23337 // make sure we append init segment on playlist changes, in case the media config
23338 // changed
23339 this.appendInitSegment_[type] = true;
23340 }
23341 };
23342
23343 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
23344 var type = _ref4.type,
23345 initSegment = _ref4.initSegment,
23346 map = _ref4.map,
23347 playlist = _ref4.playlist;
23348
23349 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
23350 // (Section 3) required to parse the applicable Media Segments. It applies to every
23351 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
23352 // or until the end of the playlist."
23353 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
23354 if (map) {
23355 var id = initSegmentId(map);
23356
23357 if (this.activeInitSegmentId_ === id) {
23358 // don't need to re-append the init segment if the ID matches
23359 return null;
23360 } // a map-specified init segment takes priority over any transmuxed (or otherwise
23361 // obtained) init segment
23362 //
23363 // this also caches the init segment for later use
23364
23365
23366 initSegment = this.initSegmentForMap(map, true).bytes;
23367 this.activeInitSegmentId_ = id;
23368 } // We used to always prepend init segments for video, however, that shouldn't be
23369 // necessary. Instead, we should only append on changes, similar to what we've always
23370 // done for audio. This is more important (though may not be that important) for
23371 // frame-by-frame appending for LHLS, simply because of the increased quantity of
23372 // appends.
23373
23374
23375 if (initSegment && this.appendInitSegment_[type]) {
23376 // Make sure we track the playlist that we last used for the init segment, so that
23377 // we can re-append the init segment in the event that we get data from a new
23378 // playlist. Discontinuities and track changes are handled in other sections.
23379 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
23380
23381 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
23382 // we are appending the muxer init segment
23383
23384 this.activeInitSegmentId_ = null;
23385 return initSegment;
23386 }
23387
23388 return null;
23389 };
23390
23391 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
23392 var _this3 = this;
23393
23394 var segmentInfo = _ref5.segmentInfo,
23395 type = _ref5.type,
23396 bytes = _ref5.bytes;
23397 var audioBuffered = this.sourceUpdater_.audioBuffered();
23398 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
23399 // should be cleared out during the buffer removals. However, log in case it helps
23400 // debug.
23401
23402 if (audioBuffered.length > 1) {
23403 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
23404 }
23405
23406 if (videoBuffered.length > 1) {
23407 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
23408 }
23409
23410 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
23411 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
23412 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
23413 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
23414
23415 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
23416 // Can't remove enough buffer to make room for new segment (or the browser doesn't
23417 // allow for appends of segments this size). In the future, it may be possible to
23418 // split up the segment and append in pieces, but for now, error out this playlist
23419 // in an attempt to switch to a more manageable rendition.
23420 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
23421 this.error({
23422 message: 'Quota exceeded error with append of a single segment of content',
23423 excludeUntil: Infinity
23424 });
23425 this.trigger('error');
23426 return;
23427 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
23428 // that the segment-loader should block on future events until this one is handled, so
23429 // that it doesn't keep moving onto further segments. Adding the call to the call
23430 // queue will prevent further appends until waitingOnRemove_ and
23431 // quotaExceededErrorRetryTimeout_ are cleared.
23432 //
23433 // Note that this will only block the current loader. In the case of demuxed content,
23434 // the other load may keep filling as fast as possible. In practice, this should be
23435 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
23436 // source buffer, or video fills without enough room for audio to append (and without
23437 // the availability of clearing out seconds of back buffer to make room for audio).
23438 // But it might still be good to handle this case in the future as a TODO.
23439
23440
23441 this.waitingOnRemove_ = true;
23442 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
23443 segmentInfo: segmentInfo,
23444 type: type,
23445 bytes: bytes
23446 }));
23447 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
23448 // before retrying.
23449
23450 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
23451 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
23452 this.remove(0, timeToRemoveUntil, function () {
23453 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
23454
23455 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
23456 // attempts (since we can't clear less than the minimum)
23457
23458 _this3.quotaExceededErrorRetryTimeout_ = window.setTimeout(function () {
23459 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
23460
23461 _this3.quotaExceededErrorRetryTimeout_ = null;
23462
23463 _this3.processCallQueue_();
23464 }, MIN_BACK_BUFFER * 1000);
23465 }, true);
23466 };
23467
23468 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
23469 var segmentInfo = _ref6.segmentInfo,
23470 type = _ref6.type,
23471 bytes = _ref6.bytes;
23472
23473 // if there's no error, nothing to do
23474 if (!error) {
23475 return;
23476 }
23477
23478 if (error.code === QUOTA_EXCEEDED_ERR) {
23479 this.handleQuotaExceededError_({
23480 segmentInfo: segmentInfo,
23481 type: type,
23482 bytes: bytes
23483 }); // A quota exceeded error should be recoverable with a future re-append, so no need
23484 // to trigger an append error.
23485
23486 return;
23487 }
23488
23489 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
23490 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
23491 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
23492 //
23493 // Trigger a special error so that it can be handled separately from normal,
23494 // recoverable errors.
23495
23496 this.trigger('appenderror');
23497 };
23498
23499 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
23500 var segmentInfo = _ref7.segmentInfo,
23501 type = _ref7.type,
23502 initSegment = _ref7.initSegment,
23503 data = _ref7.data,
23504 bytes = _ref7.bytes;
23505
23506 // If this is a re-append, bytes were already created and don't need to be recreated
23507 if (!bytes) {
23508 var segments = [data];
23509 var byteLength = data.byteLength;
23510
23511 if (initSegment) {
23512 // if the media initialization segment is changing, append it before the content
23513 // segment
23514 segments.unshift(initSegment);
23515 byteLength += initSegment.byteLength;
23516 } // Technically we should be OK appending the init segment separately, however, we
23517 // haven't yet tested that, and prepending is how we have always done things.
23518
23519
23520 bytes = concatSegments({
23521 bytes: byteLength,
23522 segments: segments
23523 });
23524 }
23525
23526 this.sourceUpdater_.appendBuffer({
23527 segmentInfo: segmentInfo,
23528 type: type,
23529 bytes: bytes
23530 }, this.handleAppendError_.bind(this, {
23531 segmentInfo: segmentInfo,
23532 type: type,
23533 bytes: bytes
23534 }));
23535 };
23536
23537 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
23538 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
23539 return;
23540 }
23541
23542 var segment = this.pendingSegment_.segment;
23543 var timingInfoProperty = type + "TimingInfo";
23544
23545 if (!segment[timingInfoProperty]) {
23546 segment[timingInfoProperty] = {};
23547 }
23548
23549 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
23550 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
23551 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
23552 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
23553 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
23554
23555 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
23556 };
23557
23558 _proto.appendData_ = function appendData_(segmentInfo, result) {
23559 var type = result.type,
23560 data = result.data;
23561
23562 if (!data || !data.byteLength) {
23563 return;
23564 }
23565
23566 if (type === 'audio' && this.audioDisabled_) {
23567 return;
23568 }
23569
23570 var initSegment = this.getInitSegmentAndUpdateState_({
23571 type: type,
23572 initSegment: result.initSegment,
23573 playlist: segmentInfo.playlist,
23574 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
23575 });
23576 this.appendToSourceBuffer_({
23577 segmentInfo: segmentInfo,
23578 type: type,
23579 initSegment: initSegment,
23580 data: data
23581 });
23582 }
23583 /**
23584 * load a specific segment from a request into the buffer
23585 *
23586 * @private
23587 */
23588 ;
23589
23590 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
23591 var _this4 = this;
23592
23593 this.state = 'WAITING';
23594 this.pendingSegment_ = segmentInfo;
23595 this.trimBackBuffer_(segmentInfo);
23596
23597 if (typeof segmentInfo.timestampOffset === 'number') {
23598 if (this.transmuxer_) {
23599 this.transmuxer_.postMessage({
23600 action: 'clearAllMp4Captions'
23601 });
23602 }
23603 }
23604
23605 if (!this.hasEnoughInfoToLoad_()) {
23606 this.loadQueue_.push(function () {
23607 // regenerate the audioAppendStart, timestampOffset, etc as they
23608 // may have changed since this function was added to the queue.
23609 var options = _extends_1({}, segmentInfo, {
23610 forceTimestampOffset: true
23611 });
23612
23613 _extends_1(segmentInfo, _this4.generateSegmentInfo_(options));
23614
23615 _this4.isPendingTimestampOffset_ = false;
23616
23617 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
23618 });
23619 return;
23620 }
23621
23622 this.updateTransmuxerAndRequestSegment_(segmentInfo);
23623 };
23624
23625 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
23626 var _this5 = this;
23627
23628 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
23629 // the transmuxer still needs to be updated before then.
23630 //
23631 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
23632 // offset must be passed to the transmuxer for stream correcting adjustments.
23633 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
23634 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
23635
23636 segmentInfo.gopsToAlignWith = [];
23637 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
23638
23639 this.transmuxer_.postMessage({
23640 action: 'reset'
23641 });
23642 this.transmuxer_.postMessage({
23643 action: 'setTimestampOffset',
23644 timestampOffset: segmentInfo.timestampOffset
23645 });
23646 }
23647
23648 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
23649 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
23650 var isWalkingForward = this.mediaIndex !== null;
23651 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
23652 // the first timeline
23653 segmentInfo.timeline > 0;
23654 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
23655 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
23656 // then this init segment has never been seen before and should be appended.
23657 //
23658 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
23659 // both to true and leave the decision of whether to append the init segment to append time.
23660
23661 if (simpleSegment.map && !simpleSegment.map.bytes) {
23662 this.logger_('going to request init segment.');
23663 this.appendInitSegment_ = {
23664 video: true,
23665 audio: true
23666 };
23667 }
23668
23669 segmentInfo.abortRequests = mediaSegmentRequest({
23670 xhr: this.vhs_.xhr,
23671 xhrOptions: this.xhrOptions_,
23672 decryptionWorker: this.decrypter_,
23673 segment: simpleSegment,
23674 abortFn: this.handleAbort_.bind(this, segmentInfo),
23675 progressFn: this.handleProgress_.bind(this),
23676 trackInfoFn: this.handleTrackInfo_.bind(this),
23677 timingInfoFn: this.handleTimingInfo_.bind(this),
23678 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
23679 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
23680 captionsFn: this.handleCaptions_.bind(this),
23681 isEndOfTimeline: isEndOfTimeline,
23682 endedTimelineFn: function endedTimelineFn() {
23683 _this5.logger_('received endedtimeline callback');
23684 },
23685 id3Fn: this.handleId3_.bind(this),
23686 dataFn: this.handleData_.bind(this),
23687 doneFn: this.segmentRequestFinished_.bind(this),
23688 onTransmuxerLog: function onTransmuxerLog(_ref8) {
23689 var message = _ref8.message,
23690 level = _ref8.level,
23691 stream = _ref8.stream;
23692
23693 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
23694 }
23695 });
23696 }
23697 /**
23698 * trim the back buffer so that we don't have too much data
23699 * in the source buffer
23700 *
23701 * @private
23702 *
23703 * @param {Object} segmentInfo - the current segment
23704 */
23705 ;
23706
23707 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
23708 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
23709 // buffer and a very conservative "garbage collector"
23710 // We manually clear out the old buffer to ensure
23711 // we don't trigger the QuotaExceeded error
23712 // on the source buffer during subsequent appends
23713
23714 if (removeToTime > 0) {
23715 this.remove(0, removeToTime);
23716 }
23717 }
23718 /**
23719 * created a simplified copy of the segment object with just the
23720 * information necessary to perform the XHR and decryption
23721 *
23722 * @private
23723 *
23724 * @param {Object} segmentInfo - the current segment
23725 * @return {Object} a simplified segment object copy
23726 */
23727 ;
23728
23729 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
23730 var segment = segmentInfo.segment;
23731 var part = segmentInfo.part;
23732 var simpleSegment = {
23733 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
23734 byterange: part ? part.byterange : segment.byterange,
23735 requestId: segmentInfo.requestId,
23736 transmuxer: segmentInfo.transmuxer,
23737 audioAppendStart: segmentInfo.audioAppendStart,
23738 gopsToAlignWith: segmentInfo.gopsToAlignWith,
23739 part: segmentInfo.part
23740 };
23741 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
23742
23743 if (previousSegment && previousSegment.timeline === segment.timeline) {
23744 // The baseStartTime of a segment is used to handle rollover when probing the TS
23745 // segment to retrieve timing information. Since the probe only looks at the media's
23746 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
23747 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
23748 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
23749 // seconds of media time, so should be used here. The previous segment is used since
23750 // the end of the previous segment should represent the beginning of the current
23751 // segment, so long as they are on the same timeline.
23752 if (previousSegment.videoTimingInfo) {
23753 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
23754 } else if (previousSegment.audioTimingInfo) {
23755 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
23756 }
23757 }
23758
23759 if (segment.key) {
23760 // if the media sequence is greater than 2^32, the IV will be incorrect
23761 // assuming 10s segments, that would be about 1300 years
23762 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
23763 simpleSegment.key = this.segmentKey(segment.key);
23764 simpleSegment.key.iv = iv;
23765 }
23766
23767 if (segment.map) {
23768 simpleSegment.map = this.initSegmentForMap(segment.map);
23769 }
23770
23771 return simpleSegment;
23772 };
23773
23774 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
23775 // every request counts as a media request even if it has been aborted
23776 // or canceled due to a timeout
23777 this.mediaRequests += 1;
23778
23779 if (stats) {
23780 this.mediaBytesTransferred += stats.bytesReceived;
23781 this.mediaTransferDuration += stats.roundTripTime;
23782 }
23783 };
23784
23785 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
23786 // byteLength will be used for throughput, and should be based on bytes receieved,
23787 // which we only know at the end of the request and should reflect total bytes
23788 // downloaded rather than just bytes processed from components of the segment
23789 this.pendingSegment_.byteLength = stats.bytesReceived;
23790
23791 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23792 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23793 return;
23794 }
23795
23796 this.bandwidth = stats.bandwidth;
23797 this.roundTrip = stats.roundTripTime;
23798 };
23799
23800 _proto.handleTimeout_ = function handleTimeout_() {
23801 // although the VTT segment loader bandwidth isn't really used, it's good to
23802 // maintain functinality between segment loaders
23803 this.mediaRequestsTimedout += 1;
23804 this.bandwidth = 1;
23805 this.roundTrip = NaN;
23806 this.trigger('bandwidthupdate');
23807 this.trigger('timeout');
23808 }
23809 /**
23810 * Handle the callback from the segmentRequest function and set the
23811 * associated SegmentLoader state and errors if necessary
23812 *
23813 * @private
23814 */
23815 ;
23816
23817 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
23818 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
23819 // check the call queue directly since this function doesn't need to deal with any
23820 // data, and can continue even if the source buffers are not set up and we didn't get
23821 // any data from the segment
23822 if (this.callQueue_.length) {
23823 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
23824 return;
23825 }
23826
23827 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
23828
23829 if (!this.pendingSegment_) {
23830 return;
23831 } // the request was aborted and the SegmentLoader has already started
23832 // another request. this can happen when the timeout for an aborted
23833 // request triggers due to a limitation in the XHR library
23834 // do not count this as any sort of request or we risk double-counting
23835
23836
23837 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
23838 return;
23839 } // an error occurred from the active pendingSegment_ so reset everything
23840
23841
23842 if (error) {
23843 this.pendingSegment_ = null;
23844 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
23845
23846 if (error.code === REQUEST_ERRORS.ABORTED) {
23847 return;
23848 }
23849
23850 this.pause(); // the error is really just that at least one of the requests timed-out
23851 // set the bandwidth to a very low value and trigger an ABR switch to
23852 // take emergency action
23853
23854 if (error.code === REQUEST_ERRORS.TIMEOUT) {
23855 this.handleTimeout_();
23856 return;
23857 } // if control-flow has arrived here, then the error is real
23858 // emit an error event to blacklist the current playlist
23859
23860
23861 this.mediaRequestsErrored += 1;
23862 this.error(error);
23863 this.trigger('error');
23864 return;
23865 }
23866
23867 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
23868 // generated for ABR purposes
23869
23870 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
23871 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
23872
23873 if (result.gopInfo) {
23874 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
23875 } // Although we may have already started appending on progress, we shouldn't switch the
23876 // state away from loading until we are officially done loading the segment data.
23877
23878
23879 this.state = 'APPENDING'; // used for testing
23880
23881 this.trigger('appending');
23882 this.waitForAppendsToComplete_(segmentInfo);
23883 };
23884
23885 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
23886 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
23887
23888 if (timelineMapping !== null) {
23889 this.timeMapping_ = timelineMapping;
23890 }
23891 };
23892
23893 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
23894 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
23895 this.mediaSecondsLoaded += segment.end - segment.start;
23896 } else {
23897 this.mediaSecondsLoaded += segment.duration;
23898 }
23899 };
23900
23901 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
23902 if (timestampOffset === null) {
23903 return false;
23904 } // note that we're potentially using the same timestamp offset for both video and
23905 // audio
23906
23907
23908 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23909 return true;
23910 }
23911
23912 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23913 return true;
23914 }
23915
23916 return false;
23917 };
23918
23919 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
23920 var currentStart = _ref9.currentStart,
23921 playlist = _ref9.playlist,
23922 mediaIndex = _ref9.mediaIndex,
23923 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
23924 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
23925 useVideoTimingInfo = _ref9.useVideoTimingInfo,
23926 videoTimingInfo = _ref9.videoTimingInfo,
23927 audioTimingInfo = _ref9.audioTimingInfo;
23928
23929 if (typeof currentStart !== 'undefined') {
23930 // if start was set once, keep using it
23931 return currentStart;
23932 }
23933
23934 if (!useVideoTimingInfo) {
23935 return audioTimingInfo.start;
23936 }
23937
23938 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
23939 // within that segment. Since the transmuxer maintains a cache of incomplete data
23940 // from and/or the last frame seen, the start time may reflect a frame that starts
23941 // in the previous segment. Check for that case and ensure the start time is
23942 // accurate for the segment.
23943
23944 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
23945 return firstVideoFrameTimeForData;
23946 }
23947
23948 return videoTimingInfo.start;
23949 };
23950
23951 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
23952 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
23953
23954 if (!trackInfo) {
23955 this.error({
23956 message: 'No starting media returned, likely due to an unsupported media format.',
23957 blacklistDuration: Infinity
23958 });
23959 this.trigger('error');
23960 return;
23961 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
23962 // on each queue this loader is responsible for to ensure that the appends are
23963 // complete.
23964
23965
23966 var hasAudio = trackInfo.hasAudio,
23967 hasVideo = trackInfo.hasVideo,
23968 isMuxed = trackInfo.isMuxed;
23969 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
23970 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
23971 segmentInfo.waitingOnAppends = 0; // segments with no data
23972
23973 if (!segmentInfo.hasAppendedData_) {
23974 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
23975 // When there's no audio or video data in the segment, there's no audio or video
23976 // timing information.
23977 //
23978 // If there's no audio or video timing information, then the timestamp offset
23979 // can't be adjusted to the appropriate value for the transmuxer and source
23980 // buffers.
23981 //
23982 // Therefore, the next segment should be used to set the timestamp offset.
23983 this.isPendingTimestampOffset_ = true;
23984 } // override settings for metadata only segments
23985
23986
23987 segmentInfo.timingInfo = {
23988 start: 0
23989 };
23990 segmentInfo.waitingOnAppends++;
23991
23992 if (!this.isPendingTimestampOffset_) {
23993 // update the timestampoffset
23994 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
23995 // no video/audio data.
23996
23997 this.processMetadataQueue_();
23998 } // append is "done" instantly with no data.
23999
24000
24001 this.checkAppendsDone_(segmentInfo);
24002 return;
24003 } // Since source updater could call back synchronously, do the increments first.
24004
24005
24006 if (waitForVideo) {
24007 segmentInfo.waitingOnAppends++;
24008 }
24009
24010 if (waitForAudio) {
24011 segmentInfo.waitingOnAppends++;
24012 }
24013
24014 if (waitForVideo) {
24015 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
24016 }
24017
24018 if (waitForAudio) {
24019 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
24020 }
24021 };
24022
24023 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
24024 if (this.checkForAbort_(segmentInfo.requestId)) {
24025 return;
24026 }
24027
24028 segmentInfo.waitingOnAppends--;
24029
24030 if (segmentInfo.waitingOnAppends === 0) {
24031 this.handleAppendsDone_();
24032 }
24033 };
24034
24035 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
24036 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
24037
24038 if (illegalMediaSwitchError) {
24039 this.error({
24040 message: illegalMediaSwitchError,
24041 blacklistDuration: Infinity
24042 });
24043 this.trigger('error');
24044 return true;
24045 }
24046
24047 return false;
24048 };
24049
24050 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
24051 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
24052 // priority, timing-wise, so we must wait
24053 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
24054 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
24055 this.loaderType_ !== 'main') {
24056 return;
24057 }
24058
24059 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
24060 // the timing info here comes from video. In the event that the audio is longer than
24061 // the video, this will trim the start of the audio.
24062 // This also trims any offset from 0 at the beginning of the media
24063
24064 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
24065 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
24066 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
24067 timingInfo: segmentInfo.timingInfo
24068 }); // In the event that there are part segment downloads, each will try to update the
24069 // timestamp offset. Retaining this bit of state prevents us from updating in the
24070 // future (within the same segment), however, there may be a better way to handle it.
24071
24072 segmentInfo.changedTimestampOffset = true;
24073
24074 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
24075 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
24076 didChange = true;
24077 }
24078
24079 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
24080 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
24081 didChange = true;
24082 }
24083
24084 if (didChange) {
24085 this.trigger('timestampoffset');
24086 }
24087 };
24088
24089 _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
24090 var videoTimingInfo = _ref10.videoTimingInfo,
24091 audioTimingInfo = _ref10.audioTimingInfo,
24092 timingInfo = _ref10.timingInfo;
24093
24094 if (!this.useDtsForTimestampOffset_) {
24095 return timingInfo.start;
24096 }
24097
24098 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
24099 return videoTimingInfo.transmuxedDecodeStart;
24100 } // handle audio only
24101
24102
24103 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
24104 return audioTimingInfo.transmuxedDecodeStart;
24105 } // handle content not transmuxed (e.g., MP4)
24106
24107
24108 return timingInfo.start;
24109 };
24110
24111 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
24112 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
24113 var trackInfo = this.getMediaInfo_();
24114 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
24115 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
24116
24117 if (!prioritizedTimingInfo) {
24118 return;
24119 }
24120
24121 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
24122 // current example is the case of fmp4), so use the rough duration to calculate an
24123 // end time.
24124 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
24125 }
24126 /**
24127 * callback to run when appendBuffer is finished. detects if we are
24128 * in a good state to do things with the data we got, or if we need
24129 * to wait for more
24130 *
24131 * @private
24132 */
24133 ;
24134
24135 _proto.handleAppendsDone_ = function handleAppendsDone_() {
24136 // appendsdone can cause an abort
24137 if (this.pendingSegment_) {
24138 this.trigger('appendsdone');
24139 }
24140
24141 if (!this.pendingSegment_) {
24142 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
24143 // all appending cases?
24144
24145 if (!this.paused()) {
24146 this.monitorBuffer_();
24147 }
24148
24149 return;
24150 }
24151
24152 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
24153 // best to wait until all appends are done so we're sure that the primary media is
24154 // finished (and we have its end time).
24155
24156 this.updateTimingInfoEnd_(segmentInfo);
24157
24158 if (this.shouldSaveSegmentTimingInfo_) {
24159 // Timeline mappings should only be saved for the main loader. This is for multiple
24160 // reasons:
24161 //
24162 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
24163 // and the main loader try to save the timeline mapping, whichever comes later
24164 // will overwrite the first. In theory this is OK, as the mappings should be the
24165 // same, however, it breaks for (2)
24166 // 2) In the event of a live stream, the initial live point will make for a somewhat
24167 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
24168 // the mapping will be off for one of the streams, dependent on which one was
24169 // first saved (see (1)).
24170 // 3) Primary timing goes by video in VHS, so the mapping should be video.
24171 //
24172 // Since the audio loader will wait for the main loader to load the first segment,
24173 // the main loader will save the first timeline mapping, and ensure that there won't
24174 // be a case where audio loads two segments without saving a mapping (thus leading
24175 // to missing segment timing info).
24176 this.syncController_.saveSegmentTimingInfo({
24177 segmentInfo: segmentInfo,
24178 shouldSaveTimelineMapping: this.loaderType_ === 'main'
24179 });
24180 }
24181
24182 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
24183
24184 if (segmentDurationMessage) {
24185 if (segmentDurationMessage.severity === 'warn') {
24186 videojs__default["default"].log.warn(segmentDurationMessage.message);
24187 } else {
24188 this.logger_(segmentDurationMessage.message);
24189 }
24190 }
24191
24192 this.recordThroughput_(segmentInfo);
24193 this.pendingSegment_ = null;
24194 this.state = 'READY';
24195
24196 if (segmentInfo.isSyncRequest) {
24197 this.trigger('syncinfoupdate'); // if the sync request was not appended
24198 // then it was not the correct segment.
24199 // throw it away and use the data it gave us
24200 // to get the correct one.
24201
24202 if (!segmentInfo.hasAppendedData_) {
24203 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
24204 return;
24205 }
24206 }
24207
24208 this.logger_("Appended " + segmentInfoString(segmentInfo));
24209 this.addSegmentMetadataCue_(segmentInfo);
24210 this.fetchAtBuffer_ = true;
24211
24212 if (this.currentTimeline_ !== segmentInfo.timeline) {
24213 this.timelineChangeController_.lastTimelineChange({
24214 type: this.loaderType_,
24215 from: this.currentTimeline_,
24216 to: segmentInfo.timeline
24217 }); // If audio is not disabled, the main segment loader is responsible for updating
24218 // the audio timeline as well. If the content is video only, this won't have any
24219 // impact.
24220
24221 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
24222 this.timelineChangeController_.lastTimelineChange({
24223 type: 'audio',
24224 from: this.currentTimeline_,
24225 to: segmentInfo.timeline
24226 });
24227 }
24228 }
24229
24230 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
24231 // the following conditional otherwise it may consider this a bad "guess"
24232 // and attempt to resync when the post-update seekable window and live
24233 // point would mean that this was the perfect segment to fetch
24234
24235 this.trigger('syncinfoupdate');
24236 var segment = segmentInfo.segment;
24237 var part = segmentInfo.part;
24238 var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
24239 var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
24240 // the currentTime_ that means that our conservative guess was too conservative.
24241 // In that case, reset the loader state so that we try to use any information gained
24242 // from the previous request to create a new, more accurate, sync-point.
24243
24244 if (badSegmentGuess || badPartGuess) {
24245 this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
24246 this.resetEverything();
24247 return;
24248 }
24249
24250 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
24251 // and conservatively guess
24252
24253 if (isWalkingForward) {
24254 this.trigger('bandwidthupdate');
24255 }
24256
24257 this.trigger('progress');
24258 this.mediaIndex = segmentInfo.mediaIndex;
24259 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
24260 // buffer, end the stream. this ensures the "ended" event will
24261 // fire if playback reaches that point.
24262
24263 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
24264 this.endOfStream();
24265 } // used for testing
24266
24267
24268 this.trigger('appended');
24269
24270 if (segmentInfo.hasAppendedData_) {
24271 this.mediaAppends++;
24272 }
24273
24274 if (!this.paused()) {
24275 this.monitorBuffer_();
24276 }
24277 }
24278 /**
24279 * Records the current throughput of the decrypt, transmux, and append
24280 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
24281 * moving average of the throughput. `throughput.count` is the number of
24282 * data points in the average.
24283 *
24284 * @private
24285 * @param {Object} segmentInfo the object returned by loadSegment
24286 */
24287 ;
24288
24289 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
24290 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
24291 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
24292 return;
24293 }
24294
24295 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
24296 // by zero in the case where the throughput is ridiculously high
24297
24298 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
24299
24300 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
24301 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
24302
24303 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
24304 }
24305 /**
24306 * Adds a cue to the segment-metadata track with some metadata information about the
24307 * segment
24308 *
24309 * @private
24310 * @param {Object} segmentInfo
24311 * the object returned by loadSegment
24312 * @method addSegmentMetadataCue_
24313 */
24314 ;
24315
24316 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
24317 if (!this.segmentMetadataTrack_) {
24318 return;
24319 }
24320
24321 var segment = segmentInfo.segment;
24322 var start = segment.start;
24323 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
24324
24325 if (!finite(start) || !finite(end)) {
24326 return;
24327 }
24328
24329 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
24330 var Cue = window.WebKitDataCue || window.VTTCue;
24331 var value = {
24332 custom: segment.custom,
24333 dateTimeObject: segment.dateTimeObject,
24334 dateTimeString: segment.dateTimeString,
24335 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
24336 resolution: segmentInfo.playlist.attributes.RESOLUTION,
24337 codecs: segmentInfo.playlist.attributes.CODECS,
24338 byteLength: segmentInfo.byteLength,
24339 uri: segmentInfo.uri,
24340 timeline: segmentInfo.timeline,
24341 playlist: segmentInfo.playlist.id,
24342 start: start,
24343 end: end
24344 };
24345 var data = JSON.stringify(value);
24346 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
24347 // the differences of WebKitDataCue in safari and VTTCue in other browsers
24348
24349 cue.value = value;
24350 this.segmentMetadataTrack_.addCue(cue);
24351 };
24352
24353 return SegmentLoader;
24354 }(videojs__default["default"].EventTarget);
24355
24356 function noop() {}
24357
24358 var toTitleCase = function toTitleCase(string) {
24359 if (typeof string !== 'string') {
24360 return string;
24361 }
24362
24363 return string.replace(/./, function (w) {
24364 return w.toUpperCase();
24365 });
24366 };
24367
24368 var bufferTypes = ['video', 'audio'];
24369
24370 var _updating = function updating(type, sourceUpdater) {
24371 var sourceBuffer = sourceUpdater[type + "Buffer"];
24372 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
24373 };
24374
24375 var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
24376 for (var i = 0; i < queue.length; i++) {
24377 var queueEntry = queue[i];
24378
24379 if (queueEntry.type === 'mediaSource') {
24380 // If the next entry is a media source entry (uses multiple source buffers), block
24381 // processing to allow it to go through first.
24382 return null;
24383 }
24384
24385 if (queueEntry.type === type) {
24386 return i;
24387 }
24388 }
24389
24390 return null;
24391 };
24392
24393 var shiftQueue = function shiftQueue(type, sourceUpdater) {
24394 if (sourceUpdater.queue.length === 0) {
24395 return;
24396 }
24397
24398 var queueIndex = 0;
24399 var queueEntry = sourceUpdater.queue[queueIndex];
24400
24401 if (queueEntry.type === 'mediaSource') {
24402 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
24403 sourceUpdater.queue.shift();
24404 queueEntry.action(sourceUpdater);
24405
24406 if (queueEntry.doneFn) {
24407 queueEntry.doneFn();
24408 } // Only specific source buffer actions must wait for async updateend events. Media
24409 // Source actions process synchronously. Therefore, both audio and video source
24410 // buffers are now clear to process the next queue entries.
24411
24412
24413 shiftQueue('audio', sourceUpdater);
24414 shiftQueue('video', sourceUpdater);
24415 } // Media Source actions require both source buffers, so if the media source action
24416 // couldn't process yet (because one or both source buffers are busy), block other
24417 // queue actions until both are available and the media source action can process.
24418
24419
24420 return;
24421 }
24422
24423 if (type === 'mediaSource') {
24424 // If the queue was shifted by a media source action (this happens when pushing a
24425 // media source action onto the queue), then it wasn't from an updateend event from an
24426 // audio or video source buffer, so there's no change from previous state, and no
24427 // processing should be done.
24428 return;
24429 } // Media source queue entries don't need to consider whether the source updater is
24430 // started (i.e., source buffers are created) as they don't need the source buffers, but
24431 // source buffer queue entries do.
24432
24433
24434 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
24435 return;
24436 }
24437
24438 if (queueEntry.type !== type) {
24439 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
24440
24441 if (queueIndex === null) {
24442 // Either there's no queue entry that uses this source buffer type in the queue, or
24443 // there's a media source queue entry before the next entry of this type, in which
24444 // case wait for that action to process first.
24445 return;
24446 }
24447
24448 queueEntry = sourceUpdater.queue[queueIndex];
24449 }
24450
24451 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
24452 //
24453 // The queue pending operation must be set before the action is performed in the event
24454 // that the action results in a synchronous event that is acted upon. For instance, if
24455 // an exception is thrown that can be handled, it's possible that new actions will be
24456 // appended to an empty queue and immediately executed, but would not have the correct
24457 // pending information if this property was set after the action was performed.
24458
24459 sourceUpdater.queuePending[type] = queueEntry;
24460 queueEntry.action(type, sourceUpdater);
24461
24462 if (!queueEntry.doneFn) {
24463 // synchronous operation, process next entry
24464 sourceUpdater.queuePending[type] = null;
24465 shiftQueue(type, sourceUpdater);
24466 return;
24467 }
24468 };
24469
24470 var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
24471 var buffer = sourceUpdater[type + "Buffer"];
24472 var titleType = toTitleCase(type);
24473
24474 if (!buffer) {
24475 return;
24476 }
24477
24478 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24479 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24480 sourceUpdater.codecs[type] = null;
24481 sourceUpdater[type + "Buffer"] = null;
24482 };
24483
24484 var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
24485 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
24486 };
24487
24488 var actions = {
24489 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
24490 return function (type, sourceUpdater) {
24491 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24492 // or the media source does not contain this source buffer.
24493
24494 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24495 return;
24496 }
24497
24498 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
24499
24500 try {
24501 sourceBuffer.appendBuffer(bytes);
24502 } catch (e) {
24503 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
24504 sourceUpdater.queuePending[type] = null;
24505 onError(e);
24506 }
24507 };
24508 },
24509 remove: function remove(start, end) {
24510 return function (type, sourceUpdater) {
24511 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24512 // or the media source does not contain this source buffer.
24513
24514 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24515 return;
24516 }
24517
24518 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
24519
24520 try {
24521 sourceBuffer.remove(start, end);
24522 } catch (e) {
24523 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
24524 }
24525 };
24526 },
24527 timestampOffset: function timestampOffset(offset) {
24528 return function (type, sourceUpdater) {
24529 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24530 // or the media source does not contain this source buffer.
24531
24532 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24533 return;
24534 }
24535
24536 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
24537 sourceBuffer.timestampOffset = offset;
24538 };
24539 },
24540 callback: function callback(_callback) {
24541 return function (type, sourceUpdater) {
24542 _callback();
24543 };
24544 },
24545 endOfStream: function endOfStream(error) {
24546 return function (sourceUpdater) {
24547 if (sourceUpdater.mediaSource.readyState !== 'open') {
24548 return;
24549 }
24550
24551 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
24552
24553 try {
24554 sourceUpdater.mediaSource.endOfStream(error);
24555 } catch (e) {
24556 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
24557 }
24558 };
24559 },
24560 duration: function duration(_duration) {
24561 return function (sourceUpdater) {
24562 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
24563
24564 try {
24565 sourceUpdater.mediaSource.duration = _duration;
24566 } catch (e) {
24567 videojs__default["default"].log.warn('Failed to set media source duration', e);
24568 }
24569 };
24570 },
24571 abort: function abort() {
24572 return function (type, sourceUpdater) {
24573 if (sourceUpdater.mediaSource.readyState !== 'open') {
24574 return;
24575 }
24576
24577 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24578 // or the media source does not contain this source buffer.
24579
24580 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24581 return;
24582 }
24583
24584 sourceUpdater.logger_("calling abort on " + type + "Buffer");
24585
24586 try {
24587 sourceBuffer.abort();
24588 } catch (e) {
24589 videojs__default["default"].log.warn("Failed to abort on " + type + "Buffer", e);
24590 }
24591 };
24592 },
24593 addSourceBuffer: function addSourceBuffer(type, codec) {
24594 return function (sourceUpdater) {
24595 var titleType = toTitleCase(type);
24596 var mime = getMimeForCodec(codec);
24597 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
24598 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
24599 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24600 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24601 sourceUpdater.codecs[type] = codec;
24602 sourceUpdater[type + "Buffer"] = sourceBuffer;
24603 };
24604 },
24605 removeSourceBuffer: function removeSourceBuffer(type) {
24606 return function (sourceUpdater) {
24607 var sourceBuffer = sourceUpdater[type + "Buffer"];
24608 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
24609 // or the media source does not contain this source buffer.
24610
24611 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24612 return;
24613 }
24614
24615 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
24616
24617 try {
24618 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
24619 } catch (e) {
24620 videojs__default["default"].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
24621 }
24622 };
24623 },
24624 changeType: function changeType(codec) {
24625 return function (type, sourceUpdater) {
24626 var sourceBuffer = sourceUpdater[type + "Buffer"];
24627 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
24628 // or the media source does not contain this source buffer.
24629
24630 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24631 return;
24632 } // do not update codec if we don't need to.
24633
24634
24635 if (sourceUpdater.codecs[type] === codec) {
24636 return;
24637 }
24638
24639 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
24640 sourceBuffer.changeType(mime);
24641 sourceUpdater.codecs[type] = codec;
24642 };
24643 }
24644 };
24645
24646 var pushQueue = function pushQueue(_ref) {
24647 var type = _ref.type,
24648 sourceUpdater = _ref.sourceUpdater,
24649 action = _ref.action,
24650 doneFn = _ref.doneFn,
24651 name = _ref.name;
24652 sourceUpdater.queue.push({
24653 type: type,
24654 action: action,
24655 doneFn: doneFn,
24656 name: name
24657 });
24658 shiftQueue(type, sourceUpdater);
24659 };
24660
24661 var onUpdateend = function onUpdateend(type, sourceUpdater) {
24662 return function (e) {
24663 // Although there should, in theory, be a pending action for any updateend receieved,
24664 // there are some actions that may trigger updateend events without set definitions in
24665 // the w3c spec. For instance, setting the duration on the media source may trigger
24666 // updateend events on source buffers. This does not appear to be in the spec. As such,
24667 // if we encounter an updateend without a corresponding pending action from our queue
24668 // for that source buffer type, process the next action.
24669 if (sourceUpdater.queuePending[type]) {
24670 var doneFn = sourceUpdater.queuePending[type].doneFn;
24671 sourceUpdater.queuePending[type] = null;
24672
24673 if (doneFn) {
24674 // if there's an error, report it
24675 doneFn(sourceUpdater[type + "Error_"]);
24676 }
24677 }
24678
24679 shiftQueue(type, sourceUpdater);
24680 };
24681 };
24682 /**
24683 * A queue of callbacks to be serialized and applied when a
24684 * MediaSource and its associated SourceBuffers are not in the
24685 * updating state. It is used by the segment loader to update the
24686 * underlying SourceBuffers when new data is loaded, for instance.
24687 *
24688 * @class SourceUpdater
24689 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
24690 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
24691 */
24692
24693
24694 var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
24695 inheritsLoose(SourceUpdater, _videojs$EventTarget);
24696
24697 function SourceUpdater(mediaSource) {
24698 var _this;
24699
24700 _this = _videojs$EventTarget.call(this) || this;
24701 _this.mediaSource = mediaSource;
24702
24703 _this.sourceopenListener_ = function () {
24704 return shiftQueue('mediaSource', assertThisInitialized(_this));
24705 };
24706
24707 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
24708
24709 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
24710
24711 _this.audioTimestampOffset_ = 0;
24712 _this.videoTimestampOffset_ = 0;
24713 _this.queue = [];
24714 _this.queuePending = {
24715 audio: null,
24716 video: null
24717 };
24718 _this.delayedAudioAppendQueue_ = [];
24719 _this.videoAppendQueued_ = false;
24720 _this.codecs = {};
24721 _this.onVideoUpdateEnd_ = onUpdateend('video', assertThisInitialized(_this));
24722 _this.onAudioUpdateEnd_ = onUpdateend('audio', assertThisInitialized(_this));
24723
24724 _this.onVideoError_ = function (e) {
24725 // used for debugging
24726 _this.videoError_ = e;
24727 };
24728
24729 _this.onAudioError_ = function (e) {
24730 // used for debugging
24731 _this.audioError_ = e;
24732 };
24733
24734 _this.createdSourceBuffers_ = false;
24735 _this.initializedEme_ = false;
24736 _this.triggeredReady_ = false;
24737 return _this;
24738 }
24739
24740 var _proto = SourceUpdater.prototype;
24741
24742 _proto.initializedEme = function initializedEme() {
24743 this.initializedEme_ = true;
24744 this.triggerReady();
24745 };
24746
24747 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
24748 // if false, likely waiting on one of the segment loaders to get enough data to create
24749 // source buffers
24750 return this.createdSourceBuffers_;
24751 };
24752
24753 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
24754 return this.initializedEme_;
24755 };
24756
24757 _proto.ready = function ready() {
24758 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
24759 };
24760
24761 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
24762 if (this.hasCreatedSourceBuffers()) {
24763 // already created them before
24764 return;
24765 } // the intial addOrChangeSourceBuffers will always be
24766 // two add buffers.
24767
24768
24769 this.addOrChangeSourceBuffers(codecs);
24770 this.createdSourceBuffers_ = true;
24771 this.trigger('createdsourcebuffers');
24772 this.triggerReady();
24773 };
24774
24775 _proto.triggerReady = function triggerReady() {
24776 // only allow ready to be triggered once, this prevents the case
24777 // where:
24778 // 1. we trigger createdsourcebuffers
24779 // 2. ie 11 synchronously initializates eme
24780 // 3. the synchronous initialization causes us to trigger ready
24781 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
24782 if (this.ready() && !this.triggeredReady_) {
24783 this.triggeredReady_ = true;
24784 this.trigger('ready');
24785 }
24786 }
24787 /**
24788 * Add a type of source buffer to the media source.
24789 *
24790 * @param {string} type
24791 * The type of source buffer to add.
24792 *
24793 * @param {string} codec
24794 * The codec to add the source buffer with.
24795 */
24796 ;
24797
24798 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
24799 pushQueue({
24800 type: 'mediaSource',
24801 sourceUpdater: this,
24802 action: actions.addSourceBuffer(type, codec),
24803 name: 'addSourceBuffer'
24804 });
24805 }
24806 /**
24807 * call abort on a source buffer.
24808 *
24809 * @param {string} type
24810 * The type of source buffer to call abort on.
24811 */
24812 ;
24813
24814 _proto.abort = function abort(type) {
24815 pushQueue({
24816 type: type,
24817 sourceUpdater: this,
24818 action: actions.abort(type),
24819 name: 'abort'
24820 });
24821 }
24822 /**
24823 * Call removeSourceBuffer and remove a specific type
24824 * of source buffer on the mediaSource.
24825 *
24826 * @param {string} type
24827 * The type of source buffer to remove.
24828 */
24829 ;
24830
24831 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
24832 if (!this.canRemoveSourceBuffer()) {
24833 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
24834 return;
24835 }
24836
24837 pushQueue({
24838 type: 'mediaSource',
24839 sourceUpdater: this,
24840 action: actions.removeSourceBuffer(type),
24841 name: 'removeSourceBuffer'
24842 });
24843 }
24844 /**
24845 * Whether or not the removeSourceBuffer function is supported
24846 * on the mediaSource.
24847 *
24848 * @return {boolean}
24849 * if removeSourceBuffer can be called.
24850 */
24851 ;
24852
24853 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
24854 // IE reports that it supports removeSourceBuffer, but often throws
24855 // errors when attempting to use the function. So we report that it
24856 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
24857 // throws errors, so we report that it does not support this as well.
24858 return !videojs__default["default"].browser.IE_VERSION && !videojs__default["default"].browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
24859 }
24860 /**
24861 * Whether or not the changeType function is supported
24862 * on our SourceBuffers.
24863 *
24864 * @return {boolean}
24865 * if changeType can be called.
24866 */
24867 ;
24868
24869 SourceUpdater.canChangeType = function canChangeType() {
24870 return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';
24871 }
24872 /**
24873 * Whether or not the changeType function is supported
24874 * on our SourceBuffers.
24875 *
24876 * @return {boolean}
24877 * if changeType can be called.
24878 */
24879 ;
24880
24881 _proto.canChangeType = function canChangeType() {
24882 return this.constructor.canChangeType();
24883 }
24884 /**
24885 * Call the changeType function on a source buffer, given the code and type.
24886 *
24887 * @param {string} type
24888 * The type of source buffer to call changeType on.
24889 *
24890 * @param {string} codec
24891 * The codec string to change type with on the source buffer.
24892 */
24893 ;
24894
24895 _proto.changeType = function changeType(type, codec) {
24896 if (!this.canChangeType()) {
24897 videojs__default["default"].log.error('changeType is not supported!');
24898 return;
24899 }
24900
24901 pushQueue({
24902 type: type,
24903 sourceUpdater: this,
24904 action: actions.changeType(codec),
24905 name: 'changeType'
24906 });
24907 }
24908 /**
24909 * Add source buffers with a codec or, if they are already created,
24910 * call changeType on source buffers using changeType.
24911 *
24912 * @param {Object} codecs
24913 * Codecs to switch to
24914 */
24915 ;
24916
24917 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
24918 var _this2 = this;
24919
24920 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
24921 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
24922 }
24923
24924 Object.keys(codecs).forEach(function (type) {
24925 var codec = codecs[type];
24926
24927 if (!_this2.hasCreatedSourceBuffers()) {
24928 return _this2.addSourceBuffer(type, codec);
24929 }
24930
24931 if (_this2.canChangeType()) {
24932 _this2.changeType(type, codec);
24933 }
24934 });
24935 }
24936 /**
24937 * Queue an update to append an ArrayBuffer.
24938 *
24939 * @param {MediaObject} object containing audioBytes and/or videoBytes
24940 * @param {Function} done the function to call when done
24941 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
24942 */
24943 ;
24944
24945 _proto.appendBuffer = function appendBuffer(options, doneFn) {
24946 var _this3 = this;
24947
24948 var segmentInfo = options.segmentInfo,
24949 type = options.type,
24950 bytes = options.bytes;
24951 this.processedAppend_ = true;
24952
24953 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
24954 this.delayedAudioAppendQueue_.push([options, doneFn]);
24955 this.logger_("delayed audio append of " + bytes.length + " until video append");
24956 return;
24957 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
24958 // not be fired. This means that the queue will be blocked until the next action
24959 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
24960 // these errors by calling the doneFn with the specific error.
24961
24962
24963 var onError = doneFn;
24964 pushQueue({
24965 type: type,
24966 sourceUpdater: this,
24967 action: actions.appendBuffer(bytes, segmentInfo || {
24968 mediaIndex: -1
24969 }, onError),
24970 doneFn: doneFn,
24971 name: 'appendBuffer'
24972 });
24973
24974 if (type === 'video') {
24975 this.videoAppendQueued_ = true;
24976
24977 if (!this.delayedAudioAppendQueue_.length) {
24978 return;
24979 }
24980
24981 var queue = this.delayedAudioAppendQueue_.slice();
24982 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
24983 this.delayedAudioAppendQueue_.length = 0;
24984 queue.forEach(function (que) {
24985 _this3.appendBuffer.apply(_this3, que);
24986 });
24987 }
24988 }
24989 /**
24990 * Get the audio buffer's buffered timerange.
24991 *
24992 * @return {TimeRange}
24993 * The audio buffer's buffered time range
24994 */
24995 ;
24996
24997 _proto.audioBuffered = function audioBuffered() {
24998 // no media source/source buffer or it isn't in the media sources
24999 // source buffer list
25000 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
25001 return videojs__default["default"].createTimeRange();
25002 }
25003
25004 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default["default"].createTimeRange();
25005 }
25006 /**
25007 * Get the video buffer's buffered timerange.
25008 *
25009 * @return {TimeRange}
25010 * The video buffer's buffered time range
25011 */
25012 ;
25013
25014 _proto.videoBuffered = function videoBuffered() {
25015 // no media source/source buffer or it isn't in the media sources
25016 // source buffer list
25017 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
25018 return videojs__default["default"].createTimeRange();
25019 }
25020
25021 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default["default"].createTimeRange();
25022 }
25023 /**
25024 * Get a combined video/audio buffer's buffered timerange.
25025 *
25026 * @return {TimeRange}
25027 * the combined time range
25028 */
25029 ;
25030
25031 _proto.buffered = function buffered() {
25032 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
25033 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
25034
25035 if (audio && !video) {
25036 return this.audioBuffered();
25037 }
25038
25039 if (video && !audio) {
25040 return this.videoBuffered();
25041 }
25042
25043 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
25044 }
25045 /**
25046 * Add a callback to the queue that will set duration on the mediaSource.
25047 *
25048 * @param {number} duration
25049 * The duration to set
25050 *
25051 * @param {Function} [doneFn]
25052 * function to run after duration has been set.
25053 */
25054 ;
25055
25056 _proto.setDuration = function setDuration(duration, doneFn) {
25057 if (doneFn === void 0) {
25058 doneFn = noop;
25059 }
25060
25061 // In order to set the duration on the media source, it's necessary to wait for all
25062 // source buffers to no longer be updating. "If the updating attribute equals true on
25063 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25064 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25065 pushQueue({
25066 type: 'mediaSource',
25067 sourceUpdater: this,
25068 action: actions.duration(duration),
25069 name: 'duration',
25070 doneFn: doneFn
25071 });
25072 }
25073 /**
25074 * Add a mediaSource endOfStream call to the queue
25075 *
25076 * @param {Error} [error]
25077 * Call endOfStream with an error
25078 *
25079 * @param {Function} [doneFn]
25080 * A function that should be called when the
25081 * endOfStream call has finished.
25082 */
25083 ;
25084
25085 _proto.endOfStream = function endOfStream(error, doneFn) {
25086 if (error === void 0) {
25087 error = null;
25088 }
25089
25090 if (doneFn === void 0) {
25091 doneFn = noop;
25092 }
25093
25094 if (typeof error !== 'string') {
25095 error = undefined;
25096 } // In order to set the duration on the media source, it's necessary to wait for all
25097 // source buffers to no longer be updating. "If the updating attribute equals true on
25098 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25099 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25100
25101
25102 pushQueue({
25103 type: 'mediaSource',
25104 sourceUpdater: this,
25105 action: actions.endOfStream(error),
25106 name: 'endOfStream',
25107 doneFn: doneFn
25108 });
25109 }
25110 /**
25111 * Queue an update to remove a time range from the buffer.
25112 *
25113 * @param {number} start where to start the removal
25114 * @param {number} end where to end the removal
25115 * @param {Function} [done=noop] optional callback to be executed when the remove
25116 * operation is complete
25117 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25118 */
25119 ;
25120
25121 _proto.removeAudio = function removeAudio(start, end, done) {
25122 if (done === void 0) {
25123 done = noop;
25124 }
25125
25126 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
25127 done();
25128 return;
25129 }
25130
25131 pushQueue({
25132 type: 'audio',
25133 sourceUpdater: this,
25134 action: actions.remove(start, end),
25135 doneFn: done,
25136 name: 'remove'
25137 });
25138 }
25139 /**
25140 * Queue an update to remove a time range from the buffer.
25141 *
25142 * @param {number} start where to start the removal
25143 * @param {number} end where to end the removal
25144 * @param {Function} [done=noop] optional callback to be executed when the remove
25145 * operation is complete
25146 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25147 */
25148 ;
25149
25150 _proto.removeVideo = function removeVideo(start, end, done) {
25151 if (done === void 0) {
25152 done = noop;
25153 }
25154
25155 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
25156 done();
25157 return;
25158 }
25159
25160 pushQueue({
25161 type: 'video',
25162 sourceUpdater: this,
25163 action: actions.remove(start, end),
25164 doneFn: done,
25165 name: 'remove'
25166 });
25167 }
25168 /**
25169 * Whether the underlying sourceBuffer is updating or not
25170 *
25171 * @return {boolean} the updating status of the SourceBuffer
25172 */
25173 ;
25174
25175 _proto.updating = function updating() {
25176 // the audio/video source buffer is updating
25177 if (_updating('audio', this) || _updating('video', this)) {
25178 return true;
25179 }
25180
25181 return false;
25182 }
25183 /**
25184 * Set/get the timestampoffset on the audio SourceBuffer
25185 *
25186 * @return {number} the timestamp offset
25187 */
25188 ;
25189
25190 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
25191 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
25192 this.audioTimestampOffset_ !== offset) {
25193 pushQueue({
25194 type: 'audio',
25195 sourceUpdater: this,
25196 action: actions.timestampOffset(offset),
25197 name: 'timestampOffset'
25198 });
25199 this.audioTimestampOffset_ = offset;
25200 }
25201
25202 return this.audioTimestampOffset_;
25203 }
25204 /**
25205 * Set/get the timestampoffset on the video SourceBuffer
25206 *
25207 * @return {number} the timestamp offset
25208 */
25209 ;
25210
25211 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
25212 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
25213 this.videoTimestampOffset !== offset) {
25214 pushQueue({
25215 type: 'video',
25216 sourceUpdater: this,
25217 action: actions.timestampOffset(offset),
25218 name: 'timestampOffset'
25219 });
25220 this.videoTimestampOffset_ = offset;
25221 }
25222
25223 return this.videoTimestampOffset_;
25224 }
25225 /**
25226 * Add a function to the queue that will be called
25227 * when it is its turn to run in the audio queue.
25228 *
25229 * @param {Function} callback
25230 * The callback to queue.
25231 */
25232 ;
25233
25234 _proto.audioQueueCallback = function audioQueueCallback(callback) {
25235 if (!this.audioBuffer) {
25236 return;
25237 }
25238
25239 pushQueue({
25240 type: 'audio',
25241 sourceUpdater: this,
25242 action: actions.callback(callback),
25243 name: 'callback'
25244 });
25245 }
25246 /**
25247 * Add a function to the queue that will be called
25248 * when it is its turn to run in the video queue.
25249 *
25250 * @param {Function} callback
25251 * The callback to queue.
25252 */
25253 ;
25254
25255 _proto.videoQueueCallback = function videoQueueCallback(callback) {
25256 if (!this.videoBuffer) {
25257 return;
25258 }
25259
25260 pushQueue({
25261 type: 'video',
25262 sourceUpdater: this,
25263 action: actions.callback(callback),
25264 name: 'callback'
25265 });
25266 }
25267 /**
25268 * dispose of the source updater and the underlying sourceBuffer
25269 */
25270 ;
25271
25272 _proto.dispose = function dispose() {
25273 var _this4 = this;
25274
25275 this.trigger('dispose');
25276 bufferTypes.forEach(function (type) {
25277 _this4.abort(type);
25278
25279 if (_this4.canRemoveSourceBuffer()) {
25280 _this4.removeSourceBuffer(type);
25281 } else {
25282 _this4[type + "QueueCallback"](function () {
25283 return cleanupBuffer(type, _this4);
25284 });
25285 }
25286 });
25287 this.videoAppendQueued_ = false;
25288 this.delayedAudioAppendQueue_.length = 0;
25289
25290 if (this.sourceopenListener_) {
25291 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
25292 }
25293
25294 this.off();
25295 };
25296
25297 return SourceUpdater;
25298 }(videojs__default["default"].EventTarget);
25299
25300 var getPrototypeOf = createCommonjsModule(function (module) {
25301 function _getPrototypeOf(o) {
25302 module.exports = _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
25303 return o.__proto__ || Object.getPrototypeOf(o);
25304 };
25305 module.exports["default"] = module.exports, module.exports.__esModule = true;
25306 return _getPrototypeOf(o);
25307 }
25308
25309 module.exports = _getPrototypeOf;
25310 module.exports["default"] = module.exports, module.exports.__esModule = true;
25311 });
25312
25313 var isNativeFunction = createCommonjsModule(function (module) {
25314 function _isNativeFunction(fn) {
25315 return Function.toString.call(fn).indexOf("[native code]") !== -1;
25316 }
25317
25318 module.exports = _isNativeFunction;
25319 module.exports["default"] = module.exports, module.exports.__esModule = true;
25320 });
25321
25322 var isNativeReflectConstruct = createCommonjsModule(function (module) {
25323 function _isNativeReflectConstruct() {
25324 if (typeof Reflect === "undefined" || !Reflect.construct) return false;
25325 if (Reflect.construct.sham) return false;
25326 if (typeof Proxy === "function") return true;
25327
25328 try {
25329 Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {}));
25330 return true;
25331 } catch (e) {
25332 return false;
25333 }
25334 }
25335
25336 module.exports = _isNativeReflectConstruct;
25337 module.exports["default"] = module.exports, module.exports.__esModule = true;
25338 });
25339
25340 var construct = createCommonjsModule(function (module) {
25341 function _construct(Parent, args, Class) {
25342 if (isNativeReflectConstruct()) {
25343 module.exports = _construct = Reflect.construct;
25344 module.exports["default"] = module.exports, module.exports.__esModule = true;
25345 } else {
25346 module.exports = _construct = function _construct(Parent, args, Class) {
25347 var a = [null];
25348 a.push.apply(a, args);
25349 var Constructor = Function.bind.apply(Parent, a);
25350 var instance = new Constructor();
25351 if (Class) setPrototypeOf(instance, Class.prototype);
25352 return instance;
25353 };
25354
25355 module.exports["default"] = module.exports, module.exports.__esModule = true;
25356 }
25357
25358 return _construct.apply(null, arguments);
25359 }
25360
25361 module.exports = _construct;
25362 module.exports["default"] = module.exports, module.exports.__esModule = true;
25363 });
25364
25365 var wrapNativeSuper = createCommonjsModule(function (module) {
25366 function _wrapNativeSuper(Class) {
25367 var _cache = typeof Map === "function" ? new Map() : undefined;
25368
25369 module.exports = _wrapNativeSuper = function _wrapNativeSuper(Class) {
25370 if (Class === null || !isNativeFunction(Class)) return Class;
25371
25372 if (typeof Class !== "function") {
25373 throw new TypeError("Super expression must either be null or a function");
25374 }
25375
25376 if (typeof _cache !== "undefined") {
25377 if (_cache.has(Class)) return _cache.get(Class);
25378
25379 _cache.set(Class, Wrapper);
25380 }
25381
25382 function Wrapper() {
25383 return construct(Class, arguments, getPrototypeOf(this).constructor);
25384 }
25385
25386 Wrapper.prototype = Object.create(Class.prototype, {
25387 constructor: {
25388 value: Wrapper,
25389 enumerable: false,
25390 writable: true,
25391 configurable: true
25392 }
25393 });
25394 return setPrototypeOf(Wrapper, Class);
25395 };
25396
25397 module.exports["default"] = module.exports, module.exports.__esModule = true;
25398 return _wrapNativeSuper(Class);
25399 }
25400
25401 module.exports = _wrapNativeSuper;
25402 module.exports["default"] = module.exports, module.exports.__esModule = true;
25403 });
25404
25405 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
25406 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
25407 };
25408
25409 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
25410 return char.charCodeAt(0);
25411 }));
25412
25413 var NoVttJsError = /*#__PURE__*/function (_Error) {
25414 inheritsLoose(NoVttJsError, _Error);
25415
25416 function NoVttJsError() {
25417 return _Error.call(this, 'Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.') || this;
25418 }
25419
25420 return NoVttJsError;
25421 }( /*#__PURE__*/wrapNativeSuper(Error));
25422 /**
25423 * An object that manages segment loading and appending.
25424 *
25425 * @class VTTSegmentLoader
25426 * @param {Object} options required and optional options
25427 * @extends videojs.EventTarget
25428 */
25429
25430
25431 var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
25432 inheritsLoose(VTTSegmentLoader, _SegmentLoader);
25433
25434 function VTTSegmentLoader(settings, options) {
25435 var _this;
25436
25437 if (options === void 0) {
25438 options = {};
25439 }
25440
25441 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
25442 // however, VTTSegmentLoader has no need of a media source, so delete the reference
25443
25444 _this.mediaSource_ = null;
25445 _this.subtitlesTrack_ = null;
25446 _this.loaderType_ = 'subtitle';
25447 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
25448 _this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
25449 // the sync controller leads to improper behavior.
25450
25451 _this.shouldSaveSegmentTimingInfo_ = false;
25452 return _this;
25453 }
25454
25455 var _proto = VTTSegmentLoader.prototype;
25456
25457 _proto.createTransmuxer_ = function createTransmuxer_() {
25458 // don't need to transmux any subtitles
25459 return null;
25460 }
25461 /**
25462 * Indicates which time ranges are buffered
25463 *
25464 * @return {TimeRange}
25465 * TimeRange object representing the current buffered ranges
25466 */
25467 ;
25468
25469 _proto.buffered_ = function buffered_() {
25470 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
25471 return videojs__default["default"].createTimeRanges();
25472 }
25473
25474 var cues = this.subtitlesTrack_.cues;
25475 var start = cues[0].startTime;
25476 var end = cues[cues.length - 1].startTime;
25477 return videojs__default["default"].createTimeRanges([[start, end]]);
25478 }
25479 /**
25480 * Gets and sets init segment for the provided map
25481 *
25482 * @param {Object} map
25483 * The map object representing the init segment to get or set
25484 * @param {boolean=} set
25485 * If true, the init segment for the provided map should be saved
25486 * @return {Object}
25487 * map object for desired init segment
25488 */
25489 ;
25490
25491 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
25492 if (set === void 0) {
25493 set = false;
25494 }
25495
25496 if (!map) {
25497 return null;
25498 }
25499
25500 var id = initSegmentId(map);
25501 var storedMap = this.initSegments_[id];
25502
25503 if (set && !storedMap && map.bytes) {
25504 // append WebVTT line terminators to the media initialization segment if it exists
25505 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
25506 // requires two or more WebVTT line terminators between the WebVTT header and the
25507 // rest of the file
25508 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
25509 var combinedSegment = new Uint8Array(combinedByteLength);
25510 combinedSegment.set(map.bytes);
25511 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
25512 this.initSegments_[id] = storedMap = {
25513 resolvedUri: map.resolvedUri,
25514 byterange: map.byterange,
25515 bytes: combinedSegment
25516 };
25517 }
25518
25519 return storedMap || map;
25520 }
25521 /**
25522 * Returns true if all configuration required for loading is present, otherwise false.
25523 *
25524 * @return {boolean} True if the all configuration is ready for loading
25525 * @private
25526 */
25527 ;
25528
25529 _proto.couldBeginLoading_ = function couldBeginLoading_() {
25530 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
25531 }
25532 /**
25533 * Once all the starting parameters have been specified, begin
25534 * operation. This method should only be invoked from the INIT
25535 * state.
25536 *
25537 * @private
25538 */
25539 ;
25540
25541 _proto.init_ = function init_() {
25542 this.state = 'READY';
25543 this.resetEverything();
25544 return this.monitorBuffer_();
25545 }
25546 /**
25547 * Set a subtitle track on the segment loader to add subtitles to
25548 *
25549 * @param {TextTrack=} track
25550 * The text track to add loaded subtitles to
25551 * @return {TextTrack}
25552 * Returns the subtitles track
25553 */
25554 ;
25555
25556 _proto.track = function track(_track) {
25557 if (typeof _track === 'undefined') {
25558 return this.subtitlesTrack_;
25559 }
25560
25561 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
25562 // buffering now
25563
25564 if (this.state === 'INIT' && this.couldBeginLoading_()) {
25565 this.init_();
25566 }
25567
25568 return this.subtitlesTrack_;
25569 }
25570 /**
25571 * Remove any data in the source buffer between start and end times
25572 *
25573 * @param {number} start - the start time of the region to remove from the buffer
25574 * @param {number} end - the end time of the region to remove from the buffer
25575 */
25576 ;
25577
25578 _proto.remove = function remove(start, end) {
25579 removeCuesFromTrack(start, end, this.subtitlesTrack_);
25580 }
25581 /**
25582 * fill the buffer with segements unless the sourceBuffers are
25583 * currently updating
25584 *
25585 * Note: this function should only ever be called by monitorBuffer_
25586 * and never directly
25587 *
25588 * @private
25589 */
25590 ;
25591
25592 _proto.fillBuffer_ = function fillBuffer_() {
25593 var _this2 = this;
25594
25595 // see if we need to begin loading immediately
25596 var segmentInfo = this.chooseNextRequest_();
25597
25598 if (!segmentInfo) {
25599 return;
25600 }
25601
25602 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
25603 // We don't have the timestamp offset that we need to sync subtitles.
25604 // Rerun on a timestamp offset or user interaction.
25605 var checkTimestampOffset = function checkTimestampOffset() {
25606 _this2.state = 'READY';
25607
25608 if (!_this2.paused()) {
25609 // if not paused, queue a buffer check as soon as possible
25610 _this2.monitorBuffer_();
25611 }
25612 };
25613
25614 this.syncController_.one('timestampoffset', checkTimestampOffset);
25615 this.state = 'WAITING_ON_TIMELINE';
25616 return;
25617 }
25618
25619 this.loadSegment_(segmentInfo);
25620 } // never set a timestamp offset for vtt segments.
25621 ;
25622
25623 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
25624 return null;
25625 };
25626
25627 _proto.chooseNextRequest_ = function chooseNextRequest_() {
25628 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
25629 }
25630 /**
25631 * Prevents the segment loader from requesting segments we know contain no subtitles
25632 * by walking forward until we find the next segment that we don't know whether it is
25633 * empty or not.
25634 *
25635 * @param {Object} segmentInfo
25636 * a segment info object that describes the current segment
25637 * @return {Object}
25638 * a segment info object that describes the current segment
25639 */
25640 ;
25641
25642 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
25643 while (segmentInfo && segmentInfo.segment.empty) {
25644 // stop at the last possible segmentInfo
25645 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
25646 segmentInfo = null;
25647 break;
25648 }
25649
25650 segmentInfo = this.generateSegmentInfo_({
25651 playlist: segmentInfo.playlist,
25652 mediaIndex: segmentInfo.mediaIndex + 1,
25653 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
25654 isSyncRequest: segmentInfo.isSyncRequest
25655 });
25656 }
25657
25658 return segmentInfo;
25659 };
25660
25661 _proto.stopForError = function stopForError(error) {
25662 this.error(error);
25663 this.state = 'READY';
25664 this.pause();
25665 this.trigger('error');
25666 }
25667 /**
25668 * append a decrypted segement to the SourceBuffer through a SourceUpdater
25669 *
25670 * @private
25671 */
25672 ;
25673
25674 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
25675 var _this3 = this;
25676
25677 if (!this.subtitlesTrack_) {
25678 this.state = 'READY';
25679 return;
25680 }
25681
25682 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
25683
25684 if (!this.pendingSegment_) {
25685 this.state = 'READY';
25686 this.mediaRequestsAborted += 1;
25687 return;
25688 }
25689
25690 if (error) {
25691 if (error.code === REQUEST_ERRORS.TIMEOUT) {
25692 this.handleTimeout_();
25693 }
25694
25695 if (error.code === REQUEST_ERRORS.ABORTED) {
25696 this.mediaRequestsAborted += 1;
25697 } else {
25698 this.mediaRequestsErrored += 1;
25699 }
25700
25701 this.stopForError(error);
25702 return;
25703 }
25704
25705 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
25706 // maintain functionality between segment loaders
25707
25708 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cache
25709
25710 if (simpleSegment.key) {
25711 this.segmentKey(simpleSegment.key, true);
25712 }
25713
25714 this.state = 'APPENDING'; // used for tests
25715
25716 this.trigger('appending');
25717 var segment = segmentInfo.segment;
25718
25719 if (segment.map) {
25720 segment.map.bytes = simpleSegment.map.bytes;
25721 }
25722
25723 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
25724
25725 if (typeof window.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
25726 this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times
25727 // script will be loaded once but multiple listeners will be added to the queue, which is expected.
25728
25729 this.loadVttJs().then(function () {
25730 return _this3.segmentRequestFinished_(error, simpleSegment, result);
25731 }, function () {
25732 return _this3.stopForError({
25733 message: 'Error loading vtt.js'
25734 });
25735 });
25736 return;
25737 }
25738
25739 segment.requested = true;
25740
25741 try {
25742 this.parseVTTCues_(segmentInfo);
25743 } catch (e) {
25744 this.stopForError({
25745 message: e.message
25746 });
25747 return;
25748 }
25749
25750 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
25751
25752 if (segmentInfo.cues.length) {
25753 segmentInfo.timingInfo = {
25754 start: segmentInfo.cues[0].startTime,
25755 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
25756 };
25757 } else {
25758 segmentInfo.timingInfo = {
25759 start: segmentInfo.startOfSegment,
25760 end: segmentInfo.startOfSegment + segmentInfo.duration
25761 };
25762 }
25763
25764 if (segmentInfo.isSyncRequest) {
25765 this.trigger('syncinfoupdate');
25766 this.pendingSegment_ = null;
25767 this.state = 'READY';
25768 return;
25769 }
25770
25771 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
25772 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
25773 // the subtitle track
25774
25775 segmentInfo.cues.forEach(function (cue) {
25776 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
25777 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
25778 // cues to have identical time-intervals, but if the text is also identical
25779 // we can safely assume it is a duplicate that can be removed (ex. when a cue
25780 // "overlaps" VTT segments)
25781
25782 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
25783 this.handleAppendsDone_();
25784 };
25785
25786 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
25787 // that we do not support here.
25788 };
25789
25790 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
25791 }
25792 /**
25793 * Uses the WebVTT parser to parse the segment response
25794 *
25795 * @throws NoVttJsError
25796 *
25797 * @param {Object} segmentInfo
25798 * a segment info object that describes the current segment
25799 * @private
25800 */
25801 ;
25802
25803 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
25804 var decoder;
25805 var decodeBytesToString = false;
25806
25807 if (typeof window.WebVTT !== 'function') {
25808 // caller is responsible for exception handling.
25809 throw new NoVttJsError();
25810 }
25811
25812 if (typeof window.TextDecoder === 'function') {
25813 decoder = new window.TextDecoder('utf8');
25814 } else {
25815 decoder = window.WebVTT.StringDecoder();
25816 decodeBytesToString = true;
25817 }
25818
25819 var parser = new window.WebVTT.Parser(window, window.vttjs, decoder);
25820 segmentInfo.cues = [];
25821 segmentInfo.timestampmap = {
25822 MPEGTS: 0,
25823 LOCAL: 0
25824 };
25825 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
25826
25827 parser.ontimestampmap = function (map) {
25828 segmentInfo.timestampmap = map;
25829 };
25830
25831 parser.onparsingerror = function (error) {
25832 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
25833 };
25834
25835 if (segmentInfo.segment.map) {
25836 var mapData = segmentInfo.segment.map.bytes;
25837
25838 if (decodeBytesToString) {
25839 mapData = uint8ToUtf8(mapData);
25840 }
25841
25842 parser.parse(mapData);
25843 }
25844
25845 var segmentData = segmentInfo.bytes;
25846
25847 if (decodeBytesToString) {
25848 segmentData = uint8ToUtf8(segmentData);
25849 }
25850
25851 parser.parse(segmentData);
25852 parser.flush();
25853 }
25854 /**
25855 * Updates the start and end times of any cues parsed by the WebVTT parser using
25856 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
25857 * from the SyncController
25858 *
25859 * @param {Object} segmentInfo
25860 * a segment info object that describes the current segment
25861 * @param {Object} mappingObj
25862 * object containing a mapping from TS to media time
25863 * @param {Object} playlist
25864 * the playlist object containing the segment
25865 * @private
25866 */
25867 ;
25868
25869 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
25870 var segment = segmentInfo.segment;
25871
25872 if (!mappingObj) {
25873 // If the sync controller does not have a mapping of TS to Media Time for the
25874 // timeline, then we don't have enough information to update the cue
25875 // start/end times
25876 return;
25877 }
25878
25879 if (!segmentInfo.cues.length) {
25880 // If there are no cues, we also do not have enough information to figure out
25881 // segment timing. Mark that the segment contains no cues so we don't re-request
25882 // an empty segment.
25883 segment.empty = true;
25884 return;
25885 }
25886
25887 var timestampmap = segmentInfo.timestampmap;
25888 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
25889 segmentInfo.cues.forEach(function (cue) {
25890 // First convert cue time to TS time using the timestamp-map provided within the vtt
25891 cue.startTime += diff;
25892 cue.endTime += diff;
25893 });
25894
25895 if (!playlist.syncInfo) {
25896 var firstStart = segmentInfo.cues[0].startTime;
25897 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
25898 playlist.syncInfo = {
25899 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
25900 time: Math.min(firstStart, lastStart - segment.duration)
25901 };
25902 }
25903 };
25904
25905 return VTTSegmentLoader;
25906 }(SegmentLoader);
25907
25908 /**
25909 * @file ad-cue-tags.js
25910 */
25911
25912 /**
25913 * Searches for an ad cue that overlaps with the given mediaTime
25914 *
25915 * @param {Object} track
25916 * the track to find the cue for
25917 *
25918 * @param {number} mediaTime
25919 * the time to find the cue at
25920 *
25921 * @return {Object|null}
25922 * the found cue or null
25923 */
25924 var findAdCue = function findAdCue(track, mediaTime) {
25925 var cues = track.cues;
25926
25927 for (var i = 0; i < cues.length; i++) {
25928 var cue = cues[i];
25929
25930 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
25931 return cue;
25932 }
25933 }
25934
25935 return null;
25936 };
25937 var updateAdCues = function updateAdCues(media, track, offset) {
25938 if (offset === void 0) {
25939 offset = 0;
25940 }
25941
25942 if (!media.segments) {
25943 return;
25944 }
25945
25946 var mediaTime = offset;
25947 var cue;
25948
25949 for (var i = 0; i < media.segments.length; i++) {
25950 var segment = media.segments[i];
25951
25952 if (!cue) {
25953 // Since the cues will span for at least the segment duration, adding a fudge
25954 // factor of half segment duration will prevent duplicate cues from being
25955 // created when timing info is not exact (e.g. cue start time initialized
25956 // at 10.006677, but next call mediaTime is 10.003332 )
25957 cue = findAdCue(track, mediaTime + segment.duration / 2);
25958 }
25959
25960 if (cue) {
25961 if ('cueIn' in segment) {
25962 // Found a CUE-IN so end the cue
25963 cue.endTime = mediaTime;
25964 cue.adEndTime = mediaTime;
25965 mediaTime += segment.duration;
25966 cue = null;
25967 continue;
25968 }
25969
25970 if (mediaTime < cue.endTime) {
25971 // Already processed this mediaTime for this cue
25972 mediaTime += segment.duration;
25973 continue;
25974 } // otherwise extend cue until a CUE-IN is found
25975
25976
25977 cue.endTime += segment.duration;
25978 } else {
25979 if ('cueOut' in segment) {
25980 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
25981 cue.adStartTime = mediaTime; // Assumes tag format to be
25982 // #EXT-X-CUE-OUT:30
25983
25984 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
25985 track.addCue(cue);
25986 }
25987
25988 if ('cueOutCont' in segment) {
25989 // Entered into the middle of an ad cue
25990 // Assumes tag formate to be
25991 // #EXT-X-CUE-OUT-CONT:10/30
25992 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
25993 adOffset = _segment$cueOutCont$s[0],
25994 adTotal = _segment$cueOutCont$s[1];
25995
25996 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');
25997 cue.adStartTime = mediaTime - adOffset;
25998 cue.adEndTime = cue.adStartTime + adTotal;
25999 track.addCue(cue);
26000 }
26001 }
26002
26003 mediaTime += segment.duration;
26004 }
26005 };
26006
26007 // synchronize expired playlist segments.
26008 // the max media sequence diff is 48 hours of live stream
26009 // content with two second segments. Anything larger than that
26010 // will likely be invalid.
26011
26012 var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
26013 var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
26014 // the equivalence display-time 0 === segment-index 0
26015 {
26016 name: 'VOD',
26017 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26018 if (duration !== Infinity) {
26019 var syncPoint = {
26020 time: 0,
26021 segmentIndex: 0,
26022 partIndex: null
26023 };
26024 return syncPoint;
26025 }
26026
26027 return null;
26028 }
26029 }, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
26030 {
26031 name: 'ProgramDateTime',
26032 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26033 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
26034 return null;
26035 }
26036
26037 var syncPoint = null;
26038 var lastDistance = null;
26039 var partsAndSegments = getPartsAndSegments(playlist);
26040 currentTime = currentTime || 0;
26041
26042 for (var i = 0; i < partsAndSegments.length; i++) {
26043 // start from the end and loop backwards for live
26044 // or start from the front and loop forwards for non-live
26045 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
26046 var partAndSegment = partsAndSegments[index];
26047 var segment = partAndSegment.segment;
26048 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
26049
26050 if (!datetimeMapping || !segment.dateTimeObject) {
26051 continue;
26052 }
26053
26054 var segmentTime = segment.dateTimeObject.getTime() / 1000;
26055 var start = segmentTime + datetimeMapping; // take part duration into account.
26056
26057 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
26058 for (var z = 0; z < partAndSegment.partIndex; z++) {
26059 start += segment.parts[z].duration;
26060 }
26061 }
26062
26063 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
26064 // currentTime and can stop looking for better candidates
26065
26066 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
26067 break;
26068 }
26069
26070 lastDistance = distance;
26071 syncPoint = {
26072 time: start,
26073 segmentIndex: partAndSegment.segmentIndex,
26074 partIndex: partAndSegment.partIndex
26075 };
26076 }
26077
26078 return syncPoint;
26079 }
26080 }, // Stategy "Segment": We have a known time mapping for a timeline and a
26081 // segment in the current timeline with timing data
26082 {
26083 name: 'Segment',
26084 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26085 var syncPoint = null;
26086 var lastDistance = null;
26087 currentTime = currentTime || 0;
26088 var partsAndSegments = getPartsAndSegments(playlist);
26089
26090 for (var i = 0; i < partsAndSegments.length; i++) {
26091 // start from the end and loop backwards for live
26092 // or start from the front and loop forwards for non-live
26093 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
26094 var partAndSegment = partsAndSegments[index];
26095 var segment = partAndSegment.segment;
26096 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
26097
26098 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
26099 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
26100 // currentTime and can stop looking for better candidates
26101
26102 if (lastDistance !== null && lastDistance < distance) {
26103 break;
26104 }
26105
26106 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
26107 lastDistance = distance;
26108 syncPoint = {
26109 time: start,
26110 segmentIndex: partAndSegment.segmentIndex,
26111 partIndex: partAndSegment.partIndex
26112 };
26113 }
26114 }
26115 }
26116
26117 return syncPoint;
26118 }
26119 }, // Stategy "Discontinuity": We have a discontinuity with a known
26120 // display-time
26121 {
26122 name: 'Discontinuity',
26123 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26124 var syncPoint = null;
26125 currentTime = currentTime || 0;
26126
26127 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
26128 var lastDistance = null;
26129
26130 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
26131 var segmentIndex = playlist.discontinuityStarts[i];
26132 var discontinuity = playlist.discontinuitySequence + i + 1;
26133 var discontinuitySync = syncController.discontinuities[discontinuity];
26134
26135 if (discontinuitySync) {
26136 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
26137 // currentTime and can stop looking for better candidates
26138
26139 if (lastDistance !== null && lastDistance < distance) {
26140 break;
26141 }
26142
26143 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
26144 lastDistance = distance;
26145 syncPoint = {
26146 time: discontinuitySync.time,
26147 segmentIndex: segmentIndex,
26148 partIndex: null
26149 };
26150 }
26151 }
26152 }
26153 }
26154
26155 return syncPoint;
26156 }
26157 }, // Stategy "Playlist": We have a playlist with a known mapping of
26158 // segment index to display time
26159 {
26160 name: 'Playlist',
26161 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26162 if (playlist.syncInfo) {
26163 var syncPoint = {
26164 time: playlist.syncInfo.time,
26165 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
26166 partIndex: null
26167 };
26168 return syncPoint;
26169 }
26170
26171 return null;
26172 }
26173 }];
26174
26175 var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
26176 inheritsLoose(SyncController, _videojs$EventTarget);
26177
26178 function SyncController(options) {
26179 var _this;
26180
26181 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
26182
26183 _this.timelines = [];
26184 _this.discontinuities = [];
26185 _this.timelineToDatetimeMappings = {};
26186 _this.logger_ = logger('SyncController');
26187 return _this;
26188 }
26189 /**
26190 * Find a sync-point for the playlist specified
26191 *
26192 * A sync-point is defined as a known mapping from display-time to
26193 * a segment-index in the current playlist.
26194 *
26195 * @param {Playlist} playlist
26196 * The playlist that needs a sync-point
26197 * @param {number} duration
26198 * Duration of the MediaSource (Infinite if playing a live source)
26199 * @param {number} currentTimeline
26200 * The last timeline from which a segment was loaded
26201 * @return {Object}
26202 * A sync-point object
26203 */
26204
26205
26206 var _proto = SyncController.prototype;
26207
26208 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
26209 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
26210
26211 if (!syncPoints.length) {
26212 // Signal that we need to attempt to get a sync-point manually
26213 // by fetching a segment in the playlist and constructing
26214 // a sync-point from that information
26215 return null;
26216 } // Now find the sync-point that is closest to the currentTime because
26217 // that should result in the most accurate guess about which segment
26218 // to fetch
26219
26220
26221 return this.selectSyncPoint_(syncPoints, {
26222 key: 'time',
26223 value: currentTime
26224 });
26225 }
26226 /**
26227 * Calculate the amount of time that has expired off the playlist during playback
26228 *
26229 * @param {Playlist} playlist
26230 * Playlist object to calculate expired from
26231 * @param {number} duration
26232 * Duration of the MediaSource (Infinity if playling a live source)
26233 * @return {number|null}
26234 * The amount of time that has expired off the playlist during playback. Null
26235 * if no sync-points for the playlist can be found.
26236 */
26237 ;
26238
26239 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
26240 if (!playlist || !playlist.segments) {
26241 return null;
26242 }
26243
26244 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
26245
26246 if (!syncPoints.length) {
26247 return null;
26248 }
26249
26250 var syncPoint = this.selectSyncPoint_(syncPoints, {
26251 key: 'segmentIndex',
26252 value: 0
26253 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
26254 // duration from index 0 to syncPoint.segmentIndex instead of adding.
26255
26256 if (syncPoint.segmentIndex > 0) {
26257 syncPoint.time *= -1;
26258 }
26259
26260 return Math.abs(syncPoint.time + sumDurations({
26261 defaultDuration: playlist.targetDuration,
26262 durationList: playlist.segments,
26263 startIndex: syncPoint.segmentIndex,
26264 endIndex: 0
26265 }));
26266 }
26267 /**
26268 * Runs each sync-point strategy and returns a list of sync-points returned by the
26269 * strategies
26270 *
26271 * @private
26272 * @param {Playlist} playlist
26273 * The playlist that needs a sync-point
26274 * @param {number} duration
26275 * Duration of the MediaSource (Infinity if playing a live source)
26276 * @param {number} currentTimeline
26277 * The last timeline from which a segment was loaded
26278 * @return {Array}
26279 * A list of sync-point objects
26280 */
26281 ;
26282
26283 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
26284 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
26285
26286 for (var i = 0; i < syncPointStrategies.length; i++) {
26287 var strategy = syncPointStrategies[i];
26288 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
26289
26290 if (syncPoint) {
26291 syncPoint.strategy = strategy.name;
26292 syncPoints.push({
26293 strategy: strategy.name,
26294 syncPoint: syncPoint
26295 });
26296 }
26297 }
26298
26299 return syncPoints;
26300 }
26301 /**
26302 * Selects the sync-point nearest the specified target
26303 *
26304 * @private
26305 * @param {Array} syncPoints
26306 * List of sync-points to select from
26307 * @param {Object} target
26308 * Object specifying the property and value we are targeting
26309 * @param {string} target.key
26310 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
26311 * @param {number} target.value
26312 * The value to target for the specified key.
26313 * @return {Object}
26314 * The sync-point nearest the target
26315 */
26316 ;
26317
26318 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
26319 var bestSyncPoint = syncPoints[0].syncPoint;
26320 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
26321 var bestStrategy = syncPoints[0].strategy;
26322
26323 for (var i = 1; i < syncPoints.length; i++) {
26324 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
26325
26326 if (newDistance < bestDistance) {
26327 bestDistance = newDistance;
26328 bestSyncPoint = syncPoints[i].syncPoint;
26329 bestStrategy = syncPoints[i].strategy;
26330 }
26331 }
26332
26333 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
26334 return bestSyncPoint;
26335 }
26336 /**
26337 * Save any meta-data present on the segments when segments leave
26338 * the live window to the playlist to allow for synchronization at the
26339 * playlist level later.
26340 *
26341 * @param {Playlist} oldPlaylist - The previous active playlist
26342 * @param {Playlist} newPlaylist - The updated and most current playlist
26343 */
26344 ;
26345
26346 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
26347 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
26348
26349 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
26350 videojs__default["default"].log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
26351 return;
26352 } // When a segment expires from the playlist and it has a start time
26353 // save that information as a possible sync-point reference in future
26354
26355
26356 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
26357 var lastRemovedSegment = oldPlaylist.segments[i];
26358
26359 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
26360 newPlaylist.syncInfo = {
26361 mediaSequence: oldPlaylist.mediaSequence + i,
26362 time: lastRemovedSegment.start
26363 };
26364 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
26365 this.trigger('syncinfoupdate');
26366 break;
26367 }
26368 }
26369 }
26370 /**
26371 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
26372 * before segments start to load.
26373 *
26374 * @param {Playlist} playlist - The currently active playlist
26375 */
26376 ;
26377
26378 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
26379 // It's possible for the playlist to be updated before playback starts, meaning time
26380 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
26381 // crossed, then the old time zero mapping (for the prior timeline) would be retained
26382 // unless the mappings are cleared.
26383 this.timelineToDatetimeMappings = {};
26384
26385 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
26386 var firstSegment = playlist.segments[0];
26387 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
26388 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
26389 }
26390 }
26391 /**
26392 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
26393 * based on the latest timing information.
26394 *
26395 * @param {Object} options
26396 * Options object
26397 * @param {SegmentInfo} options.segmentInfo
26398 * The current active request information
26399 * @param {boolean} options.shouldSaveTimelineMapping
26400 * If there's a timeline change, determines if the timeline mapping should be
26401 * saved for timeline mapping and program date time mappings.
26402 */
26403 ;
26404
26405 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
26406 var segmentInfo = _ref.segmentInfo,
26407 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
26408 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
26409 var segment = segmentInfo.segment;
26410
26411 if (didCalculateSegmentTimeMapping) {
26412 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
26413 // now with segment timing information
26414
26415 if (!segmentInfo.playlist.syncInfo) {
26416 segmentInfo.playlist.syncInfo = {
26417 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
26418 time: segment.start
26419 };
26420 }
26421 }
26422
26423 var dateTime = segment.dateTimeObject;
26424
26425 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
26426 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
26427 }
26428 };
26429
26430 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
26431 if (typeof this.timelines[timeline] === 'undefined') {
26432 return null;
26433 }
26434
26435 return this.timelines[timeline].time;
26436 };
26437
26438 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
26439 if (typeof this.timelines[timeline] === 'undefined') {
26440 return null;
26441 }
26442
26443 return this.timelines[timeline].mapping;
26444 }
26445 /**
26446 * Use the "media time" for a segment to generate a mapping to "display time" and
26447 * save that display time to the segment.
26448 *
26449 * @private
26450 * @param {SegmentInfo} segmentInfo
26451 * The current active request information
26452 * @param {Object} timingInfo
26453 * The start and end time of the current segment in "media time"
26454 * @param {boolean} shouldSaveTimelineMapping
26455 * If there's a timeline change, determines if the timeline mapping should be
26456 * saved in timelines.
26457 * @return {boolean}
26458 * Returns false if segment time mapping could not be calculated
26459 */
26460 ;
26461
26462 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
26463 // TODO: remove side effects
26464 var segment = segmentInfo.segment;
26465 var part = segmentInfo.part;
26466 var mappingObj = this.timelines[segmentInfo.timeline];
26467 var start;
26468 var end;
26469
26470 if (typeof segmentInfo.timestampOffset === 'number') {
26471 mappingObj = {
26472 time: segmentInfo.startOfSegment,
26473 mapping: segmentInfo.startOfSegment - timingInfo.start
26474 };
26475
26476 if (shouldSaveTimelineMapping) {
26477 this.timelines[segmentInfo.timeline] = mappingObj;
26478 this.trigger('timestampoffset');
26479 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
26480 }
26481
26482 start = segmentInfo.startOfSegment;
26483 end = timingInfo.end + mappingObj.mapping;
26484 } else if (mappingObj) {
26485 start = timingInfo.start + mappingObj.mapping;
26486 end = timingInfo.end + mappingObj.mapping;
26487 } else {
26488 return false;
26489 }
26490
26491 if (part) {
26492 part.start = start;
26493 part.end = end;
26494 } // If we don't have a segment start yet or the start value we got
26495 // is less than our current segment.start value, save a new start value.
26496 // We have to do this because parts will have segment timing info saved
26497 // multiple times and we want segment start to be the earliest part start
26498 // value for that segment.
26499
26500
26501 if (!segment.start || start < segment.start) {
26502 segment.start = start;
26503 }
26504
26505 segment.end = end;
26506 return true;
26507 }
26508 /**
26509 * Each time we have discontinuity in the playlist, attempt to calculate the location
26510 * in display of the start of the discontinuity and save that. We also save an accuracy
26511 * value so that we save values with the most accuracy (closest to 0.)
26512 *
26513 * @private
26514 * @param {SegmentInfo} segmentInfo - The current active request information
26515 */
26516 ;
26517
26518 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
26519 var playlist = segmentInfo.playlist;
26520 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
26521 // the start of the range and it's accuracy is 0 (greater accuracy values
26522 // mean more approximation)
26523
26524 if (segment.discontinuity) {
26525 this.discontinuities[segment.timeline] = {
26526 time: segment.start,
26527 accuracy: 0
26528 };
26529 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
26530 // Search for future discontinuities that we can provide better timing
26531 // information for and save that information for sync purposes
26532 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
26533 var segmentIndex = playlist.discontinuityStarts[i];
26534 var discontinuity = playlist.discontinuitySequence + i + 1;
26535 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
26536 var accuracy = Math.abs(mediaIndexDiff);
26537
26538 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
26539 var time = void 0;
26540
26541 if (mediaIndexDiff < 0) {
26542 time = segment.start - sumDurations({
26543 defaultDuration: playlist.targetDuration,
26544 durationList: playlist.segments,
26545 startIndex: segmentInfo.mediaIndex,
26546 endIndex: segmentIndex
26547 });
26548 } else {
26549 time = segment.end + sumDurations({
26550 defaultDuration: playlist.targetDuration,
26551 durationList: playlist.segments,
26552 startIndex: segmentInfo.mediaIndex + 1,
26553 endIndex: segmentIndex
26554 });
26555 }
26556
26557 this.discontinuities[discontinuity] = {
26558 time: time,
26559 accuracy: accuracy
26560 };
26561 }
26562 }
26563 }
26564 };
26565
26566 _proto.dispose = function dispose() {
26567 this.trigger('dispose');
26568 this.off();
26569 };
26570
26571 return SyncController;
26572 }(videojs__default["default"].EventTarget);
26573
26574 /**
26575 * The TimelineChangeController acts as a source for segment loaders to listen for and
26576 * keep track of latest and pending timeline changes. This is useful to ensure proper
26577 * sync, as each loader may need to make a consideration for what timeline the other
26578 * loader is on before making changes which could impact the other loader's media.
26579 *
26580 * @class TimelineChangeController
26581 * @extends videojs.EventTarget
26582 */
26583
26584 var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
26585 inheritsLoose(TimelineChangeController, _videojs$EventTarget);
26586
26587 function TimelineChangeController() {
26588 var _this;
26589
26590 _this = _videojs$EventTarget.call(this) || this;
26591 _this.pendingTimelineChanges_ = {};
26592 _this.lastTimelineChanges_ = {};
26593 return _this;
26594 }
26595
26596 var _proto = TimelineChangeController.prototype;
26597
26598 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
26599 this.pendingTimelineChanges_[type] = null;
26600 this.trigger('pendingtimelinechange');
26601 };
26602
26603 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
26604 var type = _ref.type,
26605 from = _ref.from,
26606 to = _ref.to;
26607
26608 if (typeof from === 'number' && typeof to === 'number') {
26609 this.pendingTimelineChanges_[type] = {
26610 type: type,
26611 from: from,
26612 to: to
26613 };
26614 this.trigger('pendingtimelinechange');
26615 }
26616
26617 return this.pendingTimelineChanges_[type];
26618 };
26619
26620 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
26621 var type = _ref2.type,
26622 from = _ref2.from,
26623 to = _ref2.to;
26624
26625 if (typeof from === 'number' && typeof to === 'number') {
26626 this.lastTimelineChanges_[type] = {
26627 type: type,
26628 from: from,
26629 to: to
26630 };
26631 delete this.pendingTimelineChanges_[type];
26632 this.trigger('timelinechange');
26633 }
26634
26635 return this.lastTimelineChanges_[type];
26636 };
26637
26638 _proto.dispose = function dispose() {
26639 this.trigger('dispose');
26640 this.pendingTimelineChanges_ = {};
26641 this.lastTimelineChanges_ = {};
26642 this.off();
26643 };
26644
26645 return TimelineChangeController;
26646 }(videojs__default["default"].EventTarget);
26647
26648 /* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
26649 var workerCode = transform(getWorkerString(function () {
26650
26651 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
26652
26653 function createCommonjsModule(fn, basedir, module) {
26654 return module = {
26655 path: basedir,
26656 exports: {},
26657 require: function require(path, base) {
26658 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
26659 }
26660 }, fn(module, module.exports), module.exports;
26661 }
26662
26663 function commonjsRequire() {
26664 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
26665 }
26666
26667 var createClass = createCommonjsModule(function (module) {
26668 function _defineProperties(target, props) {
26669 for (var i = 0; i < props.length; i++) {
26670 var descriptor = props[i];
26671 descriptor.enumerable = descriptor.enumerable || false;
26672 descriptor.configurable = true;
26673 if ("value" in descriptor) descriptor.writable = true;
26674 Object.defineProperty(target, descriptor.key, descriptor);
26675 }
26676 }
26677
26678 function _createClass(Constructor, protoProps, staticProps) {
26679 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
26680 if (staticProps) _defineProperties(Constructor, staticProps);
26681 return Constructor;
26682 }
26683
26684 module.exports = _createClass;
26685 module.exports["default"] = module.exports, module.exports.__esModule = true;
26686 });
26687 var setPrototypeOf = createCommonjsModule(function (module) {
26688 function _setPrototypeOf(o, p) {
26689 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
26690 o.__proto__ = p;
26691 return o;
26692 };
26693
26694 module.exports["default"] = module.exports, module.exports.__esModule = true;
26695 return _setPrototypeOf(o, p);
26696 }
26697
26698 module.exports = _setPrototypeOf;
26699 module.exports["default"] = module.exports, module.exports.__esModule = true;
26700 });
26701 var inheritsLoose = createCommonjsModule(function (module) {
26702 function _inheritsLoose(subClass, superClass) {
26703 subClass.prototype = Object.create(superClass.prototype);
26704 subClass.prototype.constructor = subClass;
26705 setPrototypeOf(subClass, superClass);
26706 }
26707
26708 module.exports = _inheritsLoose;
26709 module.exports["default"] = module.exports, module.exports.__esModule = true;
26710 });
26711 /**
26712 * @file stream.js
26713 */
26714
26715 /**
26716 * A lightweight readable stream implemention that handles event dispatching.
26717 *
26718 * @class Stream
26719 */
26720
26721 var Stream = /*#__PURE__*/function () {
26722 function Stream() {
26723 this.listeners = {};
26724 }
26725 /**
26726 * Add a listener for a specified event type.
26727 *
26728 * @param {string} type the event name
26729 * @param {Function} listener the callback to be invoked when an event of
26730 * the specified type occurs
26731 */
26732
26733
26734 var _proto = Stream.prototype;
26735
26736 _proto.on = function on(type, listener) {
26737 if (!this.listeners[type]) {
26738 this.listeners[type] = [];
26739 }
26740
26741 this.listeners[type].push(listener);
26742 }
26743 /**
26744 * Remove a listener for a specified event type.
26745 *
26746 * @param {string} type the event name
26747 * @param {Function} listener a function previously registered for this
26748 * type of event through `on`
26749 * @return {boolean} if we could turn it off or not
26750 */
26751 ;
26752
26753 _proto.off = function off(type, listener) {
26754 if (!this.listeners[type]) {
26755 return false;
26756 }
26757
26758 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
26759 // In Video.js we slice listener functions
26760 // on trigger so that it does not mess up the order
26761 // while we loop through.
26762 //
26763 // Here we slice on off so that the loop in trigger
26764 // can continue using it's old reference to loop without
26765 // messing up the order.
26766
26767 this.listeners[type] = this.listeners[type].slice(0);
26768 this.listeners[type].splice(index, 1);
26769 return index > -1;
26770 }
26771 /**
26772 * Trigger an event of the specified type on this stream. Any additional
26773 * arguments to this function are passed as parameters to event listeners.
26774 *
26775 * @param {string} type the event name
26776 */
26777 ;
26778
26779 _proto.trigger = function trigger(type) {
26780 var callbacks = this.listeners[type];
26781
26782 if (!callbacks) {
26783 return;
26784 } // Slicing the arguments on every invocation of this method
26785 // can add a significant amount of overhead. Avoid the
26786 // intermediate object creation for the common case of a
26787 // single callback argument
26788
26789
26790 if (arguments.length === 2) {
26791 var length = callbacks.length;
26792
26793 for (var i = 0; i < length; ++i) {
26794 callbacks[i].call(this, arguments[1]);
26795 }
26796 } else {
26797 var args = Array.prototype.slice.call(arguments, 1);
26798 var _length = callbacks.length;
26799
26800 for (var _i = 0; _i < _length; ++_i) {
26801 callbacks[_i].apply(this, args);
26802 }
26803 }
26804 }
26805 /**
26806 * Destroys the stream and cleans up.
26807 */
26808 ;
26809
26810 _proto.dispose = function dispose() {
26811 this.listeners = {};
26812 }
26813 /**
26814 * Forwards all `data` events on this stream to the destination stream. The
26815 * destination stream should provide a method `push` to receive the data
26816 * events as they arrive.
26817 *
26818 * @param {Stream} destination the stream that will receive all `data` events
26819 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
26820 */
26821 ;
26822
26823 _proto.pipe = function pipe(destination) {
26824 this.on('data', function (data) {
26825 destination.push(data);
26826 });
26827 };
26828
26829 return Stream;
26830 }();
26831 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
26832
26833 /**
26834 * Returns the subarray of a Uint8Array without PKCS#7 padding.
26835 *
26836 * @param padded {Uint8Array} unencrypted bytes that have been padded
26837 * @return {Uint8Array} the unpadded bytes
26838 * @see http://tools.ietf.org/html/rfc5652
26839 */
26840
26841
26842 function unpad(padded) {
26843 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
26844 }
26845 /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
26846
26847 /**
26848 * @file aes.js
26849 *
26850 * This file contains an adaptation of the AES decryption algorithm
26851 * from the Standford Javascript Cryptography Library. That work is
26852 * covered by the following copyright and permissions notice:
26853 *
26854 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
26855 * All rights reserved.
26856 *
26857 * Redistribution and use in source and binary forms, with or without
26858 * modification, are permitted provided that the following conditions are
26859 * met:
26860 *
26861 * 1. Redistributions of source code must retain the above copyright
26862 * notice, this list of conditions and the following disclaimer.
26863 *
26864 * 2. Redistributions in binary form must reproduce the above
26865 * copyright notice, this list of conditions and the following
26866 * disclaimer in the documentation and/or other materials provided
26867 * with the distribution.
26868 *
26869 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
26870 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
26871 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26872 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
26873 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26874 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26875 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
26876 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26877 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26878 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26879 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26880 *
26881 * The views and conclusions contained in the software and documentation
26882 * are those of the authors and should not be interpreted as representing
26883 * official policies, either expressed or implied, of the authors.
26884 */
26885
26886 /**
26887 * Expand the S-box tables.
26888 *
26889 * @private
26890 */
26891
26892
26893 var precompute = function precompute() {
26894 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
26895 var encTable = tables[0];
26896 var decTable = tables[1];
26897 var sbox = encTable[4];
26898 var sboxInv = decTable[4];
26899 var i;
26900 var x;
26901 var xInv;
26902 var d = [];
26903 var th = [];
26904 var x2;
26905 var x4;
26906 var x8;
26907 var s;
26908 var tEnc;
26909 var tDec; // Compute double and third tables
26910
26911 for (i = 0; i < 256; i++) {
26912 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
26913 }
26914
26915 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
26916 // Compute sbox
26917 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
26918 s = s >> 8 ^ s & 255 ^ 99;
26919 sbox[x] = s;
26920 sboxInv[s] = x; // Compute MixColumns
26921
26922 x8 = d[x4 = d[x2 = d[x]]];
26923 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
26924 tEnc = d[s] * 0x101 ^ s * 0x1010100;
26925
26926 for (i = 0; i < 4; i++) {
26927 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
26928 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
26929 }
26930 } // Compactify. Considerable speedup on Firefox.
26931
26932
26933 for (i = 0; i < 5; i++) {
26934 encTable[i] = encTable[i].slice(0);
26935 decTable[i] = decTable[i].slice(0);
26936 }
26937
26938 return tables;
26939 };
26940
26941 var aesTables = null;
26942 /**
26943 * Schedule out an AES key for both encryption and decryption. This
26944 * is a low-level class. Use a cipher mode to do bulk encryption.
26945 *
26946 * @class AES
26947 * @param key {Array} The key as an array of 4, 6 or 8 words.
26948 */
26949
26950 var AES = /*#__PURE__*/function () {
26951 function AES(key) {
26952 /**
26953 * The expanded S-box and inverse S-box tables. These will be computed
26954 * on the client so that we don't have to send them down the wire.
26955 *
26956 * There are two tables, _tables[0] is for encryption and
26957 * _tables[1] is for decryption.
26958 *
26959 * The first 4 sub-tables are the expanded S-box with MixColumns. The
26960 * last (_tables[01][4]) is the S-box itself.
26961 *
26962 * @private
26963 */
26964 // if we have yet to precompute the S-box tables
26965 // do so now
26966 if (!aesTables) {
26967 aesTables = precompute();
26968 } // then make a copy of that object for use
26969
26970
26971 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
26972 var i;
26973 var j;
26974 var tmp;
26975 var sbox = this._tables[0][4];
26976 var decTable = this._tables[1];
26977 var keyLen = key.length;
26978 var rcon = 1;
26979
26980 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
26981 throw new Error('Invalid aes key size');
26982 }
26983
26984 var encKey = key.slice(0);
26985 var decKey = [];
26986 this._key = [encKey, decKey]; // schedule encryption keys
26987
26988 for (i = keyLen; i < 4 * keyLen + 28; i++) {
26989 tmp = encKey[i - 1]; // apply sbox
26990
26991 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
26992 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
26993
26994 if (i % keyLen === 0) {
26995 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
26996 rcon = rcon << 1 ^ (rcon >> 7) * 283;
26997 }
26998 }
26999
27000 encKey[i] = encKey[i - keyLen] ^ tmp;
27001 } // schedule decryption keys
27002
27003
27004 for (j = 0; i; j++, i--) {
27005 tmp = encKey[j & 3 ? i : i - 4];
27006
27007 if (i <= 4 || j < 4) {
27008 decKey[j] = tmp;
27009 } else {
27010 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
27011 }
27012 }
27013 }
27014 /**
27015 * Decrypt 16 bytes, specified as four 32-bit words.
27016 *
27017 * @param {number} encrypted0 the first word to decrypt
27018 * @param {number} encrypted1 the second word to decrypt
27019 * @param {number} encrypted2 the third word to decrypt
27020 * @param {number} encrypted3 the fourth word to decrypt
27021 * @param {Int32Array} out the array to write the decrypted words
27022 * into
27023 * @param {number} offset the offset into the output array to start
27024 * writing results
27025 * @return {Array} The plaintext.
27026 */
27027
27028
27029 var _proto = AES.prototype;
27030
27031 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
27032 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
27033
27034 var a = encrypted0 ^ key[0];
27035 var b = encrypted3 ^ key[1];
27036 var c = encrypted2 ^ key[2];
27037 var d = encrypted1 ^ key[3];
27038 var a2;
27039 var b2;
27040 var c2; // key.length === 2 ?
27041
27042 var nInnerRounds = key.length / 4 - 2;
27043 var i;
27044 var kIndex = 4;
27045 var table = this._tables[1]; // load up the tables
27046
27047 var table0 = table[0];
27048 var table1 = table[1];
27049 var table2 = table[2];
27050 var table3 = table[3];
27051 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
27052
27053 for (i = 0; i < nInnerRounds; i++) {
27054 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
27055 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
27056 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
27057 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
27058 kIndex += 4;
27059 a = a2;
27060 b = b2;
27061 c = c2;
27062 } // Last round.
27063
27064
27065 for (i = 0; i < 4; i++) {
27066 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
27067 a2 = a;
27068 a = b;
27069 b = c;
27070 c = d;
27071 d = a2;
27072 }
27073 };
27074
27075 return AES;
27076 }();
27077 /**
27078 * A wrapper around the Stream class to use setTimeout
27079 * and run stream "jobs" Asynchronously
27080 *
27081 * @class AsyncStream
27082 * @extends Stream
27083 */
27084
27085
27086 var AsyncStream = /*#__PURE__*/function (_Stream) {
27087 inheritsLoose(AsyncStream, _Stream);
27088
27089 function AsyncStream() {
27090 var _this;
27091
27092 _this = _Stream.call(this, Stream) || this;
27093 _this.jobs = [];
27094 _this.delay = 1;
27095 _this.timeout_ = null;
27096 return _this;
27097 }
27098 /**
27099 * process an async job
27100 *
27101 * @private
27102 */
27103
27104
27105 var _proto = AsyncStream.prototype;
27106
27107 _proto.processJob_ = function processJob_() {
27108 this.jobs.shift()();
27109
27110 if (this.jobs.length) {
27111 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
27112 } else {
27113 this.timeout_ = null;
27114 }
27115 }
27116 /**
27117 * push a job into the stream
27118 *
27119 * @param {Function} job the job to push into the stream
27120 */
27121 ;
27122
27123 _proto.push = function push(job) {
27124 this.jobs.push(job);
27125
27126 if (!this.timeout_) {
27127 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
27128 }
27129 };
27130
27131 return AsyncStream;
27132 }(Stream);
27133 /**
27134 * Convert network-order (big-endian) bytes into their little-endian
27135 * representation.
27136 */
27137
27138
27139 var ntoh = function ntoh(word) {
27140 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
27141 };
27142 /**
27143 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
27144 *
27145 * @param {Uint8Array} encrypted the encrypted bytes
27146 * @param {Uint32Array} key the bytes of the decryption key
27147 * @param {Uint32Array} initVector the initialization vector (IV) to
27148 * use for the first round of CBC.
27149 * @return {Uint8Array} the decrypted bytes
27150 *
27151 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
27152 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
27153 * @see https://tools.ietf.org/html/rfc2315
27154 */
27155
27156
27157 var decrypt = function decrypt(encrypted, key, initVector) {
27158 // word-level access to the encrypted bytes
27159 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
27160 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
27161
27162 var decrypted = new Uint8Array(encrypted.byteLength);
27163 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
27164 // decrypted data
27165
27166 var init0;
27167 var init1;
27168 var init2;
27169 var init3;
27170 var encrypted0;
27171 var encrypted1;
27172 var encrypted2;
27173 var encrypted3; // iteration variable
27174
27175 var wordIx; // pull out the words of the IV to ensure we don't modify the
27176 // passed-in reference and easier access
27177
27178 init0 = initVector[0];
27179 init1 = initVector[1];
27180 init2 = initVector[2];
27181 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
27182 // to each decrypted block
27183
27184 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
27185 // convert big-endian (network order) words into little-endian
27186 // (javascript order)
27187 encrypted0 = ntoh(encrypted32[wordIx]);
27188 encrypted1 = ntoh(encrypted32[wordIx + 1]);
27189 encrypted2 = ntoh(encrypted32[wordIx + 2]);
27190 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
27191
27192 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
27193 // plaintext
27194
27195 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
27196 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
27197 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
27198 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
27199
27200 init0 = encrypted0;
27201 init1 = encrypted1;
27202 init2 = encrypted2;
27203 init3 = encrypted3;
27204 }
27205
27206 return decrypted;
27207 };
27208 /**
27209 * The `Decrypter` class that manages decryption of AES
27210 * data through `AsyncStream` objects and the `decrypt`
27211 * function
27212 *
27213 * @param {Uint8Array} encrypted the encrypted bytes
27214 * @param {Uint32Array} key the bytes of the decryption key
27215 * @param {Uint32Array} initVector the initialization vector (IV) to
27216 * @param {Function} done the function to run when done
27217 * @class Decrypter
27218 */
27219
27220
27221 var Decrypter = /*#__PURE__*/function () {
27222 function Decrypter(encrypted, key, initVector, done) {
27223 var step = Decrypter.STEP;
27224 var encrypted32 = new Int32Array(encrypted.buffer);
27225 var decrypted = new Uint8Array(encrypted.byteLength);
27226 var i = 0;
27227 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
27228
27229 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27230
27231 for (i = step; i < encrypted32.length; i += step) {
27232 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
27233 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27234 } // invoke the done() callback when everything is finished
27235
27236
27237 this.asyncStream_.push(function () {
27238 // remove pkcs#7 padding from the decrypted bytes
27239 done(null, unpad(decrypted));
27240 });
27241 }
27242 /**
27243 * a getter for step the maximum number of bytes to process at one time
27244 *
27245 * @return {number} the value of step 32000
27246 */
27247
27248
27249 var _proto = Decrypter.prototype;
27250 /**
27251 * @private
27252 */
27253
27254 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
27255 return function () {
27256 var bytes = decrypt(encrypted, key, initVector);
27257 decrypted.set(bytes, encrypted.byteOffset);
27258 };
27259 };
27260
27261 createClass(Decrypter, null, [{
27262 key: "STEP",
27263 get: function get() {
27264 // 4 * 8000;
27265 return 32000;
27266 }
27267 }]);
27268 return Decrypter;
27269 }();
27270
27271 var win;
27272
27273 if (typeof window !== "undefined") {
27274 win = window;
27275 } else if (typeof commonjsGlobal !== "undefined") {
27276 win = commonjsGlobal;
27277 } else if (typeof self !== "undefined") {
27278 win = self;
27279 } else {
27280 win = {};
27281 }
27282
27283 var window_1 = win;
27284
27285 var isArrayBufferView = function isArrayBufferView(obj) {
27286 if (ArrayBuffer.isView === 'function') {
27287 return ArrayBuffer.isView(obj);
27288 }
27289
27290 return obj && obj.buffer instanceof ArrayBuffer;
27291 };
27292
27293 var BigInt = window_1.BigInt || Number;
27294 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
27295
27296 (function () {
27297 var a = new Uint16Array([0xFFCC]);
27298 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
27299
27300 if (b[0] === 0xFF) {
27301 return 'big';
27302 }
27303
27304 if (b[0] === 0xCC) {
27305 return 'little';
27306 }
27307
27308 return 'unknown';
27309 })();
27310 /**
27311 * Creates an object for sending to a web worker modifying properties that are TypedArrays
27312 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
27313 *
27314 * @param {Object} message
27315 * Object of properties and values to send to the web worker
27316 * @return {Object}
27317 * Modified message with TypedArray values expanded
27318 * @function createTransferableMessage
27319 */
27320
27321
27322 var createTransferableMessage = function createTransferableMessage(message) {
27323 var transferable = {};
27324 Object.keys(message).forEach(function (key) {
27325 var value = message[key];
27326
27327 if (isArrayBufferView(value)) {
27328 transferable[key] = {
27329 bytes: value.buffer,
27330 byteOffset: value.byteOffset,
27331 byteLength: value.byteLength
27332 };
27333 } else {
27334 transferable[key] = value;
27335 }
27336 });
27337 return transferable;
27338 };
27339 /* global self */
27340
27341 /**
27342 * Our web worker interface so that things can talk to aes-decrypter
27343 * that will be running in a web worker. the scope is passed to this by
27344 * webworkify.
27345 */
27346
27347
27348 self.onmessage = function (event) {
27349 var data = event.data;
27350 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
27351 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
27352 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
27353 /* eslint-disable no-new, handle-callback-err */
27354
27355 new Decrypter(encrypted, key, iv, function (err, bytes) {
27356 self.postMessage(createTransferableMessage({
27357 source: data.source,
27358 decrypted: bytes
27359 }), [bytes.buffer]);
27360 });
27361 /* eslint-enable */
27362 };
27363 }));
27364 var Decrypter = factory(workerCode);
27365 /* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
27366
27367 /**
27368 * Convert the properties of an HLS track into an audioTrackKind.
27369 *
27370 * @private
27371 */
27372
27373 var audioTrackKind_ = function audioTrackKind_(properties) {
27374 var kind = properties.default ? 'main' : 'alternative';
27375
27376 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
27377 kind = 'main-desc';
27378 }
27379
27380 return kind;
27381 };
27382 /**
27383 * Pause provided segment loader and playlist loader if active
27384 *
27385 * @param {SegmentLoader} segmentLoader
27386 * SegmentLoader to pause
27387 * @param {Object} mediaType
27388 * Active media type
27389 * @function stopLoaders
27390 */
27391
27392
27393 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
27394 segmentLoader.abort();
27395 segmentLoader.pause();
27396
27397 if (mediaType && mediaType.activePlaylistLoader) {
27398 mediaType.activePlaylistLoader.pause();
27399 mediaType.activePlaylistLoader = null;
27400 }
27401 };
27402 /**
27403 * Start loading provided segment loader and playlist loader
27404 *
27405 * @param {PlaylistLoader} playlistLoader
27406 * PlaylistLoader to start loading
27407 * @param {Object} mediaType
27408 * Active media type
27409 * @function startLoaders
27410 */
27411
27412 var startLoaders = function startLoaders(playlistLoader, mediaType) {
27413 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
27414 // playlist loader
27415 mediaType.activePlaylistLoader = playlistLoader;
27416 playlistLoader.load();
27417 };
27418 /**
27419 * Returns a function to be called when the media group changes. It performs a
27420 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
27421 * change of group is merely a rendition switch of the same content at another encoding,
27422 * rather than a change of content, such as switching audio from English to Spanish.
27423 *
27424 * @param {string} type
27425 * MediaGroup type
27426 * @param {Object} settings
27427 * Object containing required information for media groups
27428 * @return {Function}
27429 * Handler for a non-destructive resync of SegmentLoader when the active media
27430 * group changes.
27431 * @function onGroupChanged
27432 */
27433
27434 var onGroupChanged = function onGroupChanged(type, settings) {
27435 return function () {
27436 var _settings$segmentLoad = settings.segmentLoaders,
27437 segmentLoader = _settings$segmentLoad[type],
27438 mainSegmentLoader = _settings$segmentLoad.main,
27439 mediaType = settings.mediaTypes[type];
27440 var activeTrack = mediaType.activeTrack();
27441 var activeGroup = mediaType.getActiveGroup();
27442 var previousActiveLoader = mediaType.activePlaylistLoader;
27443 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
27444
27445 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
27446 return;
27447 }
27448
27449 mediaType.lastGroup_ = activeGroup;
27450 mediaType.lastTrack_ = activeTrack;
27451 stopLoaders(segmentLoader, mediaType);
27452
27453 if (!activeGroup || activeGroup.isMasterPlaylist) {
27454 // there is no group active or active group is a main playlist and won't change
27455 return;
27456 }
27457
27458 if (!activeGroup.playlistLoader) {
27459 if (previousActiveLoader) {
27460 // The previous group had a playlist loader but the new active group does not
27461 // this means we are switching from demuxed to muxed audio. In this case we want to
27462 // do a destructive reset of the main segment loader and not restart the audio
27463 // loaders.
27464 mainSegmentLoader.resetEverything();
27465 }
27466
27467 return;
27468 } // Non-destructive resync
27469
27470
27471 segmentLoader.resyncLoader();
27472 startLoaders(activeGroup.playlistLoader, mediaType);
27473 };
27474 };
27475 var onGroupChanging = function onGroupChanging(type, settings) {
27476 return function () {
27477 var segmentLoader = settings.segmentLoaders[type],
27478 mediaType = settings.mediaTypes[type];
27479 mediaType.lastGroup_ = null;
27480 segmentLoader.abort();
27481 segmentLoader.pause();
27482 };
27483 };
27484 /**
27485 * Returns a function to be called when the media track changes. It performs a
27486 * destructive reset of the SegmentLoader to ensure we start loading as close to
27487 * currentTime as possible.
27488 *
27489 * @param {string} type
27490 * MediaGroup type
27491 * @param {Object} settings
27492 * Object containing required information for media groups
27493 * @return {Function}
27494 * Handler for a destructive reset of SegmentLoader when the active media
27495 * track changes.
27496 * @function onTrackChanged
27497 */
27498
27499 var onTrackChanged = function onTrackChanged(type, settings) {
27500 return function () {
27501 var masterPlaylistLoader = settings.masterPlaylistLoader,
27502 _settings$segmentLoad2 = settings.segmentLoaders,
27503 segmentLoader = _settings$segmentLoad2[type],
27504 mainSegmentLoader = _settings$segmentLoad2.main,
27505 mediaType = settings.mediaTypes[type];
27506 var activeTrack = mediaType.activeTrack();
27507 var activeGroup = mediaType.getActiveGroup();
27508 var previousActiveLoader = mediaType.activePlaylistLoader;
27509 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
27510
27511 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
27512 return;
27513 }
27514
27515 mediaType.lastGroup_ = activeGroup;
27516 mediaType.lastTrack_ = activeTrack;
27517 stopLoaders(segmentLoader, mediaType);
27518
27519 if (!activeGroup) {
27520 // there is no group active so we do not want to restart loaders
27521 return;
27522 }
27523
27524 if (activeGroup.isMasterPlaylist) {
27525 // track did not change, do nothing
27526 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
27527 return;
27528 }
27529
27530 var mpc = settings.vhs.masterPlaylistController_;
27531 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
27532
27533 if (mpc.media() === newPlaylist) {
27534 return;
27535 }
27536
27537 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
27538 masterPlaylistLoader.pause();
27539 mainSegmentLoader.resetEverything();
27540 mpc.fastQualityChange_(newPlaylist);
27541 return;
27542 }
27543
27544 if (type === 'AUDIO') {
27545 if (!activeGroup.playlistLoader) {
27546 // when switching from demuxed audio/video to muxed audio/video (noted by no
27547 // playlist loader for the audio group), we want to do a destructive reset of the
27548 // main segment loader and not restart the audio loaders
27549 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
27550 // it should be stopped
27551
27552 mainSegmentLoader.resetEverything();
27553 return;
27554 } // although the segment loader is an audio segment loader, call the setAudio
27555 // function to ensure it is prepared to re-append the init segment (or handle other
27556 // config changes)
27557
27558
27559 segmentLoader.setAudio(true);
27560 mainSegmentLoader.setAudio(false);
27561 }
27562
27563 if (previousActiveLoader === activeGroup.playlistLoader) {
27564 // Nothing has actually changed. This can happen because track change events can fire
27565 // multiple times for a "single" change. One for enabling the new active track, and
27566 // one for disabling the track that was active
27567 startLoaders(activeGroup.playlistLoader, mediaType);
27568 return;
27569 }
27570
27571 if (segmentLoader.track) {
27572 // For WebVTT, set the new text track in the segmentloader
27573 segmentLoader.track(activeTrack);
27574 } // destructive reset
27575
27576
27577 segmentLoader.resetEverything();
27578 startLoaders(activeGroup.playlistLoader, mediaType);
27579 };
27580 };
27581 var onError = {
27582 /**
27583 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27584 * an error.
27585 *
27586 * @param {string} type
27587 * MediaGroup type
27588 * @param {Object} settings
27589 * Object containing required information for media groups
27590 * @return {Function}
27591 * Error handler. Logs warning (or error if the playlist is blacklisted) to
27592 * console and switches back to default audio track.
27593 * @function onError.AUDIO
27594 */
27595 AUDIO: function AUDIO(type, settings) {
27596 return function () {
27597 var segmentLoader = settings.segmentLoaders[type],
27598 mediaType = settings.mediaTypes[type],
27599 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
27600 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
27601
27602 var activeTrack = mediaType.activeTrack();
27603 var activeGroup = mediaType.activeGroup();
27604 var id = (activeGroup.filter(function (group) {
27605 return group.default;
27606 })[0] || activeGroup[0]).id;
27607 var defaultTrack = mediaType.tracks[id];
27608
27609 if (activeTrack === defaultTrack) {
27610 // Default track encountered an error. All we can do now is blacklist the current
27611 // rendition and hope another will switch audio groups
27612 blacklistCurrentPlaylist({
27613 message: 'Problem encountered loading the default audio track.'
27614 });
27615 return;
27616 }
27617
27618 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
27619
27620 for (var trackId in mediaType.tracks) {
27621 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
27622 }
27623
27624 mediaType.onTrackChanged();
27625 };
27626 },
27627
27628 /**
27629 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27630 * an error.
27631 *
27632 * @param {string} type
27633 * MediaGroup type
27634 * @param {Object} settings
27635 * Object containing required information for media groups
27636 * @return {Function}
27637 * Error handler. Logs warning to console and disables the active subtitle track
27638 * @function onError.SUBTITLES
27639 */
27640 SUBTITLES: function SUBTITLES(type, settings) {
27641 return function () {
27642 var segmentLoader = settings.segmentLoaders[type],
27643 mediaType = settings.mediaTypes[type];
27644 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
27645 stopLoaders(segmentLoader, mediaType);
27646 var track = mediaType.activeTrack();
27647
27648 if (track) {
27649 track.mode = 'disabled';
27650 }
27651
27652 mediaType.onTrackChanged();
27653 };
27654 }
27655 };
27656 var setupListeners = {
27657 /**
27658 * Setup event listeners for audio playlist loader
27659 *
27660 * @param {string} type
27661 * MediaGroup type
27662 * @param {PlaylistLoader|null} playlistLoader
27663 * PlaylistLoader to register listeners on
27664 * @param {Object} settings
27665 * Object containing required information for media groups
27666 * @function setupListeners.AUDIO
27667 */
27668 AUDIO: function AUDIO(type, playlistLoader, settings) {
27669 if (!playlistLoader) {
27670 // no playlist loader means audio will be muxed with the video
27671 return;
27672 }
27673
27674 var tech = settings.tech,
27675 requestOptions = settings.requestOptions,
27676 segmentLoader = settings.segmentLoaders[type];
27677 playlistLoader.on('loadedmetadata', function () {
27678 var media = playlistLoader.media();
27679 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
27680 // permits, start downloading segments
27681
27682 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27683 segmentLoader.load();
27684 }
27685 });
27686 playlistLoader.on('loadedplaylist', function () {
27687 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27688
27689 if (!tech.paused()) {
27690 segmentLoader.load();
27691 }
27692 });
27693 playlistLoader.on('error', onError[type](type, settings));
27694 },
27695
27696 /**
27697 * Setup event listeners for subtitle playlist loader
27698 *
27699 * @param {string} type
27700 * MediaGroup type
27701 * @param {PlaylistLoader|null} playlistLoader
27702 * PlaylistLoader to register listeners on
27703 * @param {Object} settings
27704 * Object containing required information for media groups
27705 * @function setupListeners.SUBTITLES
27706 */
27707 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
27708 var tech = settings.tech,
27709 requestOptions = settings.requestOptions,
27710 segmentLoader = settings.segmentLoaders[type],
27711 mediaType = settings.mediaTypes[type];
27712 playlistLoader.on('loadedmetadata', function () {
27713 var media = playlistLoader.media();
27714 segmentLoader.playlist(media, requestOptions);
27715 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
27716 // permits, start downloading segments
27717
27718 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27719 segmentLoader.load();
27720 }
27721 });
27722 playlistLoader.on('loadedplaylist', function () {
27723 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27724
27725 if (!tech.paused()) {
27726 segmentLoader.load();
27727 }
27728 });
27729 playlistLoader.on('error', onError[type](type, settings));
27730 }
27731 };
27732 var initialize = {
27733 /**
27734 * Setup PlaylistLoaders and AudioTracks for the audio groups
27735 *
27736 * @param {string} type
27737 * MediaGroup type
27738 * @param {Object} settings
27739 * Object containing required information for media groups
27740 * @function initialize.AUDIO
27741 */
27742 'AUDIO': function AUDIO(type, settings) {
27743 var vhs = settings.vhs,
27744 sourceType = settings.sourceType,
27745 segmentLoader = settings.segmentLoaders[type],
27746 requestOptions = settings.requestOptions,
27747 mediaGroups = settings.master.mediaGroups,
27748 _settings$mediaTypes$ = settings.mediaTypes[type],
27749 groups = _settings$mediaTypes$.groups,
27750 tracks = _settings$mediaTypes$.tracks,
27751 logger_ = _settings$mediaTypes$.logger_,
27752 masterPlaylistLoader = settings.masterPlaylistLoader;
27753 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
27754
27755 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
27756 mediaGroups[type] = {
27757 main: {
27758 default: {
27759 default: true
27760 }
27761 }
27762 };
27763
27764 if (audioOnlyMaster) {
27765 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
27766 }
27767 }
27768
27769 for (var groupId in mediaGroups[type]) {
27770 if (!groups[groupId]) {
27771 groups[groupId] = [];
27772 }
27773
27774 for (var variantLabel in mediaGroups[type][groupId]) {
27775 var properties = mediaGroups[type][groupId][variantLabel];
27776 var playlistLoader = void 0;
27777
27778 if (audioOnlyMaster) {
27779 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
27780 properties.isMasterPlaylist = true;
27781 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
27782 // use the resolved media playlist object
27783 } else if (sourceType === 'vhs-json' && properties.playlists) {
27784 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
27785 } else if (properties.resolvedUri) {
27786 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
27787 // should we even have properties.playlists in this check.
27788 } else if (properties.playlists && sourceType === 'dash') {
27789 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27790 } else {
27791 // no resolvedUri means the audio is muxed with the video when using this
27792 // audio track
27793 playlistLoader = null;
27794 }
27795
27796 properties = videojs__default["default"].mergeOptions({
27797 id: variantLabel,
27798 playlistLoader: playlistLoader
27799 }, properties);
27800 setupListeners[type](type, properties.playlistLoader, settings);
27801 groups[groupId].push(properties);
27802
27803 if (typeof tracks[variantLabel] === 'undefined') {
27804 var track = new videojs__default["default"].AudioTrack({
27805 id: variantLabel,
27806 kind: audioTrackKind_(properties),
27807 enabled: false,
27808 language: properties.language,
27809 default: properties.default,
27810 label: variantLabel
27811 });
27812 tracks[variantLabel] = track;
27813 }
27814 }
27815 } // setup single error event handler for the segment loader
27816
27817
27818 segmentLoader.on('error', onError[type](type, settings));
27819 },
27820
27821 /**
27822 * Setup PlaylistLoaders and TextTracks for the subtitle groups
27823 *
27824 * @param {string} type
27825 * MediaGroup type
27826 * @param {Object} settings
27827 * Object containing required information for media groups
27828 * @function initialize.SUBTITLES
27829 */
27830 'SUBTITLES': function SUBTITLES(type, settings) {
27831 var tech = settings.tech,
27832 vhs = settings.vhs,
27833 sourceType = settings.sourceType,
27834 segmentLoader = settings.segmentLoaders[type],
27835 requestOptions = settings.requestOptions,
27836 mediaGroups = settings.master.mediaGroups,
27837 _settings$mediaTypes$2 = settings.mediaTypes[type],
27838 groups = _settings$mediaTypes$2.groups,
27839 tracks = _settings$mediaTypes$2.tracks,
27840 masterPlaylistLoader = settings.masterPlaylistLoader;
27841
27842 for (var groupId in mediaGroups[type]) {
27843 if (!groups[groupId]) {
27844 groups[groupId] = [];
27845 }
27846
27847 for (var variantLabel in mediaGroups[type][groupId]) {
27848 if (mediaGroups[type][groupId][variantLabel].forced) {
27849 // Subtitle playlists with the forced attribute are not selectable in Safari.
27850 // According to Apple's HLS Authoring Specification:
27851 // If content has forced subtitles and regular subtitles in a given language,
27852 // the regular subtitles track in that language MUST contain both the forced
27853 // subtitles and the regular subtitles for that language.
27854 // Because of this requirement and that Safari does not add forced subtitles,
27855 // forced subtitles are skipped here to maintain consistent experience across
27856 // all platforms
27857 continue;
27858 }
27859
27860 var properties = mediaGroups[type][groupId][variantLabel];
27861 var playlistLoader = void 0;
27862
27863 if (sourceType === 'hls') {
27864 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
27865 } else if (sourceType === 'dash') {
27866 var playlists = properties.playlists.filter(function (p) {
27867 return p.excludeUntil !== Infinity;
27868 });
27869
27870 if (!playlists.length) {
27871 return;
27872 }
27873
27874 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27875 } else if (sourceType === 'vhs-json') {
27876 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
27877 // as provided, otherwise use the resolved URI to load the playlist
27878 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
27879 }
27880
27881 properties = videojs__default["default"].mergeOptions({
27882 id: variantLabel,
27883 playlistLoader: playlistLoader
27884 }, properties);
27885 setupListeners[type](type, properties.playlistLoader, settings);
27886 groups[groupId].push(properties);
27887
27888 if (typeof tracks[variantLabel] === 'undefined') {
27889 var track = tech.addRemoteTextTrack({
27890 id: variantLabel,
27891 kind: 'subtitles',
27892 default: properties.default && properties.autoselect,
27893 language: properties.language,
27894 label: variantLabel
27895 }, false).track;
27896 tracks[variantLabel] = track;
27897 }
27898 }
27899 } // setup single error event handler for the segment loader
27900
27901
27902 segmentLoader.on('error', onError[type](type, settings));
27903 },
27904
27905 /**
27906 * Setup TextTracks for the closed-caption groups
27907 *
27908 * @param {String} type
27909 * MediaGroup type
27910 * @param {Object} settings
27911 * Object containing required information for media groups
27912 * @function initialize['CLOSED-CAPTIONS']
27913 */
27914 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
27915 var tech = settings.tech,
27916 mediaGroups = settings.master.mediaGroups,
27917 _settings$mediaTypes$3 = settings.mediaTypes[type],
27918 groups = _settings$mediaTypes$3.groups,
27919 tracks = _settings$mediaTypes$3.tracks;
27920
27921 for (var groupId in mediaGroups[type]) {
27922 if (!groups[groupId]) {
27923 groups[groupId] = [];
27924 }
27925
27926 for (var variantLabel in mediaGroups[type][groupId]) {
27927 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
27928
27929 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
27930 continue;
27931 }
27932
27933 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
27934 var newProps = {
27935 label: variantLabel,
27936 language: properties.language,
27937 instreamId: properties.instreamId,
27938 default: properties.default && properties.autoselect
27939 };
27940
27941 if (captionServices[newProps.instreamId]) {
27942 newProps = videojs__default["default"].mergeOptions(newProps, captionServices[newProps.instreamId]);
27943 }
27944
27945 if (newProps.default === undefined) {
27946 delete newProps.default;
27947 } // No PlaylistLoader is required for Closed-Captions because the captions are
27948 // embedded within the video stream
27949
27950
27951 groups[groupId].push(videojs__default["default"].mergeOptions({
27952 id: variantLabel
27953 }, properties));
27954
27955 if (typeof tracks[variantLabel] === 'undefined') {
27956 var track = tech.addRemoteTextTrack({
27957 id: newProps.instreamId,
27958 kind: 'captions',
27959 default: newProps.default,
27960 language: newProps.language,
27961 label: newProps.label
27962 }, false).track;
27963 tracks[variantLabel] = track;
27964 }
27965 }
27966 }
27967 }
27968 };
27969
27970 var groupMatch = function groupMatch(list, media) {
27971 for (var i = 0; i < list.length; i++) {
27972 if (playlistMatch(media, list[i])) {
27973 return true;
27974 }
27975
27976 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
27977 return true;
27978 }
27979 }
27980
27981 return false;
27982 };
27983 /**
27984 * Returns a function used to get the active group of the provided type
27985 *
27986 * @param {string} type
27987 * MediaGroup type
27988 * @param {Object} settings
27989 * Object containing required information for media groups
27990 * @return {Function}
27991 * Function that returns the active media group for the provided type. Takes an
27992 * optional parameter {TextTrack} track. If no track is provided, a list of all
27993 * variants in the group, otherwise the variant corresponding to the provided
27994 * track is returned.
27995 * @function activeGroup
27996 */
27997
27998
27999 var activeGroup = function activeGroup(type, settings) {
28000 return function (track) {
28001 var masterPlaylistLoader = settings.masterPlaylistLoader,
28002 groups = settings.mediaTypes[type].groups;
28003 var media = masterPlaylistLoader.media();
28004
28005 if (!media) {
28006 return null;
28007 }
28008
28009 var variants = null; // set to variants to main media active group
28010
28011 if (media.attributes[type]) {
28012 variants = groups[media.attributes[type]];
28013 }
28014
28015 var groupKeys = Object.keys(groups);
28016
28017 if (!variants) {
28018 // find the masterPlaylistLoader media
28019 // that is in a media group if we are dealing
28020 // with audio only
28021 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
28022 for (var i = 0; i < groupKeys.length; i++) {
28023 var groupPropertyList = groups[groupKeys[i]];
28024
28025 if (groupMatch(groupPropertyList, media)) {
28026 variants = groupPropertyList;
28027 break;
28028 }
28029 } // use the main group if it exists
28030
28031 } else if (groups.main) {
28032 variants = groups.main; // only one group, use that one
28033 } else if (groupKeys.length === 1) {
28034 variants = groups[groupKeys[0]];
28035 }
28036 }
28037
28038 if (typeof track === 'undefined') {
28039 return variants;
28040 }
28041
28042 if (track === null || !variants) {
28043 // An active track was specified so a corresponding group is expected. track === null
28044 // means no track is currently active so there is no corresponding group
28045 return null;
28046 }
28047
28048 return variants.filter(function (props) {
28049 return props.id === track.id;
28050 })[0] || null;
28051 };
28052 };
28053 var activeTrack = {
28054 /**
28055 * Returns a function used to get the active track of type provided
28056 *
28057 * @param {string} type
28058 * MediaGroup type
28059 * @param {Object} settings
28060 * Object containing required information for media groups
28061 * @return {Function}
28062 * Function that returns the active media track for the provided type. Returns
28063 * null if no track is active
28064 * @function activeTrack.AUDIO
28065 */
28066 AUDIO: function AUDIO(type, settings) {
28067 return function () {
28068 var tracks = settings.mediaTypes[type].tracks;
28069
28070 for (var id in tracks) {
28071 if (tracks[id].enabled) {
28072 return tracks[id];
28073 }
28074 }
28075
28076 return null;
28077 };
28078 },
28079
28080 /**
28081 * Returns a function used to get the active track of type provided
28082 *
28083 * @param {string} type
28084 * MediaGroup type
28085 * @param {Object} settings
28086 * Object containing required information for media groups
28087 * @return {Function}
28088 * Function that returns the active media track for the provided type. Returns
28089 * null if no track is active
28090 * @function activeTrack.SUBTITLES
28091 */
28092 SUBTITLES: function SUBTITLES(type, settings) {
28093 return function () {
28094 var tracks = settings.mediaTypes[type].tracks;
28095
28096 for (var id in tracks) {
28097 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
28098 return tracks[id];
28099 }
28100 }
28101
28102 return null;
28103 };
28104 }
28105 };
28106 var getActiveGroup = function getActiveGroup(type, _ref) {
28107 var mediaTypes = _ref.mediaTypes;
28108 return function () {
28109 var activeTrack_ = mediaTypes[type].activeTrack();
28110
28111 if (!activeTrack_) {
28112 return null;
28113 }
28114
28115 return mediaTypes[type].activeGroup(activeTrack_);
28116 };
28117 };
28118 /**
28119 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
28120 * Closed-Captions) specified in the master manifest.
28121 *
28122 * @param {Object} settings
28123 * Object containing required information for setting up the media groups
28124 * @param {Tech} settings.tech
28125 * The tech of the player
28126 * @param {Object} settings.requestOptions
28127 * XHR request options used by the segment loaders
28128 * @param {PlaylistLoader} settings.masterPlaylistLoader
28129 * PlaylistLoader for the master source
28130 * @param {VhsHandler} settings.vhs
28131 * VHS SourceHandler
28132 * @param {Object} settings.master
28133 * The parsed master manifest
28134 * @param {Object} settings.mediaTypes
28135 * Object to store the loaders, tracks, and utility methods for each media type
28136 * @param {Function} settings.blacklistCurrentPlaylist
28137 * Blacklists the current rendition and forces a rendition switch.
28138 * @function setupMediaGroups
28139 */
28140
28141 var setupMediaGroups = function setupMediaGroups(settings) {
28142 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
28143 initialize[type](type, settings);
28144 });
28145 var mediaTypes = settings.mediaTypes,
28146 masterPlaylistLoader = settings.masterPlaylistLoader,
28147 tech = settings.tech,
28148 vhs = settings.vhs,
28149 _settings$segmentLoad3 = settings.segmentLoaders,
28150 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
28151 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
28152
28153 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28154 mediaTypes[type].activeGroup = activeGroup(type, settings);
28155 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
28156 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
28157 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
28158 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
28159 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
28160 }); // DO NOT enable the default subtitle or caption track.
28161 // DO enable the default audio track
28162
28163 var audioGroup = mediaTypes.AUDIO.activeGroup();
28164
28165 if (audioGroup) {
28166 var groupId = (audioGroup.filter(function (group) {
28167 return group.default;
28168 })[0] || audioGroup[0]).id;
28169 mediaTypes.AUDIO.tracks[groupId].enabled = true;
28170 mediaTypes.AUDIO.onGroupChanged();
28171 mediaTypes.AUDIO.onTrackChanged();
28172 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
28173 // track is changed, but needs to be handled here since the track may not be considered
28174 // changed on the first call to onTrackChanged
28175
28176 if (!activeAudioGroup.playlistLoader) {
28177 // either audio is muxed with video or the stream is audio only
28178 mainSegmentLoader.setAudio(true);
28179 } else {
28180 // audio is demuxed
28181 mainSegmentLoader.setAudio(false);
28182 audioSegmentLoader.setAudio(true);
28183 }
28184 }
28185
28186 masterPlaylistLoader.on('mediachange', function () {
28187 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28188 return mediaTypes[type].onGroupChanged();
28189 });
28190 });
28191 masterPlaylistLoader.on('mediachanging', function () {
28192 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28193 return mediaTypes[type].onGroupChanging();
28194 });
28195 }); // custom audio track change event handler for usage event
28196
28197 var onAudioTrackChanged = function onAudioTrackChanged() {
28198 mediaTypes.AUDIO.onTrackChanged();
28199 tech.trigger({
28200 type: 'usage',
28201 name: 'vhs-audio-change'
28202 });
28203 tech.trigger({
28204 type: 'usage',
28205 name: 'hls-audio-change'
28206 });
28207 };
28208
28209 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
28210 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28211 vhs.on('dispose', function () {
28212 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
28213 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28214 }); // clear existing audio tracks and add the ones we just created
28215
28216 tech.clearTracks('audio');
28217
28218 for (var id in mediaTypes.AUDIO.tracks) {
28219 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
28220 }
28221 };
28222 /**
28223 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
28224 * media type
28225 *
28226 * @return {Object}
28227 * Object to store the loaders, tracks, and utility methods for each media type
28228 * @function createMediaTypes
28229 */
28230
28231 var createMediaTypes = function createMediaTypes() {
28232 var mediaTypes = {};
28233 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
28234 mediaTypes[type] = {
28235 groups: {},
28236 tracks: {},
28237 activePlaylistLoader: null,
28238 activeGroup: noop,
28239 activeTrack: noop,
28240 getActiveGroup: noop,
28241 onGroupChanged: noop,
28242 onTrackChanged: noop,
28243 lastTrack_: null,
28244 logger_: logger("MediaGroups[" + type + "]")
28245 };
28246 });
28247 return mediaTypes;
28248 };
28249
28250 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
28251 var Vhs$1; // SegmentLoader stats that need to have each loader's
28252 // values summed to calculate the final value
28253
28254 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
28255
28256 var sumLoaderStat = function sumLoaderStat(stat) {
28257 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
28258 };
28259
28260 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
28261 var currentPlaylist = _ref.currentPlaylist,
28262 buffered = _ref.buffered,
28263 currentTime = _ref.currentTime,
28264 nextPlaylist = _ref.nextPlaylist,
28265 bufferLowWaterLine = _ref.bufferLowWaterLine,
28266 bufferHighWaterLine = _ref.bufferHighWaterLine,
28267 duration = _ref.duration,
28268 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
28269 log = _ref.log;
28270
28271 // we have no other playlist to switch to
28272 if (!nextPlaylist) {
28273 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
28274 return false;
28275 }
28276
28277 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
28278
28279 if (!currentPlaylist) {
28280 log(sharedLogLine + " as current playlist is not set");
28281 return true;
28282 } // no need to switch if playlist is the same
28283
28284
28285 if (nextPlaylist.id === currentPlaylist.id) {
28286 return false;
28287 } // determine if current time is in a buffered range.
28288
28289
28290 var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
28291 // This is because in LIVE, the player plays 3 segments from the end of the
28292 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
28293 // in those segments, a viewer will never experience a rendition upswitch.
28294
28295 if (!currentPlaylist.endList) {
28296 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
28297 // doubles the time to first playback.
28298 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
28299 log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
28300 return false;
28301 }
28302
28303 log(sharedLogLine + " as current playlist is live");
28304 return true;
28305 }
28306
28307 var forwardBuffer = timeAheadOf(buffered, currentTime);
28308 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
28309 // duration is below the max potential low water line
28310
28311 if (duration < maxBufferLowWaterLine) {
28312 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
28313 return true;
28314 }
28315
28316 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
28317 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
28318 // we can switch down
28319
28320 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
28321 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
28322
28323 if (experimentalBufferBasedABR) {
28324 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
28325 }
28326
28327 log(logLine);
28328 return true;
28329 } // and if our buffer is higher than the low water line,
28330 // we can switch up
28331
28332
28333 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
28334 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
28335
28336 if (experimentalBufferBasedABR) {
28337 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
28338 }
28339
28340 log(_logLine);
28341 return true;
28342 }
28343
28344 log("not " + sharedLogLine + " as no switching criteria met");
28345 return false;
28346 };
28347 /**
28348 * the master playlist controller controller all interactons
28349 * between playlists and segmentloaders. At this time this mainly
28350 * involves a master playlist and a series of audio playlists
28351 * if they are available
28352 *
28353 * @class MasterPlaylistController
28354 * @extends videojs.EventTarget
28355 */
28356
28357
28358 var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
28359 inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
28360
28361 function MasterPlaylistController(options) {
28362 var _this;
28363
28364 _this = _videojs$EventTarget.call(this) || this;
28365 var src = options.src,
28366 handleManifestRedirects = options.handleManifestRedirects,
28367 withCredentials = options.withCredentials,
28368 tech = options.tech,
28369 bandwidth = options.bandwidth,
28370 externVhs = options.externVhs,
28371 useCueTags = options.useCueTags,
28372 blacklistDuration = options.blacklistDuration,
28373 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
28374 sourceType = options.sourceType,
28375 cacheEncryptionKeys = options.cacheEncryptionKeys,
28376 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
28377 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
28378 captionServices = options.captionServices;
28379
28380 if (!src) {
28381 throw new Error('A non-empty playlist URL or JSON manifest string is required');
28382 }
28383
28384 var maxPlaylistRetries = options.maxPlaylistRetries;
28385
28386 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
28387 maxPlaylistRetries = Infinity;
28388 }
28389
28390 Vhs$1 = externVhs;
28391 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
28392 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
28393 _this.withCredentials = withCredentials;
28394 _this.tech_ = tech;
28395 _this.vhs_ = tech.vhs;
28396 _this.sourceType_ = sourceType;
28397 _this.useCueTags_ = useCueTags;
28398 _this.blacklistDuration = blacklistDuration;
28399 _this.maxPlaylistRetries = maxPlaylistRetries;
28400 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
28401
28402 if (_this.useCueTags_) {
28403 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
28404 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
28405 }
28406
28407 _this.requestOptions_ = {
28408 withCredentials: withCredentials,
28409 handleManifestRedirects: handleManifestRedirects,
28410 maxPlaylistRetries: maxPlaylistRetries,
28411 timeout: null
28412 };
28413
28414 _this.on('error', _this.pauseLoading);
28415
28416 _this.mediaTypes_ = createMediaTypes();
28417 _this.mediaSource = new window.MediaSource();
28418 _this.handleDurationChange_ = _this.handleDurationChange_.bind(assertThisInitialized(_this));
28419 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(assertThisInitialized(_this));
28420 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(assertThisInitialized(_this));
28421
28422 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
28423
28424
28425 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
28426
28427 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
28428 // everything, and the MediaSource should not be detached without a proper disposal
28429
28430
28431 _this.seekable_ = videojs__default["default"].createTimeRanges();
28432 _this.hasPlayed_ = false;
28433 _this.syncController_ = new SyncController(options);
28434 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
28435 kind: 'metadata',
28436 label: 'segment-metadata'
28437 }, false).track;
28438 _this.decrypter_ = new Decrypter();
28439 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
28440 _this.inbandTextTracks_ = {};
28441 _this.timelineChangeController_ = new TimelineChangeController();
28442 var segmentLoaderSettings = {
28443 vhs: _this.vhs_,
28444 parse708captions: options.parse708captions,
28445 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
28446 captionServices: captionServices,
28447 mediaSource: _this.mediaSource,
28448 currentTime: _this.tech_.currentTime.bind(_this.tech_),
28449 seekable: function seekable() {
28450 return _this.seekable();
28451 },
28452 seeking: function seeking() {
28453 return _this.tech_.seeking();
28454 },
28455 duration: function duration() {
28456 return _this.duration();
28457 },
28458 hasPlayed: function hasPlayed() {
28459 return _this.hasPlayed_;
28460 },
28461 goalBufferLength: function goalBufferLength() {
28462 return _this.goalBufferLength();
28463 },
28464 bandwidth: bandwidth,
28465 syncController: _this.syncController_,
28466 decrypter: _this.decrypter_,
28467 sourceType: _this.sourceType_,
28468 inbandTextTracks: _this.inbandTextTracks_,
28469 cacheEncryptionKeys: cacheEncryptionKeys,
28470 sourceUpdater: _this.sourceUpdater_,
28471 timelineChangeController: _this.timelineChangeController_,
28472 experimentalExactManifestTimings: options.experimentalExactManifestTimings
28473 }; // The source type check not only determines whether a special DASH playlist loader
28474 // should be used, but also covers the case where the provided src is a vhs-json
28475 // manifest object (instead of a URL). In the case of vhs-json, the default
28476 // PlaylistLoader should be used.
28477
28478 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
28479
28480 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
28481 // combined audio/video or just video when alternate audio track is selected
28482
28483
28484 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28485 segmentMetadataTrack: _this.segmentMetadataTrack_,
28486 loaderType: 'main'
28487 }), options); // alternate audio track
28488
28489 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28490 loaderType: 'audio'
28491 }), options);
28492 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28493 loaderType: 'vtt',
28494 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks,
28495 loadVttJs: function loadVttJs() {
28496 return new Promise(function (resolve, reject) {
28497 function onLoad() {
28498 tech.off('vttjserror', onError);
28499 resolve();
28500 }
28501
28502 function onError() {
28503 tech.off('vttjsloaded', onLoad);
28504 reject();
28505 }
28506
28507 tech.one('vttjsloaded', onLoad);
28508 tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:
28509
28510 tech.addWebVttScript_();
28511 });
28512 }
28513 }), options);
28514
28515 _this.setupSegmentLoaderListeners_();
28516
28517 if (_this.experimentalBufferBasedABR) {
28518 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
28519 return _this.startABRTimer_();
28520 });
28521
28522 _this.tech_.on('pause', function () {
28523 return _this.stopABRTimer_();
28524 });
28525
28526 _this.tech_.on('play', function () {
28527 return _this.startABRTimer_();
28528 });
28529 } // Create SegmentLoader stat-getters
28530 // mediaRequests_
28531 // mediaRequestsAborted_
28532 // mediaRequestsTimedout_
28533 // mediaRequestsErrored_
28534 // mediaTransferDuration_
28535 // mediaBytesTransferred_
28536 // mediaAppends_
28537
28538
28539 loaderStats.forEach(function (stat) {
28540 _this[stat + '_'] = sumLoaderStat.bind(assertThisInitialized(_this), stat);
28541 });
28542 _this.logger_ = logger('MPC');
28543 _this.triggeredFmp4Usage = false;
28544
28545 if (_this.tech_.preload() === 'none') {
28546 _this.loadOnPlay_ = function () {
28547 _this.loadOnPlay_ = null;
28548
28549 _this.masterPlaylistLoader_.load();
28550 };
28551
28552 _this.tech_.one('play', _this.loadOnPlay_);
28553 } else {
28554 _this.masterPlaylistLoader_.load();
28555 }
28556
28557 _this.timeToLoadedData__ = -1;
28558 _this.mainAppendsToLoadedData__ = -1;
28559 _this.audioAppendsToLoadedData__ = -1;
28560 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
28561
28562 _this.tech_.one(event, function () {
28563 var timeToLoadedDataStart = Date.now();
28564
28565 _this.tech_.one('loadeddata', function () {
28566 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
28567 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
28568 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
28569 });
28570 });
28571
28572 return _this;
28573 }
28574
28575 var _proto = MasterPlaylistController.prototype;
28576
28577 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
28578 return this.mainAppendsToLoadedData__;
28579 };
28580
28581 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
28582 return this.audioAppendsToLoadedData__;
28583 };
28584
28585 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
28586 var main = this.mainAppendsToLoadedData_();
28587 var audio = this.audioAppendsToLoadedData_();
28588
28589 if (main === -1 || audio === -1) {
28590 return -1;
28591 }
28592
28593 return main + audio;
28594 };
28595
28596 _proto.timeToLoadedData_ = function timeToLoadedData_() {
28597 return this.timeToLoadedData__;
28598 }
28599 /**
28600 * Run selectPlaylist and switch to the new playlist if we should
28601 *
28602 * @param {string} [reason=abr] a reason for why the ABR check is made
28603 * @private
28604 */
28605 ;
28606
28607 _proto.checkABR_ = function checkABR_(reason) {
28608 if (reason === void 0) {
28609 reason = 'abr';
28610 }
28611
28612 var nextPlaylist = this.selectPlaylist();
28613
28614 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
28615 this.switchMedia_(nextPlaylist, reason);
28616 }
28617 };
28618
28619 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
28620 var oldMedia = this.media();
28621 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
28622 var newId = playlist.id || playlist.uri;
28623
28624 if (oldId && oldId !== newId) {
28625 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
28626 this.tech_.trigger({
28627 type: 'usage',
28628 name: "vhs-rendition-change-" + cause
28629 });
28630 }
28631
28632 this.masterPlaylistLoader_.media(playlist, delay);
28633 }
28634 /**
28635 * Start a timer that periodically calls checkABR_
28636 *
28637 * @private
28638 */
28639 ;
28640
28641 _proto.startABRTimer_ = function startABRTimer_() {
28642 var _this2 = this;
28643
28644 this.stopABRTimer_();
28645 this.abrTimer_ = window.setInterval(function () {
28646 return _this2.checkABR_();
28647 }, 250);
28648 }
28649 /**
28650 * Stop the timer that periodically calls checkABR_
28651 *
28652 * @private
28653 */
28654 ;
28655
28656 _proto.stopABRTimer_ = function stopABRTimer_() {
28657 // if we're scrubbing, we don't need to pause.
28658 // This getter will be added to Video.js in version 7.11.
28659 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
28660 return;
28661 }
28662
28663 window.clearInterval(this.abrTimer_);
28664 this.abrTimer_ = null;
28665 }
28666 /**
28667 * Get a list of playlists for the currently selected audio playlist
28668 *
28669 * @return {Array} the array of audio playlists
28670 */
28671 ;
28672
28673 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
28674 var master = this.master();
28675 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
28676 // assume that the audio tracks are contained in masters
28677 // playlist array, use that or an empty array.
28678
28679 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
28680 return defaultPlaylists;
28681 }
28682
28683 var AUDIO = master.mediaGroups.AUDIO;
28684 var groupKeys = Object.keys(AUDIO);
28685 var track; // get the current active track
28686
28687 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
28688 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
28689 } else {
28690 // default group is `main` or just the first group.
28691 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
28692
28693 for (var label in defaultGroup) {
28694 if (defaultGroup[label].default) {
28695 track = {
28696 label: label
28697 };
28698 break;
28699 }
28700 }
28701 } // no active track no playlists.
28702
28703
28704 if (!track) {
28705 return defaultPlaylists;
28706 }
28707
28708 var playlists = []; // get all of the playlists that are possible for the
28709 // active track.
28710
28711 for (var group in AUDIO) {
28712 if (AUDIO[group][track.label]) {
28713 var properties = AUDIO[group][track.label];
28714
28715 if (properties.playlists && properties.playlists.length) {
28716 playlists.push.apply(playlists, properties.playlists);
28717 } else if (properties.uri) {
28718 playlists.push(properties);
28719 } else if (master.playlists.length) {
28720 // if an audio group does not have a uri
28721 // see if we have main playlists that use it as a group.
28722 // if we do then add those to the playlists list.
28723 for (var i = 0; i < master.playlists.length; i++) {
28724 var playlist = master.playlists[i];
28725
28726 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
28727 playlists.push(playlist);
28728 }
28729 }
28730 }
28731 }
28732 }
28733
28734 if (!playlists.length) {
28735 return defaultPlaylists;
28736 }
28737
28738 return playlists;
28739 }
28740 /**
28741 * Register event handlers on the master playlist loader. A helper
28742 * function for construction time.
28743 *
28744 * @private
28745 */
28746 ;
28747
28748 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
28749 var _this3 = this;
28750
28751 this.masterPlaylistLoader_.on('loadedmetadata', function () {
28752 var media = _this3.masterPlaylistLoader_.media();
28753
28754 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28755 // timeout the request.
28756
28757 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28758 _this3.requestOptions_.timeout = 0;
28759 } else {
28760 _this3.requestOptions_.timeout = requestTimeout;
28761 } // if this isn't a live video and preload permits, start
28762 // downloading segments
28763
28764
28765 if (media.endList && _this3.tech_.preload() !== 'none') {
28766 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28767
28768 _this3.mainSegmentLoader_.load();
28769 }
28770
28771 setupMediaGroups({
28772 sourceType: _this3.sourceType_,
28773 segmentLoaders: {
28774 AUDIO: _this3.audioSegmentLoader_,
28775 SUBTITLES: _this3.subtitleSegmentLoader_,
28776 main: _this3.mainSegmentLoader_
28777 },
28778 tech: _this3.tech_,
28779 requestOptions: _this3.requestOptions_,
28780 masterPlaylistLoader: _this3.masterPlaylistLoader_,
28781 vhs: _this3.vhs_,
28782 master: _this3.master(),
28783 mediaTypes: _this3.mediaTypes_,
28784 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
28785 });
28786
28787 _this3.triggerPresenceUsage_(_this3.master(), media);
28788
28789 _this3.setupFirstPlay();
28790
28791 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
28792 _this3.trigger('selectedinitialmedia');
28793 } else {
28794 // We must wait for the active audio playlist loader to
28795 // finish setting up before triggering this event so the
28796 // representations API and EME setup is correct
28797 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
28798 _this3.trigger('selectedinitialmedia');
28799 });
28800 }
28801 });
28802 this.masterPlaylistLoader_.on('loadedplaylist', function () {
28803 if (_this3.loadOnPlay_) {
28804 _this3.tech_.off('play', _this3.loadOnPlay_);
28805 }
28806
28807 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
28808
28809 if (!updatedPlaylist) {
28810 // exclude any variants that are not supported by the browser before selecting
28811 // an initial media as the playlist selectors do not consider browser support
28812 _this3.excludeUnsupportedVariants_();
28813
28814 var selectedMedia;
28815
28816 if (_this3.enableLowInitialPlaylist) {
28817 selectedMedia = _this3.selectInitialPlaylist();
28818 }
28819
28820 if (!selectedMedia) {
28821 selectedMedia = _this3.selectPlaylist();
28822 }
28823
28824 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
28825 return;
28826 }
28827
28828 _this3.initialMedia_ = selectedMedia;
28829
28830 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
28831 // fire again since the playlist will be requested. In the case of vhs-json
28832 // (where the manifest object is provided as the source), when the media
28833 // playlist's `segments` list is already available, a media playlist won't be
28834 // requested, and loadedplaylist won't fire again, so the playlist handler must be
28835 // called on its own here.
28836
28837
28838 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
28839
28840 if (!haveJsonSource) {
28841 return;
28842 }
28843
28844 updatedPlaylist = _this3.initialMedia_;
28845 }
28846
28847 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
28848 });
28849 this.masterPlaylistLoader_.on('error', function () {
28850 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
28851 });
28852 this.masterPlaylistLoader_.on('mediachanging', function () {
28853 _this3.mainSegmentLoader_.abort();
28854
28855 _this3.mainSegmentLoader_.pause();
28856 });
28857 this.masterPlaylistLoader_.on('mediachange', function () {
28858 var media = _this3.masterPlaylistLoader_.media();
28859
28860 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28861 // timeout the request.
28862
28863 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28864 _this3.requestOptions_.timeout = 0;
28865 } else {
28866 _this3.requestOptions_.timeout = requestTimeout;
28867 }
28868
28869 _this3.masterPlaylistLoader_.load(); // TODO: Create a new event on the PlaylistLoader that signals
28870 // that the segments have changed in some way and use that to
28871 // update the SegmentLoader instead of doing it twice here and
28872 // on `loadedplaylist`
28873
28874
28875 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28876
28877 _this3.mainSegmentLoader_.load();
28878
28879 _this3.tech_.trigger({
28880 type: 'mediachange',
28881 bubbles: true
28882 });
28883 });
28884 this.masterPlaylistLoader_.on('playlistunchanged', function () {
28885 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
28886 // excluded for not-changing. We likely just have a really slowly updating
28887 // playlist.
28888
28889
28890 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
28891 return;
28892 }
28893
28894 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
28895
28896 if (playlistOutdated) {
28897 // Playlist has stopped updating and we're stuck at its end. Try to
28898 // blacklist it and switch to another playlist in the hope that that
28899 // one is updating (and give the player a chance to re-adjust to the
28900 // safe live point).
28901 _this3.blacklistCurrentPlaylist({
28902 message: 'Playlist no longer updating.',
28903 reason: 'playlist-unchanged'
28904 }); // useful for monitoring QoS
28905
28906
28907 _this3.tech_.trigger('playliststuck');
28908 }
28909 });
28910 this.masterPlaylistLoader_.on('renditiondisabled', function () {
28911 _this3.tech_.trigger({
28912 type: 'usage',
28913 name: 'vhs-rendition-disabled'
28914 });
28915
28916 _this3.tech_.trigger({
28917 type: 'usage',
28918 name: 'hls-rendition-disabled'
28919 });
28920 });
28921 this.masterPlaylistLoader_.on('renditionenabled', function () {
28922 _this3.tech_.trigger({
28923 type: 'usage',
28924 name: 'vhs-rendition-enabled'
28925 });
28926
28927 _this3.tech_.trigger({
28928 type: 'usage',
28929 name: 'hls-rendition-enabled'
28930 });
28931 });
28932 }
28933 /**
28934 * Given an updated media playlist (whether it was loaded for the first time, or
28935 * refreshed for live playlists), update any relevant properties and state to reflect
28936 * changes in the media that should be accounted for (e.g., cues and duration).
28937 *
28938 * @param {Object} updatedPlaylist the updated media playlist object
28939 *
28940 * @private
28941 */
28942 ;
28943
28944 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
28945 if (this.useCueTags_) {
28946 this.updateAdCues_(updatedPlaylist);
28947 } // TODO: Create a new event on the PlaylistLoader that signals
28948 // that the segments have changed in some way and use that to
28949 // update the SegmentLoader instead of doing it twice here and
28950 // on `mediachange`
28951
28952
28953 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
28954 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
28955 // as it is possible that it was temporarily stopped while waiting for
28956 // a playlist (e.g., in case the playlist errored and we re-requested it).
28957
28958 if (!this.tech_.paused()) {
28959 this.mainSegmentLoader_.load();
28960
28961 if (this.audioSegmentLoader_) {
28962 this.audioSegmentLoader_.load();
28963 }
28964 }
28965 }
28966 /**
28967 * A helper function for triggerring presence usage events once per source
28968 *
28969 * @private
28970 */
28971 ;
28972
28973 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
28974 var mediaGroups = master.mediaGroups || {};
28975 var defaultDemuxed = true;
28976 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
28977
28978 for (var mediaGroup in mediaGroups.AUDIO) {
28979 for (var label in mediaGroups.AUDIO[mediaGroup]) {
28980 var properties = mediaGroups.AUDIO[mediaGroup][label];
28981
28982 if (!properties.uri) {
28983 defaultDemuxed = false;
28984 }
28985 }
28986 }
28987
28988 if (defaultDemuxed) {
28989 this.tech_.trigger({
28990 type: 'usage',
28991 name: 'vhs-demuxed'
28992 });
28993 this.tech_.trigger({
28994 type: 'usage',
28995 name: 'hls-demuxed'
28996 });
28997 }
28998
28999 if (Object.keys(mediaGroups.SUBTITLES).length) {
29000 this.tech_.trigger({
29001 type: 'usage',
29002 name: 'vhs-webvtt'
29003 });
29004 this.tech_.trigger({
29005 type: 'usage',
29006 name: 'hls-webvtt'
29007 });
29008 }
29009
29010 if (Vhs$1.Playlist.isAes(media)) {
29011 this.tech_.trigger({
29012 type: 'usage',
29013 name: 'vhs-aes'
29014 });
29015 this.tech_.trigger({
29016 type: 'usage',
29017 name: 'hls-aes'
29018 });
29019 }
29020
29021 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
29022 this.tech_.trigger({
29023 type: 'usage',
29024 name: 'vhs-alternate-audio'
29025 });
29026 this.tech_.trigger({
29027 type: 'usage',
29028 name: 'hls-alternate-audio'
29029 });
29030 }
29031
29032 if (this.useCueTags_) {
29033 this.tech_.trigger({
29034 type: 'usage',
29035 name: 'vhs-playlist-cue-tags'
29036 });
29037 this.tech_.trigger({
29038 type: 'usage',
29039 name: 'hls-playlist-cue-tags'
29040 });
29041 }
29042 };
29043
29044 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
29045 var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
29046 var currentTime = this.tech_.currentTime();
29047 var bufferLowWaterLine = this.bufferLowWaterLine();
29048 var bufferHighWaterLine = this.bufferHighWaterLine();
29049 var buffered = this.tech_.buffered();
29050 return shouldSwitchToMedia({
29051 buffered: buffered,
29052 currentTime: currentTime,
29053 currentPlaylist: currentPlaylist,
29054 nextPlaylist: nextPlaylist,
29055 bufferLowWaterLine: bufferLowWaterLine,
29056 bufferHighWaterLine: bufferHighWaterLine,
29057 duration: this.duration(),
29058 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
29059 log: this.logger_
29060 });
29061 }
29062 /**
29063 * Register event handlers on the segment loaders. A helper function
29064 * for construction time.
29065 *
29066 * @private
29067 */
29068 ;
29069
29070 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
29071 var _this4 = this;
29072
29073 this.mainSegmentLoader_.on('bandwidthupdate', function () {
29074 // Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's
29075 // useful to check to see if a rendition switch should be made.
29076 _this4.checkABR_('bandwidthupdate');
29077
29078 _this4.tech_.trigger('bandwidthupdate');
29079 });
29080 this.mainSegmentLoader_.on('timeout', function () {
29081 if (_this4.experimentalBufferBasedABR) {
29082 // If a rendition change is needed, then it would've be done on `bandwidthupdate`.
29083 // Here the only consideration is that for buffer based ABR there's no guarantee
29084 // of an immediate switch (since the bandwidth is averaged with a timeout
29085 // bandwidth value of 1), so force a load on the segment loader to keep it going.
29086 _this4.mainSegmentLoader_.load();
29087 }
29088 }); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer
29089 // based ABR.
29090
29091 if (!this.experimentalBufferBasedABR) {
29092 this.mainSegmentLoader_.on('progress', function () {
29093 _this4.trigger('progress');
29094 });
29095 }
29096
29097 this.mainSegmentLoader_.on('error', function () {
29098 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
29099 });
29100 this.mainSegmentLoader_.on('appenderror', function () {
29101 _this4.error = _this4.mainSegmentLoader_.error_;
29102
29103 _this4.trigger('error');
29104 });
29105 this.mainSegmentLoader_.on('syncinfoupdate', function () {
29106 _this4.onSyncInfoUpdate_();
29107 });
29108 this.mainSegmentLoader_.on('timestampoffset', function () {
29109 _this4.tech_.trigger({
29110 type: 'usage',
29111 name: 'vhs-timestamp-offset'
29112 });
29113
29114 _this4.tech_.trigger({
29115 type: 'usage',
29116 name: 'hls-timestamp-offset'
29117 });
29118 });
29119 this.audioSegmentLoader_.on('syncinfoupdate', function () {
29120 _this4.onSyncInfoUpdate_();
29121 });
29122 this.audioSegmentLoader_.on('appenderror', function () {
29123 _this4.error = _this4.audioSegmentLoader_.error_;
29124
29125 _this4.trigger('error');
29126 });
29127 this.mainSegmentLoader_.on('ended', function () {
29128 _this4.logger_('main segment loader ended');
29129
29130 _this4.onEndOfStream();
29131 });
29132 this.mainSegmentLoader_.on('earlyabort', function (event) {
29133 // never try to early abort with the new ABR algorithm
29134 if (_this4.experimentalBufferBasedABR) {
29135 return;
29136 }
29137
29138 _this4.delegateLoaders_('all', ['abort']);
29139
29140 _this4.blacklistCurrentPlaylist({
29141 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
29142 }, ABORT_EARLY_BLACKLIST_SECONDS);
29143 });
29144
29145 var updateCodecs = function updateCodecs() {
29146 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
29147 return _this4.tryToCreateSourceBuffers_();
29148 }
29149
29150 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
29151
29152
29153 if (!codecs) {
29154 return;
29155 }
29156
29157 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
29158 };
29159
29160 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
29161 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
29162 this.mainSegmentLoader_.on('fmp4', function () {
29163 if (!_this4.triggeredFmp4Usage) {
29164 _this4.tech_.trigger({
29165 type: 'usage',
29166 name: 'vhs-fmp4'
29167 });
29168
29169 _this4.tech_.trigger({
29170 type: 'usage',
29171 name: 'hls-fmp4'
29172 });
29173
29174 _this4.triggeredFmp4Usage = true;
29175 }
29176 });
29177 this.audioSegmentLoader_.on('fmp4', function () {
29178 if (!_this4.triggeredFmp4Usage) {
29179 _this4.tech_.trigger({
29180 type: 'usage',
29181 name: 'vhs-fmp4'
29182 });
29183
29184 _this4.tech_.trigger({
29185 type: 'usage',
29186 name: 'hls-fmp4'
29187 });
29188
29189 _this4.triggeredFmp4Usage = true;
29190 }
29191 });
29192 this.audioSegmentLoader_.on('ended', function () {
29193 _this4.logger_('audioSegmentLoader ended');
29194
29195 _this4.onEndOfStream();
29196 });
29197 };
29198
29199 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
29200 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
29201 }
29202 /**
29203 * Call load on our SegmentLoaders
29204 */
29205 ;
29206
29207 _proto.load = function load() {
29208 this.mainSegmentLoader_.load();
29209
29210 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29211 this.audioSegmentLoader_.load();
29212 }
29213
29214 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29215 this.subtitleSegmentLoader_.load();
29216 }
29217 }
29218 /**
29219 * Re-tune playback quality level for the current player
29220 * conditions without performing destructive actions, like
29221 * removing already buffered content
29222 *
29223 * @private
29224 * @deprecated
29225 */
29226 ;
29227
29228 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
29229 if (media === void 0) {
29230 media = this.selectPlaylist();
29231 }
29232
29233 this.fastQualityChange_(media);
29234 }
29235 /**
29236 * Re-tune playback quality level for the current player
29237 * conditions. This method will perform destructive actions like removing
29238 * already buffered content in order to readjust the currently active
29239 * playlist quickly. This is good for manual quality changes
29240 *
29241 * @private
29242 */
29243 ;
29244
29245 _proto.fastQualityChange_ = function fastQualityChange_(media) {
29246 var _this5 = this;
29247
29248 if (media === void 0) {
29249 media = this.selectPlaylist();
29250 }
29251
29252 if (media === this.masterPlaylistLoader_.media()) {
29253 this.logger_('skipping fastQualityChange because new media is same as old');
29254 return;
29255 }
29256
29257 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
29258 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
29259 // ahead is roughly the minimum that will accomplish this across a variety of content
29260 // in IE and Edge, but seeking in place is sufficient on all other browsers)
29261 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
29262 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
29263
29264 this.mainSegmentLoader_.resetEverything(function () {
29265 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
29266 // from the previously enabled rendition to load before the new playlist has finished loading
29267 if (videojs__default["default"].browser.IE_VERSION || videojs__default["default"].browser.IS_EDGE) {
29268 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
29269 } else {
29270 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
29271 }
29272 }); // don't need to reset audio as it is reset when media changes
29273 }
29274 /**
29275 * Begin playback.
29276 */
29277 ;
29278
29279 _proto.play = function play() {
29280 if (this.setupFirstPlay()) {
29281 return;
29282 }
29283
29284 if (this.tech_.ended()) {
29285 this.tech_.setCurrentTime(0);
29286 }
29287
29288 if (this.hasPlayed_) {
29289 this.load();
29290 }
29291
29292 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
29293 // seek forward to the live point
29294
29295 if (this.tech_.duration() === Infinity) {
29296 if (this.tech_.currentTime() < seekable.start(0)) {
29297 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
29298 }
29299 }
29300 }
29301 /**
29302 * Seek to the latest media position if this is a live video and the
29303 * player and video are loaded and initialized.
29304 */
29305 ;
29306
29307 _proto.setupFirstPlay = function setupFirstPlay() {
29308 var _this6 = this;
29309
29310 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
29311 // If 1) there is no active media
29312 // 2) the player is paused
29313 // 3) the first play has already been setup
29314 // then exit early
29315
29316 if (!media || this.tech_.paused() || this.hasPlayed_) {
29317 return false;
29318 } // when the video is a live stream
29319
29320
29321 if (!media.endList) {
29322 var seekable = this.seekable();
29323
29324 if (!seekable.length) {
29325 // without a seekable range, the player cannot seek to begin buffering at the live
29326 // point
29327 return false;
29328 }
29329
29330 if (videojs__default["default"].browser.IE_VERSION && this.tech_.readyState() === 0) {
29331 // IE11 throws an InvalidStateError if you try to set currentTime while the
29332 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
29333 this.tech_.one('loadedmetadata', function () {
29334 _this6.trigger('firstplay');
29335
29336 _this6.tech_.setCurrentTime(seekable.end(0));
29337
29338 _this6.hasPlayed_ = true;
29339 });
29340 return false;
29341 } // trigger firstplay to inform the source handler to ignore the next seek event
29342
29343
29344 this.trigger('firstplay'); // seek to the live point
29345
29346 this.tech_.setCurrentTime(seekable.end(0));
29347 }
29348
29349 this.hasPlayed_ = true; // we can begin loading now that everything is ready
29350
29351 this.load();
29352 return true;
29353 }
29354 /**
29355 * handle the sourceopen event on the MediaSource
29356 *
29357 * @private
29358 */
29359 ;
29360
29361 _proto.handleSourceOpen_ = function handleSourceOpen_() {
29362 // Only attempt to create the source buffer if none already exist.
29363 // handleSourceOpen is also called when we are "re-opening" a source buffer
29364 // after `endOfStream` has been called (in response to a seek for instance)
29365 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
29366 // code in video.js but is required because play() must be invoked
29367 // *after* the media source has opened.
29368
29369 if (this.tech_.autoplay()) {
29370 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
29371 // on browsers which return a promise
29372
29373 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
29374 playPromise.then(null, function (e) {});
29375 }
29376 }
29377
29378 this.trigger('sourceopen');
29379 }
29380 /**
29381 * handle the sourceended event on the MediaSource
29382 *
29383 * @private
29384 */
29385 ;
29386
29387 _proto.handleSourceEnded_ = function handleSourceEnded_() {
29388 if (!this.inbandTextTracks_.metadataTrack_) {
29389 return;
29390 }
29391
29392 var cues = this.inbandTextTracks_.metadataTrack_.cues;
29393
29394 if (!cues || !cues.length) {
29395 return;
29396 }
29397
29398 var duration = this.duration();
29399 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
29400 }
29401 /**
29402 * handle the durationchange event on the MediaSource
29403 *
29404 * @private
29405 */
29406 ;
29407
29408 _proto.handleDurationChange_ = function handleDurationChange_() {
29409 this.tech_.trigger('durationchange');
29410 }
29411 /**
29412 * Calls endOfStream on the media source when all active stream types have called
29413 * endOfStream
29414 *
29415 * @param {string} streamType
29416 * Stream type of the segment loader that called endOfStream
29417 * @private
29418 */
29419 ;
29420
29421 _proto.onEndOfStream = function onEndOfStream() {
29422 var isEndOfStream = this.mainSegmentLoader_.ended_;
29423
29424 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29425 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
29426
29427 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
29428 // if we do not know if the main segment loader contains video yet or if we
29429 // definitively know the main segment loader contains video, then we need to wait
29430 // for both main and audio segment loaders to call endOfStream
29431 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
29432 } else {
29433 // otherwise just rely on the audio loader
29434 isEndOfStream = this.audioSegmentLoader_.ended_;
29435 }
29436 }
29437
29438 if (!isEndOfStream) {
29439 return;
29440 }
29441
29442 this.stopABRTimer_();
29443 this.sourceUpdater_.endOfStream();
29444 }
29445 /**
29446 * Check if a playlist has stopped being updated
29447 *
29448 * @param {Object} playlist the media playlist object
29449 * @return {boolean} whether the playlist has stopped being updated or not
29450 */
29451 ;
29452
29453 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
29454 var seekable = this.seekable();
29455
29456 if (!seekable.length) {
29457 // playlist doesn't have enough information to determine whether we are stuck
29458 return false;
29459 }
29460
29461 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
29462
29463 if (expired === null) {
29464 return false;
29465 } // does not use the safe live end to calculate playlist end, since we
29466 // don't want to say we are stuck while there is still content
29467
29468
29469 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
29470 var currentTime = this.tech_.currentTime();
29471 var buffered = this.tech_.buffered();
29472
29473 if (!buffered.length) {
29474 // return true if the playhead reached the absolute end of the playlist
29475 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
29476 }
29477
29478 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
29479 // end of playlist
29480
29481 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
29482 }
29483 /**
29484 * Blacklists a playlist when an error occurs for a set amount of time
29485 * making it unavailable for selection by the rendition selection algorithm
29486 * and then forces a new playlist (rendition) selection.
29487 *
29488 * @param {Object=} error an optional error that may include the playlist
29489 * to blacklist
29490 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
29491 * playlist
29492 */
29493 ;
29494
29495 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
29496 if (error === void 0) {
29497 error = {};
29498 }
29499
29500 // If the `error` was generated by the playlist loader, it will contain
29501 // the playlist we were trying to load (but failed) and that should be
29502 // blacklisted instead of the currently selected playlist which is likely
29503 // out-of-date in this scenario
29504 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
29505 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
29506 // trying to load the master OR while we were disposing of the tech
29507
29508 if (!currentPlaylist) {
29509 this.error = error;
29510
29511 if (this.mediaSource.readyState !== 'open') {
29512 this.trigger('error');
29513 } else {
29514 this.sourceUpdater_.endOfStream('network');
29515 }
29516
29517 return;
29518 }
29519
29520 currentPlaylist.playlistErrors_++;
29521 var playlists = this.masterPlaylistLoader_.master.playlists;
29522 var enabledPlaylists = playlists.filter(isEnabled);
29523 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
29524 // forever
29525
29526 if (playlists.length === 1 && blacklistDuration !== Infinity) {
29527 videojs__default["default"].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
29528 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
29529
29530 return this.masterPlaylistLoader_.load(isFinalRendition);
29531 }
29532
29533 if (isFinalRendition) {
29534 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
29535 // it, instead of erring the player or retrying this playlist, clear out the current
29536 // blacklist. This allows other playlists to be attempted in case any have been
29537 // fixed.
29538 var reincluded = false;
29539 playlists.forEach(function (playlist) {
29540 // skip current playlist which is about to be blacklisted
29541 if (playlist === currentPlaylist) {
29542 return;
29543 }
29544
29545 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
29546
29547 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
29548 reincluded = true;
29549 delete playlist.excludeUntil;
29550 }
29551 });
29552
29553 if (reincluded) {
29554 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
29555 // playlist. This is needed for users relying on the retryplaylist event to catch a
29556 // case where the player might be stuck and looping through "dead" playlists.
29557
29558 this.tech_.trigger('retryplaylist');
29559 }
29560 } // Blacklist this playlist
29561
29562
29563 var excludeUntil;
29564
29565 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
29566 excludeUntil = Infinity;
29567 } else {
29568 excludeUntil = Date.now() + blacklistDuration * 1000;
29569 }
29570
29571 currentPlaylist.excludeUntil = excludeUntil;
29572
29573 if (error.reason) {
29574 currentPlaylist.lastExcludeReason_ = error.reason;
29575 }
29576
29577 this.tech_.trigger('blacklistplaylist');
29578 this.tech_.trigger({
29579 type: 'usage',
29580 name: 'vhs-rendition-blacklisted'
29581 });
29582 this.tech_.trigger({
29583 type: 'usage',
29584 name: 'hls-rendition-blacklisted'
29585 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
29586 // Would be something like media().id !=== currentPlaylist.id and we would need something
29587 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
29588 // from loading a new playlist on any blacklist.
29589 // Select a new playlist
29590
29591 var nextPlaylist = this.selectPlaylist();
29592
29593 if (!nextPlaylist) {
29594 this.error = 'Playback cannot continue. No available working or supported playlists.';
29595 this.trigger('error');
29596 return;
29597 }
29598
29599 var logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
29600 var errorMessage = error.message ? ' ' + error.message : '';
29601 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
29602
29603 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
29604 this.delegateLoaders_('audio', ['abort', 'pause']);
29605 } // if subtitle group changed reset subtitle loaders
29606
29607
29608 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
29609 this.delegateLoaders_('subtitle', ['abort', 'pause']);
29610 }
29611
29612 this.delegateLoaders_('main', ['abort', 'pause']);
29613 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
29614 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
29615
29616 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
29617 }
29618 /**
29619 * Pause all segment/playlist loaders
29620 */
29621 ;
29622
29623 _proto.pauseLoading = function pauseLoading() {
29624 this.delegateLoaders_('all', ['abort', 'pause']);
29625 this.stopABRTimer_();
29626 }
29627 /**
29628 * Call a set of functions in order on playlist loaders, segment loaders,
29629 * or both types of loaders.
29630 *
29631 * @param {string} filter
29632 * Filter loaders that should call fnNames using a string. Can be:
29633 * * all - run on all loaders
29634 * * audio - run on all audio loaders
29635 * * subtitle - run on all subtitle loaders
29636 * * main - run on the main/master loaders
29637 *
29638 * @param {Array|string} fnNames
29639 * A string or array of function names to call.
29640 */
29641 ;
29642
29643 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
29644 var _this7 = this;
29645
29646 var loaders = [];
29647 var dontFilterPlaylist = filter === 'all';
29648
29649 if (dontFilterPlaylist || filter === 'main') {
29650 loaders.push(this.masterPlaylistLoader_);
29651 }
29652
29653 var mediaTypes = [];
29654
29655 if (dontFilterPlaylist || filter === 'audio') {
29656 mediaTypes.push('AUDIO');
29657 }
29658
29659 if (dontFilterPlaylist || filter === 'subtitle') {
29660 mediaTypes.push('CLOSED-CAPTIONS');
29661 mediaTypes.push('SUBTITLES');
29662 }
29663
29664 mediaTypes.forEach(function (mediaType) {
29665 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
29666
29667 if (loader) {
29668 loaders.push(loader);
29669 }
29670 });
29671 ['main', 'audio', 'subtitle'].forEach(function (name) {
29672 var loader = _this7[name + "SegmentLoader_"];
29673
29674 if (loader && (filter === name || filter === 'all')) {
29675 loaders.push(loader);
29676 }
29677 });
29678 loaders.forEach(function (loader) {
29679 return fnNames.forEach(function (fnName) {
29680 if (typeof loader[fnName] === 'function') {
29681 loader[fnName]();
29682 }
29683 });
29684 });
29685 }
29686 /**
29687 * set the current time on all segment loaders
29688 *
29689 * @param {TimeRange} currentTime the current time to set
29690 * @return {TimeRange} the current time
29691 */
29692 ;
29693
29694 _proto.setCurrentTime = function setCurrentTime(currentTime) {
29695 var buffered = findRange(this.tech_.buffered(), currentTime);
29696
29697 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
29698 // return immediately if the metadata is not ready yet
29699 return 0;
29700 } // it's clearly an edge-case but don't thrown an error if asked to
29701 // seek within an empty playlist
29702
29703
29704 if (!this.masterPlaylistLoader_.media().segments) {
29705 return 0;
29706 } // if the seek location is already buffered, continue buffering as usual
29707
29708
29709 if (buffered && buffered.length) {
29710 return currentTime;
29711 } // cancel outstanding requests so we begin buffering at the new
29712 // location
29713
29714
29715 this.mainSegmentLoader_.resetEverything();
29716 this.mainSegmentLoader_.abort();
29717
29718 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29719 this.audioSegmentLoader_.resetEverything();
29720 this.audioSegmentLoader_.abort();
29721 }
29722
29723 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29724 this.subtitleSegmentLoader_.resetEverything();
29725 this.subtitleSegmentLoader_.abort();
29726 } // start segment loader loading in case they are paused
29727
29728
29729 this.load();
29730 }
29731 /**
29732 * get the current duration
29733 *
29734 * @return {TimeRange} the duration
29735 */
29736 ;
29737
29738 _proto.duration = function duration() {
29739 if (!this.masterPlaylistLoader_) {
29740 return 0;
29741 }
29742
29743 var media = this.masterPlaylistLoader_.media();
29744
29745 if (!media) {
29746 // no playlists loaded yet, so can't determine a duration
29747 return 0;
29748 } // Don't rely on the media source for duration in the case of a live playlist since
29749 // setting the native MediaSource's duration to infinity ends up with consequences to
29750 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29751 //
29752 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29753 // however, few browsers have support for setLiveSeekableRange()
29754 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29755 //
29756 // Until a time when the duration of the media source can be set to infinity, and a
29757 // seekable range specified across browsers, just return Infinity.
29758
29759
29760 if (!media.endList) {
29761 return Infinity;
29762 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
29763 // available). If it's not available, fall back to a playlist-calculated estimate.
29764
29765
29766 if (this.mediaSource) {
29767 return this.mediaSource.duration;
29768 }
29769
29770 return Vhs$1.Playlist.duration(media);
29771 }
29772 /**
29773 * check the seekable range
29774 *
29775 * @return {TimeRange} the seekable range
29776 */
29777 ;
29778
29779 _proto.seekable = function seekable() {
29780 return this.seekable_;
29781 };
29782
29783 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
29784 var audioSeekable; // TODO check for creation of both source buffers before updating seekable
29785 //
29786 // A fix was made to this function where a check for
29787 // this.sourceUpdater_.hasCreatedSourceBuffers
29788 // was added to ensure that both source buffers were created before seekable was
29789 // updated. However, it originally had a bug where it was checking for a true and
29790 // returning early instead of checking for false. Setting it to check for false to
29791 // return early though created other issues. A call to play() would check for seekable
29792 // end without verifying that a seekable range was present. In addition, even checking
29793 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
29794 // due to a media update calling load on the segment loaders, skipping a seek to live,
29795 // thereby starting live streams at the beginning of the stream rather than at the end.
29796 //
29797 // This conditional should be fixed to wait for the creation of two source buffers at
29798 // the same time as the other sections of code are fixed to properly seek to live and
29799 // not throw an error due to checking for a seekable end when no seekable range exists.
29800 //
29801 // For now, fall back to the older behavior, with the understanding that the seekable
29802 // range may not be completely correct, leading to a suboptimal initial live point.
29803
29804 if (!this.masterPlaylistLoader_) {
29805 return;
29806 }
29807
29808 var media = this.masterPlaylistLoader_.media();
29809
29810 if (!media) {
29811 return;
29812 }
29813
29814 var expired = this.syncController_.getExpiredTime(media, this.duration());
29815
29816 if (expired === null) {
29817 // not enough information to update seekable
29818 return;
29819 }
29820
29821 var master = this.masterPlaylistLoader_.master;
29822 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29823
29824 if (mainSeekable.length === 0) {
29825 return;
29826 }
29827
29828 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29829 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
29830 expired = this.syncController_.getExpiredTime(media, this.duration());
29831
29832 if (expired === null) {
29833 return;
29834 }
29835
29836 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29837
29838 if (audioSeekable.length === 0) {
29839 return;
29840 }
29841 }
29842
29843 var oldEnd;
29844 var oldStart;
29845
29846 if (this.seekable_ && this.seekable_.length) {
29847 oldEnd = this.seekable_.end(0);
29848 oldStart = this.seekable_.start(0);
29849 }
29850
29851 if (!audioSeekable) {
29852 // seekable has been calculated based on buffering video data so it
29853 // can be returned directly
29854 this.seekable_ = mainSeekable;
29855 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
29856 // seekables are pretty far off, rely on main
29857 this.seekable_ = mainSeekable;
29858 } else {
29859 this.seekable_ = videojs__default["default"].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
29860 } // seekable is the same as last time
29861
29862
29863 if (this.seekable_ && this.seekable_.length) {
29864 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
29865 return;
29866 }
29867 }
29868
29869 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
29870 this.tech_.trigger('seekablechanged');
29871 }
29872 /**
29873 * Update the player duration
29874 */
29875 ;
29876
29877 _proto.updateDuration = function updateDuration(isLive) {
29878 if (this.updateDuration_) {
29879 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29880 this.updateDuration_ = null;
29881 }
29882
29883 if (this.mediaSource.readyState !== 'open') {
29884 this.updateDuration_ = this.updateDuration.bind(this, isLive);
29885 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
29886 return;
29887 }
29888
29889 if (isLive) {
29890 var seekable = this.seekable();
29891
29892 if (!seekable.length) {
29893 return;
29894 } // Even in the case of a live playlist, the native MediaSource's duration should not
29895 // be set to Infinity (even though this would be expected for a live playlist), since
29896 // setting the native MediaSource's duration to infinity ends up with consequences to
29897 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29898 //
29899 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29900 // however, few browsers have support for setLiveSeekableRange()
29901 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29902 //
29903 // Until a time when the duration of the media source can be set to infinity, and a
29904 // seekable range specified across browsers, the duration should be greater than or
29905 // equal to the last possible seekable value.
29906 // MediaSource duration starts as NaN
29907 // It is possible (and probable) that this case will never be reached for many
29908 // sources, since the MediaSource reports duration as the highest value without
29909 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
29910 // we buffered times 0 to 100 with real times of 100 to 200, even though current
29911 // time will be between 0 and 100, the native media source may report the duration
29912 // as 200. However, since we report duration separate from the media source (as
29913 // Infinity), and as long as the native media source duration value is greater than
29914 // our reported seekable range, seeks will work as expected. The large number as
29915 // duration for live is actually a strategy used by some players to work around the
29916 // issue of live seekable ranges cited above.
29917
29918
29919 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
29920 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
29921 }
29922
29923 return;
29924 }
29925
29926 var buffered = this.tech_.buffered();
29927 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
29928
29929 if (buffered.length > 0) {
29930 duration = Math.max(duration, buffered.end(buffered.length - 1));
29931 }
29932
29933 if (this.mediaSource.duration !== duration) {
29934 this.sourceUpdater_.setDuration(duration);
29935 }
29936 }
29937 /**
29938 * dispose of the MasterPlaylistController and everything
29939 * that it controls
29940 */
29941 ;
29942
29943 _proto.dispose = function dispose() {
29944 var _this8 = this;
29945
29946 this.trigger('dispose');
29947 this.decrypter_.terminate();
29948 this.masterPlaylistLoader_.dispose();
29949 this.mainSegmentLoader_.dispose();
29950
29951 if (this.loadOnPlay_) {
29952 this.tech_.off('play', this.loadOnPlay_);
29953 }
29954
29955 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
29956 var groups = _this8.mediaTypes_[type].groups;
29957
29958 for (var id in groups) {
29959 groups[id].forEach(function (group) {
29960 if (group.playlistLoader) {
29961 group.playlistLoader.dispose();
29962 }
29963 });
29964 }
29965 });
29966 this.audioSegmentLoader_.dispose();
29967 this.subtitleSegmentLoader_.dispose();
29968 this.sourceUpdater_.dispose();
29969 this.timelineChangeController_.dispose();
29970 this.stopABRTimer_();
29971
29972 if (this.updateDuration_) {
29973 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29974 }
29975
29976 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
29977
29978 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
29979 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
29980 this.off();
29981 }
29982 /**
29983 * return the master playlist object if we have one
29984 *
29985 * @return {Object} the master playlist object that we parsed
29986 */
29987 ;
29988
29989 _proto.master = function master() {
29990 return this.masterPlaylistLoader_.master;
29991 }
29992 /**
29993 * return the currently selected playlist
29994 *
29995 * @return {Object} the currently selected playlist object that we parsed
29996 */
29997 ;
29998
29999 _proto.media = function media() {
30000 // playlist loader will not return media if it has not been fully loaded
30001 return this.masterPlaylistLoader_.media() || this.initialMedia_;
30002 };
30003
30004 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
30005 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
30006 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
30007 // otherwise check on the segment loader.
30008
30009 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
30010
30011 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
30012 return false;
30013 }
30014
30015 return true;
30016 };
30017
30018 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
30019 var _this9 = this;
30020
30021 var media = {
30022 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
30023 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
30024 }; // set "main" media equal to video
30025
30026 media.video = media.main;
30027 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
30028 var codecs = {};
30029 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
30030
30031 if (media.main.hasVideo) {
30032 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
30033 }
30034
30035 if (media.main.isMuxed) {
30036 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
30037 }
30038
30039 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
30040 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
30041
30042 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
30043 } // no codecs, no playback.
30044
30045
30046 if (!codecs.audio && !codecs.video) {
30047 this.blacklistCurrentPlaylist({
30048 playlist: this.media(),
30049 message: 'Could not determine codecs for playlist.',
30050 blacklistDuration: Infinity
30051 });
30052 return;
30053 } // fmp4 relies on browser support, while ts relies on muxer support
30054
30055
30056 var supportFunction = function supportFunction(isFmp4, codec) {
30057 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
30058 };
30059
30060 var unsupportedCodecs = {};
30061 var unsupportedAudio;
30062 ['video', 'audio'].forEach(function (type) {
30063 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
30064 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
30065 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
30066 unsupportedCodecs[supporter].push(codecs[type]);
30067
30068 if (type === 'audio') {
30069 unsupportedAudio = supporter;
30070 }
30071 }
30072 });
30073
30074 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
30075 var audioGroup = this.media().attributes.AUDIO;
30076 this.master().playlists.forEach(function (variant) {
30077 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
30078
30079 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
30080 variant.excludeUntil = Infinity;
30081 }
30082 });
30083 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
30084 } // if we have any unsupported codecs blacklist this playlist.
30085
30086
30087 if (Object.keys(unsupportedCodecs).length) {
30088 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
30089 if (acc) {
30090 acc += ', ';
30091 }
30092
30093 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
30094 return acc;
30095 }, '') + '.';
30096 this.blacklistCurrentPlaylist({
30097 playlist: this.media(),
30098 internal: true,
30099 message: message,
30100 blacklistDuration: Infinity
30101 });
30102 return;
30103 } // check if codec switching is happening
30104
30105
30106 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
30107 var switchMessages = [];
30108 ['video', 'audio'].forEach(function (type) {
30109 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
30110 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
30111
30112 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
30113 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
30114 }
30115 });
30116
30117 if (switchMessages.length) {
30118 this.blacklistCurrentPlaylist({
30119 playlist: this.media(),
30120 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
30121 blacklistDuration: Infinity,
30122 internal: true
30123 });
30124 return;
30125 }
30126 } // TODO: when using the muxer shouldn't we just return
30127 // the codecs that the muxer outputs?
30128
30129
30130 return codecs;
30131 }
30132 /**
30133 * Create source buffers and exlude any incompatible renditions.
30134 *
30135 * @private
30136 */
30137 ;
30138
30139 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
30140 // media source is not ready yet or sourceBuffers are already
30141 // created.
30142 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
30143 return;
30144 }
30145
30146 if (!this.areMediaTypesKnown_()) {
30147 return;
30148 }
30149
30150 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
30151
30152 if (!codecs) {
30153 return;
30154 }
30155
30156 this.sourceUpdater_.createSourceBuffers(codecs);
30157 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
30158 this.excludeIncompatibleVariants_(codecString);
30159 }
30160 /**
30161 * Excludes playlists with codecs that are unsupported by the muxer and browser.
30162 */
30163 ;
30164
30165 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
30166 var _this10 = this;
30167
30168 var playlists = this.master().playlists;
30169 var ids = []; // TODO: why don't we have a property to loop through all
30170 // playlist? Why did we ever mix indexes and keys?
30171
30172 Object.keys(playlists).forEach(function (key) {
30173 var variant = playlists[key]; // check if we already processed this playlist.
30174
30175 if (ids.indexOf(variant.id) !== -1) {
30176 return;
30177 }
30178
30179 ids.push(variant.id);
30180 var codecs = codecsForPlaylist(_this10.master, variant);
30181 var unsupported = [];
30182
30183 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
30184 unsupported.push("audio codec " + codecs.audio);
30185 }
30186
30187 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
30188 unsupported.push("video codec " + codecs.video);
30189 }
30190
30191 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
30192 unsupported.push("text codec " + codecs.text);
30193 }
30194
30195 if (unsupported.length) {
30196 variant.excludeUntil = Infinity;
30197
30198 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
30199 }
30200 });
30201 }
30202 /**
30203 * Blacklist playlists that are known to be codec or
30204 * stream-incompatible with the SourceBuffer configuration. For
30205 * instance, Media Source Extensions would cause the video element to
30206 * stall waiting for video data if you switched from a variant with
30207 * video and audio to an audio-only one.
30208 *
30209 * @param {Object} media a media playlist compatible with the current
30210 * set of SourceBuffers. Variants in the current master playlist that
30211 * do not appear to have compatible codec or stream configurations
30212 * will be excluded from the default playlist selection algorithm
30213 * indefinitely.
30214 * @private
30215 */
30216 ;
30217
30218 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
30219 var _this11 = this;
30220
30221 var ids = [];
30222 var playlists = this.master().playlists;
30223 var codecs = unwrapCodecList(parseCodecs(codecString));
30224 var codecCount_ = codecCount(codecs);
30225 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
30226 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
30227 Object.keys(playlists).forEach(function (key) {
30228 var variant = playlists[key]; // check if we already processed this playlist.
30229 // or it if it is already excluded forever.
30230
30231 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
30232 return;
30233 }
30234
30235 ids.push(variant.id);
30236 var blacklistReasons = []; // get codecs from the playlist for this variant
30237
30238 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
30239 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
30240 // variant is incompatible. Wait for mux.js to probe
30241
30242 if (!variantCodecs.audio && !variantCodecs.video) {
30243 return;
30244 } // TODO: we can support this by removing the
30245 // old media source and creating a new one, but it will take some work.
30246 // The number of streams cannot change
30247
30248
30249 if (variantCodecCount !== codecCount_) {
30250 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
30251 } // only exclude playlists by codec change, if codecs cannot switch
30252 // during playback.
30253
30254
30255 if (!_this11.sourceUpdater_.canChangeType()) {
30256 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
30257 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
30258
30259 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
30260 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
30261 } // the audio codec cannot change
30262
30263
30264 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
30265 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
30266 }
30267 }
30268
30269 if (blacklistReasons.length) {
30270 variant.excludeUntil = Infinity;
30271
30272 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
30273 }
30274 });
30275 };
30276
30277 _proto.updateAdCues_ = function updateAdCues_(media) {
30278 var offset = 0;
30279 var seekable = this.seekable();
30280
30281 if (seekable.length) {
30282 offset = seekable.start(0);
30283 }
30284
30285 updateAdCues(media, this.cueTagsTrack_, offset);
30286 }
30287 /**
30288 * Calculates the desired forward buffer length based on current time
30289 *
30290 * @return {number} Desired forward buffer length in seconds
30291 */
30292 ;
30293
30294 _proto.goalBufferLength = function goalBufferLength() {
30295 var currentTime = this.tech_.currentTime();
30296 var initial = Config.GOAL_BUFFER_LENGTH;
30297 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
30298 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
30299 return Math.min(initial + currentTime * rate, max);
30300 }
30301 /**
30302 * Calculates the desired buffer low water line based on current time
30303 *
30304 * @return {number} Desired buffer low water line in seconds
30305 */
30306 ;
30307
30308 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
30309 var currentTime = this.tech_.currentTime();
30310 var initial = Config.BUFFER_LOW_WATER_LINE;
30311 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
30312 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
30313 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
30314 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
30315 };
30316
30317 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
30318 return Config.BUFFER_HIGH_WATER_LINE;
30319 };
30320
30321 return MasterPlaylistController;
30322 }(videojs__default["default"].EventTarget);
30323
30324 /**
30325 * Returns a function that acts as the Enable/disable playlist function.
30326 *
30327 * @param {PlaylistLoader} loader - The master playlist loader
30328 * @param {string} playlistID - id of the playlist
30329 * @param {Function} changePlaylistFn - A function to be called after a
30330 * playlist's enabled-state has been changed. Will NOT be called if a
30331 * playlist's enabled-state is unchanged
30332 * @param {boolean=} enable - Value to set the playlist enabled-state to
30333 * or if undefined returns the current enabled-state for the playlist
30334 * @return {Function} Function for setting/getting enabled
30335 */
30336
30337 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
30338 return function (enable) {
30339 var playlist = loader.master.playlists[playlistID];
30340 var incompatible = isIncompatible(playlist);
30341 var currentlyEnabled = isEnabled(playlist);
30342
30343 if (typeof enable === 'undefined') {
30344 return currentlyEnabled;
30345 }
30346
30347 if (enable) {
30348 delete playlist.disabled;
30349 } else {
30350 playlist.disabled = true;
30351 }
30352
30353 if (enable !== currentlyEnabled && !incompatible) {
30354 // Ensure the outside world knows about our changes
30355 changePlaylistFn();
30356
30357 if (enable) {
30358 loader.trigger('renditionenabled');
30359 } else {
30360 loader.trigger('renditiondisabled');
30361 }
30362 }
30363
30364 return enable;
30365 };
30366 };
30367 /**
30368 * The representation object encapsulates the publicly visible information
30369 * in a media playlist along with a setter/getter-type function (enabled)
30370 * for changing the enabled-state of a particular playlist entry
30371 *
30372 * @class Representation
30373 */
30374
30375
30376 var Representation = function Representation(vhsHandler, playlist, id) {
30377 var mpc = vhsHandler.masterPlaylistController_,
30378 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
30379
30380 var changeType = smoothQualityChange ? 'smooth' : 'fast';
30381 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
30382
30383 if (playlist.attributes) {
30384 var resolution = playlist.attributes.RESOLUTION;
30385 this.width = resolution && resolution.width;
30386 this.height = resolution && resolution.height;
30387 this.bandwidth = playlist.attributes.BANDWIDTH;
30388 this.frameRate = playlist.attributes['FRAME-RATE'];
30389 }
30390
30391 this.codecs = codecsForPlaylist(mpc.master(), playlist);
30392 this.playlist = playlist; // The id is simply the ordinality of the media playlist
30393 // within the master playlist
30394
30395 this.id = id; // Partially-apply the enableFunction to create a playlist-
30396 // specific variant
30397
30398 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
30399 };
30400 /**
30401 * A mixin function that adds the `representations` api to an instance
30402 * of the VhsHandler class
30403 *
30404 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
30405 * representation API into
30406 */
30407
30408
30409 var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
30410 // Add a single API-specific function to the VhsHandler instance
30411 vhsHandler.representations = function () {
30412 var master = vhsHandler.masterPlaylistController_.master();
30413 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
30414
30415 if (!playlists) {
30416 return [];
30417 }
30418
30419 return playlists.filter(function (media) {
30420 return !isIncompatible(media);
30421 }).map(function (e, i) {
30422 return new Representation(vhsHandler, e, e.id);
30423 });
30424 };
30425 };
30426
30427 /**
30428 * @file playback-watcher.js
30429 *
30430 * Playback starts, and now my watch begins. It shall not end until my death. I shall
30431 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
30432 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
30433 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
30434 * my life and honor to the Playback Watch, for this Player and all the Players to come.
30435 */
30436
30437 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
30438 /**
30439 * @class PlaybackWatcher
30440 */
30441
30442 var PlaybackWatcher = /*#__PURE__*/function () {
30443 /**
30444 * Represents an PlaybackWatcher object.
30445 *
30446 * @class
30447 * @param {Object} options an object that includes the tech and settings
30448 */
30449 function PlaybackWatcher(options) {
30450 var _this = this;
30451
30452 this.masterPlaylistController_ = options.masterPlaylistController;
30453 this.tech_ = options.tech;
30454 this.seekable = options.seekable;
30455 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
30456 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
30457 this.media = options.media;
30458 this.consecutiveUpdates = 0;
30459 this.lastRecordedTime = null;
30460 this.timer_ = null;
30461 this.checkCurrentTimeTimeout_ = null;
30462 this.logger_ = logger('PlaybackWatcher');
30463 this.logger_('initialize');
30464
30465 var playHandler = function playHandler() {
30466 return _this.monitorCurrentTime_();
30467 };
30468
30469 var canPlayHandler = function canPlayHandler() {
30470 return _this.monitorCurrentTime_();
30471 };
30472
30473 var waitingHandler = function waitingHandler() {
30474 return _this.techWaiting_();
30475 };
30476
30477 var cancelTimerHandler = function cancelTimerHandler() {
30478 return _this.cancelTimer_();
30479 };
30480
30481 var mpc = this.masterPlaylistController_;
30482 var loaderTypes = ['main', 'subtitle', 'audio'];
30483 var loaderChecks = {};
30484 loaderTypes.forEach(function (type) {
30485 loaderChecks[type] = {
30486 reset: function reset() {
30487 return _this.resetSegmentDownloads_(type);
30488 },
30489 updateend: function updateend() {
30490 return _this.checkSegmentDownloads_(type);
30491 }
30492 };
30493 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
30494 // isn't changing we want to reset. We cannot assume that the new rendition
30495 // will also be stalled, until after new appends.
30496
30497 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
30498 // This prevents one segment playlists (single vtt or single segment content)
30499 // from being detected as stalling. As the buffer will not change in those cases, since
30500 // the buffer is the entire video duration.
30501
30502 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
30503 });
30504 /**
30505 * We check if a seek was into a gap through the following steps:
30506 * 1. We get a seeking event and we do not get a seeked event. This means that
30507 * a seek was attempted but not completed.
30508 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
30509 * removed everything from our buffer and appended a segment, and should be ready
30510 * to check for gaps.
30511 */
30512
30513 var setSeekingHandlers = function setSeekingHandlers(fn) {
30514 ['main', 'audio'].forEach(function (type) {
30515 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
30516 });
30517 };
30518
30519 this.seekingAppendCheck_ = function () {
30520 if (_this.fixesBadSeeks_()) {
30521 _this.consecutiveUpdates = 0;
30522 _this.lastRecordedTime = _this.tech_.currentTime();
30523 setSeekingHandlers('off');
30524 }
30525 };
30526
30527 this.clearSeekingAppendCheck_ = function () {
30528 return setSeekingHandlers('off');
30529 };
30530
30531 this.watchForBadSeeking_ = function () {
30532 _this.clearSeekingAppendCheck_();
30533
30534 setSeekingHandlers('on');
30535 };
30536
30537 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
30538 this.tech_.on('seeking', this.watchForBadSeeking_);
30539 this.tech_.on('waiting', waitingHandler);
30540 this.tech_.on(timerCancelEvents, cancelTimerHandler);
30541 this.tech_.on('canplay', canPlayHandler);
30542 /*
30543 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
30544 is surfaced in one of two ways:
30545 1) The `waiting` event is fired before the player has buffered content, making it impossible
30546 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
30547 we can check if playback is stalled due to a gap, and skip the gap if necessary.
30548 2) A source with a gap at the beginning of the stream is loaded programatically while the player
30549 is in a playing state. To catch this case, it's important that our one-time play listener is setup
30550 even if the player is in a playing state
30551 */
30552
30553 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
30554
30555 this.dispose = function () {
30556 _this.clearSeekingAppendCheck_();
30557
30558 _this.logger_('dispose');
30559
30560 _this.tech_.off('waiting', waitingHandler);
30561
30562 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
30563
30564 _this.tech_.off('canplay', canPlayHandler);
30565
30566 _this.tech_.off('play', playHandler);
30567
30568 _this.tech_.off('seeking', _this.watchForBadSeeking_);
30569
30570 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
30571
30572 loaderTypes.forEach(function (type) {
30573 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
30574 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
30575
30576 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
30577 });
30578
30579 if (_this.checkCurrentTimeTimeout_) {
30580 window.clearTimeout(_this.checkCurrentTimeTimeout_);
30581 }
30582
30583 _this.cancelTimer_();
30584 };
30585 }
30586 /**
30587 * Periodically check current time to see if playback stopped
30588 *
30589 * @private
30590 */
30591
30592
30593 var _proto = PlaybackWatcher.prototype;
30594
30595 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
30596 this.checkCurrentTime_();
30597
30598 if (this.checkCurrentTimeTimeout_) {
30599 window.clearTimeout(this.checkCurrentTimeTimeout_);
30600 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
30601
30602
30603 this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
30604 }
30605 /**
30606 * Reset stalled download stats for a specific type of loader
30607 *
30608 * @param {string} type
30609 * The segment loader type to check.
30610 *
30611 * @listens SegmentLoader#playlistupdate
30612 * @listens Tech#seeking
30613 * @listens Tech#seeked
30614 */
30615 ;
30616
30617 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
30618 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
30619
30620 if (this[type + "StalledDownloads_"] > 0) {
30621 this.logger_("resetting possible stalled download count for " + type + " loader");
30622 }
30623
30624 this[type + "StalledDownloads_"] = 0;
30625 this[type + "Buffered_"] = loader.buffered_();
30626 }
30627 /**
30628 * Checks on every segment `appendsdone` to see
30629 * if segment appends are making progress. If they are not
30630 * and we are still downloading bytes. We blacklist the playlist.
30631 *
30632 * @param {string} type
30633 * The segment loader type to check.
30634 *
30635 * @listens SegmentLoader#appendsdone
30636 */
30637 ;
30638
30639 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
30640 var mpc = this.masterPlaylistController_;
30641 var loader = mpc[type + "SegmentLoader_"];
30642 var buffered = loader.buffered_();
30643 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
30644 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
30645 // the buffered value for this loader changed
30646 // appends are working
30647
30648 if (isBufferedDifferent) {
30649 this.resetSegmentDownloads_(type);
30650 return;
30651 }
30652
30653 this[type + "StalledDownloads_"]++;
30654 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
30655 playlistId: loader.playlist_ && loader.playlist_.id,
30656 buffered: timeRangesToArray(buffered)
30657 }); // after 10 possibly stalled appends with no reset, exclude
30658
30659 if (this[type + "StalledDownloads_"] < 10) {
30660 return;
30661 }
30662
30663 this.logger_(type + " loader stalled download exclusion");
30664 this.resetSegmentDownloads_(type);
30665 this.tech_.trigger({
30666 type: 'usage',
30667 name: "vhs-" + type + "-download-exclusion"
30668 });
30669
30670 if (type === 'subtitle') {
30671 return;
30672 } // TODO: should we exclude audio tracks rather than main tracks
30673 // when type is audio?
30674
30675
30676 mpc.blacklistCurrentPlaylist({
30677 message: "Excessive " + type + " segment downloading detected."
30678 }, Infinity);
30679 }
30680 /**
30681 * The purpose of this function is to emulate the "waiting" event on
30682 * browsers that do not emit it when they are waiting for more
30683 * data to continue playback
30684 *
30685 * @private
30686 */
30687 ;
30688
30689 _proto.checkCurrentTime_ = function checkCurrentTime_() {
30690 if (this.tech_.paused() || this.tech_.seeking()) {
30691 return;
30692 }
30693
30694 var currentTime = this.tech_.currentTime();
30695 var buffered = this.tech_.buffered();
30696
30697 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
30698 // If current time is at the end of the final buffered region, then any playback
30699 // stall is most likely caused by buffering in a low bandwidth environment. The tech
30700 // should fire a `waiting` event in this scenario, but due to browser and tech
30701 // inconsistencies. Calling `techWaiting_` here allows us to simulate
30702 // responding to a native `waiting` event when the tech fails to emit one.
30703 return this.techWaiting_();
30704 }
30705
30706 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
30707 this.consecutiveUpdates++;
30708 this.waiting_();
30709 } else if (currentTime === this.lastRecordedTime) {
30710 this.consecutiveUpdates++;
30711 } else {
30712 this.consecutiveUpdates = 0;
30713 this.lastRecordedTime = currentTime;
30714 }
30715 }
30716 /**
30717 * Cancels any pending timers and resets the 'timeupdate' mechanism
30718 * designed to detect that we are stalled
30719 *
30720 * @private
30721 */
30722 ;
30723
30724 _proto.cancelTimer_ = function cancelTimer_() {
30725 this.consecutiveUpdates = 0;
30726
30727 if (this.timer_) {
30728 this.logger_('cancelTimer_');
30729 clearTimeout(this.timer_);
30730 }
30731
30732 this.timer_ = null;
30733 }
30734 /**
30735 * Fixes situations where there's a bad seek
30736 *
30737 * @return {boolean} whether an action was taken to fix the seek
30738 * @private
30739 */
30740 ;
30741
30742 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
30743 var seeking = this.tech_.seeking();
30744
30745 if (!seeking) {
30746 return false;
30747 } // TODO: It's possible that these seekable checks should be moved out of this function
30748 // and into a function that runs on seekablechange. It's also possible that we only need
30749 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
30750 // seekable range.
30751
30752
30753 var seekable = this.seekable();
30754 var currentTime = this.tech_.currentTime();
30755 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
30756 var seekTo;
30757
30758 if (isAfterSeekableRange) {
30759 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
30760
30761 seekTo = seekableEnd;
30762 }
30763
30764 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30765 var seekableStart = seekable.start(0); // sync to the beginning of the live window
30766 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
30767
30768 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
30769 // happen in live with a 3 segment playlist), then don't use a time delta
30770 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
30771 }
30772
30773 if (typeof seekTo !== 'undefined') {
30774 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
30775 this.tech_.setCurrentTime(seekTo);
30776 return true;
30777 }
30778
30779 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
30780 var buffered = this.tech_.buffered();
30781 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
30782 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
30783 var media = this.media(); // verify that at least two segment durations or one part duration have been
30784 // appended before checking for a gap.
30785
30786 var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
30787 // appended before checking for a gap.
30788
30789 var bufferedToCheck = [audioBuffered, videoBuffered];
30790
30791 for (var i = 0; i < bufferedToCheck.length; i++) {
30792 // skip null buffered
30793 if (!bufferedToCheck[i]) {
30794 continue;
30795 }
30796
30797 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
30798 // duration behind we haven't appended enough to call this a bad seek.
30799
30800 if (timeAhead < minAppendedDuration) {
30801 return false;
30802 }
30803 }
30804
30805 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
30806 // to seek over the gap
30807
30808 if (nextRange.length === 0) {
30809 return false;
30810 }
30811
30812 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
30813 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
30814 this.tech_.setCurrentTime(seekTo);
30815 return true;
30816 }
30817 /**
30818 * Handler for situations when we determine the player is waiting.
30819 *
30820 * @private
30821 */
30822 ;
30823
30824 _proto.waiting_ = function waiting_() {
30825 if (this.techWaiting_()) {
30826 return;
30827 } // All tech waiting checks failed. Use last resort correction
30828
30829
30830 var currentTime = this.tech_.currentTime();
30831 var buffered = this.tech_.buffered();
30832 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
30833 // region with no indication that anything is amiss (seen in Firefox). Seeking to
30834 // currentTime is usually enough to kickstart the player. This checks that the player
30835 // is currently within a buffered region before attempting a corrective seek.
30836 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
30837 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
30838 // make sure there is ~3 seconds of forward buffer before taking any corrective action
30839 // to avoid triggering an `unknownwaiting` event when the network is slow.
30840
30841 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
30842 this.cancelTimer_();
30843 this.tech_.setCurrentTime(currentTime);
30844 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
30845
30846 this.tech_.trigger({
30847 type: 'usage',
30848 name: 'vhs-unknown-waiting'
30849 });
30850 this.tech_.trigger({
30851 type: 'usage',
30852 name: 'hls-unknown-waiting'
30853 });
30854 return;
30855 }
30856 }
30857 /**
30858 * Handler for situations when the tech fires a `waiting` event
30859 *
30860 * @return {boolean}
30861 * True if an action (or none) was needed to correct the waiting. False if no
30862 * checks passed
30863 * @private
30864 */
30865 ;
30866
30867 _proto.techWaiting_ = function techWaiting_() {
30868 var seekable = this.seekable();
30869 var currentTime = this.tech_.currentTime();
30870
30871 if (this.tech_.seeking() || this.timer_ !== null) {
30872 // Tech is seeking or already waiting on another action, no action needed
30873 return true;
30874 }
30875
30876 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30877 var livePoint = seekable.end(seekable.length - 1);
30878 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
30879 this.cancelTimer_();
30880 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
30881
30882 this.tech_.trigger({
30883 type: 'usage',
30884 name: 'vhs-live-resync'
30885 });
30886 this.tech_.trigger({
30887 type: 'usage',
30888 name: 'hls-live-resync'
30889 });
30890 return true;
30891 }
30892
30893 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
30894 var buffered = this.tech_.buffered();
30895 var videoUnderflow = this.videoUnderflow_({
30896 audioBuffered: sourceUpdater.audioBuffered(),
30897 videoBuffered: sourceUpdater.videoBuffered(),
30898 currentTime: currentTime
30899 });
30900
30901 if (videoUnderflow) {
30902 // Even though the video underflowed and was stuck in a gap, the audio overplayed
30903 // the gap, leading currentTime into a buffered range. Seeking to currentTime
30904 // allows the video to catch up to the audio position without losing any audio
30905 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
30906 this.cancelTimer_();
30907 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
30908
30909 this.tech_.trigger({
30910 type: 'usage',
30911 name: 'vhs-video-underflow'
30912 });
30913 this.tech_.trigger({
30914 type: 'usage',
30915 name: 'hls-video-underflow'
30916 });
30917 return true;
30918 }
30919
30920 var nextRange = findNextRange(buffered, currentTime); // check for gap
30921
30922 if (nextRange.length > 0) {
30923 var difference = nextRange.start(0) - currentTime;
30924 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
30925 this.cancelTimer_();
30926 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
30927 return true;
30928 } // All checks failed. Returning false to indicate failure to correct waiting
30929
30930
30931 return false;
30932 };
30933
30934 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
30935 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
30936 allowSeeksWithinUnsafeLiveWindow = false;
30937 }
30938
30939 if (!seekable.length) {
30940 // we can't make a solid case if there's no seekable, default to false
30941 return false;
30942 }
30943
30944 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
30945 var isLive = !playlist.endList;
30946
30947 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
30948 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
30949 }
30950
30951 if (currentTime > allowedEnd) {
30952 return true;
30953 }
30954
30955 return false;
30956 };
30957
30958 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
30959 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
30960 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
30961 return true;
30962 }
30963
30964 return false;
30965 };
30966
30967 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
30968 var videoBuffered = _ref.videoBuffered,
30969 audioBuffered = _ref.audioBuffered,
30970 currentTime = _ref.currentTime;
30971
30972 // audio only content will not have video underflow :)
30973 if (!videoBuffered) {
30974 return;
30975 }
30976
30977 var gap; // find a gap in demuxed content.
30978
30979 if (videoBuffered.length && audioBuffered.length) {
30980 // in Chrome audio will continue to play for ~3s when we run out of video
30981 // so we have to check that the video buffer did have some buffer in the
30982 // past.
30983 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
30984 var videoRange = findRange(videoBuffered, currentTime);
30985 var audioRange = findRange(audioBuffered, currentTime);
30986
30987 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
30988 gap = {
30989 start: lastVideoRange.end(0),
30990 end: audioRange.end(0)
30991 };
30992 } // find a gap in muxed content.
30993
30994 } else {
30995 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
30996 // stuck in a gap due to video underflow.
30997
30998 if (!nextRange.length) {
30999 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
31000 }
31001 }
31002
31003 if (gap) {
31004 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
31005 return true;
31006 }
31007
31008 return false;
31009 }
31010 /**
31011 * Timer callback. If playback still has not proceeded, then we seek
31012 * to the start of the next buffered region.
31013 *
31014 * @private
31015 */
31016 ;
31017
31018 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
31019 var buffered = this.tech_.buffered();
31020 var currentTime = this.tech_.currentTime();
31021 var nextRange = findNextRange(buffered, currentTime);
31022 this.cancelTimer_();
31023
31024 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
31025 return;
31026 }
31027
31028 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
31029
31030 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
31031 this.tech_.trigger({
31032 type: 'usage',
31033 name: 'vhs-gap-skip'
31034 });
31035 this.tech_.trigger({
31036 type: 'usage',
31037 name: 'hls-gap-skip'
31038 });
31039 };
31040
31041 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
31042 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
31043 // playing for ~3 seconds after the video gap starts. This is done to account for
31044 // video buffer underflow/underrun (note that this is not done when there is audio
31045 // buffer underflow/underrun -- in that case the video will stop as soon as it
31046 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
31047 // video stalls). The player's time will reflect the playthrough of audio, so the
31048 // time will appear as if we are in a buffered region, even if we are stuck in a
31049 // "gap."
31050 //
31051 // Example:
31052 // video buffer: 0 => 10.1, 10.2 => 20
31053 // audio buffer: 0 => 20
31054 // overall buffer: 0 => 10.1, 10.2 => 20
31055 // current time: 13
31056 //
31057 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
31058 // however, the audio continued playing until it reached ~3 seconds past the gap
31059 // (13 seconds), at which point it stops as well. Since current time is past the
31060 // gap, findNextRange will return no ranges.
31061 //
31062 // To check for this issue, we see if there is a gap that starts somewhere within
31063 // a 3 second range (3 seconds +/- 1 second) back from our current time.
31064 var gaps = findGaps(buffered);
31065
31066 for (var i = 0; i < gaps.length; i++) {
31067 var start = gaps.start(i);
31068 var end = gaps.end(i); // gap is starts no more than 4 seconds back
31069
31070 if (currentTime - start < 4 && currentTime - start > 2) {
31071 return {
31072 start: start,
31073 end: end
31074 };
31075 }
31076 }
31077
31078 return null;
31079 };
31080
31081 return PlaybackWatcher;
31082 }();
31083
31084 var defaultOptions = {
31085 errorInterval: 30,
31086 getSource: function getSource(next) {
31087 var tech = this.tech({
31088 IWillNotUseThisInPlugins: true
31089 });
31090 var sourceObj = tech.currentSource_ || this.currentSource();
31091 return next(sourceObj);
31092 }
31093 };
31094 /**
31095 * Main entry point for the plugin
31096 *
31097 * @param {Player} player a reference to a videojs Player instance
31098 * @param {Object} [options] an object with plugin options
31099 * @private
31100 */
31101
31102 var initPlugin = function initPlugin(player, options) {
31103 var lastCalled = 0;
31104 var seekTo = 0;
31105 var localOptions = videojs__default["default"].mergeOptions(defaultOptions, options);
31106 player.ready(function () {
31107 player.trigger({
31108 type: 'usage',
31109 name: 'vhs-error-reload-initialized'
31110 });
31111 player.trigger({
31112 type: 'usage',
31113 name: 'hls-error-reload-initialized'
31114 });
31115 });
31116 /**
31117 * Player modifications to perform that must wait until `loadedmetadata`
31118 * has been triggered
31119 *
31120 * @private
31121 */
31122
31123 var loadedMetadataHandler = function loadedMetadataHandler() {
31124 if (seekTo) {
31125 player.currentTime(seekTo);
31126 }
31127 };
31128 /**
31129 * Set the source on the player element, play, and seek if necessary
31130 *
31131 * @param {Object} sourceObj An object specifying the source url and mime-type to play
31132 * @private
31133 */
31134
31135
31136 var setSource = function setSource(sourceObj) {
31137 if (sourceObj === null || sourceObj === undefined) {
31138 return;
31139 }
31140
31141 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
31142 player.one('loadedmetadata', loadedMetadataHandler);
31143 player.src(sourceObj);
31144 player.trigger({
31145 type: 'usage',
31146 name: 'vhs-error-reload'
31147 });
31148 player.trigger({
31149 type: 'usage',
31150 name: 'hls-error-reload'
31151 });
31152 player.play();
31153 };
31154 /**
31155 * Attempt to get a source from either the built-in getSource function
31156 * or a custom function provided via the options
31157 *
31158 * @private
31159 */
31160
31161
31162 var errorHandler = function errorHandler() {
31163 // Do not attempt to reload the source if a source-reload occurred before
31164 // 'errorInterval' time has elapsed since the last source-reload
31165 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
31166 player.trigger({
31167 type: 'usage',
31168 name: 'vhs-error-reload-canceled'
31169 });
31170 player.trigger({
31171 type: 'usage',
31172 name: 'hls-error-reload-canceled'
31173 });
31174 return;
31175 }
31176
31177 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
31178 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
31179 return;
31180 }
31181
31182 lastCalled = Date.now();
31183 return localOptions.getSource.call(player, setSource);
31184 };
31185 /**
31186 * Unbind any event handlers that were bound by the plugin
31187 *
31188 * @private
31189 */
31190
31191
31192 var cleanupEvents = function cleanupEvents() {
31193 player.off('loadedmetadata', loadedMetadataHandler);
31194 player.off('error', errorHandler);
31195 player.off('dispose', cleanupEvents);
31196 };
31197 /**
31198 * Cleanup before re-initializing the plugin
31199 *
31200 * @param {Object} [newOptions] an object with plugin options
31201 * @private
31202 */
31203
31204
31205 var reinitPlugin = function reinitPlugin(newOptions) {
31206 cleanupEvents();
31207 initPlugin(player, newOptions);
31208 };
31209
31210 player.on('error', errorHandler);
31211 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
31212 // initializing the plugin
31213
31214 player.reloadSourceOnError = reinitPlugin;
31215 };
31216 /**
31217 * Reload the source when an error is detected as long as there
31218 * wasn't an error previously within the last 30 seconds
31219 *
31220 * @param {Object} [options] an object with plugin options
31221 */
31222
31223
31224 var reloadSourceOnError = function reloadSourceOnError(options) {
31225 initPlugin(this, options);
31226 };
31227
31228 var version$4 = "2.16.0";
31229
31230 var version$3 = "6.0.1";
31231
31232 var version$2 = "0.22.1";
31233
31234 var version$1 = "4.8.0";
31235
31236 var version = "3.1.3";
31237
31238 var Vhs = {
31239 PlaylistLoader: PlaylistLoader,
31240 Playlist: Playlist,
31241 utils: utils,
31242 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
31243 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
31244 lastBandwidthSelector: lastBandwidthSelector,
31245 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
31246 comparePlaylistBandwidth: comparePlaylistBandwidth,
31247 comparePlaylistResolution: comparePlaylistResolution,
31248 xhr: xhrFactory()
31249 }; // Define getter/setters for config properties
31250
31251 Object.keys(Config).forEach(function (prop) {
31252 Object.defineProperty(Vhs, prop, {
31253 get: function get() {
31254 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31255 return Config[prop];
31256 },
31257 set: function set(value) {
31258 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31259
31260 if (typeof value !== 'number' || value < 0) {
31261 videojs__default["default"].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
31262 return;
31263 }
31264
31265 Config[prop] = value;
31266 }
31267 });
31268 });
31269 var LOCAL_STORAGE_KEY = 'videojs-vhs';
31270 /**
31271 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
31272 *
31273 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
31274 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
31275 * @function handleVhsMediaChange
31276 */
31277
31278 var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
31279 var newPlaylist = playlistLoader.media();
31280 var selectedIndex = -1;
31281
31282 for (var i = 0; i < qualityLevels.length; i++) {
31283 if (qualityLevels[i].id === newPlaylist.id) {
31284 selectedIndex = i;
31285 break;
31286 }
31287 }
31288
31289 qualityLevels.selectedIndex_ = selectedIndex;
31290 qualityLevels.trigger({
31291 selectedIndex: selectedIndex,
31292 type: 'change'
31293 });
31294 };
31295 /**
31296 * Adds quality levels to list once playlist metadata is available
31297 *
31298 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
31299 * @param {Object} vhs Vhs object to listen to for media events.
31300 * @function handleVhsLoadedMetadata
31301 */
31302
31303
31304 var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
31305 vhs.representations().forEach(function (rep) {
31306 qualityLevels.addQualityLevel(rep);
31307 });
31308 handleVhsMediaChange(qualityLevels, vhs.playlists);
31309 }; // HLS is a source handler, not a tech. Make sure attempts to use it
31310 // as one do not cause exceptions.
31311
31312
31313 Vhs.canPlaySource = function () {
31314 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31315 };
31316
31317 var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
31318 if (!keySystemOptions) {
31319 return keySystemOptions;
31320 }
31321
31322 var codecs = {};
31323
31324 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
31325 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
31326 }
31327
31328 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
31329 codecs.audio = audioPlaylist.attributes.CODECS;
31330 }
31331
31332 var videoContentType = getMimeForCodec(codecs.video);
31333 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
31334
31335 var keySystemContentTypes = {};
31336
31337 for (var keySystem in keySystemOptions) {
31338 keySystemContentTypes[keySystem] = {};
31339
31340 if (audioContentType) {
31341 keySystemContentTypes[keySystem].audioContentType = audioContentType;
31342 }
31343
31344 if (videoContentType) {
31345 keySystemContentTypes[keySystem].videoContentType = videoContentType;
31346 } // Default to using the video playlist's PSSH even though they may be different, as
31347 // videojs-contrib-eme will only accept one in the options.
31348 //
31349 // This shouldn't be an issue for most cases as early intialization will handle all
31350 // unique PSSH values, and if they aren't, then encrypted events should have the
31351 // specific information needed for the unique license.
31352
31353
31354 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
31355 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
31356 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
31357 // so we need to prevent overwriting the URL entirely
31358
31359
31360 if (typeof keySystemOptions[keySystem] === 'string') {
31361 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
31362 }
31363 }
31364
31365 return videojs__default["default"].mergeOptions(keySystemOptions, keySystemContentTypes);
31366 };
31367 /**
31368 * @typedef {Object} KeySystems
31369 *
31370 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
31371 * Note: not all options are listed here.
31372 *
31373 * @property {Uint8Array} [pssh]
31374 * Protection System Specific Header
31375 */
31376
31377 /**
31378 * Goes through all the playlists and collects an array of KeySystems options objects
31379 * containing each playlist's keySystems and their pssh values, if available.
31380 *
31381 * @param {Object[]} playlists
31382 * The playlists to look through
31383 * @param {string[]} keySystems
31384 * The keySystems to collect pssh values for
31385 *
31386 * @return {KeySystems[]}
31387 * An array of KeySystems objects containing available key systems and their
31388 * pssh values
31389 */
31390
31391
31392 var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
31393 return playlists.reduce(function (keySystemsArr, playlist) {
31394 if (!playlist.contentProtection) {
31395 return keySystemsArr;
31396 }
31397
31398 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
31399 var keySystemOptions = playlist.contentProtection[keySystem];
31400
31401 if (keySystemOptions && keySystemOptions.pssh) {
31402 keySystemsObj[keySystem] = {
31403 pssh: keySystemOptions.pssh
31404 };
31405 }
31406
31407 return keySystemsObj;
31408 }, {});
31409
31410 if (Object.keys(keySystemsOptions).length) {
31411 keySystemsArr.push(keySystemsOptions);
31412 }
31413
31414 return keySystemsArr;
31415 }, []);
31416 };
31417 /**
31418 * Returns a promise that waits for the
31419 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
31420 *
31421 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
31422 * browsers.
31423 *
31424 * As per the above ticket, this is particularly important for Chrome, where, if
31425 * unencrypted content is appended before encrypted content and the key session has not
31426 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
31427 * during playback.
31428 *
31429 * @param {Object} player
31430 * The player instance
31431 * @param {Object[]} sourceKeySystems
31432 * The key systems options from the player source
31433 * @param {Object} [audioMedia]
31434 * The active audio media playlist (optional)
31435 * @param {Object[]} mainPlaylists
31436 * The playlists found on the master playlist object
31437 *
31438 * @return {Object}
31439 * Promise that resolves when the key session has been created
31440 */
31441
31442
31443 var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
31444 var player = _ref.player,
31445 sourceKeySystems = _ref.sourceKeySystems,
31446 audioMedia = _ref.audioMedia,
31447 mainPlaylists = _ref.mainPlaylists;
31448
31449 if (!player.eme.initializeMediaKeys) {
31450 return Promise.resolve();
31451 } // TODO should all audio PSSH values be initialized for DRM?
31452 //
31453 // All unique video rendition pssh values are initialized for DRM, but here only
31454 // the initial audio playlist license is initialized. In theory, an encrypted
31455 // event should be fired if the user switches to an alternative audio playlist
31456 // where a license is required, but this case hasn't yet been tested. In addition, there
31457 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
31458 // languages).
31459
31460
31461 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
31462 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
31463 var initializationFinishedPromises = [];
31464 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
31465 // only place where it should not be deduped is for ms-prefixed APIs, but the early
31466 // return for IE11 above, and the existence of modern EME APIs in addition to
31467 // ms-prefixed APIs on Edge should prevent this from being a concern.
31468 // initializeMediaKeys also won't use the webkit-prefixed APIs.
31469
31470 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
31471 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
31472 player.tech_.one('keysessioncreated', resolve);
31473 }));
31474 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
31475 player.eme.initializeMediaKeys({
31476 keySystems: keySystemsOptions
31477 }, function (err) {
31478 if (err) {
31479 reject(err);
31480 return;
31481 }
31482
31483 resolve();
31484 });
31485 }));
31486 }); // The reasons Promise.race is chosen over Promise.any:
31487 //
31488 // * Promise.any is only available in Safari 14+.
31489 // * None of these promises are expected to reject. If they do reject, it might be
31490 // better here for the race to surface the rejection, rather than mask it by using
31491 // Promise.any.
31492
31493 return Promise.race([// If a session was previously created, these will all finish resolving without
31494 // creating a new session, otherwise it will take until the end of all license
31495 // requests, which is why the key session check is used (to make setup much faster).
31496 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
31497 Promise.race(keySessionCreatedPromises)]);
31498 };
31499 /**
31500 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
31501 * there are keySystems on the source, sets up source options to prepare the source for
31502 * eme.
31503 *
31504 * @param {Object} player
31505 * The player instance
31506 * @param {Object[]} sourceKeySystems
31507 * The key systems options from the player source
31508 * @param {Object} media
31509 * The active media playlist
31510 * @param {Object} [audioMedia]
31511 * The active audio media playlist (optional)
31512 *
31513 * @return {boolean}
31514 * Whether or not options were configured and EME is available
31515 */
31516
31517 var setupEmeOptions = function setupEmeOptions(_ref2) {
31518 var player = _ref2.player,
31519 sourceKeySystems = _ref2.sourceKeySystems,
31520 media = _ref2.media,
31521 audioMedia = _ref2.audioMedia;
31522 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
31523
31524 if (!sourceOptions) {
31525 return false;
31526 }
31527
31528 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
31529 // do nothing.
31530
31531 if (sourceOptions && !player.eme) {
31532 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
31533 return false;
31534 }
31535
31536 return true;
31537 };
31538
31539 var getVhsLocalStorage = function getVhsLocalStorage() {
31540 if (!window.localStorage) {
31541 return null;
31542 }
31543
31544 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
31545
31546 if (!storedObject) {
31547 return null;
31548 }
31549
31550 try {
31551 return JSON.parse(storedObject);
31552 } catch (e) {
31553 // someone may have tampered with the value
31554 return null;
31555 }
31556 };
31557
31558 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
31559 if (!window.localStorage) {
31560 return false;
31561 }
31562
31563 var objectToStore = getVhsLocalStorage();
31564 objectToStore = objectToStore ? videojs__default["default"].mergeOptions(objectToStore, options) : options;
31565
31566 try {
31567 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
31568 } catch (e) {
31569 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
31570 // storage is set to 0).
31571 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
31572 // No need to perform any operation.
31573 return false;
31574 }
31575
31576 return objectToStore;
31577 };
31578 /**
31579 * Parses VHS-supported media types from data URIs. See
31580 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
31581 * for information on data URIs.
31582 *
31583 * @param {string} dataUri
31584 * The data URI
31585 *
31586 * @return {string|Object}
31587 * The parsed object/string, or the original string if no supported media type
31588 * was found
31589 */
31590
31591
31592 var expandDataUri = function expandDataUri(dataUri) {
31593 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
31594 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
31595 } // no known case for this data URI, return the string as-is
31596
31597
31598 return dataUri;
31599 };
31600 /**
31601 * Whether the browser has built-in HLS support.
31602 */
31603
31604
31605 Vhs.supportsNativeHls = function () {
31606 if (!document || !document.createElement) {
31607 return false;
31608 }
31609
31610 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
31611
31612 if (!videojs__default["default"].getTech('Html5').isSupported()) {
31613 return false;
31614 } // HLS manifests can go by many mime-types
31615
31616
31617 var canPlay = [// Apple santioned
31618 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
31619 'audio/mpegurl', // Very common
31620 'audio/x-mpegurl', // Very common
31621 'application/x-mpegurl', // Included for completeness
31622 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
31623 return canPlay.some(function (canItPlay) {
31624 return /maybe|probably/i.test(video.canPlayType(canItPlay));
31625 });
31626 }();
31627
31628 Vhs.supportsNativeDash = function () {
31629 if (!document || !document.createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
31630 return false;
31631 }
31632
31633 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
31634 }();
31635
31636 Vhs.supportsTypeNatively = function (type) {
31637 if (type === 'hls') {
31638 return Vhs.supportsNativeHls;
31639 }
31640
31641 if (type === 'dash') {
31642 return Vhs.supportsNativeDash;
31643 }
31644
31645 return false;
31646 };
31647 /**
31648 * HLS is a source handler, not a tech. Make sure attempts to use it
31649 * as one do not cause exceptions.
31650 */
31651
31652
31653 Vhs.isSupported = function () {
31654 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31655 };
31656
31657 var Component = videojs__default["default"].getComponent('Component');
31658 /**
31659 * The Vhs Handler object, where we orchestrate all of the parts
31660 * of HLS to interact with video.js
31661 *
31662 * @class VhsHandler
31663 * @extends videojs.Component
31664 * @param {Object} source the soruce object
31665 * @param {Tech} tech the parent tech object
31666 * @param {Object} options optional and required options
31667 */
31668
31669 var VhsHandler = /*#__PURE__*/function (_Component) {
31670 inheritsLoose(VhsHandler, _Component);
31671
31672 function VhsHandler(source, tech, options) {
31673 var _this;
31674
31675 _this = _Component.call(this, tech, videojs__default["default"].mergeOptions(options.hls, options.vhs)) || this;
31676
31677 if (options.hls && Object.keys(options.hls).length) {
31678 videojs__default["default"].log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
31679 } // if a tech level `initialBandwidth` option was passed
31680 // use that over the VHS level `bandwidth` option
31681
31682
31683 if (typeof options.initialBandwidth === 'number') {
31684 _this.options_.bandwidth = options.initialBandwidth;
31685 }
31686
31687 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
31688 // backwards-compatibility
31689
31690 if (tech.options_ && tech.options_.playerId) {
31691 var _player = videojs__default["default"](tech.options_.playerId);
31692
31693 if (!_player.hasOwnProperty('hls')) {
31694 Object.defineProperty(_player, 'hls', {
31695 get: function get() {
31696 videojs__default["default"].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
31697 tech.trigger({
31698 type: 'usage',
31699 name: 'hls-player-access'
31700 });
31701 return assertThisInitialized(_this);
31702 },
31703 configurable: true
31704 });
31705 }
31706
31707 if (!_player.hasOwnProperty('vhs')) {
31708 Object.defineProperty(_player, 'vhs', {
31709 get: function get() {
31710 videojs__default["default"].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
31711 tech.trigger({
31712 type: 'usage',
31713 name: 'vhs-player-access'
31714 });
31715 return assertThisInitialized(_this);
31716 },
31717 configurable: true
31718 });
31719 }
31720
31721 if (!_player.hasOwnProperty('dash')) {
31722 Object.defineProperty(_player, 'dash', {
31723 get: function get() {
31724 videojs__default["default"].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
31725 return assertThisInitialized(_this);
31726 },
31727 configurable: true
31728 });
31729 }
31730
31731 _this.player_ = _player;
31732 }
31733
31734 _this.tech_ = tech;
31735 _this.source_ = source;
31736 _this.stats = {};
31737 _this.ignoreNextSeekingEvent_ = false;
31738
31739 _this.setOptions_();
31740
31741 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
31742 tech.overrideNativeAudioTracks(true);
31743 tech.overrideNativeVideoTracks(true);
31744 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
31745 // overriding native HLS only works if audio tracks have been emulated
31746 // error early if we're misconfigured
31747 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
31748 } // listen for fullscreenchange events for this player so that we
31749 // can adjust our quality selection quickly
31750
31751
31752 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
31753 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
31754
31755 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
31756 _this.masterPlaylistController_.fastQualityChange_();
31757 } else {
31758 // When leaving fullscreen, since the in page pixel dimensions should be smaller
31759 // than full screen, see if there should be a rendition switch down to preserve
31760 // bandwidth.
31761 _this.masterPlaylistController_.checkABR_();
31762 }
31763 });
31764
31765 _this.on(_this.tech_, 'seeking', function () {
31766 if (this.ignoreNextSeekingEvent_) {
31767 this.ignoreNextSeekingEvent_ = false;
31768 return;
31769 }
31770
31771 this.setCurrentTime(this.tech_.currentTime());
31772 });
31773
31774 _this.on(_this.tech_, 'error', function () {
31775 // verify that the error was real and we are loaded
31776 // enough to have mpc loaded.
31777 if (this.tech_.error() && this.masterPlaylistController_) {
31778 this.masterPlaylistController_.pauseLoading();
31779 }
31780 });
31781
31782 _this.on(_this.tech_, 'play', _this.play);
31783
31784 return _this;
31785 }
31786
31787 var _proto = VhsHandler.prototype;
31788
31789 _proto.setOptions_ = function setOptions_() {
31790 var _this2 = this;
31791
31792 // defaults
31793 this.options_.withCredentials = this.options_.withCredentials || false;
31794 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
31795 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
31796 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
31797 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
31798 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
31799 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
31800 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
31801 this.options_.customTagParsers = this.options_.customTagParsers || [];
31802 this.options_.customTagMappers = this.options_.customTagMappers || [];
31803 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
31804
31805 if (typeof this.options_.blacklistDuration !== 'number') {
31806 this.options_.blacklistDuration = 5 * 60;
31807 }
31808
31809 if (typeof this.options_.bandwidth !== 'number') {
31810 if (this.options_.useBandwidthFromLocalStorage) {
31811 var storedObject = getVhsLocalStorage();
31812
31813 if (storedObject && storedObject.bandwidth) {
31814 this.options_.bandwidth = storedObject.bandwidth;
31815 this.tech_.trigger({
31816 type: 'usage',
31817 name: 'vhs-bandwidth-from-local-storage'
31818 });
31819 this.tech_.trigger({
31820 type: 'usage',
31821 name: 'hls-bandwidth-from-local-storage'
31822 });
31823 }
31824
31825 if (storedObject && storedObject.throughput) {
31826 this.options_.throughput = storedObject.throughput;
31827 this.tech_.trigger({
31828 type: 'usage',
31829 name: 'vhs-throughput-from-local-storage'
31830 });
31831 this.tech_.trigger({
31832 type: 'usage',
31833 name: 'hls-throughput-from-local-storage'
31834 });
31835 }
31836 }
31837 } // if bandwidth was not set by options or pulled from local storage, start playlist
31838 // selection at a reasonable bandwidth
31839
31840
31841 if (typeof this.options_.bandwidth !== 'number') {
31842 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
31843 } // If the bandwidth number is unchanged from the initial setting
31844 // then this takes precedence over the enableLowInitialPlaylist option
31845
31846
31847 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
31848
31849 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
31850 if (typeof _this2.source_[option] !== 'undefined') {
31851 _this2.options_[option] = _this2.source_[option];
31852 }
31853 });
31854 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
31855 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
31856 }
31857 /**
31858 * called when player.src gets called, handle a new source
31859 *
31860 * @param {Object} src the source object to handle
31861 */
31862 ;
31863
31864 _proto.src = function src(_src, type) {
31865 var _this3 = this;
31866
31867 // do nothing if the src is falsey
31868 if (!_src) {
31869 return;
31870 }
31871
31872 this.setOptions_(); // add master playlist controller options
31873
31874 this.options_.src = expandDataUri(this.source_.src);
31875 this.options_.tech = this.tech_;
31876 this.options_.externVhs = Vhs;
31877 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
31878
31879 this.options_.seekTo = function (time) {
31880 _this3.tech_.setCurrentTime(time);
31881 };
31882
31883 if (this.options_.smoothQualityChange) {
31884 videojs__default["default"].log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
31885 }
31886
31887 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
31888 var playbackWatcherOptions = videojs__default["default"].mergeOptions({
31889 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
31890 }, this.options_, {
31891 seekable: function seekable() {
31892 return _this3.seekable();
31893 },
31894 media: function media() {
31895 return _this3.masterPlaylistController_.media();
31896 },
31897 masterPlaylistController: this.masterPlaylistController_
31898 });
31899 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
31900 this.masterPlaylistController_.on('error', function () {
31901 var player = videojs__default["default"].players[_this3.tech_.options_.playerId];
31902 var error = _this3.masterPlaylistController_.error;
31903
31904 if (typeof error === 'object' && !error.code) {
31905 error.code = 3;
31906 } else if (typeof error === 'string') {
31907 error = {
31908 message: error,
31909 code: 3
31910 };
31911 }
31912
31913 player.error(error);
31914 });
31915 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
31916 // compatibility with < v2
31917
31918 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
31919 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
31920
31921 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
31922 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
31923 // controller. Using a custom property for backwards compatibility
31924 // with < v2
31925
31926 Object.defineProperties(this, {
31927 selectPlaylist: {
31928 get: function get() {
31929 return this.masterPlaylistController_.selectPlaylist;
31930 },
31931 set: function set(selectPlaylist) {
31932 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
31933 }
31934 },
31935 throughput: {
31936 get: function get() {
31937 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
31938 },
31939 set: function set(throughput) {
31940 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
31941 // for the cumulative average
31942
31943 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
31944 }
31945 },
31946 bandwidth: {
31947 get: function get() {
31948 var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
31949 var networkInformation = window.navigator.connection || window.navigator.mozConnection || window.navigator.webkitConnection;
31950 var tenMbpsAsBitsPerSecond = 10e6;
31951
31952 if (this.options_.useNetworkInformationApi && networkInformation) {
31953 // downlink returns Mbps
31954 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
31955 var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
31956 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
31957 // high quality streams are not filtered out.
31958
31959 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
31960 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
31961 } else {
31962 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
31963 }
31964 }
31965
31966 return playerBandwidthEst;
31967 },
31968 set: function set(bandwidth) {
31969 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
31970 // `count` is set to zero that current value of `rate` isn't included
31971 // in the cumulative average
31972
31973 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
31974 rate: 0,
31975 count: 0
31976 };
31977 }
31978 },
31979
31980 /**
31981 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
31982 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
31983 * the entire process after that - decryption, transmuxing, and appending - provided
31984 * by `throughput`.
31985 *
31986 * Since the two process are serial, the overall system bandwidth is given by:
31987 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
31988 */
31989 systemBandwidth: {
31990 get: function get() {
31991 var invBandwidth = 1 / (this.bandwidth || 1);
31992 var invThroughput;
31993
31994 if (this.throughput > 0) {
31995 invThroughput = 1 / this.throughput;
31996 } else {
31997 invThroughput = 0;
31998 }
31999
32000 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
32001 return systemBitrate;
32002 },
32003 set: function set() {
32004 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
32005 }
32006 }
32007 });
32008
32009 if (this.options_.bandwidth) {
32010 this.bandwidth = this.options_.bandwidth;
32011 }
32012
32013 if (this.options_.throughput) {
32014 this.throughput = this.options_.throughput;
32015 }
32016
32017 Object.defineProperties(this.stats, {
32018 bandwidth: {
32019 get: function get() {
32020 return _this3.bandwidth || 0;
32021 },
32022 enumerable: true
32023 },
32024 mediaRequests: {
32025 get: function get() {
32026 return _this3.masterPlaylistController_.mediaRequests_() || 0;
32027 },
32028 enumerable: true
32029 },
32030 mediaRequestsAborted: {
32031 get: function get() {
32032 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
32033 },
32034 enumerable: true
32035 },
32036 mediaRequestsTimedout: {
32037 get: function get() {
32038 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
32039 },
32040 enumerable: true
32041 },
32042 mediaRequestsErrored: {
32043 get: function get() {
32044 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
32045 },
32046 enumerable: true
32047 },
32048 mediaTransferDuration: {
32049 get: function get() {
32050 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
32051 },
32052 enumerable: true
32053 },
32054 mediaBytesTransferred: {
32055 get: function get() {
32056 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
32057 },
32058 enumerable: true
32059 },
32060 mediaSecondsLoaded: {
32061 get: function get() {
32062 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
32063 },
32064 enumerable: true
32065 },
32066 mediaAppends: {
32067 get: function get() {
32068 return _this3.masterPlaylistController_.mediaAppends_() || 0;
32069 },
32070 enumerable: true
32071 },
32072 mainAppendsToLoadedData: {
32073 get: function get() {
32074 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
32075 },
32076 enumerable: true
32077 },
32078 audioAppendsToLoadedData: {
32079 get: function get() {
32080 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
32081 },
32082 enumerable: true
32083 },
32084 appendsToLoadedData: {
32085 get: function get() {
32086 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
32087 },
32088 enumerable: true
32089 },
32090 timeToLoadedData: {
32091 get: function get() {
32092 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
32093 },
32094 enumerable: true
32095 },
32096 buffered: {
32097 get: function get() {
32098 return timeRangesToArray(_this3.tech_.buffered());
32099 },
32100 enumerable: true
32101 },
32102 currentTime: {
32103 get: function get() {
32104 return _this3.tech_.currentTime();
32105 },
32106 enumerable: true
32107 },
32108 currentSource: {
32109 get: function get() {
32110 return _this3.tech_.currentSource_;
32111 },
32112 enumerable: true
32113 },
32114 currentTech: {
32115 get: function get() {
32116 return _this3.tech_.name_;
32117 },
32118 enumerable: true
32119 },
32120 duration: {
32121 get: function get() {
32122 return _this3.tech_.duration();
32123 },
32124 enumerable: true
32125 },
32126 master: {
32127 get: function get() {
32128 return _this3.playlists.master;
32129 },
32130 enumerable: true
32131 },
32132 playerDimensions: {
32133 get: function get() {
32134 return _this3.tech_.currentDimensions();
32135 },
32136 enumerable: true
32137 },
32138 seekable: {
32139 get: function get() {
32140 return timeRangesToArray(_this3.tech_.seekable());
32141 },
32142 enumerable: true
32143 },
32144 timestamp: {
32145 get: function get() {
32146 return Date.now();
32147 },
32148 enumerable: true
32149 },
32150 videoPlaybackQuality: {
32151 get: function get() {
32152 return _this3.tech_.getVideoPlaybackQuality();
32153 },
32154 enumerable: true
32155 }
32156 });
32157 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
32158 this.tech_.on('bandwidthupdate', function () {
32159 if (_this3.options_.useBandwidthFromLocalStorage) {
32160 updateVhsLocalStorage({
32161 bandwidth: _this3.bandwidth,
32162 throughput: Math.round(_this3.throughput)
32163 });
32164 }
32165 });
32166 this.masterPlaylistController_.on('selectedinitialmedia', function () {
32167 // Add the manual rendition mix-in to VhsHandler
32168 renditionSelectionMixin(_this3);
32169 });
32170 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
32171 _this3.setupEme_();
32172 }); // the bandwidth of the primary segment loader is our best
32173 // estimate of overall bandwidth
32174
32175 this.on(this.masterPlaylistController_, 'progress', function () {
32176 this.tech_.trigger('progress');
32177 }); // In the live case, we need to ignore the very first `seeking` event since
32178 // that will be the result of the seek-to-live behavior
32179
32180 this.on(this.masterPlaylistController_, 'firstplay', function () {
32181 this.ignoreNextSeekingEvent_ = true;
32182 });
32183 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
32184 // this can occur if someone sets the src in player.ready(), for instance
32185
32186 if (!this.tech_.el()) {
32187 return;
32188 }
32189
32190 this.mediaSourceUrl_ = window.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
32191 this.tech_.src(this.mediaSourceUrl_);
32192 };
32193
32194 _proto.createKeySessions_ = function createKeySessions_() {
32195 var _this4 = this;
32196
32197 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32198 this.logger_('waiting for EME key session creation');
32199 waitForKeySessionCreation({
32200 player: this.player_,
32201 sourceKeySystems: this.source_.keySystems,
32202 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
32203 mainPlaylists: this.playlists.master.playlists
32204 }).then(function () {
32205 _this4.logger_('created EME key session');
32206
32207 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
32208 }).catch(function (err) {
32209 _this4.logger_('error while creating EME key session', err);
32210
32211 _this4.player_.error({
32212 message: 'Failed to initialize media keys for EME',
32213 code: 3
32214 });
32215 });
32216 };
32217
32218 _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
32219 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
32220 // the key is in the manifest. While this should've happened on initial source load, it
32221 // may happen again in live streams where the keys change, and the manifest info
32222 // reflects the update.
32223 //
32224 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
32225 // already requested keys for, we don't have to worry about this generating extraneous
32226 // requests.
32227 this.logger_('waitingforkey fired, attempting to create any new key sessions');
32228 this.createKeySessions_();
32229 }
32230 /**
32231 * If necessary and EME is available, sets up EME options and waits for key session
32232 * creation.
32233 *
32234 * This function also updates the source updater so taht it can be used, as for some
32235 * browsers, EME must be configured before content is appended (if appending unencrypted
32236 * content before encrypted content).
32237 */
32238 ;
32239
32240 _proto.setupEme_ = function setupEme_() {
32241 var _this5 = this;
32242
32243 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32244 var didSetupEmeOptions = setupEmeOptions({
32245 player: this.player_,
32246 sourceKeySystems: this.source_.keySystems,
32247 media: this.playlists.media(),
32248 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
32249 });
32250 this.player_.tech_.on('keystatuschange', function (e) {
32251 if (e.status !== 'output-restricted') {
32252 return;
32253 }
32254
32255 var masterPlaylist = _this5.masterPlaylistController_.master();
32256
32257 if (!masterPlaylist || !masterPlaylist.playlists) {
32258 return;
32259 }
32260
32261 var excludedHDPlaylists = []; // Assume all HD streams are unplayable and exclude them from ABR selection
32262
32263 masterPlaylist.playlists.forEach(function (playlist) {
32264 if (playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height >= 720) {
32265 if (!playlist.excludeUntil || playlist.excludeUntil < Infinity) {
32266 playlist.excludeUntil = Infinity;
32267 excludedHDPlaylists.push(playlist);
32268 }
32269 }
32270 });
32271
32272 if (excludedHDPlaylists.length) {
32273 var _videojs$log;
32274
32275 (_videojs$log = videojs__default["default"].log).warn.apply(_videojs$log, ['DRM keystatus changed to "output-restricted." Removing the following HD playlists ' + 'that will most likely fail to play and clearing the buffer. ' + 'This may be due to HDCP restrictions on the stream and the capabilities of the current device.'].concat(excludedHDPlaylists)); // Clear the buffer before switching playlists, since it may already contain unplayable segments
32276
32277
32278 _this5.masterPlaylistController_.fastQualityChange_();
32279 }
32280 });
32281 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
32282 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
32283 // promises.
32284
32285 if (videojs__default["default"].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
32286 // If EME options were not set up, we've done all we could to initialize EME.
32287 this.masterPlaylistController_.sourceUpdater_.initializedEme();
32288 return;
32289 }
32290
32291 this.createKeySessions_();
32292 }
32293 /**
32294 * Initializes the quality levels and sets listeners to update them.
32295 *
32296 * @method setupQualityLevels_
32297 * @private
32298 */
32299 ;
32300
32301 _proto.setupQualityLevels_ = function setupQualityLevels_() {
32302 var _this6 = this;
32303
32304 var player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
32305 // or qualityLevels_ listeners have already been setup, do nothing.
32306
32307 if (!player || !player.qualityLevels || this.qualityLevels_) {
32308 return;
32309 }
32310
32311 this.qualityLevels_ = player.qualityLevels();
32312 this.masterPlaylistController_.on('selectedinitialmedia', function () {
32313 handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
32314 });
32315 this.playlists.on('mediachange', function () {
32316 handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
32317 });
32318 }
32319 /**
32320 * return the version
32321 */
32322 ;
32323
32324 VhsHandler.version = function version$5() {
32325 return {
32326 '@videojs/http-streaming': version$4,
32327 'mux.js': version$3,
32328 'mpd-parser': version$2,
32329 'm3u8-parser': version$1,
32330 'aes-decrypter': version
32331 };
32332 }
32333 /**
32334 * return the version
32335 */
32336 ;
32337
32338 _proto.version = function version() {
32339 return this.constructor.version();
32340 };
32341
32342 _proto.canChangeType = function canChangeType() {
32343 return SourceUpdater.canChangeType();
32344 }
32345 /**
32346 * Begin playing the video.
32347 */
32348 ;
32349
32350 _proto.play = function play() {
32351 this.masterPlaylistController_.play();
32352 }
32353 /**
32354 * a wrapper around the function in MasterPlaylistController
32355 */
32356 ;
32357
32358 _proto.setCurrentTime = function setCurrentTime(currentTime) {
32359 this.masterPlaylistController_.setCurrentTime(currentTime);
32360 }
32361 /**
32362 * a wrapper around the function in MasterPlaylistController
32363 */
32364 ;
32365
32366 _proto.duration = function duration() {
32367 return this.masterPlaylistController_.duration();
32368 }
32369 /**
32370 * a wrapper around the function in MasterPlaylistController
32371 */
32372 ;
32373
32374 _proto.seekable = function seekable() {
32375 return this.masterPlaylistController_.seekable();
32376 }
32377 /**
32378 * Abort all outstanding work and cleanup.
32379 */
32380 ;
32381
32382 _proto.dispose = function dispose() {
32383 if (this.playbackWatcher_) {
32384 this.playbackWatcher_.dispose();
32385 }
32386
32387 if (this.masterPlaylistController_) {
32388 this.masterPlaylistController_.dispose();
32389 }
32390
32391 if (this.qualityLevels_) {
32392 this.qualityLevels_.dispose();
32393 }
32394
32395 if (this.player_) {
32396 delete this.player_.vhs;
32397 delete this.player_.dash;
32398 delete this.player_.hls;
32399 }
32400
32401 if (this.tech_ && this.tech_.vhs) {
32402 delete this.tech_.vhs;
32403 } // don't check this.tech_.hls as it will log a deprecated warning
32404
32405
32406 if (this.tech_) {
32407 delete this.tech_.hls;
32408 }
32409
32410 if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
32411 window.URL.revokeObjectURL(this.mediaSourceUrl_);
32412 this.mediaSourceUrl_ = null;
32413 }
32414
32415 if (this.tech_) {
32416 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
32417 }
32418
32419 _Component.prototype.dispose.call(this);
32420 };
32421
32422 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
32423 return getProgramTime({
32424 playlist: this.masterPlaylistController_.media(),
32425 time: time,
32426 callback: callback
32427 });
32428 } // the player must be playing before calling this
32429 ;
32430
32431 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
32432 if (pauseAfterSeek === void 0) {
32433 pauseAfterSeek = true;
32434 }
32435
32436 if (retryCount === void 0) {
32437 retryCount = 2;
32438 }
32439
32440 return seekToProgramTime({
32441 programTime: programTime,
32442 playlist: this.masterPlaylistController_.media(),
32443 retryCount: retryCount,
32444 pauseAfterSeek: pauseAfterSeek,
32445 seekTo: this.options_.seekTo,
32446 tech: this.options_.tech,
32447 callback: callback
32448 });
32449 };
32450
32451 return VhsHandler;
32452 }(Component);
32453 /**
32454 * The Source Handler object, which informs video.js what additional
32455 * MIME types are supported and sets up playback. It is registered
32456 * automatically to the appropriate tech based on the capabilities of
32457 * the browser it is running in. It is not necessary to use or modify
32458 * this object in normal usage.
32459 */
32460
32461
32462 var VhsSourceHandler = {
32463 name: 'videojs-http-streaming',
32464 VERSION: version$4,
32465 canHandleSource: function canHandleSource(srcObj, options) {
32466 if (options === void 0) {
32467 options = {};
32468 }
32469
32470 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32471 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
32472 },
32473 handleSource: function handleSource(source, tech, options) {
32474 if (options === void 0) {
32475 options = {};
32476 }
32477
32478 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32479 tech.vhs = new VhsHandler(source, tech, localOptions);
32480
32481 if (!videojs__default["default"].hasOwnProperty('hls')) {
32482 Object.defineProperty(tech, 'hls', {
32483 get: function get() {
32484 videojs__default["default"].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
32485 return tech.vhs;
32486 },
32487 configurable: true
32488 });
32489 }
32490
32491 tech.vhs.xhr = xhrFactory();
32492 tech.vhs.src(source.src, source.type);
32493 return tech.vhs;
32494 },
32495 canPlayType: function canPlayType(type, options) {
32496 var simpleType = simpleTypeFromSourceType(type);
32497
32498 if (!simpleType) {
32499 return '';
32500 }
32501
32502 var overrideNative = VhsSourceHandler.getOverrideNative(options);
32503 var supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);
32504 var canUseMsePlayback = !supportsTypeNatively || overrideNative;
32505 return canUseMsePlayback ? 'maybe' : '';
32506 },
32507 getOverrideNative: function getOverrideNative(options) {
32508 if (options === void 0) {
32509 options = {};
32510 }
32511
32512 var _options = options,
32513 _options$vhs = _options.vhs,
32514 vhs = _options$vhs === void 0 ? {} : _options$vhs,
32515 _options$hls = _options.hls,
32516 hls = _options$hls === void 0 ? {} : _options$hls;
32517 var defaultOverrideNative = !(videojs__default["default"].browser.IS_ANY_SAFARI || videojs__default["default"].browser.IS_IOS);
32518 var _vhs$overrideNative = vhs.overrideNative,
32519 overrideNative = _vhs$overrideNative === void 0 ? defaultOverrideNative : _vhs$overrideNative;
32520 var _hls$overrideNative = hls.overrideNative,
32521 legacyOverrideNative = _hls$overrideNative === void 0 ? false : _hls$overrideNative;
32522 return legacyOverrideNative || overrideNative;
32523 }
32524 };
32525 /**
32526 * Check to see if the native MediaSource object exists and supports
32527 * an MP4 container with both H.264 video and AAC-LC audio.
32528 *
32529 * @return {boolean} if native media sources are supported
32530 */
32531
32532 var supportsNativeMediaSources = function supportsNativeMediaSources() {
32533 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
32534 }; // register source handlers with the appropriate techs
32535
32536
32537 if (supportsNativeMediaSources()) {
32538 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
32539 }
32540
32541 videojs__default["default"].VhsHandler = VhsHandler;
32542 Object.defineProperty(videojs__default["default"], 'HlsHandler', {
32543 get: function get() {
32544 videojs__default["default"].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
32545 return VhsHandler;
32546 },
32547 configurable: true
32548 });
32549 videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
32550 Object.defineProperty(videojs__default["default"], 'HlsSourceHandler', {
32551 get: function get() {
32552 videojs__default["default"].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
32553 return VhsSourceHandler;
32554 },
32555 configurable: true
32556 });
32557 videojs__default["default"].Vhs = Vhs;
32558 Object.defineProperty(videojs__default["default"], 'Hls', {
32559 get: function get() {
32560 videojs__default["default"].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
32561 return Vhs;
32562 },
32563 configurable: true
32564 });
32565
32566 if (!videojs__default["default"].use) {
32567 videojs__default["default"].registerComponent('Hls', Vhs);
32568 videojs__default["default"].registerComponent('Vhs', Vhs);
32569 }
32570
32571 videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
32572 videojs__default["default"].options.hls = videojs__default["default"].options.hls || {};
32573
32574 if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
32575 var registerPlugin = videojs__default["default"].registerPlugin || videojs__default["default"].plugin;
32576 registerPlugin('reloadSourceOnError', reloadSourceOnError);
32577 }
32578
32579 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
32580 exports.Vhs = Vhs;
32581 exports.VhsHandler = VhsHandler;
32582 exports.VhsSourceHandler = VhsSourceHandler;
32583 exports.emeKeySystems = emeKeySystems;
32584 exports.expandDataUri = expandDataUri;
32585 exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
32586 exports.setupEmeOptions = setupEmeOptions;
32587 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
32588 exports.waitForKeySessionCreation = waitForKeySessionCreation;
32589
32590 Object.defineProperty(exports, '__esModule', { value: true });
32591
32592}));