UNPKG

1.03 MBJavaScriptView Raw
1import { r as registerInstance, h, H as Host, g as getElement, c as createEvent } from './index-8809c729.js';
2import { a as createCommonjsModule, b as commonjsGlobal, g as getDefaultExportFromCjs, c as classnames } from './index-1d8e8acd.js';
3import { t as throttle } from './index-cad8203e.js';
4
5const formatTime = (time) => {
6 if (!time)
7 return '';
8 const sec = Math.round(time % 60);
9 const min = Math.round((time - sec) / 60);
10 return `${min < 10 ? `0${min}` : min}:${sec < 10 ? `0${sec}` : sec}`;
11};
12const calcDist = (x, y) => {
13 return Math.sqrt(Math.pow(x, 2) + Math.pow(y, 2));
14};
15const normalizeNumber = (number) => {
16 return Math.max(-1, Math.min(number, 1));
17};
18let scene = 'default';
19const screenFn = (function () {
20 let val;
21 const fnMap = [
22 [
23 'requestFullscreen',
24 'exitFullscreen',
25 'fullscreenElement',
26 'fullscreenEnabled',
27 'fullscreenchange',
28 'fullscreenerror'
29 ],
30 // New WebKit
31 [
32 'webkitRequestFullscreen',
33 'webkitExitFullscreen',
34 'webkitFullscreenElement',
35 'webkitFullscreenEnabled',
36 'webkitfullscreenchange',
37 'webkitfullscreenerror'
38 ],
39 // Old WebKit
40 [
41 'webkitRequestFullScreen',
42 'webkitCancelFullScreen',
43 'webkitCurrentFullScreenElement',
44 'webkitCancelFullScreen',
45 'webkitfullscreenchange',
46 'webkitfullscreenerror'
47 ],
48 [
49 'mozRequestFullScreen',
50 'mozCancelFullScreen',
51 'mozFullScreenElement',
52 'mozFullScreenEnabled',
53 'mozfullscreenchange',
54 'mozfullscreenerror'
55 ],
56 [
57 'msRequestFullscreen',
58 'msExitFullscreen',
59 'msFullscreenElement',
60 'msFullscreenEnabled',
61 'MSFullscreenChange',
62 'MSFullscreenError'
63 ]
64 ];
65 var defaultIOSMap = [
66 'webkitEnterFullscreen',
67 'webkitExitFullscreen',
68 'webkitCurrentFullScreenElement',
69 'webkitSupportsFullscreen',
70 'fullscreenchange',
71 'fullscreenerror'
72 ];
73 let i = 0;
74 const l = fnMap.length;
75 const ret = {};
76 // This for loop essentially checks the current document object for the property/methods above.
77 for (; i < l; i++) {
78 val = fnMap[i];
79 if (val && val[1] in document) {
80 for (i = 0; i < val.length; i++) {
81 ret[fnMap[0][i]] = val[i];
82 }
83 return ret;
84 }
85 }
86 if (!ret[fnMap[0][0]]) {
87 scene = 'iOS';
88 // when there is no any APIs be set.
89 // In IOS, there is no 'webkitEnterFullscreen' property `in document` but video can use it for fullscreen.
90 // ref: https://developer.apple.com/documentation/webkitjs/htmlvideoelement/1633500-webkitenterfullscreen
91 for (i = 0; i < defaultIOSMap.length; i++) {
92 ret[fnMap[0][i]] = defaultIOSMap[i];
93 }
94 }
95 // If it doesn't find any of them, this whole function returns {}
96 // and the fn variable is set to this returned value.
97 return ret;
98})();
99const isHls = url => /\.(m3u8)($|\?)/i.test(url);
100
101let VideoControl = class {
102 constructor(hostRef) {
103 registerInstance(this, hostRef);
104 this.visible = false;
105 this.isDraggingProgressBall = false;
106 this.percentage = 0;
107 this.progressDimensions = {
108 left: 0,
109 width: 0
110 };
111 this.calcPercentage = (pageX) => {
112 let pos = pageX - this.progressDimensions.left;
113 pos = Math.max(pos, 0);
114 pos = Math.min(pos, this.progressDimensions.width);
115 return pos / this.progressDimensions.width;
116 };
117 this.onDragProgressBallStart = () => {
118 this.isDraggingProgressBall = true;
119 this.hideControlsTimer && clearTimeout(this.hideControlsTimer);
120 };
121 this.onClickProgress = (e) => {
122 e.stopPropagation();
123 const percentage = this.calcPercentage(e.pageX);
124 this.seekFunc(percentage * this.duration);
125 this.toggleVisibility(true);
126 };
127 }
128 onDocumentTouchMove(e) {
129 if (!this.isDraggingProgressBall)
130 return;
131 const touchX = e.touches[0].pageX;
132 this.percentage = this.calcPercentage(touchX);
133 this.setProgressBall(this.percentage);
134 this.setCurrentTime(this.percentage * this.duration);
135 }
136 onDocumentTouchEnd() {
137 if (!this.isDraggingProgressBall)
138 return;
139 this.isDraggingProgressBall = false;
140 this.seekFunc(this.percentage * this.duration);
141 this.toggleVisibility(true);
142 }
143 async setProgressBall(percentage) {
144 if (this.progressBallRef) {
145 this.progressBallRef.style.left = `${percentage * 100}%`;
146 }
147 }
148 async toggleVisibility(nextVisible) {
149 const visible = nextVisible === undefined ? !this.visible : nextVisible;
150 if (visible) {
151 this.hideControlsTimer && clearTimeout(this.hideControlsTimer);
152 if (this.isPlaying) {
153 this.hideControlsTimer = setTimeout(() => {
154 this.toggleVisibility(false);
155 }, 2000);
156 }
157 this.el.style.visibility = 'visible';
158 }
159 else {
160 this.el.style.visibility = 'hidden';
161 }
162 this.visible = !!visible;
163 }
164 async getIsDraggingProgressBall() {
165 return this.isDraggingProgressBall;
166 }
167 async setCurrentTime(time) {
168 this.currentTimeRef.innerHTML = formatTime(time);
169 }
170 render() {
171 const { controls, currentTime, duration, isPlaying, pauseFunc, playFunc, showPlayBtn, showProgress } = this;
172 const formattedDuration = formatTime(duration);
173 let playBtn;
174 if (!showPlayBtn) {
175 playBtn = null;
176 }
177 else if (isPlaying) {
178 playBtn = h("div", { class: 'taro-video-control-button taro-video-control-button-pause', onClick: pauseFunc });
179 }
180 else {
181 playBtn = h("div", { class: 'taro-video-control-button taro-video-control-button-play', onClick: playFunc });
182 }
183 return (h(Host, { class: 'taro-video-bar taro-video-bar-full' }, controls && (h("div", { class: 'taro-video-controls' }, playBtn, showProgress && (h("div", { class: 'taro-video-current-time', ref: dom => (this.currentTimeRef = dom) }, formatTime(currentTime))), showProgress && (h("div", { class: 'taro-video-progress-container', onClick: this.onClickProgress }, h("div", { class: 'taro-video-progress', ref: ref => {
184 if (!ref)
185 return;
186 const rect = ref.getBoundingClientRect();
187 this.progressDimensions.left = rect.left;
188 this.progressDimensions.width = rect.width;
189 } }, h("div", { class: 'taro-video-progress-buffered', style: { width: '100%' } }), h("div", { class: 'taro-video-ball', ref: dom => (this.progressBallRef = dom), onTouchStart: this.onDragProgressBallStart, style: {
190 left: `${formattedDuration ? (this.currentTime / duration) * 100 : 0}%`
191 } }, h("div", { class: 'taro-video-inner' }))))), showProgress && h("div", { class: 'taro-video-duration' }, formattedDuration))), h("slot", null)));
192 }
193 get el() { return getElement(this); }
194};
195
196var hls = createCommonjsModule(function (module, exports) {
197typeof window !== "undefined" &&
198(function webpackUniversalModuleDefinition(root, factory) {
199 module.exports = factory();
200})(commonjsGlobal, function() {
201return /******/ (function(modules) { // webpackBootstrap
202/******/ // The module cache
203/******/ var installedModules = {};
204/******/
205/******/ // The require function
206/******/ function __webpack_require__(moduleId) {
207/******/
208/******/ // Check if module is in cache
209/******/ if(installedModules[moduleId]) {
210/******/ return installedModules[moduleId].exports;
211/******/ }
212/******/ // Create a new module (and put it into the cache)
213/******/ var module = installedModules[moduleId] = {
214/******/ i: moduleId,
215/******/ l: false,
216/******/ exports: {}
217/******/ };
218/******/
219/******/ // Execute the module function
220/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
221/******/
222/******/ // Flag the module as loaded
223/******/ module.l = true;
224/******/
225/******/ // Return the exports of the module
226/******/ return module.exports;
227/******/ }
228/******/
229/******/
230/******/ // expose the modules object (__webpack_modules__)
231/******/ __webpack_require__.m = modules;
232/******/
233/******/ // expose the module cache
234/******/ __webpack_require__.c = installedModules;
235/******/
236/******/ // define getter function for harmony exports
237/******/ __webpack_require__.d = function(exports, name, getter) {
238/******/ if(!__webpack_require__.o(exports, name)) {
239/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
240/******/ }
241/******/ };
242/******/
243/******/ // define __esModule on exports
244/******/ __webpack_require__.r = function(exports) {
245/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
246/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
247/******/ }
248/******/ Object.defineProperty(exports, '__esModule', { value: true });
249/******/ };
250/******/
251/******/ // create a fake namespace object
252/******/ // mode & 1: value is a module id, require it
253/******/ // mode & 2: merge all properties of value into the ns
254/******/ // mode & 4: return value when already ns object
255/******/ // mode & 8|1: behave like require
256/******/ __webpack_require__.t = function(value, mode) {
257/******/ if(mode & 1) value = __webpack_require__(value);
258/******/ if(mode & 8) return value;
259/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
260/******/ var ns = Object.create(null);
261/******/ __webpack_require__.r(ns);
262/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
263/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
264/******/ return ns;
265/******/ };
266/******/
267/******/ // getDefaultExport function for compatibility with non-harmony modules
268/******/ __webpack_require__.n = function(module) {
269/******/ var getter = module && module.__esModule ?
270/******/ function getDefault() { return module['default']; } :
271/******/ function getModuleExports() { return module; };
272/******/ __webpack_require__.d(getter, 'a', getter);
273/******/ return getter;
274/******/ };
275/******/
276/******/ // Object.prototype.hasOwnProperty.call
277/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
278/******/
279/******/ // __webpack_public_path__
280/******/ __webpack_require__.p = "/dist/";
281/******/
282/******/
283/******/ // Load entry module and return exports
284/******/ return __webpack_require__(__webpack_require__.s = "./src/hls.ts");
285/******/ })
286/************************************************************************/
287/******/ ({
288
289/***/ "./node_modules/eventemitter3/index.js":
290/*!*********************************************!*\
291 !*** ./node_modules/eventemitter3/index.js ***!
292 \*********************************************/
293/*! no static exports found */
294/***/ (function(module, exports, __webpack_require__) {
295
296
297var has = Object.prototype.hasOwnProperty
298 , prefix = '~';
299
300/**
301 * Constructor to create a storage for our `EE` objects.
302 * An `Events` instance is a plain object whose properties are event names.
303 *
304 * @constructor
305 * @private
306 */
307function Events() {}
308
309//
310// We try to not inherit from `Object.prototype`. In some engines creating an
311// instance in this way is faster than calling `Object.create(null)` directly.
312// If `Object.create(null)` is not supported we prefix the event names with a
313// character to make sure that the built-in object properties are not
314// overridden or used as an attack vector.
315//
316if (Object.create) {
317 Events.prototype = Object.create(null);
318
319 //
320 // This hack is needed because the `__proto__` property is still inherited in
321 // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.
322 //
323 if (!new Events().__proto__) prefix = false;
324}
325
326/**
327 * Representation of a single event listener.
328 *
329 * @param {Function} fn The listener function.
330 * @param {*} context The context to invoke the listener with.
331 * @param {Boolean} [once=false] Specify if the listener is a one-time listener.
332 * @constructor
333 * @private
334 */
335function EE(fn, context, once) {
336 this.fn = fn;
337 this.context = context;
338 this.once = once || false;
339}
340
341/**
342 * Add a listener for a given event.
343 *
344 * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
345 * @param {(String|Symbol)} event The event name.
346 * @param {Function} fn The listener function.
347 * @param {*} context The context to invoke the listener with.
348 * @param {Boolean} once Specify if the listener is a one-time listener.
349 * @returns {EventEmitter}
350 * @private
351 */
352function addListener(emitter, event, fn, context, once) {
353 if (typeof fn !== 'function') {
354 throw new TypeError('The listener must be a function');
355 }
356
357 var listener = new EE(fn, context || emitter, once)
358 , evt = prefix ? prefix + event : event;
359
360 if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;
361 else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);
362 else emitter._events[evt] = [emitter._events[evt], listener];
363
364 return emitter;
365}
366
367/**
368 * Clear event by name.
369 *
370 * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
371 * @param {(String|Symbol)} evt The Event name.
372 * @private
373 */
374function clearEvent(emitter, evt) {
375 if (--emitter._eventsCount === 0) emitter._events = new Events();
376 else delete emitter._events[evt];
377}
378
379/**
380 * Minimal `EventEmitter` interface that is molded against the Node.js
381 * `EventEmitter` interface.
382 *
383 * @constructor
384 * @public
385 */
386function EventEmitter() {
387 this._events = new Events();
388 this._eventsCount = 0;
389}
390
391/**
392 * Return an array listing the events for which the emitter has registered
393 * listeners.
394 *
395 * @returns {Array}
396 * @public
397 */
398EventEmitter.prototype.eventNames = function eventNames() {
399 var names = []
400 , events
401 , name;
402
403 if (this._eventsCount === 0) return names;
404
405 for (name in (events = this._events)) {
406 if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);
407 }
408
409 if (Object.getOwnPropertySymbols) {
410 return names.concat(Object.getOwnPropertySymbols(events));
411 }
412
413 return names;
414};
415
416/**
417 * Return the listeners registered for a given event.
418 *
419 * @param {(String|Symbol)} event The event name.
420 * @returns {Array} The registered listeners.
421 * @public
422 */
423EventEmitter.prototype.listeners = function listeners(event) {
424 var evt = prefix ? prefix + event : event
425 , handlers = this._events[evt];
426
427 if (!handlers) return [];
428 if (handlers.fn) return [handlers.fn];
429
430 for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
431 ee[i] = handlers[i].fn;
432 }
433
434 return ee;
435};
436
437/**
438 * Return the number of listeners listening to a given event.
439 *
440 * @param {(String|Symbol)} event The event name.
441 * @returns {Number} The number of listeners.
442 * @public
443 */
444EventEmitter.prototype.listenerCount = function listenerCount(event) {
445 var evt = prefix ? prefix + event : event
446 , listeners = this._events[evt];
447
448 if (!listeners) return 0;
449 if (listeners.fn) return 1;
450 return listeners.length;
451};
452
453/**
454 * Calls each of the listeners registered for a given event.
455 *
456 * @param {(String|Symbol)} event The event name.
457 * @returns {Boolean} `true` if the event had listeners, else `false`.
458 * @public
459 */
460EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
461 var evt = prefix ? prefix + event : event;
462
463 if (!this._events[evt]) return false;
464
465 var listeners = this._events[evt]
466 , len = arguments.length
467 , args
468 , i;
469
470 if (listeners.fn) {
471 if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);
472
473 switch (len) {
474 case 1: return listeners.fn.call(listeners.context), true;
475 case 2: return listeners.fn.call(listeners.context, a1), true;
476 case 3: return listeners.fn.call(listeners.context, a1, a2), true;
477 case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;
478 case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
479 case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
480 }
481
482 for (i = 1, args = new Array(len -1); i < len; i++) {
483 args[i - 1] = arguments[i];
484 }
485
486 listeners.fn.apply(listeners.context, args);
487 } else {
488 var length = listeners.length
489 , j;
490
491 for (i = 0; i < length; i++) {
492 if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);
493
494 switch (len) {
495 case 1: listeners[i].fn.call(listeners[i].context); break;
496 case 2: listeners[i].fn.call(listeners[i].context, a1); break;
497 case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;
498 case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;
499 default:
500 if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {
501 args[j - 1] = arguments[j];
502 }
503
504 listeners[i].fn.apply(listeners[i].context, args);
505 }
506 }
507 }
508
509 return true;
510};
511
512/**
513 * Add a listener for a given event.
514 *
515 * @param {(String|Symbol)} event The event name.
516 * @param {Function} fn The listener function.
517 * @param {*} [context=this] The context to invoke the listener with.
518 * @returns {EventEmitter} `this`.
519 * @public
520 */
521EventEmitter.prototype.on = function on(event, fn, context) {
522 return addListener(this, event, fn, context, false);
523};
524
525/**
526 * Add a one-time listener for a given event.
527 *
528 * @param {(String|Symbol)} event The event name.
529 * @param {Function} fn The listener function.
530 * @param {*} [context=this] The context to invoke the listener with.
531 * @returns {EventEmitter} `this`.
532 * @public
533 */
534EventEmitter.prototype.once = function once(event, fn, context) {
535 return addListener(this, event, fn, context, true);
536};
537
538/**
539 * Remove the listeners of a given event.
540 *
541 * @param {(String|Symbol)} event The event name.
542 * @param {Function} fn Only remove the listeners that match this function.
543 * @param {*} context Only remove the listeners that have this context.
544 * @param {Boolean} once Only remove one-time listeners.
545 * @returns {EventEmitter} `this`.
546 * @public
547 */
548EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {
549 var evt = prefix ? prefix + event : event;
550
551 if (!this._events[evt]) return this;
552 if (!fn) {
553 clearEvent(this, evt);
554 return this;
555 }
556
557 var listeners = this._events[evt];
558
559 if (listeners.fn) {
560 if (
561 listeners.fn === fn &&
562 (!once || listeners.once) &&
563 (!context || listeners.context === context)
564 ) {
565 clearEvent(this, evt);
566 }
567 } else {
568 for (var i = 0, events = [], length = listeners.length; i < length; i++) {
569 if (
570 listeners[i].fn !== fn ||
571 (once && !listeners[i].once) ||
572 (context && listeners[i].context !== context)
573 ) {
574 events.push(listeners[i]);
575 }
576 }
577
578 //
579 // Reset the array, or remove it completely if we have no more listeners.
580 //
581 if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;
582 else clearEvent(this, evt);
583 }
584
585 return this;
586};
587
588/**
589 * Remove all listeners, or those of the specified event.
590 *
591 * @param {(String|Symbol)} [event] The event name.
592 * @returns {EventEmitter} `this`.
593 * @public
594 */
595EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {
596 var evt;
597
598 if (event) {
599 evt = prefix ? prefix + event : event;
600 if (this._events[evt]) clearEvent(this, evt);
601 } else {
602 this._events = new Events();
603 this._eventsCount = 0;
604 }
605
606 return this;
607};
608
609//
610// Alias methods names because people roll like that.
611//
612EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
613EventEmitter.prototype.addListener = EventEmitter.prototype.on;
614
615//
616// Expose the prefix.
617//
618EventEmitter.prefixed = prefix;
619
620//
621// Allow `EventEmitter` to be imported as module namespace.
622//
623EventEmitter.EventEmitter = EventEmitter;
624
625//
626// Expose the module.
627//
628{
629 module.exports = EventEmitter;
630}
631
632
633/***/ }),
634
635/***/ "./node_modules/url-toolkit/src/url-toolkit.js":
636/*!*****************************************************!*\
637 !*** ./node_modules/url-toolkit/src/url-toolkit.js ***!
638 \*****************************************************/
639/*! no static exports found */
640/***/ (function(module, exports, __webpack_require__) {
641
642// see https://tools.ietf.org/html/rfc1808
643
644(function (root) {
645 var URL_REGEX =
646 /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
647 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
648 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
649 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
650
651 var URLToolkit = {
652 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
653 // E.g
654 // With opts.alwaysNormalize = false (default, spec compliant)
655 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
656 // With opts.alwaysNormalize = true (not spec compliant)
657 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
658 buildAbsoluteURL: function (baseURL, relativeURL, opts) {
659 opts = opts || {};
660 // remove any remaining space and CRLF
661 baseURL = baseURL.trim();
662 relativeURL = relativeURL.trim();
663 if (!relativeURL) {
664 // 2a) If the embedded URL is entirely empty, it inherits the
665 // entire base URL (i.e., is set equal to the base URL)
666 // and we are done.
667 if (!opts.alwaysNormalize) {
668 return baseURL;
669 }
670 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
671 if (!basePartsForNormalise) {
672 throw new Error('Error trying to parse base URL.');
673 }
674 basePartsForNormalise.path = URLToolkit.normalizePath(
675 basePartsForNormalise.path
676 );
677 return URLToolkit.buildURLFromParts(basePartsForNormalise);
678 }
679 var relativeParts = URLToolkit.parseURL(relativeURL);
680 if (!relativeParts) {
681 throw new Error('Error trying to parse relative URL.');
682 }
683 if (relativeParts.scheme) {
684 // 2b) If the embedded URL starts with a scheme name, it is
685 // interpreted as an absolute URL and we are done.
686 if (!opts.alwaysNormalize) {
687 return relativeURL;
688 }
689 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
690 return URLToolkit.buildURLFromParts(relativeParts);
691 }
692 var baseParts = URLToolkit.parseURL(baseURL);
693 if (!baseParts) {
694 throw new Error('Error trying to parse base URL.');
695 }
696 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
697 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
698 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
699 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
700 baseParts.netLoc = pathParts[1];
701 baseParts.path = pathParts[2];
702 }
703 if (baseParts.netLoc && !baseParts.path) {
704 baseParts.path = '/';
705 }
706 var builtParts = {
707 // 2c) Otherwise, the embedded URL inherits the scheme of
708 // the base URL.
709 scheme: baseParts.scheme,
710 netLoc: relativeParts.netLoc,
711 path: null,
712 params: relativeParts.params,
713 query: relativeParts.query,
714 fragment: relativeParts.fragment,
715 };
716 if (!relativeParts.netLoc) {
717 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
718 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
719 // (if any) of the base URL.
720 builtParts.netLoc = baseParts.netLoc;
721 // 4) If the embedded URL path is preceded by a slash "/", the
722 // path is not relative and we skip to Step 7.
723 if (relativeParts.path[0] !== '/') {
724 if (!relativeParts.path) {
725 // 5) If the embedded URL path is empty (and not preceded by a
726 // slash), then the embedded URL inherits the base URL path
727 builtParts.path = baseParts.path;
728 // 5a) if the embedded URL's <params> is non-empty, we skip to
729 // step 7; otherwise, it inherits the <params> of the base
730 // URL (if any) and
731 if (!relativeParts.params) {
732 builtParts.params = baseParts.params;
733 // 5b) if the embedded URL's <query> is non-empty, we skip to
734 // step 7; otherwise, it inherits the <query> of the base
735 // URL (if any) and we skip to step 7.
736 if (!relativeParts.query) {
737 builtParts.query = baseParts.query;
738 }
739 }
740 } else {
741 // 6) The last segment of the base URL's path (anything
742 // following the rightmost slash "/", or the entire path if no
743 // slash is present) is removed and the embedded URL's path is
744 // appended in its place.
745 var baseURLPath = baseParts.path;
746 var newPath =
747 baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) +
748 relativeParts.path;
749 builtParts.path = URLToolkit.normalizePath(newPath);
750 }
751 }
752 }
753 if (builtParts.path === null) {
754 builtParts.path = opts.alwaysNormalize
755 ? URLToolkit.normalizePath(relativeParts.path)
756 : relativeParts.path;
757 }
758 return URLToolkit.buildURLFromParts(builtParts);
759 },
760 parseURL: function (url) {
761 var parts = URL_REGEX.exec(url);
762 if (!parts) {
763 return null;
764 }
765 return {
766 scheme: parts[1] || '',
767 netLoc: parts[2] || '',
768 path: parts[3] || '',
769 params: parts[4] || '',
770 query: parts[5] || '',
771 fragment: parts[6] || '',
772 };
773 },
774 normalizePath: function (path) {
775 // The following operations are
776 // then applied, in order, to the new path:
777 // 6a) All occurrences of "./", where "." is a complete path
778 // segment, are removed.
779 // 6b) If the path ends with "." as a complete path segment,
780 // that "." is removed.
781 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, '');
782 // 6c) All occurrences of "<segment>/../", where <segment> is a
783 // complete path segment not equal to "..", are removed.
784 // Removal of these path segments is performed iteratively,
785 // removing the leftmost matching pattern on each iteration,
786 // until no matching pattern remains.
787 // 6d) If the path ends with "<segment>/..", where <segment> is a
788 // complete path segment not equal to "..", that
789 // "<segment>/.." is removed.
790 while (
791 path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length
792 ) {}
793 return path.split('').reverse().join('');
794 },
795 buildURLFromParts: function (parts) {
796 return (
797 parts.scheme +
798 parts.netLoc +
799 parts.path +
800 parts.params +
801 parts.query +
802 parts.fragment
803 );
804 },
805 };
806
807 module.exports = URLToolkit;
808})();
809
810
811/***/ }),
812
813/***/ "./node_modules/webworkify-webpack/index.js":
814/*!**************************************************!*\
815 !*** ./node_modules/webworkify-webpack/index.js ***!
816 \**************************************************/
817/*! no static exports found */
818/***/ (function(module, exports, __webpack_require__) {
819
820function webpackBootstrapFunc (modules) {
821/******/ // The module cache
822/******/ var installedModules = {};
823
824/******/ // The require function
825/******/ function __webpack_require__(moduleId) {
826
827/******/ // Check if module is in cache
828/******/ if(installedModules[moduleId])
829/******/ return installedModules[moduleId].exports;
830
831/******/ // Create a new module (and put it into the cache)
832/******/ var module = installedModules[moduleId] = {
833/******/ i: moduleId,
834/******/ l: false,
835/******/ exports: {}
836/******/ };
837
838/******/ // Execute the module function
839/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
840
841/******/ // Flag the module as loaded
842/******/ module.l = true;
843
844/******/ // Return the exports of the module
845/******/ return module.exports;
846/******/ }
847
848/******/ // expose the modules object (__webpack_modules__)
849/******/ __webpack_require__.m = modules;
850
851/******/ // expose the module cache
852/******/ __webpack_require__.c = installedModules;
853
854/******/ // identity function for calling harmony imports with the correct context
855/******/ __webpack_require__.i = function(value) { return value; };
856
857/******/ // define getter function for harmony exports
858/******/ __webpack_require__.d = function(exports, name, getter) {
859/******/ if(!__webpack_require__.o(exports, name)) {
860/******/ Object.defineProperty(exports, name, {
861/******/ configurable: false,
862/******/ enumerable: true,
863/******/ get: getter
864/******/ });
865/******/ }
866/******/ };
867
868/******/ // define __esModule on exports
869/******/ __webpack_require__.r = function(exports) {
870/******/ Object.defineProperty(exports, '__esModule', { value: true });
871/******/ };
872
873/******/ // getDefaultExport function for compatibility with non-harmony modules
874/******/ __webpack_require__.n = function(module) {
875/******/ var getter = module && module.__esModule ?
876/******/ function getDefault() { return module['default']; } :
877/******/ function getModuleExports() { return module; };
878/******/ __webpack_require__.d(getter, 'a', getter);
879/******/ return getter;
880/******/ };
881
882/******/ // Object.prototype.hasOwnProperty.call
883/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
884
885/******/ // __webpack_public_path__
886/******/ __webpack_require__.p = "/";
887
888/******/ // on error function for async loading
889/******/ __webpack_require__.oe = function(err) { console.error(err); throw err; };
890
891 var f = __webpack_require__(__webpack_require__.s = ENTRY_MODULE);
892 return f.default || f // try to call default if defined to also support babel esmodule exports
893}
894
895var moduleNameReqExp = '[\\.|\\-|\\+|\\w|\/|@]+';
896var dependencyRegExp = '\\(\\s*(\/\\*.*?\\*\/)?\\s*.*?(' + moduleNameReqExp + ').*?\\)'; // additional chars when output.pathinfo is true
897
898// http://stackoverflow.com/a/2593661/130442
899function quoteRegExp (str) {
900 return (str + '').replace(/[.?*+^$[\]\\(){}|-]/g, '\\$&')
901}
902
903function isNumeric(n) {
904 return !isNaN(1 * n); // 1 * n converts integers, integers as string ("123"), 1e3 and "1e3" to integers and strings to NaN
905}
906
907function getModuleDependencies (sources, module, queueName) {
908 var retval = {};
909 retval[queueName] = [];
910
911 var fnString = module.toString();
912 var wrapperSignature = fnString.match(/^function\s?\w*\(\w+,\s*\w+,\s*(\w+)\)/);
913 if (!wrapperSignature) return retval
914 var webpackRequireName = wrapperSignature[1];
915
916 // main bundle deps
917 var re = new RegExp('(\\\\n|\\W)' + quoteRegExp(webpackRequireName) + dependencyRegExp, 'g');
918 var match;
919 while ((match = re.exec(fnString))) {
920 if (match[3] === 'dll-reference') continue
921 retval[queueName].push(match[3]);
922 }
923
924 // dll deps
925 re = new RegExp('\\(' + quoteRegExp(webpackRequireName) + '\\("(dll-reference\\s(' + moduleNameReqExp + '))"\\)\\)' + dependencyRegExp, 'g');
926 while ((match = re.exec(fnString))) {
927 if (!sources[match[2]]) {
928 retval[queueName].push(match[1]);
929 sources[match[2]] = __webpack_require__(match[1]).m;
930 }
931 retval[match[2]] = retval[match[2]] || [];
932 retval[match[2]].push(match[4]);
933 }
934
935 // convert 1e3 back to 1000 - this can be important after uglify-js converted 1000 to 1e3
936 var keys = Object.keys(retval);
937 for (var i = 0; i < keys.length; i++) {
938 for (var j = 0; j < retval[keys[i]].length; j++) {
939 if (isNumeric(retval[keys[i]][j])) {
940 retval[keys[i]][j] = 1 * retval[keys[i]][j];
941 }
942 }
943 }
944
945 return retval
946}
947
948function hasValuesInQueues (queues) {
949 var keys = Object.keys(queues);
950 return keys.reduce(function (hasValues, key) {
951 return hasValues || queues[key].length > 0
952 }, false)
953}
954
955function getRequiredModules (sources, moduleId) {
956 var modulesQueue = {
957 main: [moduleId]
958 };
959 var requiredModules = {
960 main: []
961 };
962 var seenModules = {
963 main: {}
964 };
965
966 while (hasValuesInQueues(modulesQueue)) {
967 var queues = Object.keys(modulesQueue);
968 for (var i = 0; i < queues.length; i++) {
969 var queueName = queues[i];
970 var queue = modulesQueue[queueName];
971 var moduleToCheck = queue.pop();
972 seenModules[queueName] = seenModules[queueName] || {};
973 if (seenModules[queueName][moduleToCheck] || !sources[queueName][moduleToCheck]) continue
974 seenModules[queueName][moduleToCheck] = true;
975 requiredModules[queueName] = requiredModules[queueName] || [];
976 requiredModules[queueName].push(moduleToCheck);
977 var newModules = getModuleDependencies(sources, sources[queueName][moduleToCheck], queueName);
978 var newModulesKeys = Object.keys(newModules);
979 for (var j = 0; j < newModulesKeys.length; j++) {
980 modulesQueue[newModulesKeys[j]] = modulesQueue[newModulesKeys[j]] || [];
981 modulesQueue[newModulesKeys[j]] = modulesQueue[newModulesKeys[j]].concat(newModules[newModulesKeys[j]]);
982 }
983 }
984 }
985
986 return requiredModules
987}
988
989module.exports = function (moduleId, options) {
990 options = options || {};
991 var sources = {
992 main: __webpack_require__.m
993 };
994
995 var requiredModules = options.all ? { main: Object.keys(sources.main) } : getRequiredModules(sources, moduleId);
996
997 var src = '';
998
999 Object.keys(requiredModules).filter(function (m) { return m !== 'main' }).forEach(function (module) {
1000 var entryModule = 0;
1001 while (requiredModules[module][entryModule]) {
1002 entryModule++;
1003 }
1004 requiredModules[module].push(entryModule);
1005 sources[module][entryModule] = '(function(module, exports, __webpack_require__) { module.exports = __webpack_require__; })';
1006 src = src + 'var ' + module + ' = (' + webpackBootstrapFunc.toString().replace('ENTRY_MODULE', JSON.stringify(entryModule)) + ')({' + requiredModules[module].map(function (id) { return '' + JSON.stringify(id) + ': ' + sources[module][id].toString() }).join(',') + '});\n';
1007 });
1008
1009 src = src + 'new ((' + webpackBootstrapFunc.toString().replace('ENTRY_MODULE', JSON.stringify(moduleId)) + ')({' + requiredModules.main.map(function (id) { return '' + JSON.stringify(id) + ': ' + sources.main[id].toString() }).join(',') + '}))(self);';
1010
1011 var blob = new window.Blob([src], { type: 'text/javascript' });
1012 if (options.bare) { return blob }
1013
1014 var URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
1015
1016 var workerUrl = URL.createObjectURL(blob);
1017 var worker = new window.Worker(workerUrl);
1018 worker.objectURL = workerUrl;
1019
1020 return worker
1021};
1022
1023
1024/***/ }),
1025
1026/***/ "./src/config.ts":
1027/*!***********************!*\
1028 !*** ./src/config.ts ***!
1029 \***********************/
1030/*! exports provided: hlsDefaultConfig, mergeConfig, enableStreamingMode */
1031/***/ (function(module, __webpack_exports__, __webpack_require__) {
1032__webpack_require__.r(__webpack_exports__);
1033/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "hlsDefaultConfig", function() { return hlsDefaultConfig; });
1034/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mergeConfig", function() { return mergeConfig; });
1035/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "enableStreamingMode", function() { return enableStreamingMode; });
1036/* harmony import */ var _controller_abr_controller__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./controller/abr-controller */ "./src/controller/abr-controller.ts");
1037/* harmony import */ var _controller_audio_stream_controller__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./controller/audio-stream-controller */ "./src/controller/audio-stream-controller.ts");
1038/* harmony import */ var _controller_audio_track_controller__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./controller/audio-track-controller */ "./src/controller/audio-track-controller.ts");
1039/* harmony import */ var _controller_subtitle_stream_controller__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./controller/subtitle-stream-controller */ "./src/controller/subtitle-stream-controller.ts");
1040/* harmony import */ var _controller_subtitle_track_controller__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./controller/subtitle-track-controller */ "./src/controller/subtitle-track-controller.ts");
1041/* harmony import */ var _controller_buffer_controller__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./controller/buffer-controller */ "./src/controller/buffer-controller.ts");
1042/* harmony import */ var _controller_timeline_controller__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./controller/timeline-controller */ "./src/controller/timeline-controller.ts");
1043/* harmony import */ var _controller_cap_level_controller__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./controller/cap-level-controller */ "./src/controller/cap-level-controller.ts");
1044/* harmony import */ var _controller_fps_controller__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./controller/fps-controller */ "./src/controller/fps-controller.ts");
1045/* harmony import */ var _controller_eme_controller__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./controller/eme-controller */ "./src/controller/eme-controller.ts");
1046/* harmony import */ var _controller_cmcd_controller__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./controller/cmcd-controller */ "./src/controller/cmcd-controller.ts");
1047/* harmony import */ var _utils_xhr_loader__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./utils/xhr-loader */ "./src/utils/xhr-loader.ts");
1048/* harmony import */ var _utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./utils/fetch-loader */ "./src/utils/fetch-loader.ts");
1049/* harmony import */ var _utils_cues__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./utils/cues */ "./src/utils/cues.ts");
1050/* harmony import */ var _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ./utils/mediakeys-helper */ "./src/utils/mediakeys-helper.ts");
1051/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(/*! ./utils/logger */ "./src/utils/logger.ts");
1052function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
1053
1054function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
1055
1056function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
1057
1058function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076// If possible, keep hlsDefaultConfig shallow
1077// It is cloned whenever a new Hls instance is created, by keeping the config
1078// shallow the properties are cloned, and we don't end up manipulating the default
1079var hlsDefaultConfig = _objectSpread(_objectSpread({
1080 autoStartLoad: true,
1081 // used by stream-controller
1082 startPosition: -1,
1083 // used by stream-controller
1084 defaultAudioCodec: undefined,
1085 // used by stream-controller
1086 debug: false,
1087 // used by logger
1088 capLevelOnFPSDrop: false,
1089 // used by fps-controller
1090 capLevelToPlayerSize: false,
1091 // used by cap-level-controller
1092 initialLiveManifestSize: 1,
1093 // used by stream-controller
1094 maxBufferLength: 30,
1095 // used by stream-controller
1096 backBufferLength: Infinity,
1097 // used by buffer-controller
1098 maxBufferSize: 60 * 1000 * 1000,
1099 // used by stream-controller
1100 maxBufferHole: 0.1,
1101 // used by stream-controller
1102 highBufferWatchdogPeriod: 2,
1103 // used by stream-controller
1104 nudgeOffset: 0.1,
1105 // used by stream-controller
1106 nudgeMaxRetry: 3,
1107 // used by stream-controller
1108 maxFragLookUpTolerance: 0.25,
1109 // used by stream-controller
1110 liveSyncDurationCount: 3,
1111 // used by latency-controller
1112 liveMaxLatencyDurationCount: Infinity,
1113 // used by latency-controller
1114 liveSyncDuration: undefined,
1115 // used by latency-controller
1116 liveMaxLatencyDuration: undefined,
1117 // used by latency-controller
1118 maxLiveSyncPlaybackRate: 1,
1119 // used by latency-controller
1120 liveDurationInfinity: false,
1121 // used by buffer-controller
1122 liveBackBufferLength: null,
1123 // used by buffer-controller
1124 maxMaxBufferLength: 600,
1125 // used by stream-controller
1126 enableWorker: true,
1127 // used by demuxer
1128 enableSoftwareAES: true,
1129 // used by decrypter
1130 manifestLoadingTimeOut: 10000,
1131 // used by playlist-loader
1132 manifestLoadingMaxRetry: 1,
1133 // used by playlist-loader
1134 manifestLoadingRetryDelay: 1000,
1135 // used by playlist-loader
1136 manifestLoadingMaxRetryTimeout: 64000,
1137 // used by playlist-loader
1138 startLevel: undefined,
1139 // used by level-controller
1140 levelLoadingTimeOut: 10000,
1141 // used by playlist-loader
1142 levelLoadingMaxRetry: 4,
1143 // used by playlist-loader
1144 levelLoadingRetryDelay: 1000,
1145 // used by playlist-loader
1146 levelLoadingMaxRetryTimeout: 64000,
1147 // used by playlist-loader
1148 fragLoadingTimeOut: 20000,
1149 // used by fragment-loader
1150 fragLoadingMaxRetry: 6,
1151 // used by fragment-loader
1152 fragLoadingRetryDelay: 1000,
1153 // used by fragment-loader
1154 fragLoadingMaxRetryTimeout: 64000,
1155 // used by fragment-loader
1156 startFragPrefetch: false,
1157 // used by stream-controller
1158 fpsDroppedMonitoringPeriod: 5000,
1159 // used by fps-controller
1160 fpsDroppedMonitoringThreshold: 0.2,
1161 // used by fps-controller
1162 appendErrorMaxRetry: 3,
1163 // used by buffer-controller
1164 loader: _utils_xhr_loader__WEBPACK_IMPORTED_MODULE_11__["default"],
1165 // loader: FetchLoader,
1166 fLoader: undefined,
1167 // used by fragment-loader
1168 pLoader: undefined,
1169 // used by playlist-loader
1170 xhrSetup: undefined,
1171 // used by xhr-loader
1172 licenseXhrSetup: undefined,
1173 // used by eme-controller
1174 licenseResponseCallback: undefined,
1175 // used by eme-controller
1176 abrController: _controller_abr_controller__WEBPACK_IMPORTED_MODULE_0__["default"],
1177 bufferController: _controller_buffer_controller__WEBPACK_IMPORTED_MODULE_5__["default"],
1178 capLevelController: _controller_cap_level_controller__WEBPACK_IMPORTED_MODULE_7__["default"],
1179 fpsController: _controller_fps_controller__WEBPACK_IMPORTED_MODULE_8__["default"],
1180 stretchShortVideoTrack: false,
1181 // used by mp4-remuxer
1182 maxAudioFramesDrift: 1,
1183 // used by mp4-remuxer
1184 forceKeyFrameOnDiscontinuity: true,
1185 // used by ts-demuxer
1186 abrEwmaFastLive: 3,
1187 // used by abr-controller
1188 abrEwmaSlowLive: 9,
1189 // used by abr-controller
1190 abrEwmaFastVoD: 3,
1191 // used by abr-controller
1192 abrEwmaSlowVoD: 9,
1193 // used by abr-controller
1194 abrEwmaDefaultEstimate: 5e5,
1195 // 500 kbps // used by abr-controller
1196 abrBandWidthFactor: 0.95,
1197 // used by abr-controller
1198 abrBandWidthUpFactor: 0.7,
1199 // used by abr-controller
1200 abrMaxWithRealBitrate: false,
1201 // used by abr-controller
1202 maxStarvationDelay: 4,
1203 // used by abr-controller
1204 maxLoadingDelay: 4,
1205 // used by abr-controller
1206 minAutoBitrate: 0,
1207 // used by hls
1208 emeEnabled: false,
1209 // used by eme-controller
1210 widevineLicenseUrl: undefined,
1211 // used by eme-controller
1212 drmSystemOptions: {},
1213 // used by eme-controller
1214 requestMediaKeySystemAccessFunc: _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_14__["requestMediaKeySystemAccess"],
1215 // used by eme-controller
1216 testBandwidth: true,
1217 progressive: false,
1218 lowLatencyMode: true,
1219 cmcd: undefined
1220}, timelineConfig()), {}, {
1221 subtitleStreamController: _controller_subtitle_stream_controller__WEBPACK_IMPORTED_MODULE_3__["SubtitleStreamController"] ,
1222 subtitleTrackController: _controller_subtitle_track_controller__WEBPACK_IMPORTED_MODULE_4__["default"] ,
1223 timelineController: _controller_timeline_controller__WEBPACK_IMPORTED_MODULE_6__["TimelineController"] ,
1224 audioStreamController: _controller_audio_stream_controller__WEBPACK_IMPORTED_MODULE_1__["default"] ,
1225 audioTrackController: _controller_audio_track_controller__WEBPACK_IMPORTED_MODULE_2__["default"] ,
1226 emeController: _controller_eme_controller__WEBPACK_IMPORTED_MODULE_9__["default"] ,
1227 cmcdController: _controller_cmcd_controller__WEBPACK_IMPORTED_MODULE_10__["default"]
1228});
1229
1230function timelineConfig() {
1231 return {
1232 cueHandler: _utils_cues__WEBPACK_IMPORTED_MODULE_13__["default"],
1233 // used by timeline-controller
1234 enableCEA708Captions: true,
1235 // used by timeline-controller
1236 enableWebVTT: true,
1237 // used by timeline-controller
1238 enableIMSC1: true,
1239 // used by timeline-controller
1240 captionsTextTrack1Label: 'English',
1241 // used by timeline-controller
1242 captionsTextTrack1LanguageCode: 'en',
1243 // used by timeline-controller
1244 captionsTextTrack2Label: 'Spanish',
1245 // used by timeline-controller
1246 captionsTextTrack2LanguageCode: 'es',
1247 // used by timeline-controller
1248 captionsTextTrack3Label: 'Unknown CC',
1249 // used by timeline-controller
1250 captionsTextTrack3LanguageCode: '',
1251 // used by timeline-controller
1252 captionsTextTrack4Label: 'Unknown CC',
1253 // used by timeline-controller
1254 captionsTextTrack4LanguageCode: '',
1255 // used by timeline-controller
1256 renderTextTracksNatively: true
1257 };
1258}
1259
1260function mergeConfig(defaultConfig, userConfig) {
1261 if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
1262 throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
1263 }
1264
1265 if (userConfig.liveMaxLatencyDurationCount !== undefined && (userConfig.liveSyncDurationCount === undefined || userConfig.liveMaxLatencyDurationCount <= userConfig.liveSyncDurationCount)) {
1266 throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be greater than "liveSyncDurationCount"');
1267 }
1268
1269 if (userConfig.liveMaxLatencyDuration !== undefined && (userConfig.liveSyncDuration === undefined || userConfig.liveMaxLatencyDuration <= userConfig.liveSyncDuration)) {
1270 throw new Error('Illegal hls.js config: "liveMaxLatencyDuration" must be greater than "liveSyncDuration"');
1271 }
1272
1273 return _extends({}, defaultConfig, userConfig);
1274}
1275function enableStreamingMode(config) {
1276 var currentLoader = config.loader;
1277
1278 if (currentLoader !== _utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__["default"] && currentLoader !== _utils_xhr_loader__WEBPACK_IMPORTED_MODULE_11__["default"]) {
1279 // If a developer has configured their own loader, respect that choice
1280 _utils_logger__WEBPACK_IMPORTED_MODULE_15__["logger"].log('[config]: Custom loader detected, cannot enable progressive streaming');
1281 config.progressive = false;
1282 } else {
1283 var canStreamProgressively = Object(_utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__["fetchSupported"])();
1284
1285 if (canStreamProgressively) {
1286 config.loader = _utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__["default"];
1287 config.progressive = true;
1288 config.enableSoftwareAES = true;
1289 _utils_logger__WEBPACK_IMPORTED_MODULE_15__["logger"].log('[config]: Progressive streaming enabled, using FetchLoader');
1290 }
1291 }
1292}
1293
1294/***/ }),
1295
1296/***/ "./src/controller/abr-controller.ts":
1297/*!******************************************!*\
1298 !*** ./src/controller/abr-controller.ts ***!
1299 \******************************************/
1300/*! exports provided: default */
1301/***/ (function(module, __webpack_exports__, __webpack_require__) {
1302__webpack_require__.r(__webpack_exports__);
1303/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
1304/* harmony import */ var _utils_ewma_bandwidth_estimator__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/ewma-bandwidth-estimator */ "./src/utils/ewma-bandwidth-estimator.ts");
1305/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
1306/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
1307/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
1308/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
1309/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
1310
1311
1312
1313function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
1314
1315function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
1316
1317
1318
1319
1320
1321
1322
1323
1324var AbrController = /*#__PURE__*/function () {
1325 function AbrController(hls) {
1326 this.hls = void 0;
1327 this.lastLoadedFragLevel = 0;
1328 this._nextAutoLevel = -1;
1329 this.timer = void 0;
1330 this.onCheck = this._abandonRulesCheck.bind(this);
1331 this.fragCurrent = null;
1332 this.partCurrent = null;
1333 this.bitrateTestDelay = 0;
1334 this.bwEstimator = void 0;
1335 this.hls = hls;
1336 var config = hls.config;
1337 this.bwEstimator = new _utils_ewma_bandwidth_estimator__WEBPACK_IMPORTED_MODULE_1__["default"](config.abrEwmaSlowVoD, config.abrEwmaFastVoD, config.abrEwmaDefaultEstimate);
1338 this.registerListeners();
1339 }
1340
1341 var _proto = AbrController.prototype;
1342
1343 _proto.registerListeners = function registerListeners() {
1344 var hls = this.hls;
1345 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADING, this.onFragLoading, this);
1346 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADED, this.onFragLoaded, this);
1347 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
1348 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
1349 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
1350 };
1351
1352 _proto.unregisterListeners = function unregisterListeners() {
1353 var hls = this.hls;
1354 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADING, this.onFragLoading, this);
1355 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADED, this.onFragLoaded, this);
1356 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
1357 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
1358 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
1359 };
1360
1361 _proto.destroy = function destroy() {
1362 this.unregisterListeners();
1363 this.clearTimer(); // @ts-ignore
1364
1365 this.hls = this.onCheck = null;
1366 this.fragCurrent = this.partCurrent = null;
1367 };
1368
1369 _proto.onFragLoading = function onFragLoading(event, data) {
1370 var frag = data.frag;
1371
1372 if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_5__["PlaylistLevelType"].MAIN) {
1373 if (!this.timer) {
1374 var _data$part;
1375
1376 this.fragCurrent = frag;
1377 this.partCurrent = (_data$part = data.part) != null ? _data$part : null;
1378 this.timer = self.setInterval(this.onCheck, 100);
1379 }
1380 }
1381 };
1382
1383 _proto.onLevelLoaded = function onLevelLoaded(event, data) {
1384 var config = this.hls.config;
1385
1386 if (data.details.live) {
1387 this.bwEstimator.update(config.abrEwmaSlowLive, config.abrEwmaFastLive);
1388 } else {
1389 this.bwEstimator.update(config.abrEwmaSlowVoD, config.abrEwmaFastVoD);
1390 }
1391 }
1392 /*
1393 This method monitors the download rate of the current fragment, and will downswitch if that fragment will not load
1394 quickly enough to prevent underbuffering
1395 */
1396 ;
1397
1398 _proto._abandonRulesCheck = function _abandonRulesCheck() {
1399 var frag = this.fragCurrent,
1400 part = this.partCurrent,
1401 hls = this.hls;
1402 var autoLevelEnabled = hls.autoLevelEnabled,
1403 config = hls.config,
1404 media = hls.media;
1405
1406 if (!frag || !media) {
1407 return;
1408 }
1409
1410 var stats = part ? part.stats : frag.stats;
1411 var duration = part ? part.duration : frag.duration; // If loading has been aborted and not in lowLatencyMode, stop timer and return
1412
1413 if (stats.aborted) {
1414 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('frag loader destroy or aborted, disarm abandonRules');
1415 this.clearTimer(); // reset forced auto level value so that next level will be selected
1416
1417 this._nextAutoLevel = -1;
1418 return;
1419 } // This check only runs if we're in ABR mode and actually playing
1420
1421
1422 if (!autoLevelEnabled || media.paused || !media.playbackRate || !media.readyState) {
1423 return;
1424 }
1425
1426 var requestDelay = performance.now() - stats.loading.start;
1427 var playbackRate = Math.abs(media.playbackRate); // In order to work with a stable bandwidth, only begin monitoring bandwidth after half of the fragment has been loaded
1428
1429 if (requestDelay <= 500 * duration / playbackRate) {
1430 return;
1431 }
1432
1433 var levels = hls.levels,
1434 minAutoLevel = hls.minAutoLevel;
1435 var level = levels[frag.level];
1436 var expectedLen = stats.total || Math.max(stats.loaded, Math.round(duration * level.maxBitrate / 8));
1437 var loadRate = Math.max(1, stats.bwEstimate ? stats.bwEstimate / 8 : stats.loaded * 1000 / requestDelay); // fragLoadDelay is an estimate of the time (in seconds) it will take to buffer the entire fragment
1438
1439 var fragLoadedDelay = (expectedLen - stats.loaded) / loadRate;
1440 var pos = media.currentTime; // bufferStarvationDelay is an estimate of the amount time (in seconds) it will take to exhaust the buffer
1441
1442 var bufferStarvationDelay = (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(media, pos, config.maxBufferHole).end - pos) / playbackRate; // Attempt an emergency downswitch only if less than 2 fragment lengths are buffered, and the time to finish loading
1443 // the current fragment is greater than the amount of buffer we have left
1444
1445 if (bufferStarvationDelay >= 2 * duration / playbackRate || fragLoadedDelay <= bufferStarvationDelay) {
1446 return;
1447 }
1448
1449 var fragLevelNextLoadedDelay = Number.POSITIVE_INFINITY;
1450 var nextLoadLevel; // Iterate through lower level and try to find the largest one that avoids rebuffering
1451
1452 for (nextLoadLevel = frag.level - 1; nextLoadLevel > minAutoLevel; nextLoadLevel--) {
1453 // compute time to load next fragment at lower level
1454 // 0.8 : consider only 80% of current bw to be conservative
1455 // 8 = bits per byte (bps/Bps)
1456 var levelNextBitrate = levels[nextLoadLevel].maxBitrate;
1457 fragLevelNextLoadedDelay = duration * levelNextBitrate / (8 * 0.8 * loadRate);
1458
1459 if (fragLevelNextLoadedDelay < bufferStarvationDelay) {
1460 break;
1461 }
1462 } // Only emergency switch down if it takes less time to load a new fragment at lowest level instead of continuing
1463 // to load the current one
1464
1465
1466 if (fragLevelNextLoadedDelay >= fragLoadedDelay) {
1467 return;
1468 }
1469
1470 var bwEstimate = this.bwEstimator.getEstimate();
1471 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("Fragment " + frag.sn + (part ? ' part ' + part.index : '') + " of level " + frag.level + " is loading too slowly and will cause an underbuffer; aborting and switching to level " + nextLoadLevel + "\n Current BW estimate: " + (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(bwEstimate) ? (bwEstimate / 1024).toFixed(3) : 'Unknown') + " Kb/s\n Estimated load time for current fragment: " + fragLoadedDelay.toFixed(3) + " s\n Estimated load time for the next fragment: " + fragLevelNextLoadedDelay.toFixed(3) + " s\n Time to underbuffer: " + bufferStarvationDelay.toFixed(3) + " s");
1472 hls.nextLoadLevel = nextLoadLevel;
1473 this.bwEstimator.sample(requestDelay, stats.loaded);
1474 this.clearTimer();
1475
1476 if (frag.loader) {
1477 this.fragCurrent = this.partCurrent = null;
1478 frag.loader.abort();
1479 }
1480
1481 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOAD_EMERGENCY_ABORTED, {
1482 frag: frag,
1483 part: part,
1484 stats: stats
1485 });
1486 };
1487
1488 _proto.onFragLoaded = function onFragLoaded(event, _ref) {
1489 var frag = _ref.frag,
1490 part = _ref.part;
1491
1492 if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_5__["PlaylistLevelType"].MAIN && Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.sn)) {
1493 var stats = part ? part.stats : frag.stats;
1494 var duration = part ? part.duration : frag.duration; // stop monitoring bw once frag loaded
1495
1496 this.clearTimer(); // store level id after successful fragment load
1497
1498 this.lastLoadedFragLevel = frag.level; // reset forced auto level value so that next level will be selected
1499
1500 this._nextAutoLevel = -1; // compute level average bitrate
1501
1502 if (this.hls.config.abrMaxWithRealBitrate) {
1503 var level = this.hls.levels[frag.level];
1504 var loadedBytes = (level.loaded ? level.loaded.bytes : 0) + stats.loaded;
1505 var loadedDuration = (level.loaded ? level.loaded.duration : 0) + duration;
1506 level.loaded = {
1507 bytes: loadedBytes,
1508 duration: loadedDuration
1509 };
1510 level.realBitrate = Math.round(8 * loadedBytes / loadedDuration);
1511 }
1512
1513 if (frag.bitrateTest) {
1514 var fragBufferedData = {
1515 stats: stats,
1516 frag: frag,
1517 part: part,
1518 id: frag.type
1519 };
1520 this.onFragBuffered(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, fragBufferedData);
1521 frag.bitrateTest = false;
1522 }
1523 }
1524 };
1525
1526 _proto.onFragBuffered = function onFragBuffered(event, data) {
1527 var frag = data.frag,
1528 part = data.part;
1529 var stats = part ? part.stats : frag.stats;
1530
1531 if (stats.aborted) {
1532 return;
1533 } // Only count non-alt-audio frags which were actually buffered in our BW calculations
1534
1535
1536 if (frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_5__["PlaylistLevelType"].MAIN || frag.sn === 'initSegment') {
1537 return;
1538 } // Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing;
1539 // rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch
1540 // is used. If we used buffering in that case, our BW estimate sample will be very large.
1541
1542
1543 var processingMs = stats.parsing.end - stats.loading.start;
1544 this.bwEstimator.sample(processingMs, stats.loaded);
1545 stats.bwEstimate = this.bwEstimator.getEstimate();
1546
1547 if (frag.bitrateTest) {
1548 this.bitrateTestDelay = processingMs / 1000;
1549 } else {
1550 this.bitrateTestDelay = 0;
1551 }
1552 };
1553
1554 _proto.onError = function onError(event, data) {
1555 // stop timer in case of frag loading error
1556 switch (data.details) {
1557 case _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].FRAG_LOAD_ERROR:
1558 case _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
1559 this.clearTimer();
1560 break;
1561 }
1562 };
1563
1564 _proto.clearTimer = function clearTimer() {
1565 self.clearInterval(this.timer);
1566 this.timer = undefined;
1567 } // return next auto level
1568 ;
1569
1570 _proto.getNextABRAutoLevel = function getNextABRAutoLevel() {
1571 var fragCurrent = this.fragCurrent,
1572 partCurrent = this.partCurrent,
1573 hls = this.hls;
1574 var maxAutoLevel = hls.maxAutoLevel,
1575 config = hls.config,
1576 minAutoLevel = hls.minAutoLevel,
1577 media = hls.media;
1578 var currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0;
1579 var pos = media ? media.currentTime : 0; // playbackRate is the absolute value of the playback rate; if media.playbackRate is 0, we use 1 to load as
1580 // if we're playing back at the normal rate.
1581
1582 var playbackRate = media && media.playbackRate !== 0 ? Math.abs(media.playbackRate) : 1.0;
1583 var avgbw = this.bwEstimator ? this.bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate; // bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
1584
1585 var bufferStarvationDelay = (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(media, pos, config.maxBufferHole).end - pos) / playbackRate; // First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
1586
1587 var bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, config.abrBandWidthFactor, config.abrBandWidthUpFactor);
1588
1589 if (bestLevel >= 0) {
1590 return bestLevel;
1591 }
1592
1593 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].trace((bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty') + ", finding optimal quality level"); // not possible to get rid of rebuffering ... let's try to find level that will guarantee less than maxStarvationDelay of rebuffering
1594 // if no matching level found, logic will return 0
1595
1596 var maxStarvationDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxStarvationDelay) : config.maxStarvationDelay;
1597 var bwFactor = config.abrBandWidthFactor;
1598 var bwUpFactor = config.abrBandWidthUpFactor;
1599
1600 if (!bufferStarvationDelay) {
1601 // in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
1602 var bitrateTestDelay = this.bitrateTestDelay;
1603
1604 if (bitrateTestDelay) {
1605 // if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
1606 // max video loading delay used in automatic start level selection :
1607 // in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
1608 // the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
1609 // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
1610 var maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
1611 maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
1612 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].trace("bitrate test took " + Math.round(1000 * bitrateTestDelay) + "ms, set first fragment max fetchDuration to " + Math.round(1000 * maxStarvationDelay) + " ms"); // don't use conservative factor on bitrate test
1613
1614 bwFactor = bwUpFactor = 1;
1615 }
1616 }
1617
1618 bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay + maxStarvationDelay, bwFactor, bwUpFactor);
1619 return Math.max(bestLevel, 0);
1620 };
1621
1622 _proto.findBestLevel = function findBestLevel(currentBw, minAutoLevel, maxAutoLevel, maxFetchDuration, bwFactor, bwUpFactor) {
1623 var _level$details;
1624
1625 var fragCurrent = this.fragCurrent,
1626 partCurrent = this.partCurrent,
1627 currentLevel = this.lastLoadedFragLevel;
1628 var levels = this.hls.levels;
1629 var level = levels[currentLevel];
1630 var live = !!(level !== null && level !== void 0 && (_level$details = level.details) !== null && _level$details !== void 0 && _level$details.live);
1631 var currentCodecSet = level === null || level === void 0 ? void 0 : level.codecSet;
1632 var currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0;
1633
1634 for (var i = maxAutoLevel; i >= minAutoLevel; i--) {
1635 var levelInfo = levels[i];
1636
1637 if (!levelInfo || currentCodecSet && levelInfo.codecSet !== currentCodecSet) {
1638 continue;
1639 }
1640
1641 var levelDetails = levelInfo.details;
1642 var avgDuration = (partCurrent ? levelDetails === null || levelDetails === void 0 ? void 0 : levelDetails.partTarget : levelDetails === null || levelDetails === void 0 ? void 0 : levelDetails.averagetargetduration) || currentFragDuration;
1643 var adjustedbw = void 0; // follow algorithm captured from stagefright :
1644 // https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
1645 // Pick the highest bandwidth stream below or equal to estimated bandwidth.
1646 // consider only 80% of the available bandwidth, but if we are switching up,
1647 // be even more conservative (70%) to avoid overestimating and immediately
1648 // switching back.
1649
1650 if (i <= currentLevel) {
1651 adjustedbw = bwFactor * currentBw;
1652 } else {
1653 adjustedbw = bwUpFactor * currentBw;
1654 }
1655
1656 var bitrate = levels[i].maxBitrate;
1657 var fetchDuration = bitrate * avgDuration / adjustedbw;
1658 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].trace("level/adjustedbw/bitrate/avgDuration/maxFetchDuration/fetchDuration: " + i + "/" + Math.round(adjustedbw) + "/" + bitrate + "/" + avgDuration + "/" + maxFetchDuration + "/" + fetchDuration); // if adjusted bw is greater than level bitrate AND
1659
1660 if (adjustedbw > bitrate && ( // fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
1661 // we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
1662 // special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that findBestLevel will return -1
1663 !fetchDuration || live && !this.bitrateTestDelay || fetchDuration < maxFetchDuration)) {
1664 // as we are looping from highest to lowest, this will return the best achievable quality level
1665 return i;
1666 }
1667 } // not enough time budget even with quality level 0 ... rebuffering might happen
1668
1669
1670 return -1;
1671 };
1672
1673 _createClass(AbrController, [{
1674 key: "nextAutoLevel",
1675 get: function get() {
1676 var forcedAutoLevel = this._nextAutoLevel;
1677 var bwEstimator = this.bwEstimator; // in case next auto level has been forced, and bw not available or not reliable, return forced value
1678
1679 if (forcedAutoLevel !== -1 && (!bwEstimator || !bwEstimator.canEstimate())) {
1680 return forcedAutoLevel;
1681 } // compute next level using ABR logic
1682
1683
1684 var nextABRAutoLevel = this.getNextABRAutoLevel(); // if forced auto level has been defined, use it to cap ABR computed quality level
1685
1686 if (forcedAutoLevel !== -1) {
1687 nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel);
1688 }
1689
1690 return nextABRAutoLevel;
1691 },
1692 set: function set(nextLevel) {
1693 this._nextAutoLevel = nextLevel;
1694 }
1695 }]);
1696
1697 return AbrController;
1698}();
1699
1700/* harmony default export */ __webpack_exports__["default"] = (AbrController);
1701
1702/***/ }),
1703
1704/***/ "./src/controller/audio-stream-controller.ts":
1705/*!***************************************************!*\
1706 !*** ./src/controller/audio-stream-controller.ts ***!
1707 \***************************************************/
1708/*! exports provided: default */
1709/***/ (function(module, __webpack_exports__, __webpack_require__) {
1710__webpack_require__.r(__webpack_exports__);
1711/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
1712/* harmony import */ var _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./base-stream-controller */ "./src/controller/base-stream-controller.ts");
1713/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
1714/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
1715/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
1716/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
1717/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
1718/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
1719/* harmony import */ var _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../demux/chunk-cache */ "./src/demux/chunk-cache.ts");
1720/* harmony import */ var _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../demux/transmuxer-interface */ "./src/demux/transmuxer-interface.ts");
1721/* harmony import */ var _types_transmuxer__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ../types/transmuxer */ "./src/types/transmuxer.ts");
1722/* harmony import */ var _fragment_finders__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./fragment-finders */ "./src/controller/fragment-finders.ts");
1723/* harmony import */ var _utils_discontinuities__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ../utils/discontinuities */ "./src/utils/discontinuities.ts");
1724/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
1725/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
1726
1727
1728function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
1729
1730function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
1731
1732function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748var TICK_INTERVAL = 100; // how often to tick in ms
1749
1750var AudioStreamController = /*#__PURE__*/function (_BaseStreamController) {
1751 _inheritsLoose(AudioStreamController, _BaseStreamController);
1752
1753 function AudioStreamController(hls, fragmentTracker) {
1754 var _this;
1755
1756 _this = _BaseStreamController.call(this, hls, fragmentTracker, '[audio-stream-controller]') || this;
1757 _this.videoBuffer = null;
1758 _this.videoTrackCC = -1;
1759 _this.waitingVideoCC = -1;
1760 _this.audioSwitch = false;
1761 _this.trackId = -1;
1762 _this.waitingData = null;
1763 _this.mainDetails = null;
1764 _this.bufferFlushed = false;
1765
1766 _this._registerListeners();
1767
1768 return _this;
1769 }
1770
1771 var _proto = AudioStreamController.prototype;
1772
1773 _proto.onHandlerDestroying = function onHandlerDestroying() {
1774 this._unregisterListeners();
1775
1776 this.mainDetails = null;
1777 };
1778
1779 _proto._registerListeners = function _registerListeners() {
1780 var hls = this.hls;
1781 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
1782 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
1783 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
1784 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
1785 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
1786 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
1787 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
1788 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
1789 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_RESET, this.onBufferReset, this);
1790 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
1791 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
1792 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
1793 hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
1794 };
1795
1796 _proto._unregisterListeners = function _unregisterListeners() {
1797 var hls = this.hls;
1798 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
1799 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
1800 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
1801 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
1802 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
1803 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
1804 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
1805 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
1806 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_RESET, this.onBufferReset, this);
1807 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
1808 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
1809 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
1810 hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
1811 } // INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
1812 ;
1813
1814 _proto.onInitPtsFound = function onInitPtsFound(event, _ref) {
1815 var frag = _ref.frag,
1816 id = _ref.id,
1817 initPTS = _ref.initPTS;
1818
1819 // Always update the new INIT PTS
1820 // Can change due level switch
1821 if (id === 'main') {
1822 var cc = frag.cc;
1823 this.initPTS[frag.cc] = initPTS;
1824 this.log("InitPTS for cc: " + cc + " found from main: " + initPTS);
1825 this.videoTrackCC = cc; // If we are waiting, tick immediately to unblock audio fragment transmuxing
1826
1827 if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS) {
1828 this.tick();
1829 }
1830 }
1831 };
1832
1833 _proto.startLoad = function startLoad(startPosition) {
1834 if (!this.levels) {
1835 this.startPosition = startPosition;
1836 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED;
1837 return;
1838 }
1839
1840 var lastCurrentTime = this.lastCurrentTime;
1841 this.stopLoad();
1842 this.setInterval(TICK_INTERVAL);
1843 this.fragLoadError = 0;
1844
1845 if (lastCurrentTime > 0 && startPosition === -1) {
1846 this.log("Override startPosition with lastCurrentTime @" + lastCurrentTime.toFixed(3));
1847 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
1848 } else {
1849 this.loadedmetadata = false;
1850 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK;
1851 }
1852
1853 this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
1854 this.tick();
1855 };
1856
1857 _proto.doTick = function doTick() {
1858 switch (this.state) {
1859 case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE:
1860 this.doTickIdle();
1861 break;
1862
1863 case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK:
1864 {
1865 var _levels$trackId;
1866
1867 var levels = this.levels,
1868 trackId = this.trackId;
1869 var details = levels === null || levels === void 0 ? void 0 : (_levels$trackId = levels[trackId]) === null || _levels$trackId === void 0 ? void 0 : _levels$trackId.details;
1870
1871 if (details) {
1872 if (this.waitForCdnTuneIn(details)) {
1873 break;
1874 }
1875
1876 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS;
1877 }
1878
1879 break;
1880 }
1881
1882 case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING_WAITING_RETRY:
1883 {
1884 var _this$media;
1885
1886 var now = performance.now();
1887 var retryDate = this.retryDate; // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
1888
1889 if (!retryDate || now >= retryDate || (_this$media = this.media) !== null && _this$media !== void 0 && _this$media.seeking) {
1890 this.log('RetryDate reached, switch back to IDLE state');
1891 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
1892 }
1893
1894 break;
1895 }
1896
1897 case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS:
1898 {
1899 // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
1900 var waitingData = this.waitingData;
1901
1902 if (waitingData) {
1903 var frag = waitingData.frag,
1904 part = waitingData.part,
1905 cache = waitingData.cache,
1906 complete = waitingData.complete;
1907
1908 if (this.initPTS[frag.cc] !== undefined) {
1909 this.waitingData = null;
1910 this.waitingVideoCC = -1;
1911 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING;
1912 var payload = cache.flush();
1913 var data = {
1914 frag: frag,
1915 part: part,
1916 payload: payload,
1917 networkDetails: null
1918 };
1919
1920 this._handleFragmentLoadProgress(data);
1921
1922 if (complete) {
1923 _BaseStreamController.prototype._handleFragmentLoadComplete.call(this, data);
1924 }
1925 } else if (this.videoTrackCC !== this.waitingVideoCC) {
1926 // Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
1927 _utils_logger__WEBPACK_IMPORTED_MODULE_14__["logger"].log("Waiting fragment cc (" + frag.cc + ") cancelled because video is at cc " + this.videoTrackCC);
1928 this.clearWaitingFragment();
1929 } else {
1930 // Drop waiting fragment if an earlier fragment is needed
1931 var pos = this.getLoadPosition();
1932 var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(this.mediaBuffer, pos, this.config.maxBufferHole);
1933 var waitingFragmentAtPosition = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_11__["fragmentWithinToleranceTest"])(bufferInfo.end, this.config.maxFragLookUpTolerance, frag);
1934
1935 if (waitingFragmentAtPosition < 0) {
1936 _utils_logger__WEBPACK_IMPORTED_MODULE_14__["logger"].log("Waiting fragment cc (" + frag.cc + ") @ " + frag.start + " cancelled because another fragment at " + bufferInfo.end + " is needed");
1937 this.clearWaitingFragment();
1938 }
1939 }
1940 } else {
1941 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
1942 }
1943 }
1944 }
1945
1946 this.onTickEnd();
1947 };
1948
1949 _proto.clearWaitingFragment = function clearWaitingFragment() {
1950 var waitingData = this.waitingData;
1951
1952 if (waitingData) {
1953 this.fragmentTracker.removeFragment(waitingData.frag);
1954 this.waitingData = null;
1955 this.waitingVideoCC = -1;
1956 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
1957 }
1958 };
1959
1960 _proto.onTickEnd = function onTickEnd() {
1961 var media = this.media;
1962
1963 if (!media || !media.readyState) {
1964 // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
1965 return;
1966 }
1967
1968 var mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
1969 var buffered = mediaBuffer.buffered;
1970
1971 if (!this.loadedmetadata && buffered.length) {
1972 this.loadedmetadata = true;
1973 }
1974
1975 this.lastCurrentTime = media.currentTime;
1976 };
1977
1978 _proto.doTickIdle = function doTickIdle() {
1979 var _frag$decryptdata, _frag$decryptdata2;
1980
1981 var hls = this.hls,
1982 levels = this.levels,
1983 media = this.media,
1984 trackId = this.trackId;
1985 var config = hls.config;
1986
1987 if (!levels || !levels[trackId]) {
1988 return;
1989 } // if video not attached AND
1990 // start fragment already requested OR start frag prefetch not enabled
1991 // exit loop
1992 // => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
1993
1994
1995 if (!media && (this.startFragRequested || !config.startFragPrefetch)) {
1996 return;
1997 }
1998
1999 var levelInfo = levels[trackId];
2000 var trackDetails = levelInfo.details;
2001
2002 if (!trackDetails || trackDetails.live && this.levelLastLoaded !== trackId || this.waitForCdnTuneIn(trackDetails)) {
2003 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK;
2004 return;
2005 }
2006
2007 if (this.bufferFlushed) {
2008 this.bufferFlushed = false;
2009 this.afterBufferFlushed(this.mediaBuffer ? this.mediaBuffer : this.media, _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO);
2010 }
2011
2012 var bufferInfo = this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO);
2013
2014 if (bufferInfo === null) {
2015 return;
2016 }
2017
2018 var bufferLen = bufferInfo.len;
2019 var maxBufLen = this.getMaxBufferLength();
2020 var audioSwitch = this.audioSwitch; // if buffer length is less than maxBufLen try to load a new fragment
2021
2022 if (bufferLen >= maxBufLen && !audioSwitch) {
2023 return;
2024 }
2025
2026 if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
2027 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_EOS, {
2028 type: 'audio'
2029 });
2030 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ENDED;
2031 return;
2032 }
2033
2034 var fragments = trackDetails.fragments;
2035 var start = fragments[0].start;
2036 var targetBufferTime = bufferInfo.end;
2037
2038 if (audioSwitch) {
2039 var pos = this.getLoadPosition();
2040 targetBufferTime = pos; // if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
2041
2042 if (trackDetails.PTSKnown && pos < start) {
2043 // if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
2044 if (bufferInfo.end > start || bufferInfo.nextStart) {
2045 this.log('Alt audio track ahead of main track, seek to start of alt audio track');
2046 media.currentTime = start + 0.05;
2047 }
2048 }
2049 }
2050
2051 var frag = this.getNextFragment(targetBufferTime, trackDetails);
2052
2053 if (!frag) {
2054 this.bufferFlushed = true;
2055 return;
2056 }
2057
2058 if (((_frag$decryptdata = frag.decryptdata) === null || _frag$decryptdata === void 0 ? void 0 : _frag$decryptdata.keyFormat) === 'identity' && !((_frag$decryptdata2 = frag.decryptdata) !== null && _frag$decryptdata2 !== void 0 && _frag$decryptdata2.key)) {
2059 this.loadKey(frag, trackDetails);
2060 } else {
2061 this.loadFragment(frag, trackDetails, targetBufferTime);
2062 }
2063 };
2064
2065 _proto.getMaxBufferLength = function getMaxBufferLength() {
2066 var maxConfigBuffer = _BaseStreamController.prototype.getMaxBufferLength.call(this);
2067
2068 var mainBufferInfo = this.getFwdBufferInfo(this.videoBuffer ? this.videoBuffer : this.media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
2069
2070 if (mainBufferInfo === null) {
2071 return maxConfigBuffer;
2072 }
2073
2074 return Math.max(maxConfigBuffer, mainBufferInfo.len);
2075 };
2076
2077 _proto.onMediaDetaching = function onMediaDetaching() {
2078 this.videoBuffer = null;
2079
2080 _BaseStreamController.prototype.onMediaDetaching.call(this);
2081 };
2082
2083 _proto.onAudioTracksUpdated = function onAudioTracksUpdated(event, _ref2) {
2084 var audioTracks = _ref2.audioTracks;
2085 this.resetTransmuxer();
2086 this.levels = audioTracks.map(function (mediaPlaylist) {
2087 return new _types_level__WEBPACK_IMPORTED_MODULE_5__["Level"](mediaPlaylist);
2088 });
2089 };
2090
2091 _proto.onAudioTrackSwitching = function onAudioTrackSwitching(event, data) {
2092 // if any URL found on new audio track, it is an alternate audio track
2093 var altAudio = !!data.url;
2094 this.trackId = data.id;
2095 var fragCurrent = this.fragCurrent;
2096
2097 if (fragCurrent !== null && fragCurrent !== void 0 && fragCurrent.loader) {
2098 fragCurrent.loader.abort();
2099 }
2100
2101 this.fragCurrent = null;
2102 this.clearWaitingFragment(); // destroy useless transmuxer when switching audio to main
2103
2104 if (!altAudio) {
2105 this.resetTransmuxer();
2106 } else {
2107 // switching to audio track, start timer if not already started
2108 this.setInterval(TICK_INTERVAL);
2109 } // should we switch tracks ?
2110
2111
2112 if (altAudio) {
2113 this.audioSwitch = true; // main audio track are handled by stream-controller, just do something if switching to alt audio track
2114
2115 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
2116 } else {
2117 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED;
2118 }
2119
2120 this.tick();
2121 };
2122
2123 _proto.onManifestLoading = function onManifestLoading() {
2124 this.mainDetails = null;
2125 this.fragmentTracker.removeAllFragments();
2126 this.startPosition = this.lastCurrentTime = 0;
2127 this.bufferFlushed = false;
2128 };
2129
2130 _proto.onLevelLoaded = function onLevelLoaded(event, data) {
2131 this.mainDetails = data.details;
2132 };
2133
2134 _proto.onAudioTrackLoaded = function onAudioTrackLoaded(event, data) {
2135 var _track$details;
2136
2137 var levels = this.levels;
2138 var newDetails = data.details,
2139 trackId = data.id;
2140
2141 if (!levels) {
2142 this.warn("Audio tracks were reset while loading level " + trackId);
2143 return;
2144 }
2145
2146 this.log("Track " + trackId + " loaded [" + newDetails.startSN + "," + newDetails.endSN + "],duration:" + newDetails.totalduration);
2147 var track = levels[trackId];
2148 var sliding = 0;
2149
2150 if (newDetails.live || (_track$details = track.details) !== null && _track$details !== void 0 && _track$details.live) {
2151 var mainDetails = this.mainDetails;
2152
2153 if (!newDetails.fragments[0]) {
2154 newDetails.deltaUpdateFailed = true;
2155 }
2156
2157 if (newDetails.deltaUpdateFailed || !mainDetails) {
2158 return;
2159 }
2160
2161 if (!track.details && newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) {
2162 // Make sure our audio rendition is aligned with the "main" rendition, using
2163 // pdt as our reference times.
2164 Object(_utils_discontinuities__WEBPACK_IMPORTED_MODULE_12__["alignMediaPlaylistByPDT"])(newDetails, mainDetails);
2165 sliding = newDetails.fragments[0].start;
2166 } else {
2167 sliding = this.alignPlaylists(newDetails, track.details);
2168 }
2169 }
2170
2171 track.details = newDetails;
2172 this.levelLastLoaded = trackId; // compute start position if we are aligned with the main playlist
2173
2174 if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
2175 this.setStartPosition(track.details, sliding);
2176 } // only switch back to IDLE state if we were waiting for track to start downloading a new fragment
2177
2178
2179 if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK && !this.waitForCdnTuneIn(newDetails)) {
2180 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
2181 } // trigger handler right now
2182
2183
2184 this.tick();
2185 };
2186
2187 _proto._handleFragmentLoadProgress = function _handleFragmentLoadProgress(data) {
2188 var _frag$initSegment;
2189
2190 var frag = data.frag,
2191 part = data.part,
2192 payload = data.payload;
2193 var config = this.config,
2194 trackId = this.trackId,
2195 levels = this.levels;
2196
2197 if (!levels) {
2198 this.warn("Audio tracks were reset while fragment load was in progress. Fragment " + frag.sn + " of level " + frag.level + " will not be buffered");
2199 return;
2200 }
2201
2202 var track = levels[trackId];
2203 console.assert(track, 'Audio track is defined on fragment load progress');
2204 var details = track.details;
2205 console.assert(details, 'Audio track details are defined on fragment load progress');
2206 var audioCodec = config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2';
2207 var transmuxer = this.transmuxer;
2208
2209 if (!transmuxer) {
2210 transmuxer = this.transmuxer = new _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_9__["default"](this.hls, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO, this._handleTransmuxComplete.bind(this), this._handleTransmuxerFlush.bind(this));
2211 } // Check if we have video initPTS
2212 // If not we need to wait for it
2213
2214
2215 var initPTS = this.initPTS[frag.cc];
2216 var initSegmentData = (_frag$initSegment = frag.initSegment) === null || _frag$initSegment === void 0 ? void 0 : _frag$initSegment.data;
2217
2218 if (initPTS !== undefined) {
2219 // this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
2220 // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
2221 var accurateTimeOffset = false; // details.PTSKnown || !details.live;
2222
2223 var partIndex = part ? part.index : -1;
2224 var partial = partIndex !== -1;
2225 var chunkMeta = new _types_transmuxer__WEBPACK_IMPORTED_MODULE_10__["ChunkMetadata"](frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength, partIndex, partial);
2226 transmuxer.push(payload, initSegmentData, audioCodec, '', frag, part, details.totalduration, accurateTimeOffset, chunkMeta, initPTS);
2227 } else {
2228 _utils_logger__WEBPACK_IMPORTED_MODULE_14__["logger"].log("Unknown video PTS for cc " + frag.cc + ", waiting for video PTS before demuxing audio frag " + frag.sn + " of [" + details.startSN + " ," + details.endSN + "],track " + trackId);
2229
2230 var _this$waitingData = this.waitingData = this.waitingData || {
2231 frag: frag,
2232 part: part,
2233 cache: new _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_8__["default"](),
2234 complete: false
2235 },
2236 cache = _this$waitingData.cache;
2237
2238 cache.push(new Uint8Array(payload));
2239 this.waitingVideoCC = this.videoTrackCC;
2240 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS;
2241 }
2242 };
2243
2244 _proto._handleFragmentLoadComplete = function _handleFragmentLoadComplete(fragLoadedData) {
2245 if (this.waitingData) {
2246 this.waitingData.complete = true;
2247 return;
2248 }
2249
2250 _BaseStreamController.prototype._handleFragmentLoadComplete.call(this, fragLoadedData);
2251 };
2252
2253 _proto.onBufferReset = function
2254 /* event: Events.BUFFER_RESET */
2255 onBufferReset() {
2256 // reset reference to sourcebuffers
2257 this.mediaBuffer = this.videoBuffer = null;
2258 this.loadedmetadata = false;
2259 };
2260
2261 _proto.onBufferCreated = function onBufferCreated(event, data) {
2262 var audioTrack = data.tracks.audio;
2263
2264 if (audioTrack) {
2265 this.mediaBuffer = audioTrack.buffer;
2266 }
2267
2268 if (data.tracks.video) {
2269 this.videoBuffer = data.tracks.video.buffer;
2270 }
2271 };
2272
2273 _proto.onFragBuffered = function onFragBuffered(event, data) {
2274 var frag = data.frag,
2275 part = data.part;
2276
2277 if (frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO) {
2278 return;
2279 }
2280
2281 if (this.fragContextChanged(frag)) {
2282 // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
2283 // Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
2284 this.warn("Fragment " + frag.sn + (part ? ' p: ' + part.index : '') + " of level " + frag.level + " finished buffering, but was aborted. state: " + this.state + ", audioSwitch: " + this.audioSwitch);
2285 return;
2286 }
2287
2288 if (frag.sn !== 'initSegment') {
2289 this.fragPrevious = frag;
2290
2291 if (this.audioSwitch) {
2292 this.audioSwitch = false;
2293 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHED, {
2294 id: this.trackId
2295 });
2296 }
2297 }
2298
2299 this.fragBufferedComplete(frag, part);
2300 };
2301
2302 _proto.onError = function onError(event, data) {
2303 switch (data.details) {
2304 case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].FRAG_LOAD_ERROR:
2305 case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
2306 case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].KEY_LOAD_ERROR:
2307 case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].KEY_LOAD_TIMEOUT:
2308 // TODO: Skip fragments that do not belong to this.fragCurrent audio-group id
2309 this.onFragmentOrKeyLoadError(_types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO, data);
2310 break;
2311
2312 case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR:
2313 case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].AUDIO_TRACK_LOAD_TIMEOUT:
2314 // when in ERROR state, don't switch back to IDLE state in case a non-fatal error is received
2315 if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR && this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED) {
2316 // if fatal error, stop processing, otherwise move to IDLE to retry loading
2317 this.state = data.fatal ? _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR : _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
2318 this.warn(data.details + " while loading frag, switching to " + this.state + " state");
2319 }
2320
2321 break;
2322
2323 case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].BUFFER_FULL_ERROR:
2324 // if in appending state
2325 if (data.parent === 'audio' && (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSED)) {
2326 var flushBuffer = true;
2327 var bufferedInfo = this.getFwdBufferInfo(this.mediaBuffer, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO); // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
2328 // reduce max buf len if current position is buffered
2329
2330 if (bufferedInfo && bufferedInfo.len > 0.5) {
2331 flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
2332 }
2333
2334 if (flushBuffer) {
2335 // current position is not buffered, but browser is still complaining about buffer full error
2336 // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
2337 // in that case flush the whole audio buffer to recover
2338 this.warn('Buffer full error also media.currentTime is not buffered, flush audio buffer');
2339 this.fragCurrent = null;
2340
2341 _BaseStreamController.prototype.flushMainBuffer.call(this, 0, Number.POSITIVE_INFINITY, 'audio');
2342 }
2343
2344 this.resetLoadingState();
2345 }
2346
2347 break;
2348 }
2349 };
2350
2351 _proto.onBufferFlushed = function onBufferFlushed(event, _ref3) {
2352 var type = _ref3.type;
2353
2354 if (type === _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO) {
2355 this.bufferFlushed = true;
2356 }
2357 };
2358
2359 _proto._handleTransmuxComplete = function _handleTransmuxComplete(transmuxResult) {
2360 var _id3$samples;
2361
2362 var id = 'audio';
2363 var hls = this.hls;
2364 var remuxResult = transmuxResult.remuxResult,
2365 chunkMeta = transmuxResult.chunkMeta;
2366 var context = this.getCurrentContext(chunkMeta);
2367
2368 if (!context) {
2369 this.warn("The loading context changed while buffering fragment " + chunkMeta.sn + " of level " + chunkMeta.level + ". This chunk will not be buffered.");
2370 this.resetLiveStartWhenNotLoaded(chunkMeta.level);
2371 return;
2372 }
2373
2374 var frag = context.frag,
2375 part = context.part;
2376 var audio = remuxResult.audio,
2377 text = remuxResult.text,
2378 id3 = remuxResult.id3,
2379 initSegment = remuxResult.initSegment; // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
2380 // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
2381
2382 if (this.fragContextChanged(frag)) {
2383 return;
2384 }
2385
2386 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING;
2387
2388 if (this.audioSwitch && audio) {
2389 this.completeAudioSwitch();
2390 }
2391
2392 if (initSegment !== null && initSegment !== void 0 && initSegment.tracks) {
2393 this._bufferInitSegment(initSegment.tracks, frag, chunkMeta);
2394
2395 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_PARSING_INIT_SEGMENT, {
2396 frag: frag,
2397 id: id,
2398 tracks: initSegment.tracks
2399 }); // Only flush audio from old audio tracks when PTS is known on new audio track
2400 }
2401
2402 if (audio) {
2403 var startPTS = audio.startPTS,
2404 endPTS = audio.endPTS,
2405 startDTS = audio.startDTS,
2406 endDTS = audio.endDTS;
2407
2408 if (part) {
2409 part.elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO] = {
2410 startPTS: startPTS,
2411 endPTS: endPTS,
2412 startDTS: startDTS,
2413 endDTS: endDTS
2414 };
2415 }
2416
2417 frag.setElementaryStreamInfo(_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO, startPTS, endPTS, startDTS, endDTS);
2418 this.bufferFragmentData(audio, frag, part, chunkMeta);
2419 }
2420
2421 if (id3 !== null && id3 !== void 0 && (_id3$samples = id3.samples) !== null && _id3$samples !== void 0 && _id3$samples.length) {
2422 var emittedID3 = _extends({
2423 frag: frag,
2424 id: id
2425 }, id3);
2426
2427 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_PARSING_METADATA, emittedID3);
2428 }
2429
2430 if (text) {
2431 var emittedText = _extends({
2432 frag: frag,
2433 id: id
2434 }, text);
2435
2436 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_PARSING_USERDATA, emittedText);
2437 }
2438 };
2439
2440 _proto._bufferInitSegment = function _bufferInitSegment(tracks, frag, chunkMeta) {
2441 if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING) {
2442 return;
2443 } // delete any video track found on audio transmuxer
2444
2445
2446 if (tracks.video) {
2447 delete tracks.video;
2448 } // include levelCodec in audio and video tracks
2449
2450
2451 var track = tracks.audio;
2452
2453 if (!track) {
2454 return;
2455 }
2456
2457 track.levelCodec = track.codec;
2458 track.id = 'audio';
2459 this.log("Init audio buffer, container:" + track.container + ", codecs[parsed]=[" + track.codec + "]");
2460 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_CODECS, tracks);
2461 var initSegment = track.initSegment;
2462
2463 if (initSegment !== null && initSegment !== void 0 && initSegment.byteLength) {
2464 var segment = {
2465 type: 'audio',
2466 frag: frag,
2467 part: null,
2468 chunkMeta: chunkMeta,
2469 parent: frag.type,
2470 data: initSegment
2471 };
2472 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_APPENDING, segment);
2473 } // trigger handler right now
2474
2475
2476 this.tick();
2477 };
2478
2479 _proto.loadFragment = function loadFragment(frag, trackDetails, targetBufferTime) {
2480 // only load if fragment is not loaded or if in audio switch
2481 var fragState = this.fragmentTracker.getState(frag);
2482 this.fragCurrent = frag; // we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
2483
2484 if (this.audioSwitch || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_4__["FragmentState"].NOT_LOADED || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_4__["FragmentState"].PARTIAL) {
2485 if (frag.sn === 'initSegment') {
2486 this._loadInitSegment(frag);
2487 } else if (trackDetails.live && !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.initPTS[frag.cc])) {
2488 this.log("Waiting for video PTS in continuity counter " + frag.cc + " of live stream before loading audio fragment " + frag.sn + " of level " + this.trackId);
2489 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS;
2490 } else {
2491 this.startFragRequested = true;
2492
2493 _BaseStreamController.prototype.loadFragment.call(this, frag, trackDetails, targetBufferTime);
2494 }
2495 }
2496 };
2497
2498 _proto.completeAudioSwitch = function completeAudioSwitch() {
2499 var hls = this.hls,
2500 media = this.media,
2501 trackId = this.trackId;
2502
2503 if (media) {
2504 this.log('Switching audio track : flushing all audio');
2505
2506 _BaseStreamController.prototype.flushMainBuffer.call(this, 0, Number.POSITIVE_INFINITY, 'audio');
2507 }
2508
2509 this.audioSwitch = false;
2510 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHED, {
2511 id: trackId
2512 });
2513 };
2514
2515 return AudioStreamController;
2516}(_base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["default"]);
2517
2518/* harmony default export */ __webpack_exports__["default"] = (AudioStreamController);
2519
2520/***/ }),
2521
2522/***/ "./src/controller/audio-track-controller.ts":
2523/*!**************************************************!*\
2524 !*** ./src/controller/audio-track-controller.ts ***!
2525 \**************************************************/
2526/*! exports provided: default */
2527/***/ (function(module, __webpack_exports__, __webpack_require__) {
2528__webpack_require__.r(__webpack_exports__);
2529/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
2530/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
2531/* harmony import */ var _base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./base-playlist-controller */ "./src/controller/base-playlist-controller.ts");
2532/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
2533function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
2534
2535function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
2536
2537function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
2538
2539function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
2540
2541
2542
2543
2544
2545
2546var AudioTrackController = /*#__PURE__*/function (_BasePlaylistControll) {
2547 _inheritsLoose(AudioTrackController, _BasePlaylistControll);
2548
2549 function AudioTrackController(hls) {
2550 var _this;
2551
2552 _this = _BasePlaylistControll.call(this, hls, '[audio-track-controller]') || this;
2553 _this.tracks = [];
2554 _this.groupId = null;
2555 _this.tracksInGroup = [];
2556 _this.trackId = -1;
2557 _this.trackName = '';
2558 _this.selectDefaultTrack = true;
2559
2560 _this.registerListeners();
2561
2562 return _this;
2563 }
2564
2565 var _proto = AudioTrackController.prototype;
2566
2567 _proto.registerListeners = function registerListeners() {
2568 var hls = this.hls;
2569 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
2570 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
2571 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
2572 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
2573 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
2574 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
2575 };
2576
2577 _proto.unregisterListeners = function unregisterListeners() {
2578 var hls = this.hls;
2579 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
2580 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
2581 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
2582 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
2583 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
2584 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
2585 };
2586
2587 _proto.destroy = function destroy() {
2588 this.unregisterListeners();
2589 this.tracks.length = 0;
2590 this.tracksInGroup.length = 0;
2591
2592 _BasePlaylistControll.prototype.destroy.call(this);
2593 };
2594
2595 _proto.onManifestLoading = function onManifestLoading() {
2596 this.tracks = [];
2597 this.groupId = null;
2598 this.tracksInGroup = [];
2599 this.trackId = -1;
2600 this.trackName = '';
2601 this.selectDefaultTrack = true;
2602 };
2603
2604 _proto.onManifestParsed = function onManifestParsed(event, data) {
2605 this.tracks = data.audioTracks || [];
2606 };
2607
2608 _proto.onAudioTrackLoaded = function onAudioTrackLoaded(event, data) {
2609 var id = data.id,
2610 details = data.details;
2611 var currentTrack = this.tracksInGroup[id];
2612
2613 if (!currentTrack) {
2614 this.warn("Invalid audio track id " + id);
2615 return;
2616 }
2617
2618 var curDetails = currentTrack.details;
2619 currentTrack.details = data.details;
2620 this.log("audioTrack " + id + " loaded [" + details.startSN + "-" + details.endSN + "]");
2621
2622 if (id === this.trackId) {
2623 this.retryCount = 0;
2624 this.playlistLoaded(id, data, curDetails);
2625 }
2626 };
2627
2628 _proto.onLevelLoading = function onLevelLoading(event, data) {
2629 this.switchLevel(data.level);
2630 };
2631
2632 _proto.onLevelSwitching = function onLevelSwitching(event, data) {
2633 this.switchLevel(data.level);
2634 };
2635
2636 _proto.switchLevel = function switchLevel(levelIndex) {
2637 var levelInfo = this.hls.levels[levelIndex];
2638
2639 if (!(levelInfo !== null && levelInfo !== void 0 && levelInfo.audioGroupIds)) {
2640 return;
2641 }
2642
2643 var audioGroupId = levelInfo.audioGroupIds[levelInfo.urlId];
2644
2645 if (this.groupId !== audioGroupId) {
2646 this.groupId = audioGroupId;
2647 var audioTracks = this.tracks.filter(function (track) {
2648 return !audioGroupId || track.groupId === audioGroupId;
2649 }); // Disable selectDefaultTrack if there are no default tracks
2650
2651 if (this.selectDefaultTrack && !audioTracks.some(function (track) {
2652 return track.default;
2653 })) {
2654 this.selectDefaultTrack = false;
2655 }
2656
2657 this.tracksInGroup = audioTracks;
2658 var audioTracksUpdated = {
2659 audioTracks: audioTracks
2660 };
2661 this.log("Updating audio tracks, " + audioTracks.length + " track(s) found in \"" + audioGroupId + "\" group-id");
2662 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACKS_UPDATED, audioTracksUpdated);
2663 this.selectInitialTrack();
2664 }
2665 };
2666
2667 _proto.onError = function onError(event, data) {
2668 _BasePlaylistControll.prototype.onError.call(this, event, data);
2669
2670 if (data.fatal || !data.context) {
2671 return;
2672 }
2673
2674 if (data.context.type === _types_loader__WEBPACK_IMPORTED_MODULE_3__["PlaylistContextType"].AUDIO_TRACK && data.context.id === this.trackId && data.context.groupId === this.groupId) {
2675 this.retryLoadingOrFail(data);
2676 }
2677 };
2678
2679 _proto.setAudioTrack = function setAudioTrack(newId) {
2680 var tracks = this.tracksInGroup; // check if level idx is valid
2681
2682 if (newId < 0 || newId >= tracks.length) {
2683 this.warn('Invalid id passed to audio-track controller');
2684 return;
2685 } // stopping live reloading timer if any
2686
2687
2688 this.clearTimer();
2689 var lastTrack = tracks[this.trackId];
2690 this.log("Now switching to audio-track index " + newId);
2691 var track = tracks[newId];
2692 var id = track.id,
2693 _track$groupId = track.groupId,
2694 groupId = _track$groupId === void 0 ? '' : _track$groupId,
2695 name = track.name,
2696 type = track.type,
2697 url = track.url;
2698 this.trackId = newId;
2699 this.trackName = name;
2700 this.selectDefaultTrack = false;
2701 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_SWITCHING, {
2702 id: id,
2703 groupId: groupId,
2704 name: name,
2705 type: type,
2706 url: url
2707 }); // Do not reload track unless live
2708
2709 if (track.details && !track.details.live) {
2710 return;
2711 }
2712
2713 var hlsUrlParameters = this.switchParams(track.url, lastTrack === null || lastTrack === void 0 ? void 0 : lastTrack.details);
2714 this.loadPlaylist(hlsUrlParameters);
2715 };
2716
2717 _proto.selectInitialTrack = function selectInitialTrack() {
2718 var audioTracks = this.tracksInGroup;
2719 console.assert(audioTracks.length, 'Initial audio track should be selected when tracks are known');
2720 var currentAudioTrackName = this.trackName;
2721 var trackId = this.findTrackId(currentAudioTrackName) || this.findTrackId();
2722
2723 if (trackId !== -1) {
2724 this.setAudioTrack(trackId);
2725 } else {
2726 this.warn("No track found for running audio group-ID: " + this.groupId);
2727 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
2728 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
2729 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR,
2730 fatal: true
2731 });
2732 }
2733 };
2734
2735 _proto.findTrackId = function findTrackId(name) {
2736 var audioTracks = this.tracksInGroup;
2737
2738 for (var i = 0; i < audioTracks.length; i++) {
2739 var track = audioTracks[i];
2740
2741 if (!this.selectDefaultTrack || track.default) {
2742 if (!name || name === track.name) {
2743 return track.id;
2744 }
2745 }
2746 }
2747
2748 return -1;
2749 };
2750
2751 _proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {
2752 var audioTrack = this.tracksInGroup[this.trackId];
2753
2754 if (this.shouldLoadTrack(audioTrack)) {
2755 var id = audioTrack.id;
2756 var groupId = audioTrack.groupId;
2757 var url = audioTrack.url;
2758
2759 if (hlsUrlParameters) {
2760 try {
2761 url = hlsUrlParameters.addDirectives(url);
2762 } catch (error) {
2763 this.warn("Could not construct new URL with HLS Delivery Directives: " + error);
2764 }
2765 } // track not retrieved yet, or live playlist we need to (re)load it
2766
2767
2768 this.log("loading audio-track playlist for id: " + id);
2769 this.clearTimer();
2770 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_LOADING, {
2771 url: url,
2772 id: id,
2773 groupId: groupId,
2774 deliveryDirectives: hlsUrlParameters || null
2775 });
2776 }
2777 };
2778
2779 _createClass(AudioTrackController, [{
2780 key: "audioTracks",
2781 get: function get() {
2782 return this.tracksInGroup;
2783 }
2784 }, {
2785 key: "audioTrack",
2786 get: function get() {
2787 return this.trackId;
2788 },
2789 set: function set(newId) {
2790 // If audio track is selected from API then don't choose from the manifest default track
2791 this.selectDefaultTrack = false;
2792 this.setAudioTrack(newId);
2793 }
2794 }]);
2795
2796 return AudioTrackController;
2797}(_base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__["default"]);
2798
2799/* harmony default export */ __webpack_exports__["default"] = (AudioTrackController);
2800
2801/***/ }),
2802
2803/***/ "./src/controller/base-playlist-controller.ts":
2804/*!****************************************************!*\
2805 !*** ./src/controller/base-playlist-controller.ts ***!
2806 \****************************************************/
2807/*! exports provided: default */
2808/***/ (function(module, __webpack_exports__, __webpack_require__) {
2809__webpack_require__.r(__webpack_exports__);
2810/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BasePlaylistController; });
2811/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
2812/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
2813/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
2814/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
2815/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
2816
2817
2818
2819
2820
2821
2822
2823var BasePlaylistController = /*#__PURE__*/function () {
2824 function BasePlaylistController(hls, logPrefix) {
2825 this.hls = void 0;
2826 this.timer = -1;
2827 this.canLoad = false;
2828 this.retryCount = 0;
2829 this.log = void 0;
2830 this.warn = void 0;
2831 this.log = _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"], logPrefix + ":");
2832 this.warn = _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"], logPrefix + ":");
2833 this.hls = hls;
2834 }
2835
2836 var _proto = BasePlaylistController.prototype;
2837
2838 _proto.destroy = function destroy() {
2839 this.clearTimer(); // @ts-ignore
2840
2841 this.hls = this.log = this.warn = null;
2842 };
2843
2844 _proto.onError = function onError(event, data) {
2845 if (data.fatal && data.type === _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].NETWORK_ERROR) {
2846 this.clearTimer();
2847 }
2848 };
2849
2850 _proto.clearTimer = function clearTimer() {
2851 clearTimeout(this.timer);
2852 this.timer = -1;
2853 };
2854
2855 _proto.startLoad = function startLoad() {
2856 this.canLoad = true;
2857 this.retryCount = 0;
2858 this.loadPlaylist();
2859 };
2860
2861 _proto.stopLoad = function stopLoad() {
2862 this.canLoad = false;
2863 this.clearTimer();
2864 };
2865
2866 _proto.switchParams = function switchParams(playlistUri, previous) {
2867 var renditionReports = previous === null || previous === void 0 ? void 0 : previous.renditionReports;
2868
2869 if (renditionReports) {
2870 for (var i = 0; i < renditionReports.length; i++) {
2871 var attr = renditionReports[i];
2872 var uri = '' + attr.URI;
2873
2874 if (uri === playlistUri.substr(-uri.length)) {
2875 var msn = parseInt(attr['LAST-MSN']);
2876 var part = parseInt(attr['LAST-PART']);
2877
2878 if (previous && this.hls.config.lowLatencyMode) {
2879 var currentGoal = Math.min(previous.age - previous.partTarget, previous.targetduration);
2880
2881 if (part !== undefined && currentGoal > previous.partTarget) {
2882 part += 1;
2883 }
2884 }
2885
2886 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(msn)) {
2887 return new _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsUrlParameters"](msn, Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(part) ? part : undefined, _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsSkip"].No);
2888 }
2889 }
2890 }
2891 }
2892 };
2893
2894 _proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {};
2895
2896 _proto.shouldLoadTrack = function shouldLoadTrack(track) {
2897 return this.canLoad && track && !!track.url && (!track.details || track.details.live);
2898 };
2899
2900 _proto.playlistLoaded = function playlistLoaded(index, data, previousDetails) {
2901 var _this = this;
2902
2903 var details = data.details,
2904 stats = data.stats; // Set last updated date-time
2905
2906 var elapsed = stats.loading.end ? Math.max(0, self.performance.now() - stats.loading.end) : 0;
2907 details.advancedDateTime = Date.now() - elapsed; // if current playlist is a live playlist, arm a timer to reload it
2908
2909 if (details.live || previousDetails !== null && previousDetails !== void 0 && previousDetails.live) {
2910 details.reloaded(previousDetails);
2911
2912 if (previousDetails) {
2913 this.log("live playlist " + index + " " + (details.advanced ? 'REFRESHED ' + details.lastPartSn + '-' + details.lastPartIndex : 'MISSED'));
2914 } // Merge live playlists to adjust fragment starts and fill in delta playlist skipped segments
2915
2916
2917 if (previousDetails && details.fragments.length > 0) {
2918 Object(_level_helper__WEBPACK_IMPORTED_MODULE_2__["mergeDetails"])(previousDetails, details);
2919 }
2920
2921 if (!this.canLoad || !details.live) {
2922 return;
2923 }
2924
2925 var deliveryDirectives;
2926 var msn = undefined;
2927 var part = undefined;
2928
2929 if (details.canBlockReload && details.endSN && details.advanced) {
2930 // Load level with LL-HLS delivery directives
2931 var lowLatencyMode = this.hls.config.lowLatencyMode;
2932 var lastPartSn = details.lastPartSn;
2933 var endSn = details.endSN;
2934 var lastPartIndex = details.lastPartIndex;
2935 var hasParts = lastPartIndex !== -1;
2936 var lastPart = lastPartSn === endSn; // When low latency mode is disabled, we'll skip part requests once the last part index is found
2937
2938 var nextSnStartIndex = lowLatencyMode ? 0 : lastPartIndex;
2939
2940 if (hasParts) {
2941 msn = lastPart ? endSn + 1 : lastPartSn;
2942 part = lastPart ? nextSnStartIndex : lastPartIndex + 1;
2943 } else {
2944 msn = endSn + 1;
2945 } // Low-Latency CDN Tune-in: "age" header and time since load indicates we're behind by more than one part
2946 // Update directives to obtain the Playlist that has the estimated additional duration of media
2947
2948
2949 var lastAdvanced = details.age;
2950 var cdnAge = lastAdvanced + details.ageHeader;
2951 var currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
2952
2953 if (currentGoal > 0) {
2954 if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
2955 // If we attempted to get the next or latest playlist update, but currentGoal increased,
2956 // then we either can't catchup, or the "age" header cannot be trusted.
2957 this.warn("CDN Tune-in goal increased from: " + previousDetails.tuneInGoal + " to: " + currentGoal + " with playlist age: " + details.age);
2958 currentGoal = 0;
2959 } else {
2960 var segments = Math.floor(currentGoal / details.targetduration);
2961 msn += segments;
2962
2963 if (part !== undefined) {
2964 var parts = Math.round(currentGoal % details.targetduration / details.partTarget);
2965 part += parts;
2966 }
2967
2968 this.log("CDN Tune-in age: " + details.ageHeader + "s last advanced " + lastAdvanced.toFixed(2) + "s goal: " + currentGoal + " skip sn " + segments + " to part " + part);
2969 }
2970
2971 details.tuneInGoal = currentGoal;
2972 }
2973
2974 deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part);
2975
2976 if (lowLatencyMode || !lastPart) {
2977 this.loadPlaylist(deliveryDirectives);
2978 return;
2979 }
2980 } else {
2981 deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part);
2982 }
2983
2984 var reloadInterval = Object(_level_helper__WEBPACK_IMPORTED_MODULE_2__["computeReloadInterval"])(details, stats);
2985
2986 if (msn !== undefined && details.canBlockReload) {
2987 reloadInterval -= details.partTarget || 1;
2988 }
2989
2990 this.log("reload live playlist " + index + " in " + Math.round(reloadInterval) + " ms");
2991 this.timer = self.setTimeout(function () {
2992 return _this.loadPlaylist(deliveryDirectives);
2993 }, reloadInterval);
2994 } else {
2995 this.clearTimer();
2996 }
2997 };
2998
2999 _proto.getDeliveryDirectives = function getDeliveryDirectives(details, previousDeliveryDirectives, msn, part) {
3000 var skip = Object(_types_level__WEBPACK_IMPORTED_MODULE_1__["getSkipValue"])(details, msn);
3001
3002 if (previousDeliveryDirectives !== null && previousDeliveryDirectives !== void 0 && previousDeliveryDirectives.skip && details.deltaUpdateFailed) {
3003 msn = previousDeliveryDirectives.msn;
3004 part = previousDeliveryDirectives.part;
3005 skip = _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsSkip"].No;
3006 }
3007
3008 return new _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsUrlParameters"](msn, part, skip);
3009 };
3010
3011 _proto.retryLoadingOrFail = function retryLoadingOrFail(errorEvent) {
3012 var _this2 = this;
3013
3014 var config = this.hls.config;
3015 var retry = this.retryCount < config.levelLoadingMaxRetry;
3016
3017 if (retry) {
3018 var _errorEvent$context;
3019
3020 this.retryCount++;
3021
3022 if (errorEvent.details.indexOf('LoadTimeOut') > -1 && (_errorEvent$context = errorEvent.context) !== null && _errorEvent$context !== void 0 && _errorEvent$context.deliveryDirectives) {
3023 // The LL-HLS request already timed out so retry immediately
3024 this.warn("retry playlist loading #" + this.retryCount + " after \"" + errorEvent.details + "\"");
3025 this.loadPlaylist();
3026 } else {
3027 // exponential backoff capped to max retry timeout
3028 var delay = Math.min(Math.pow(2, this.retryCount) * config.levelLoadingRetryDelay, config.levelLoadingMaxRetryTimeout); // Schedule level/track reload
3029
3030 this.timer = self.setTimeout(function () {
3031 return _this2.loadPlaylist();
3032 }, delay);
3033 this.warn("retry playlist loading #" + this.retryCount + " in " + delay + " ms after \"" + errorEvent.details + "\"");
3034 }
3035 } else {
3036 this.warn("cannot recover from error \"" + errorEvent.details + "\""); // stopping live reloading timer if any
3037
3038 this.clearTimer(); // switch error to fatal
3039
3040 errorEvent.fatal = true;
3041 }
3042
3043 return retry;
3044 };
3045
3046 return BasePlaylistController;
3047}();
3048
3049
3050
3051/***/ }),
3052
3053/***/ "./src/controller/base-stream-controller.ts":
3054/*!**************************************************!*\
3055 !*** ./src/controller/base-stream-controller.ts ***!
3056 \**************************************************/
3057/*! exports provided: State, default */
3058/***/ (function(module, __webpack_exports__, __webpack_require__) {
3059__webpack_require__.r(__webpack_exports__);
3060/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "State", function() { return State; });
3061/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BaseStreamController; });
3062/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
3063/* harmony import */ var _task_loop__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../task-loop */ "./src/task-loop.ts");
3064/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
3065/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
3066/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
3067/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../events */ "./src/events.ts");
3068/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
3069/* harmony import */ var _types_transmuxer__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../types/transmuxer */ "./src/types/transmuxer.ts");
3070/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
3071/* harmony import */ var _utils_discontinuities__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../utils/discontinuities */ "./src/utils/discontinuities.ts");
3072/* harmony import */ var _fragment_finders__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./fragment-finders */ "./src/controller/fragment-finders.ts");
3073/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
3074/* harmony import */ var _loader_fragment_loader__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ../loader/fragment-loader */ "./src/loader/fragment-loader.ts");
3075/* harmony import */ var _crypt_decrypter__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ../crypt/decrypter */ "./src/crypt/decrypter.ts");
3076/* harmony import */ var _utils_time_ranges__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ../utils/time-ranges */ "./src/utils/time-ranges.ts");
3077/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
3078
3079
3080
3081
3082
3083
3084function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
3085
3086function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
3087
3088function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
3089
3090function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
3091
3092function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
3093
3094
3095
3096
3097
3098
3099
3100
3101
3102
3103
3104
3105
3106
3107
3108
3109var State = {
3110 STOPPED: 'STOPPED',
3111 IDLE: 'IDLE',
3112 KEY_LOADING: 'KEY_LOADING',
3113 FRAG_LOADING: 'FRAG_LOADING',
3114 FRAG_LOADING_WAITING_RETRY: 'FRAG_LOADING_WAITING_RETRY',
3115 WAITING_TRACK: 'WAITING_TRACK',
3116 PARSING: 'PARSING',
3117 PARSED: 'PARSED',
3118 BACKTRACKING: 'BACKTRACKING',
3119 ENDED: 'ENDED',
3120 ERROR: 'ERROR',
3121 WAITING_INIT_PTS: 'WAITING_INIT_PTS',
3122 WAITING_LEVEL: 'WAITING_LEVEL'
3123};
3124
3125var BaseStreamController = /*#__PURE__*/function (_TaskLoop) {
3126 _inheritsLoose(BaseStreamController, _TaskLoop);
3127
3128 function BaseStreamController(hls, fragmentTracker, logPrefix) {
3129 var _this;
3130
3131 _this = _TaskLoop.call(this) || this;
3132 _this.hls = void 0;
3133 _this.fragPrevious = null;
3134 _this.fragCurrent = null;
3135 _this.fragmentTracker = void 0;
3136 _this.transmuxer = null;
3137 _this._state = State.STOPPED;
3138 _this.media = void 0;
3139 _this.mediaBuffer = void 0;
3140 _this.config = void 0;
3141 _this.bitrateTest = false;
3142 _this.lastCurrentTime = 0;
3143 _this.nextLoadPosition = 0;
3144 _this.startPosition = 0;
3145 _this.loadedmetadata = false;
3146 _this.fragLoadError = 0;
3147 _this.retryDate = 0;
3148 _this.levels = null;
3149 _this.fragmentLoader = void 0;
3150 _this.levelLastLoaded = null;
3151 _this.startFragRequested = false;
3152 _this.decrypter = void 0;
3153 _this.initPTS = [];
3154 _this.onvseeking = null;
3155 _this.onvended = null;
3156 _this.logPrefix = '';
3157 _this.log = void 0;
3158 _this.warn = void 0;
3159 _this.logPrefix = logPrefix;
3160 _this.log = _utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].log.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"], logPrefix + ":");
3161 _this.warn = _utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].warn.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"], logPrefix + ":");
3162 _this.hls = hls;
3163 _this.fragmentLoader = new _loader_fragment_loader__WEBPACK_IMPORTED_MODULE_12__["default"](hls.config);
3164 _this.fragmentTracker = fragmentTracker;
3165 _this.config = hls.config;
3166 _this.decrypter = new _crypt_decrypter__WEBPACK_IMPORTED_MODULE_13__["default"](hls, hls.config);
3167 hls.on(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].KEY_LOADED, _this.onKeyLoaded, _assertThisInitialized(_this));
3168 return _this;
3169 }
3170
3171 var _proto = BaseStreamController.prototype;
3172
3173 _proto.doTick = function doTick() {
3174 this.onTickEnd();
3175 };
3176
3177 _proto.onTickEnd = function onTickEnd() {} // eslint-disable-next-line @typescript-eslint/no-unused-vars
3178 ;
3179
3180 _proto.startLoad = function startLoad(startPosition) {};
3181
3182 _proto.stopLoad = function stopLoad() {
3183 this.fragmentLoader.abort();
3184 var frag = this.fragCurrent;
3185
3186 if (frag) {
3187 this.fragmentTracker.removeFragment(frag);
3188 }
3189
3190 this.resetTransmuxer();
3191 this.fragCurrent = null;
3192 this.fragPrevious = null;
3193 this.clearInterval();
3194 this.clearNextTick();
3195 this.state = State.STOPPED;
3196 };
3197
3198 _proto._streamEnded = function _streamEnded(bufferInfo, levelDetails) {
3199 var fragCurrent = this.fragCurrent,
3200 fragmentTracker = this.fragmentTracker; // we just got done loading the final fragment and there is no other buffered range after ...
3201 // rationale is that in case there are any buffered ranges after, it means that there are unbuffered portion in between
3202 // so we should not switch to ENDED in that case, to be able to buffer them
3203
3204 if (!levelDetails.live && fragCurrent && // NOTE: Because of the way parts are currently parsed/represented in the playlist, we can end up
3205 // in situations where the current fragment is actually greater than levelDetails.endSN. While
3206 // this feels like the "wrong place" to account for that, this is a narrower/safer change than
3207 // updating e.g. M3U8Parser::parseLevelPlaylist().
3208 fragCurrent.sn >= levelDetails.endSN && !bufferInfo.nextStart) {
3209 var partList = levelDetails.partList; // Since the last part isn't guaranteed to correspond to fragCurrent for ll-hls, check instead if the last part is buffered.
3210
3211 if (partList !== null && partList !== void 0 && partList.length) {
3212 var lastPart = partList[partList.length - 1]; // Checking the midpoint of the part for potential margin of error and related issues.
3213 // NOTE: Technically I believe parts could yield content that is < the computed duration (including potential a duration of 0)
3214 // and still be spec-compliant, so there may still be edge cases here. Likewise, there could be issues in end of stream
3215 // part mismatches for independent audio and video playlists/segments.
3216
3217 var lastPartBuffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].isBuffered(this.media, lastPart.start + lastPart.duration / 2);
3218 return lastPartBuffered;
3219 }
3220
3221 var fragState = fragmentTracker.getState(fragCurrent);
3222 return fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].PARTIAL || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].OK;
3223 }
3224
3225 return false;
3226 };
3227
3228 _proto.onMediaAttached = function onMediaAttached(event, data) {
3229 var media = this.media = this.mediaBuffer = data.media;
3230 this.onvseeking = this.onMediaSeeking.bind(this);
3231 this.onvended = this.onMediaEnded.bind(this);
3232 media.addEventListener('seeking', this.onvseeking);
3233 media.addEventListener('ended', this.onvended);
3234 var config = this.config;
3235
3236 if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
3237 this.startLoad(config.startPosition);
3238 }
3239 };
3240
3241 _proto.onMediaDetaching = function onMediaDetaching() {
3242 var media = this.media;
3243
3244 if (media !== null && media !== void 0 && media.ended) {
3245 this.log('MSE detaching and video ended, reset startPosition');
3246 this.startPosition = this.lastCurrentTime = 0;
3247 } // remove video listeners
3248
3249
3250 if (media) {
3251 media.removeEventListener('seeking', this.onvseeking);
3252 media.removeEventListener('ended', this.onvended);
3253 this.onvseeking = this.onvended = null;
3254 }
3255
3256 this.media = this.mediaBuffer = null;
3257 this.loadedmetadata = false;
3258 this.fragmentTracker.removeAllFragments();
3259 this.stopLoad();
3260 };
3261
3262 _proto.onMediaSeeking = function onMediaSeeking() {
3263 var config = this.config,
3264 fragCurrent = this.fragCurrent,
3265 media = this.media,
3266 mediaBuffer = this.mediaBuffer,
3267 state = this.state;
3268 var currentTime = media ? media.currentTime : 0;
3269 var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(mediaBuffer || media, currentTime, config.maxBufferHole);
3270 this.log("media seeking to " + (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(currentTime) ? currentTime.toFixed(3) : currentTime) + ", state: " + state);
3271
3272 if (state === State.ENDED) {
3273 this.resetLoadingState();
3274 } else if (fragCurrent && !bufferInfo.len) {
3275 // check if we are seeking to a unbuffered area AND if frag loading is in progress
3276 var tolerance = config.maxFragLookUpTolerance;
3277 var fragStartOffset = fragCurrent.start - tolerance;
3278 var fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
3279 var pastFragment = currentTime > fragEndOffset; // check if the seek position is past current fragment, and if so abort loading
3280
3281 if (currentTime < fragStartOffset || pastFragment) {
3282 if (pastFragment && fragCurrent.loader) {
3283 this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
3284 fragCurrent.loader.abort();
3285 }
3286
3287 this.resetLoadingState();
3288 }
3289 }
3290
3291 if (media) {
3292 this.lastCurrentTime = currentTime;
3293 } // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
3294
3295
3296 if (!this.loadedmetadata && !bufferInfo.len) {
3297 this.nextLoadPosition = this.startPosition = currentTime;
3298 } // Async tick to speed up processing
3299
3300
3301 this.tickImmediate();
3302 };
3303
3304 _proto.onMediaEnded = function onMediaEnded() {
3305 // reset startPosition and lastCurrentTime to restart playback @ stream beginning
3306 this.startPosition = this.lastCurrentTime = 0;
3307 };
3308
3309 _proto.onKeyLoaded = function onKeyLoaded(event, data) {
3310 if (this.state !== State.KEY_LOADING || data.frag !== this.fragCurrent || !this.levels) {
3311 return;
3312 }
3313
3314 this.state = State.IDLE;
3315 var levelDetails = this.levels[data.frag.level].details;
3316
3317 if (levelDetails) {
3318 this.loadFragment(data.frag, levelDetails, data.frag.start);
3319 }
3320 };
3321
3322 _proto.onHandlerDestroying = function onHandlerDestroying() {
3323 this.stopLoad();
3324
3325 _TaskLoop.prototype.onHandlerDestroying.call(this);
3326 };
3327
3328 _proto.onHandlerDestroyed = function onHandlerDestroyed() {
3329 this.state = State.STOPPED;
3330 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].KEY_LOADED, this.onKeyLoaded, this);
3331
3332 if (this.fragmentLoader) {
3333 this.fragmentLoader.destroy();
3334 }
3335
3336 if (this.decrypter) {
3337 this.decrypter.destroy();
3338 }
3339
3340 this.hls = this.log = this.warn = this.decrypter = this.fragmentLoader = this.fragmentTracker = null;
3341
3342 _TaskLoop.prototype.onHandlerDestroyed.call(this);
3343 };
3344
3345 _proto.loadKey = function loadKey(frag, details) {
3346 this.log("Loading key for " + frag.sn + " of [" + details.startSN + "-" + details.endSN + "], " + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + " " + frag.level);
3347 this.state = State.KEY_LOADING;
3348 this.fragCurrent = frag;
3349 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].KEY_LOADING, {
3350 frag: frag
3351 });
3352 };
3353
3354 _proto.loadFragment = function loadFragment(frag, levelDetails, targetBufferTime) {
3355 this._loadFragForPlayback(frag, levelDetails, targetBufferTime);
3356 };
3357
3358 _proto._loadFragForPlayback = function _loadFragForPlayback(frag, levelDetails, targetBufferTime) {
3359 var _this2 = this;
3360
3361 var progressCallback = function progressCallback(data) {
3362 if (_this2.fragContextChanged(frag)) {
3363 _this2.warn("Fragment " + frag.sn + (data.part ? ' p: ' + data.part.index : '') + " of level " + frag.level + " was dropped during download.");
3364
3365 _this2.fragmentTracker.removeFragment(frag);
3366
3367 return;
3368 }
3369
3370 frag.stats.chunkCount++;
3371
3372 _this2._handleFragmentLoadProgress(data);
3373 };
3374
3375 this._doFragLoad(frag, levelDetails, targetBufferTime, progressCallback).then(function (data) {
3376 if (!data) {
3377 // if we're here we probably needed to backtrack or are waiting for more parts
3378 return;
3379 }
3380
3381 _this2.fragLoadError = 0;
3382 var state = _this2.state;
3383
3384 if (_this2.fragContextChanged(frag)) {
3385 if (state === State.FRAG_LOADING || state === State.BACKTRACKING || !_this2.fragCurrent && state === State.PARSING) {
3386 _this2.fragmentTracker.removeFragment(frag);
3387
3388 _this2.state = State.IDLE;
3389 }
3390
3391 return;
3392 }
3393
3394 if ('payload' in data) {
3395 _this2.log("Loaded fragment " + frag.sn + " of level " + frag.level);
3396
3397 _this2.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADED, data); // Tracker backtrack must be called after onFragLoaded to update the fragment entity state to BACKTRACKED
3398 // This happens after handleTransmuxComplete when the worker or progressive is disabled
3399
3400
3401 if (_this2.state === State.BACKTRACKING) {
3402 _this2.fragmentTracker.backtrack(frag, data);
3403
3404 _this2.resetFragmentLoading(frag);
3405
3406 return;
3407 }
3408 } // Pass through the whole payload; controllers not implementing progressive loading receive data from this callback
3409
3410
3411 _this2._handleFragmentLoadComplete(data);
3412 }).catch(function (reason) {
3413 _this2.warn(reason);
3414
3415 _this2.resetFragmentLoading(frag);
3416 });
3417 };
3418
3419 _proto.flushMainBuffer = function flushMainBuffer(startOffset, endOffset, type) {
3420 if (type === void 0) {
3421 type = null;
3422 }
3423
3424 if (!(startOffset - endOffset)) {
3425 return;
3426 } // When alternate audio is playing, the audio-stream-controller is responsible for the audio buffer. Otherwise,
3427 // passing a null type flushes both buffers
3428
3429
3430 var flushScope = {
3431 startOffset: startOffset,
3432 endOffset: endOffset,
3433 type: type
3434 }; // Reset load errors on flush
3435
3436 this.fragLoadError = 0;
3437 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].BUFFER_FLUSHING, flushScope);
3438 };
3439
3440 _proto._loadInitSegment = function _loadInitSegment(frag) {
3441 var _this3 = this;
3442
3443 this._doFragLoad(frag).then(function (data) {
3444 if (!data || _this3.fragContextChanged(frag) || !_this3.levels) {
3445 throw new Error('init load aborted');
3446 }
3447
3448 return data;
3449 }).then(function (data) {
3450 var hls = _this3.hls;
3451 var payload = data.payload;
3452 var decryptData = frag.decryptdata; // check to see if the payload needs to be decrypted
3453
3454 if (payload && payload.byteLength > 0 && decryptData && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
3455 var startTime = self.performance.now(); // decrypt the subtitles
3456
3457 return _this3.decrypter.webCryptoDecrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).then(function (decryptedData) {
3458 var endTime = self.performance.now();
3459 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_DECRYPTED, {
3460 frag: frag,
3461 payload: decryptedData,
3462 stats: {
3463 tstart: startTime,
3464 tdecrypt: endTime
3465 }
3466 });
3467 data.payload = decryptedData;
3468 return data;
3469 });
3470 }
3471
3472 return data;
3473 }).then(function (data) {
3474 var fragCurrent = _this3.fragCurrent,
3475 hls = _this3.hls,
3476 levels = _this3.levels;
3477
3478 if (!levels) {
3479 throw new Error('init load aborted, missing levels');
3480 }
3481
3482 var details = levels[frag.level].details;
3483 console.assert(details, 'Level details are defined when init segment is loaded');
3484 var stats = frag.stats;
3485 _this3.state = State.IDLE;
3486 _this3.fragLoadError = 0;
3487 frag.data = new Uint8Array(data.payload);
3488 stats.parsing.start = stats.buffering.start = self.performance.now();
3489 stats.parsing.end = stats.buffering.end = self.performance.now(); // Silence FRAG_BUFFERED event if fragCurrent is null
3490
3491 if (data.frag === fragCurrent) {
3492 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_BUFFERED, {
3493 stats: stats,
3494 frag: fragCurrent,
3495 part: null,
3496 id: frag.type
3497 });
3498 }
3499
3500 _this3.tick();
3501 }).catch(function (reason) {
3502 _this3.warn(reason);
3503
3504 _this3.resetFragmentLoading(frag);
3505 });
3506 };
3507
3508 _proto.fragContextChanged = function fragContextChanged(frag) {
3509 var fragCurrent = this.fragCurrent;
3510 return !frag || !fragCurrent || frag.level !== fragCurrent.level || frag.sn !== fragCurrent.sn || frag.urlId !== fragCurrent.urlId;
3511 };
3512
3513 _proto.fragBufferedComplete = function fragBufferedComplete(frag, part) {
3514 var media = this.mediaBuffer ? this.mediaBuffer : this.media;
3515 this.log("Buffered " + frag.type + " sn: " + frag.sn + (part ? ' part: ' + part.index : '') + " of " + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + " " + frag.level + " " + _utils_time_ranges__WEBPACK_IMPORTED_MODULE_14__["default"].toString(_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].getBuffered(media)));
3516 this.state = State.IDLE;
3517 this.tick();
3518 };
3519
3520 _proto._handleFragmentLoadComplete = function _handleFragmentLoadComplete(fragLoadedEndData) {
3521 var transmuxer = this.transmuxer;
3522
3523 if (!transmuxer) {
3524 return;
3525 }
3526
3527 var frag = fragLoadedEndData.frag,
3528 part = fragLoadedEndData.part,
3529 partsLoaded = fragLoadedEndData.partsLoaded; // If we did not load parts, or loaded all parts, we have complete (not partial) fragment data
3530
3531 var complete = !partsLoaded || partsLoaded.length === 0 || partsLoaded.some(function (fragLoaded) {
3532 return !fragLoaded;
3533 });
3534 var chunkMeta = new _types_transmuxer__WEBPACK_IMPORTED_MODULE_7__["ChunkMetadata"](frag.level, frag.sn, frag.stats.chunkCount + 1, 0, part ? part.index : -1, !complete);
3535 transmuxer.flush(chunkMeta);
3536 } // eslint-disable-next-line @typescript-eslint/no-unused-vars
3537 ;
3538
3539 _proto._handleFragmentLoadProgress = function _handleFragmentLoadProgress(frag) {};
3540
3541 _proto._doFragLoad = function _doFragLoad(frag, details, targetBufferTime, progressCallback) {
3542 var _this4 = this;
3543
3544 if (targetBufferTime === void 0) {
3545 targetBufferTime = null;
3546 }
3547
3548 if (!this.levels) {
3549 throw new Error('frag load aborted, missing levels');
3550 }
3551
3552 targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
3553
3554 if (this.config.lowLatencyMode && details) {
3555 var partList = details.partList;
3556
3557 if (partList && progressCallback) {
3558 if (targetBufferTime > frag.end && details.fragmentHint) {
3559 frag = details.fragmentHint;
3560 }
3561
3562 var partIndex = this.getNextPart(partList, frag, targetBufferTime);
3563
3564 if (partIndex > -1) {
3565 var part = partList[partIndex];
3566 this.log("Loading part sn: " + frag.sn + " p: " + part.index + " cc: " + frag.cc + " of playlist [" + details.startSN + "-" + details.endSN + "] parts [0-" + partIndex + "-" + (partList.length - 1) + "] " + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + ": " + frag.level + ", target: " + parseFloat(targetBufferTime.toFixed(3)));
3567 this.nextLoadPosition = part.start + part.duration;
3568 this.state = State.FRAG_LOADING;
3569 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADING, {
3570 frag: frag,
3571 part: partList[partIndex],
3572 targetBufferTime: targetBufferTime
3573 });
3574 return this.doFragPartsLoad(frag, partList, partIndex, progressCallback).catch(function (error) {
3575 return _this4.handleFragLoadError(error);
3576 });
3577 } else if (!frag.url || this.loadedEndOfParts(partList, targetBufferTime)) {
3578 // Fragment hint has no parts
3579 return Promise.resolve(null);
3580 }
3581 }
3582 }
3583
3584 this.log("Loading fragment " + frag.sn + " cc: " + frag.cc + " " + (details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : '') + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + ": " + frag.level + ", target: " + parseFloat(targetBufferTime.toFixed(3))); // Don't update nextLoadPosition for fragments which are not buffered
3585
3586 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.sn) && !this.bitrateTest) {
3587 this.nextLoadPosition = frag.start + frag.duration;
3588 }
3589
3590 this.state = State.FRAG_LOADING;
3591 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADING, {
3592 frag: frag,
3593 targetBufferTime: targetBufferTime
3594 });
3595 return this.fragmentLoader.load(frag, progressCallback).catch(function (error) {
3596 return _this4.handleFragLoadError(error);
3597 });
3598 };
3599
3600 _proto.doFragPartsLoad = function doFragPartsLoad(frag, partList, partIndex, progressCallback) {
3601 var _this5 = this;
3602
3603 return new Promise(function (resolve, reject) {
3604 var partsLoaded = [];
3605
3606 var loadPartIndex = function loadPartIndex(index) {
3607 var part = partList[index];
3608
3609 _this5.fragmentLoader.loadPart(frag, part, progressCallback).then(function (partLoadedData) {
3610 partsLoaded[part.index] = partLoadedData;
3611 var loadedPart = partLoadedData.part;
3612
3613 _this5.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADED, partLoadedData);
3614
3615 var nextPart = partList[index + 1];
3616
3617 if (nextPart && nextPart.fragment === frag) {
3618 loadPartIndex(index + 1);
3619 } else {
3620 return resolve({
3621 frag: frag,
3622 part: loadedPart,
3623 partsLoaded: partsLoaded
3624 });
3625 }
3626 }).catch(reject);
3627 };
3628
3629 loadPartIndex(partIndex);
3630 });
3631 };
3632
3633 _proto.handleFragLoadError = function handleFragLoadError(_ref) {
3634 var data = _ref.data;
3635
3636 if (data && data.details === _errors__WEBPACK_IMPORTED_MODULE_6__["ErrorDetails"].INTERNAL_ABORTED) {
3637 this.handleFragLoadAborted(data.frag, data.part);
3638 } else {
3639 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].ERROR, data);
3640 }
3641
3642 return null;
3643 };
3644
3645 _proto._handleTransmuxerFlush = function _handleTransmuxerFlush(chunkMeta) {
3646 var context = this.getCurrentContext(chunkMeta);
3647
3648 if (!context || this.state !== State.PARSING) {
3649 if (!this.fragCurrent) {
3650 this.state = State.IDLE;
3651 }
3652
3653 return;
3654 }
3655
3656 var frag = context.frag,
3657 part = context.part,
3658 level = context.level;
3659 var now = self.performance.now();
3660 frag.stats.parsing.end = now;
3661
3662 if (part) {
3663 part.stats.parsing.end = now;
3664 }
3665
3666 this.updateLevelTiming(frag, part, level, chunkMeta.partial);
3667 };
3668
3669 _proto.getCurrentContext = function getCurrentContext(chunkMeta) {
3670 var levels = this.levels;
3671 var levelIndex = chunkMeta.level,
3672 sn = chunkMeta.sn,
3673 partIndex = chunkMeta.part;
3674
3675 if (!levels || !levels[levelIndex]) {
3676 this.warn("Levels object was unset while buffering fragment " + sn + " of level " + levelIndex + ". The current chunk will not be buffered.");
3677 return null;
3678 }
3679
3680 var level = levels[levelIndex];
3681 var part = partIndex > -1 ? Object(_level_helper__WEBPACK_IMPORTED_MODULE_11__["getPartWith"])(level, sn, partIndex) : null;
3682 var frag = part ? part.fragment : Object(_level_helper__WEBPACK_IMPORTED_MODULE_11__["getFragmentWithSN"])(level, sn, this.fragCurrent);
3683
3684 if (!frag) {
3685 return null;
3686 }
3687
3688 return {
3689 frag: frag,
3690 part: part,
3691 level: level
3692 };
3693 };
3694
3695 _proto.bufferFragmentData = function bufferFragmentData(data, frag, part, chunkMeta) {
3696 if (!data || this.state !== State.PARSING) {
3697 return;
3698 }
3699
3700 var data1 = data.data1,
3701 data2 = data.data2;
3702 var buffer = data1;
3703
3704 if (data1 && data2) {
3705 // Combine the moof + mdat so that we buffer with a single append
3706 buffer = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_8__["appendUint8Array"])(data1, data2);
3707 }
3708
3709 if (!buffer || !buffer.length) {
3710 return;
3711 }
3712
3713 var segment = {
3714 type: data.type,
3715 frag: frag,
3716 part: part,
3717 chunkMeta: chunkMeta,
3718 parent: frag.type,
3719 data: buffer
3720 };
3721 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].BUFFER_APPENDING, segment);
3722
3723 if (data.dropped && data.independent && !part) {
3724 // Clear buffer so that we reload previous segments sequentially if required
3725 this.flushBufferGap(frag);
3726 }
3727 };
3728
3729 _proto.flushBufferGap = function flushBufferGap(frag) {
3730 var media = this.media;
3731
3732 if (!media) {
3733 return;
3734 } // If currentTime is not buffered, clear the back buffer so that we can backtrack as much as needed
3735
3736
3737 if (!_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].isBuffered(media, media.currentTime)) {
3738 this.flushMainBuffer(0, frag.start);
3739 return;
3740 } // Remove back-buffer without interrupting playback to allow back tracking
3741
3742
3743 var currentTime = media.currentTime;
3744 var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(media, currentTime, 0);
3745 var fragDuration = frag.duration;
3746 var segmentFraction = Math.min(this.config.maxFragLookUpTolerance * 2, fragDuration * 0.25);
3747 var start = Math.max(Math.min(frag.start - segmentFraction, bufferInfo.end - segmentFraction), currentTime + segmentFraction);
3748
3749 if (frag.start - start > segmentFraction) {
3750 this.flushMainBuffer(start, frag.start);
3751 }
3752 };
3753
3754 _proto.getFwdBufferInfo = function getFwdBufferInfo(bufferable, type) {
3755 var config = this.config;
3756 var pos = this.getLoadPosition();
3757
3758 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(pos)) {
3759 return null;
3760 }
3761
3762 var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(bufferable, pos, config.maxBufferHole); // Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
3763
3764 if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
3765 var bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
3766
3767 if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
3768 return _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, config.maxBufferHole));
3769 }
3770 }
3771
3772 return bufferInfo;
3773 };
3774
3775 _proto.getMaxBufferLength = function getMaxBufferLength(levelBitrate) {
3776 var config = this.config;
3777 var maxBufLen;
3778
3779 if (levelBitrate) {
3780 maxBufLen = Math.max(8 * config.maxBufferSize / levelBitrate, config.maxBufferLength);
3781 } else {
3782 maxBufLen = config.maxBufferLength;
3783 }
3784
3785 return Math.min(maxBufLen, config.maxMaxBufferLength);
3786 };
3787
3788 _proto.reduceMaxBufferLength = function reduceMaxBufferLength(threshold) {
3789 var config = this.config;
3790 var minLength = threshold || config.maxBufferLength;
3791
3792 if (config.maxMaxBufferLength >= minLength) {
3793 // reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
3794 config.maxMaxBufferLength /= 2;
3795 this.warn("Reduce max buffer length to " + config.maxMaxBufferLength + "s");
3796 return true;
3797 }
3798
3799 return false;
3800 };
3801
3802 _proto.getNextFragment = function getNextFragment(pos, levelDetails) {
3803 var _frag, _frag2;
3804
3805 var fragments = levelDetails.fragments;
3806 var fragLen = fragments.length;
3807
3808 if (!fragLen) {
3809 return null;
3810 } // find fragment index, contiguous with end of buffer position
3811
3812
3813 var config = this.config;
3814 var start = fragments[0].start;
3815 var frag;
3816
3817 if (levelDetails.live) {
3818 var initialLiveManifestSize = config.initialLiveManifestSize;
3819
3820 if (fragLen < initialLiveManifestSize) {
3821 this.warn("Not enough fragments to start playback (have: " + fragLen + ", need: " + initialLiveManifestSize + ")");
3822 return null;
3823 } // The real fragment start times for a live stream are only known after the PTS range for that level is known.
3824 // In order to discover the range, we load the best matching fragment for that level and demux it.
3825 // Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
3826 // we get the fragment matching that start time
3827
3828
3829 if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1) {
3830 frag = this.getInitialLiveFragment(levelDetails, fragments);
3831 this.startPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
3832 }
3833 } else if (pos <= start) {
3834 // VoD playlist: if loadPosition before start of playlist, load first fragment
3835 frag = fragments[0];
3836 } // If we haven't run into any special cases already, just load the fragment most closely matching the requested position
3837
3838
3839 if (!frag) {
3840 var end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
3841 frag = this.getFragmentAtPosition(pos, end, levelDetails);
3842 } // If an initSegment is present, it must be buffered first
3843
3844
3845 if ((_frag = frag) !== null && _frag !== void 0 && _frag.initSegment && !((_frag2 = frag) !== null && _frag2 !== void 0 && _frag2.initSegment.data) && !this.bitrateTest) {
3846 frag = frag.initSegment;
3847 }
3848
3849 return frag;
3850 };
3851
3852 _proto.getNextPart = function getNextPart(partList, frag, targetBufferTime) {
3853 var nextPart = -1;
3854 var contiguous = false;
3855 var independentAttrOmitted = true;
3856
3857 for (var i = 0, len = partList.length; i < len; i++) {
3858 var part = partList[i];
3859 independentAttrOmitted = independentAttrOmitted && !part.independent;
3860
3861 if (nextPart > -1 && targetBufferTime < part.start) {
3862 break;
3863 }
3864
3865 var loaded = part.loaded;
3866
3867 if (!loaded && (contiguous || part.independent || independentAttrOmitted) && part.fragment === frag) {
3868 nextPart = i;
3869 }
3870
3871 contiguous = loaded;
3872 }
3873
3874 return nextPart;
3875 };
3876
3877 _proto.loadedEndOfParts = function loadedEndOfParts(partList, targetBufferTime) {
3878 var lastPart = partList[partList.length - 1];
3879 return lastPart && targetBufferTime > lastPart.start && lastPart.loaded;
3880 }
3881 /*
3882 This method is used find the best matching first fragment for a live playlist. This fragment is used to calculate the
3883 "sliding" of the playlist, which is its offset from the start of playback. After sliding we can compute the real
3884 start and end times for each fragment in the playlist (after which this method will not need to be called).
3885 */
3886 ;
3887
3888 _proto.getInitialLiveFragment = function getInitialLiveFragment(levelDetails, fragments) {
3889 var fragPrevious = this.fragPrevious;
3890 var frag = null;
3891
3892 if (fragPrevious) {
3893 if (levelDetails.hasProgramDateTime) {
3894 // Prefer using PDT, because it can be accurate enough to choose the correct fragment without knowing the level sliding
3895 this.log("Live playlist, switching playlist, load frag with same PDT: " + fragPrevious.programDateTime);
3896 frag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_10__["findFragmentByPDT"])(fragments, fragPrevious.endProgramDateTime, this.config.maxFragLookUpTolerance);
3897 }
3898
3899 if (!frag) {
3900 // SN does not need to be accurate between renditions, but depending on the packaging it may be so.
3901 var targetSN = fragPrevious.sn + 1;
3902
3903 if (targetSN >= levelDetails.startSN && targetSN <= levelDetails.endSN) {
3904 var fragNext = fragments[targetSN - levelDetails.startSN]; // Ensure that we're staying within the continuity range, since PTS resets upon a new range
3905
3906 if (fragPrevious.cc === fragNext.cc) {
3907 frag = fragNext;
3908 this.log("Live playlist, switching playlist, load frag with next SN: " + frag.sn);
3909 }
3910 } // It's important to stay within the continuity range if available; otherwise the fragments in the playlist
3911 // will have the wrong start times
3912
3913
3914 if (!frag) {
3915 frag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_10__["findFragWithCC"])(fragments, fragPrevious.cc);
3916
3917 if (frag) {
3918 this.log("Live playlist, switching playlist, load frag with same CC: " + frag.sn);
3919 }
3920 }
3921 }
3922 } else {
3923 // Find a new start fragment when fragPrevious is null
3924 var liveStart = this.hls.liveSyncPosition;
3925
3926 if (liveStart !== null) {
3927 frag = this.getFragmentAtPosition(liveStart, this.bitrateTest ? levelDetails.fragmentEnd : levelDetails.edge, levelDetails);
3928 }
3929 }
3930
3931 return frag;
3932 }
3933 /*
3934 This method finds the best matching fragment given the provided position.
3935 */
3936 ;
3937
3938 _proto.getFragmentAtPosition = function getFragmentAtPosition(bufferEnd, end, levelDetails) {
3939 var config = this.config,
3940 fragPrevious = this.fragPrevious;
3941 var fragments = levelDetails.fragments,
3942 endSN = levelDetails.endSN;
3943 var fragmentHint = levelDetails.fragmentHint;
3944 var tolerance = config.maxFragLookUpTolerance;
3945 var loadingParts = !!(config.lowLatencyMode && levelDetails.partList && fragmentHint);
3946
3947 if (loadingParts && fragmentHint && !this.bitrateTest) {
3948 // Include incomplete fragment with parts at end
3949 fragments = fragments.concat(fragmentHint);
3950 endSN = fragmentHint.sn;
3951 }
3952
3953 var frag;
3954
3955 if (bufferEnd < end) {
3956 var lookupTolerance = bufferEnd > end - tolerance ? 0 : tolerance; // Remove the tolerance if it would put the bufferEnd past the actual end of stream
3957 // Uses buffer and sequence number to calculate switch segment (required if using EXT-X-DISCONTINUITY-SEQUENCE)
3958
3959 frag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_10__["findFragmentByPTS"])(fragPrevious, fragments, bufferEnd, lookupTolerance);
3960 } else {
3961 // reach end of playlist
3962 frag = fragments[fragments.length - 1];
3963 }
3964
3965 if (frag) {
3966 var curSNIdx = frag.sn - levelDetails.startSN;
3967 var sameLevel = fragPrevious && frag.level === fragPrevious.level;
3968 var nextFrag = fragments[curSNIdx + 1];
3969 var fragState = this.fragmentTracker.getState(frag);
3970
3971 if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].BACKTRACKED) {
3972 frag = null;
3973 var i = curSNIdx;
3974
3975 while (fragments[i] && this.fragmentTracker.getState(fragments[i]) === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].BACKTRACKED) {
3976 // When fragPrevious is null, backtrack to first the first fragment is not BACKTRACKED for loading
3977 // When fragPrevious is set, we want the first BACKTRACKED fragment for parsing and buffering
3978 if (!fragPrevious) {
3979 frag = fragments[--i];
3980 } else {
3981 frag = fragments[i--];
3982 }
3983 }
3984
3985 if (!frag) {
3986 frag = nextFrag;
3987 }
3988 } else if (fragPrevious && frag.sn === fragPrevious.sn && !loadingParts) {
3989 // Force the next fragment to load if the previous one was already selected. This can occasionally happen with
3990 // non-uniform fragment durations
3991 if (sameLevel) {
3992 if (frag.sn < endSN && this.fragmentTracker.getState(nextFrag) !== _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].OK) {
3993 this.log("SN " + frag.sn + " just loaded, load next one: " + nextFrag.sn);
3994 frag = nextFrag;
3995 } else {
3996 frag = null;
3997 }
3998 }
3999 }
4000 }
4001
4002 return frag;
4003 };
4004
4005 _proto.synchronizeToLiveEdge = function synchronizeToLiveEdge(levelDetails) {
4006 var config = this.config,
4007 media = this.media;
4008
4009 if (!media) {
4010 return;
4011 }
4012
4013 var liveSyncPosition = this.hls.liveSyncPosition;
4014 var currentTime = media.currentTime;
4015 var start = levelDetails.fragments[0].start;
4016 var end = levelDetails.edge;
4017 var withinSlidingWindow = currentTime >= start - config.maxFragLookUpTolerance && currentTime <= end; // Continue if we can seek forward to sync position or if current time is outside of sliding window
4018
4019 if (liveSyncPosition !== null && media.duration > liveSyncPosition && (currentTime < liveSyncPosition || !withinSlidingWindow)) {
4020 // Continue if buffer is starving or if current time is behind max latency
4021 var maxLatency = config.liveMaxLatencyDuration !== undefined ? config.liveMaxLatencyDuration : config.liveMaxLatencyDurationCount * levelDetails.targetduration;
4022
4023 if (!withinSlidingWindow && media.readyState < 4 || currentTime < end - maxLatency) {
4024 if (!this.loadedmetadata) {
4025 this.nextLoadPosition = liveSyncPosition;
4026 } // Only seek if ready and there is not a significant forward buffer available for playback
4027
4028
4029 if (media.readyState) {
4030 this.warn("Playback: " + currentTime.toFixed(3) + " is located too far from the end of live sliding playlist: " + end + ", reset currentTime to : " + liveSyncPosition.toFixed(3));
4031 media.currentTime = liveSyncPosition;
4032 }
4033 }
4034 }
4035 };
4036
4037 _proto.alignPlaylists = function alignPlaylists(details, previousDetails) {
4038 var levels = this.levels,
4039 levelLastLoaded = this.levelLastLoaded,
4040 fragPrevious = this.fragPrevious;
4041 var lastLevel = levelLastLoaded !== null ? levels[levelLastLoaded] : null; // FIXME: If not for `shouldAlignOnDiscontinuities` requiring fragPrevious.cc,
4042 // this could all go in level-helper mergeDetails()
4043
4044 var length = details.fragments.length;
4045
4046 if (!length) {
4047 this.warn("No fragments in live playlist");
4048 return 0;
4049 }
4050
4051 var slidingStart = details.fragments[0].start;
4052 var firstLevelLoad = !previousDetails;
4053
4054 var aligned = details.alignedSliding && Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(slidingStart);
4055
4056 if (firstLevelLoad || !aligned && !slidingStart) {
4057 Object(_utils_discontinuities__WEBPACK_IMPORTED_MODULE_9__["alignStream"])(fragPrevious, lastLevel, details);
4058 var alignedSlidingStart = details.fragments[0].start;
4059 this.log("Live playlist sliding: " + alignedSlidingStart.toFixed(2) + " start-sn: " + (previousDetails ? previousDetails.startSN : 'na') + "->" + details.startSN + " prev-sn: " + (fragPrevious ? fragPrevious.sn : 'na') + " fragments: " + length);
4060 return alignedSlidingStart;
4061 }
4062
4063 return slidingStart;
4064 };
4065
4066 _proto.waitForCdnTuneIn = function waitForCdnTuneIn(details) {
4067 // Wait for Low-Latency CDN Tune-in to get an updated playlist
4068 var advancePartLimit = 3;
4069 return details.live && details.canBlockReload && details.tuneInGoal > Math.max(details.partHoldBack, details.partTarget * advancePartLimit);
4070 };
4071
4072 _proto.setStartPosition = function setStartPosition(details, sliding) {
4073 // compute start position if set to -1. use it straight away if value is defined
4074 var startPosition = this.startPosition;
4075
4076 if (startPosition < sliding) {
4077 startPosition = -1;
4078 }
4079
4080 if (startPosition === -1 || this.lastCurrentTime === -1) {
4081 // first, check if start time offset has been set in playlist, if yes, use this value
4082 var startTimeOffset = details.startTimeOffset;
4083
4084 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(startTimeOffset)) {
4085 startPosition = sliding + startTimeOffset;
4086
4087 if (startTimeOffset < 0) {
4088 startPosition += details.totalduration;
4089 }
4090
4091 startPosition = Math.min(Math.max(sliding, startPosition), sliding + details.totalduration);
4092 this.log("Start time offset " + startTimeOffset + " found in playlist, adjust startPosition to " + startPosition);
4093 this.startPosition = startPosition;
4094 } else if (details.live) {
4095 // Leave this.startPosition at -1, so that we can use `getInitialLiveFragment` logic when startPosition has
4096 // not been specified via the config or an as an argument to startLoad (#3736).
4097 startPosition = this.hls.liveSyncPosition || sliding;
4098 } else {
4099 this.startPosition = startPosition = 0;
4100 }
4101
4102 this.lastCurrentTime = startPosition;
4103 }
4104
4105 this.nextLoadPosition = startPosition;
4106 };
4107
4108 _proto.getLoadPosition = function getLoadPosition() {
4109 var media = this.media; // if we have not yet loaded any fragment, start loading from start position
4110
4111 var pos = 0;
4112
4113 if (this.loadedmetadata && media) {
4114 pos = media.currentTime;
4115 } else if (this.nextLoadPosition) {
4116 pos = this.nextLoadPosition;
4117 }
4118
4119 return pos;
4120 };
4121
4122 _proto.handleFragLoadAborted = function handleFragLoadAborted(frag, part) {
4123 if (this.transmuxer && frag.sn !== 'initSegment' && frag.stats.aborted) {
4124 this.warn("Fragment " + frag.sn + (part ? ' part' + part.index : '') + " of level " + frag.level + " was aborted");
4125 this.resetFragmentLoading(frag);
4126 }
4127 };
4128
4129 _proto.resetFragmentLoading = function resetFragmentLoading(frag) {
4130 if (!this.fragCurrent || !this.fragContextChanged(frag)) {
4131 this.state = State.IDLE;
4132 }
4133 };
4134
4135 _proto.onFragmentOrKeyLoadError = function onFragmentOrKeyLoadError(filterType, data) {
4136 if (data.fatal) {
4137 return;
4138 }
4139
4140 var frag = data.frag; // Handle frag error related to caller's filterType
4141
4142 if (!frag || frag.type !== filterType) {
4143 return;
4144 }
4145
4146 var fragCurrent = this.fragCurrent;
4147 console.assert(fragCurrent && frag.sn === fragCurrent.sn && frag.level === fragCurrent.level && frag.urlId === fragCurrent.urlId, 'Frag load error must match current frag to retry');
4148 var config = this.config; // keep retrying until the limit will be reached
4149
4150 if (this.fragLoadError + 1 <= config.fragLoadingMaxRetry) {
4151 if (this.resetLiveStartWhenNotLoaded(frag.level)) {
4152 return;
4153 } // exponential backoff capped to config.fragLoadingMaxRetryTimeout
4154
4155
4156 var delay = Math.min(Math.pow(2, this.fragLoadError) * config.fragLoadingRetryDelay, config.fragLoadingMaxRetryTimeout);
4157 this.warn("Fragment " + frag.sn + " of " + filterType + " " + frag.level + " failed to load, retrying in " + delay + "ms");
4158 this.retryDate = self.performance.now() + delay;
4159 this.fragLoadError++;
4160 this.state = State.FRAG_LOADING_WAITING_RETRY;
4161 } else if (data.levelRetry) {
4162 if (filterType === _types_loader__WEBPACK_IMPORTED_MODULE_15__["PlaylistLevelType"].AUDIO) {
4163 // Reset current fragment since audio track audio is essential and may not have a fail-over track
4164 this.fragCurrent = null;
4165 } // Fragment errors that result in a level switch or redundant fail-over
4166 // should reset the stream controller state to idle
4167
4168
4169 this.fragLoadError = 0;
4170 this.state = State.IDLE;
4171 } else {
4172 _utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].error(data.details + " reaches max retry, redispatch as fatal ..."); // switch error to fatal
4173
4174 data.fatal = true;
4175 this.hls.stopLoad();
4176 this.state = State.ERROR;
4177 }
4178 };
4179
4180 _proto.afterBufferFlushed = function afterBufferFlushed(media, bufferType, playlistType) {
4181 if (!media) {
4182 return;
4183 } // After successful buffer flushing, filter flushed fragments from bufferedFrags use mediaBuffered instead of media
4184 // (so that we will check against video.buffered ranges in case of alt audio track)
4185
4186
4187 var bufferedTimeRanges = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].getBuffered(media);
4188 this.fragmentTracker.detectEvictedFragments(bufferType, bufferedTimeRanges, playlistType);
4189
4190 if (this.state === State.ENDED) {
4191 this.resetLoadingState();
4192 }
4193 };
4194
4195 _proto.resetLoadingState = function resetLoadingState() {
4196 this.fragCurrent = null;
4197 this.fragPrevious = null;
4198 this.state = State.IDLE;
4199 };
4200
4201 _proto.resetLiveStartWhenNotLoaded = function resetLiveStartWhenNotLoaded(level) {
4202 // if loadedmetadata is not set, it means that we are emergency switch down on first frag
4203 // in that case, reset startFragRequested flag
4204 if (!this.loadedmetadata) {
4205 this.startFragRequested = false;
4206 var details = this.levels ? this.levels[level].details : null;
4207
4208 if (details !== null && details !== void 0 && details.live) {
4209 // We can't afford to retry after a delay in a live scenario. Update the start position and return to IDLE.
4210 this.startPosition = -1;
4211 this.setStartPosition(details, 0);
4212 this.resetLoadingState();
4213 return true;
4214 }
4215
4216 this.nextLoadPosition = this.startPosition;
4217 }
4218
4219 return false;
4220 };
4221
4222 _proto.updateLevelTiming = function updateLevelTiming(frag, part, level, partial) {
4223 var _this6 = this;
4224
4225 var details = level.details;
4226 console.assert(!!details, 'level.details must be defined');
4227 var parsed = Object.keys(frag.elementaryStreams).reduce(function (result, type) {
4228 var info = frag.elementaryStreams[type];
4229
4230 if (info) {
4231 var parsedDuration = info.endPTS - info.startPTS;
4232
4233 if (parsedDuration <= 0) {
4234 // Destroy the transmuxer after it's next time offset failed to advance because duration was <= 0.
4235 // The new transmuxer will be configured with a time offset matching the next fragment start,
4236 // preventing the timeline from shifting.
4237 _this6.warn("Could not parse fragment " + frag.sn + " " + type + " duration reliably (" + parsedDuration + ") resetting transmuxer to fallback to playlist timing");
4238
4239 _this6.resetTransmuxer();
4240
4241 return result || false;
4242 }
4243
4244 var drift = partial ? 0 : Object(_level_helper__WEBPACK_IMPORTED_MODULE_11__["updateFragPTSDTS"])(details, frag, info.startPTS, info.endPTS, info.startDTS, info.endDTS);
4245
4246 _this6.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].LEVEL_PTS_UPDATED, {
4247 details: details,
4248 level: level,
4249 drift: drift,
4250 type: type,
4251 frag: frag,
4252 start: info.startPTS,
4253 end: info.endPTS
4254 });
4255
4256 return true;
4257 }
4258
4259 return result;
4260 }, false);
4261
4262 if (parsed) {
4263 this.state = State.PARSED;
4264 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_PARSED, {
4265 frag: frag,
4266 part: part
4267 });
4268 } else {
4269 this.resetLoadingState();
4270 }
4271 };
4272
4273 _proto.resetTransmuxer = function resetTransmuxer() {
4274 if (this.transmuxer) {
4275 this.transmuxer.destroy();
4276 this.transmuxer = null;
4277 }
4278 };
4279
4280 _createClass(BaseStreamController, [{
4281 key: "state",
4282 get: function get() {
4283 return this._state;
4284 },
4285 set: function set(nextState) {
4286 var previousState = this._state;
4287
4288 if (previousState !== nextState) {
4289 this._state = nextState;
4290 this.log(previousState + "->" + nextState);
4291 }
4292 }
4293 }]);
4294
4295 return BaseStreamController;
4296}(_task_loop__WEBPACK_IMPORTED_MODULE_1__["default"]);
4297
4298
4299
4300/***/ }),
4301
4302/***/ "./src/controller/buffer-controller.ts":
4303/*!*********************************************!*\
4304 !*** ./src/controller/buffer-controller.ts ***!
4305 \*********************************************/
4306/*! exports provided: default */
4307/***/ (function(module, __webpack_exports__, __webpack_require__) {
4308__webpack_require__.r(__webpack_exports__);
4309/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BufferController; });
4310/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
4311/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
4312/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
4313/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
4314/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
4315/* harmony import */ var _utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/mediasource-helper */ "./src/utils/mediasource-helper.ts");
4316/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
4317/* harmony import */ var _buffer_operation_queue__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./buffer-operation-queue */ "./src/controller/buffer-operation-queue.ts");
4318
4319
4320
4321
4322
4323
4324
4325
4326
4327
4328
4329
4330var MediaSource = Object(_utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__["getMediaSource"])();
4331var VIDEO_CODEC_PROFILE_REPACE = /([ha]vc.)(?:\.[^.,]+)+/;
4332
4333var BufferController = /*#__PURE__*/function () {
4334 // The level details used to determine duration, target-duration and live
4335 // cache the self generated object url to detect hijack of video tag
4336 // A queue of buffer operations which require the SourceBuffer to not be updating upon execution
4337 // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
4338 // The number of BUFFER_CODEC events received before any sourceBuffers are created
4339 // The total number of BUFFER_CODEC events received
4340 // A reference to the attached media element
4341 // A reference to the active media source
4342 // counters
4343 function BufferController(_hls) {
4344 var _this = this;
4345
4346 this.details = null;
4347 this._objectUrl = null;
4348 this.operationQueue = void 0;
4349 this.listeners = void 0;
4350 this.hls = void 0;
4351 this.bufferCodecEventsExpected = 0;
4352 this._bufferCodecEventsTotal = 0;
4353 this.media = null;
4354 this.mediaSource = null;
4355 this.appendError = 0;
4356 this.tracks = {};
4357 this.pendingTracks = {};
4358 this.sourceBuffer = void 0;
4359
4360 this._onMediaSourceOpen = function () {
4361 var hls = _this.hls,
4362 media = _this.media,
4363 mediaSource = _this.mediaSource;
4364 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media source opened');
4365
4366 if (media) {
4367 _this.updateMediaElementDuration();
4368
4369 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHED, {
4370 media: media
4371 });
4372 }
4373
4374 if (mediaSource) {
4375 // once received, don't listen anymore to sourceopen event
4376 mediaSource.removeEventListener('sourceopen', _this._onMediaSourceOpen);
4377 }
4378
4379 _this.checkPendingTracks();
4380 };
4381
4382 this._onMediaSourceClose = function () {
4383 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media source closed');
4384 };
4385
4386 this._onMediaSourceEnded = function () {
4387 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media source ended');
4388 };
4389
4390 this.hls = _hls;
4391
4392 this._initSourceBuffer();
4393
4394 this.registerListeners();
4395 }
4396
4397 var _proto = BufferController.prototype;
4398
4399 _proto.hasSourceTypes = function hasSourceTypes() {
4400 return this.getSourceBufferTypes().length > 0 || Object.keys(this.pendingTracks).length > 0;
4401 };
4402
4403 _proto.destroy = function destroy() {
4404 this.unregisterListeners();
4405 this.details = null;
4406 };
4407
4408 _proto.registerListeners = function registerListeners() {
4409 var hls = this.hls;
4410 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
4411 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
4412 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
4413 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_RESET, this.onBufferReset, this);
4414 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_APPENDING, this.onBufferAppending, this);
4415 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
4416 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_EOS, this.onBufferEos, this);
4417 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
4418 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated, this);
4419 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSED, this.onFragParsed, this);
4420 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_CHANGED, this.onFragChanged, this);
4421 };
4422
4423 _proto.unregisterListeners = function unregisterListeners() {
4424 var hls = this.hls;
4425 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
4426 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
4427 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
4428 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_RESET, this.onBufferReset, this);
4429 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_APPENDING, this.onBufferAppending, this);
4430 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
4431 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_EOS, this.onBufferEos, this);
4432 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
4433 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated, this);
4434 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSED, this.onFragParsed, this);
4435 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_CHANGED, this.onFragChanged, this);
4436 };
4437
4438 _proto._initSourceBuffer = function _initSourceBuffer() {
4439 this.sourceBuffer = {};
4440 this.operationQueue = new _buffer_operation_queue__WEBPACK_IMPORTED_MODULE_7__["default"](this.sourceBuffer);
4441 this.listeners = {
4442 audio: [],
4443 video: [],
4444 audiovideo: []
4445 };
4446 };
4447
4448 _proto.onManifestParsed = function onManifestParsed(event, data) {
4449 // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
4450 // sourcebuffers will be created all at once when the expected nb of tracks will be reached
4451 // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
4452 // it will contain the expected nb of source buffers, no need to compute it
4453 var codecEvents = 2;
4454
4455 if (data.audio && !data.video || !data.altAudio) {
4456 codecEvents = 1;
4457 }
4458
4459 this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents;
4460 this.details = null;
4461 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log(this.bufferCodecEventsExpected + " bufferCodec event(s) expected");
4462 };
4463
4464 _proto.onMediaAttaching = function onMediaAttaching(event, data) {
4465 var media = this.media = data.media;
4466
4467 if (media && MediaSource) {
4468 var ms = this.mediaSource = new MediaSource(); // MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
4469
4470 ms.addEventListener('sourceopen', this._onMediaSourceOpen);
4471 ms.addEventListener('sourceended', this._onMediaSourceEnded);
4472 ms.addEventListener('sourceclose', this._onMediaSourceClose); // link video and media Source
4473
4474 media.src = self.URL.createObjectURL(ms); // cache the locally generated object url
4475
4476 this._objectUrl = media.src;
4477 }
4478 };
4479
4480 _proto.onMediaDetaching = function onMediaDetaching() {
4481 var media = this.media,
4482 mediaSource = this.mediaSource,
4483 _objectUrl = this._objectUrl;
4484
4485 if (mediaSource) {
4486 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: media source detaching');
4487
4488 if (mediaSource.readyState === 'open') {
4489 try {
4490 // endOfStream could trigger exception if any sourcebuffer is in updating state
4491 // we don't really care about checking sourcebuffer state here,
4492 // as we are anyway detaching the MediaSource
4493 // let's just avoid this exception to propagate
4494 mediaSource.endOfStream();
4495 } catch (err) {
4496 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: onMediaDetaching: " + err.message + " while calling endOfStream");
4497 }
4498 } // Clean up the SourceBuffers by invoking onBufferReset
4499
4500
4501 this.onBufferReset();
4502 mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
4503 mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded);
4504 mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose); // Detach properly the MediaSource from the HTMLMediaElement as
4505 // suggested in https://github.com/w3c/media-source/issues/53.
4506
4507 if (media) {
4508 if (_objectUrl) {
4509 self.URL.revokeObjectURL(_objectUrl);
4510 } // clean up video tag src only if it's our own url. some external libraries might
4511 // hijack the video tag and change its 'src' without destroying the Hls instance first
4512
4513
4514 if (media.src === _objectUrl) {
4515 media.removeAttribute('src');
4516 media.load();
4517 } else {
4518 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('[buffer-controller]: media.src was changed by a third party - skip cleanup');
4519 }
4520 }
4521
4522 this.mediaSource = null;
4523 this.media = null;
4524 this._objectUrl = null;
4525 this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
4526 this.pendingTracks = {};
4527 this.tracks = {};
4528 }
4529
4530 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHED, undefined);
4531 };
4532
4533 _proto.onBufferReset = function onBufferReset() {
4534 var _this2 = this;
4535
4536 this.getSourceBufferTypes().forEach(function (type) {
4537 var sb = _this2.sourceBuffer[type];
4538
4539 try {
4540 if (sb) {
4541 _this2.removeBufferListeners(type);
4542
4543 if (_this2.mediaSource) {
4544 _this2.mediaSource.removeSourceBuffer(sb);
4545 } // Synchronously remove the SB from the map before the next call in order to prevent an async function from
4546 // accessing it
4547
4548
4549 _this2.sourceBuffer[type] = undefined;
4550 }
4551 } catch (err) {
4552 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Failed to reset the " + type + " buffer", err);
4553 }
4554 });
4555
4556 this._initSourceBuffer();
4557 };
4558
4559 _proto.onBufferCodecs = function onBufferCodecs(event, data) {
4560 var _this3 = this;
4561
4562 var sourceBufferCount = this.getSourceBufferTypes().length;
4563 Object.keys(data).forEach(function (trackName) {
4564 if (sourceBufferCount) {
4565 // check if SourceBuffer codec needs to change
4566 var track = _this3.tracks[trackName];
4567
4568 if (track && typeof track.buffer.changeType === 'function') {
4569 var _data$trackName = data[trackName],
4570 codec = _data$trackName.codec,
4571 levelCodec = _data$trackName.levelCodec,
4572 container = _data$trackName.container;
4573 var currentCodec = (track.levelCodec || track.codec).replace(VIDEO_CODEC_PROFILE_REPACE, '$1');
4574 var nextCodec = (levelCodec || codec).replace(VIDEO_CODEC_PROFILE_REPACE, '$1');
4575
4576 if (currentCodec !== nextCodec) {
4577 var mimeType = container + ";codecs=" + (levelCodec || codec);
4578
4579 _this3.appendChangeType(trackName, mimeType);
4580 }
4581 }
4582 } else {
4583 // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
4584 _this3.pendingTracks[trackName] = data[trackName];
4585 }
4586 }); // if sourcebuffers already created, do nothing ...
4587
4588 if (sourceBufferCount) {
4589 return;
4590 }
4591
4592 this.bufferCodecEventsExpected = Math.max(this.bufferCodecEventsExpected - 1, 0);
4593
4594 if (this.mediaSource && this.mediaSource.readyState === 'open') {
4595 this.checkPendingTracks();
4596 }
4597 };
4598
4599 _proto.appendChangeType = function appendChangeType(type, mimeType) {
4600 var _this4 = this;
4601
4602 var operationQueue = this.operationQueue;
4603 var operation = {
4604 execute: function execute() {
4605 var sb = _this4.sourceBuffer[type];
4606
4607 if (sb) {
4608 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: changing " + type + " sourceBuffer type to " + mimeType);
4609 sb.changeType(mimeType);
4610 }
4611
4612 operationQueue.shiftAndExecuteNext(type);
4613 },
4614 onStart: function onStart() {},
4615 onComplete: function onComplete() {},
4616 onError: function onError(e) {
4617 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Failed to change " + type + " SourceBuffer type", e);
4618 }
4619 };
4620 operationQueue.append(operation, type);
4621 };
4622
4623 _proto.onBufferAppending = function onBufferAppending(event, eventData) {
4624 var _this5 = this;
4625
4626 var hls = this.hls,
4627 operationQueue = this.operationQueue,
4628 tracks = this.tracks;
4629 var data = eventData.data,
4630 type = eventData.type,
4631 frag = eventData.frag,
4632 part = eventData.part,
4633 chunkMeta = eventData.chunkMeta;
4634 var chunkStats = chunkMeta.buffering[type];
4635 var bufferAppendingStart = self.performance.now();
4636 chunkStats.start = bufferAppendingStart;
4637 var fragBuffering = frag.stats.buffering;
4638 var partBuffering = part ? part.stats.buffering : null;
4639
4640 if (fragBuffering.start === 0) {
4641 fragBuffering.start = bufferAppendingStart;
4642 }
4643
4644 if (partBuffering && partBuffering.start === 0) {
4645 partBuffering.start = bufferAppendingStart;
4646 } // TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
4647 // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
4648 // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
4649 // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
4650 // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
4651
4652
4653 var audioTrack = tracks.audio;
4654 var checkTimestampOffset = type === 'audio' && chunkMeta.id === 1 && (audioTrack === null || audioTrack === void 0 ? void 0 : audioTrack.container) === 'audio/mpeg';
4655 var operation = {
4656 execute: function execute() {
4657 chunkStats.executeStart = self.performance.now();
4658
4659 if (checkTimestampOffset) {
4660 var sb = _this5.sourceBuffer[type];
4661
4662 if (sb) {
4663 var delta = frag.start - sb.timestampOffset;
4664
4665 if (Math.abs(delta) >= 0.1) {
4666 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: Updating audio SourceBuffer timestampOffset to " + frag.start + " (delta: " + delta + ") sn: " + frag.sn + ")");
4667 sb.timestampOffset = frag.start;
4668 }
4669 }
4670 }
4671
4672 _this5.appendExecutor(data, type);
4673 },
4674 onStart: function onStart() {// logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
4675 },
4676 onComplete: function onComplete() {
4677 // logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
4678 var end = self.performance.now();
4679 chunkStats.executeEnd = chunkStats.end = end;
4680
4681 if (fragBuffering.first === 0) {
4682 fragBuffering.first = end;
4683 }
4684
4685 if (partBuffering && partBuffering.first === 0) {
4686 partBuffering.first = end;
4687 }
4688
4689 var sourceBuffer = _this5.sourceBuffer;
4690 var timeRanges = {};
4691
4692 for (var _type in sourceBuffer) {
4693 timeRanges[_type] = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(sourceBuffer[_type]);
4694 }
4695
4696 _this5.appendError = 0;
4697
4698 _this5.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_APPENDED, {
4699 type: type,
4700 frag: frag,
4701 part: part,
4702 chunkMeta: chunkMeta,
4703 parent: frag.type,
4704 timeRanges: timeRanges
4705 });
4706 },
4707 onError: function onError(err) {
4708 // in case any error occured while appending, put back segment in segments table
4709 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: Error encountered while trying to append to the " + type + " SourceBuffer", err);
4710 var event = {
4711 type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
4712 parent: frag.type,
4713 details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_APPEND_ERROR,
4714 err: err,
4715 fatal: false
4716 };
4717
4718 if (err.code === DOMException.QUOTA_EXCEEDED_ERR) {
4719 // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
4720 // let's stop appending any segments, and report BUFFER_FULL_ERROR error
4721 event.details = _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_FULL_ERROR;
4722 } else {
4723 _this5.appendError++;
4724 event.details = _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_APPEND_ERROR;
4725 /* with UHD content, we could get loop of quota exceeded error until
4726 browser is able to evict some data from sourcebuffer. Retrying can help recover.
4727 */
4728
4729 if (_this5.appendError > hls.config.appendErrorMaxRetry) {
4730 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: Failed " + hls.config.appendErrorMaxRetry + " times to append segment in sourceBuffer");
4731 event.fatal = true;
4732 }
4733 }
4734
4735 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, event);
4736 }
4737 };
4738 operationQueue.append(operation, type);
4739 };
4740
4741 _proto.onBufferFlushing = function onBufferFlushing(event, data) {
4742 var _this6 = this;
4743
4744 var operationQueue = this.operationQueue;
4745
4746 var flushOperation = function flushOperation(type) {
4747 return {
4748 execute: _this6.removeExecutor.bind(_this6, type, data.startOffset, data.endOffset),
4749 onStart: function onStart() {// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
4750 },
4751 onComplete: function onComplete() {
4752 // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
4753 _this6.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHED, {
4754 type: type
4755 });
4756 },
4757 onError: function onError(e) {
4758 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Failed to remove from " + type + " SourceBuffer", e);
4759 }
4760 };
4761 };
4762
4763 if (data.type) {
4764 operationQueue.append(flushOperation(data.type), data.type);
4765 } else {
4766 this.getSourceBufferTypes().forEach(function (type) {
4767 operationQueue.append(flushOperation(type), type);
4768 });
4769 }
4770 };
4771
4772 _proto.onFragParsed = function onFragParsed(event, data) {
4773 var _this7 = this;
4774
4775 var frag = data.frag,
4776 part = data.part;
4777 var buffersAppendedTo = [];
4778 var elementaryStreams = part ? part.elementaryStreams : frag.elementaryStreams;
4779
4780 if (elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_6__["ElementaryStreamTypes"].AUDIOVIDEO]) {
4781 buffersAppendedTo.push('audiovideo');
4782 } else {
4783 if (elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_6__["ElementaryStreamTypes"].AUDIO]) {
4784 buffersAppendedTo.push('audio');
4785 }
4786
4787 if (elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_6__["ElementaryStreamTypes"].VIDEO]) {
4788 buffersAppendedTo.push('video');
4789 }
4790 }
4791
4792 var onUnblocked = function onUnblocked() {
4793 var now = self.performance.now();
4794 frag.stats.buffering.end = now;
4795
4796 if (part) {
4797 part.stats.buffering.end = now;
4798 }
4799
4800 var stats = part ? part.stats : frag.stats;
4801
4802 _this7.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_BUFFERED, {
4803 frag: frag,
4804 part: part,
4805 stats: stats,
4806 id: frag.type
4807 });
4808 };
4809
4810 if (buffersAppendedTo.length === 0) {
4811 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("Fragments must have at least one ElementaryStreamType set. type: " + frag.type + " level: " + frag.level + " sn: " + frag.sn);
4812 }
4813
4814 this.blockBuffers(onUnblocked, buffersAppendedTo);
4815 };
4816
4817 _proto.onFragChanged = function onFragChanged(event, data) {
4818 this.flushBackBuffer();
4819 } // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
4820 // an undefined data.type will mark all buffers as EOS.
4821 ;
4822
4823 _proto.onBufferEos = function onBufferEos(event, data) {
4824 var _this8 = this;
4825
4826 var ended = this.getSourceBufferTypes().reduce(function (acc, type) {
4827 var sb = _this8.sourceBuffer[type];
4828
4829 if (!data.type || data.type === type) {
4830 if (sb && !sb.ended) {
4831 sb.ended = true;
4832 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: " + type + " sourceBuffer now EOS");
4833 }
4834 }
4835
4836 return acc && !!(!sb || sb.ended);
4837 }, true);
4838
4839 if (ended) {
4840 this.blockBuffers(function () {
4841 var mediaSource = _this8.mediaSource;
4842
4843 if (!mediaSource || mediaSource.readyState !== 'open') {
4844 return;
4845 } // Allow this to throw and be caught by the enqueueing function
4846
4847
4848 mediaSource.endOfStream();
4849 });
4850 }
4851 };
4852
4853 _proto.onLevelUpdated = function onLevelUpdated(event, _ref) {
4854 var details = _ref.details;
4855
4856 if (!details.fragments.length) {
4857 return;
4858 }
4859
4860 this.details = details;
4861
4862 if (this.getSourceBufferTypes().length) {
4863 this.blockBuffers(this.updateMediaElementDuration.bind(this));
4864 } else {
4865 this.updateMediaElementDuration();
4866 }
4867 };
4868
4869 _proto.flushBackBuffer = function flushBackBuffer() {
4870 var hls = this.hls,
4871 details = this.details,
4872 media = this.media,
4873 sourceBuffer = this.sourceBuffer;
4874
4875 if (!media || details === null) {
4876 return;
4877 }
4878
4879 var sourceBufferTypes = this.getSourceBufferTypes();
4880
4881 if (!sourceBufferTypes.length) {
4882 return;
4883 } // Support for deprecated liveBackBufferLength
4884
4885
4886 var backBufferLength = details.live && hls.config.liveBackBufferLength !== null ? hls.config.liveBackBufferLength : hls.config.backBufferLength;
4887
4888 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(backBufferLength) || backBufferLength < 0) {
4889 return;
4890 }
4891
4892 var currentTime = media.currentTime;
4893 var targetDuration = details.levelTargetDuration;
4894 var maxBackBufferLength = Math.max(backBufferLength, targetDuration);
4895 var targetBackBufferPosition = Math.floor(currentTime / targetDuration) * targetDuration - maxBackBufferLength;
4896 sourceBufferTypes.forEach(function (type) {
4897 var sb = sourceBuffer[type];
4898
4899 if (sb) {
4900 var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(sb); // when target buffer start exceeds actual buffer start
4901
4902 if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
4903 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BACK_BUFFER_REACHED, {
4904 bufferEnd: targetBackBufferPosition
4905 }); // Support for deprecated event:
4906
4907 if (details.live) {
4908 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LIVE_BACK_BUFFER_REACHED, {
4909 bufferEnd: targetBackBufferPosition
4910 });
4911 }
4912
4913 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, {
4914 startOffset: 0,
4915 endOffset: targetBackBufferPosition,
4916 type: type
4917 });
4918 }
4919 }
4920 });
4921 }
4922 /**
4923 * Update Media Source duration to current level duration or override to Infinity if configuration parameter
4924 * 'liveDurationInfinity` is set to `true`
4925 * More details: https://github.com/video-dev/hls.js/issues/355
4926 */
4927 ;
4928
4929 _proto.updateMediaElementDuration = function updateMediaElementDuration() {
4930 if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
4931 return;
4932 }
4933
4934 var details = this.details,
4935 hls = this.hls,
4936 media = this.media,
4937 mediaSource = this.mediaSource;
4938 var levelDuration = details.fragments[0].start + details.totalduration;
4939 var mediaDuration = media.duration;
4940 var msDuration = Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mediaSource.duration) ? mediaSource.duration : 0;
4941
4942 if (details.live && hls.config.liveDurationInfinity) {
4943 // Override duration to Infinity
4944 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media Source duration is set to Infinity');
4945 mediaSource.duration = Infinity;
4946 this.updateSeekableRange(details);
4947 } else if (levelDuration > msDuration && levelDuration > mediaDuration || !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mediaDuration)) {
4948 // levelDuration was the last value we set.
4949 // not using mediaSource.duration as the browser may tweak this value
4950 // only update Media Source duration if its value increase, this is to avoid
4951 // flushing already buffered portion when switching between quality level
4952 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: Updating Media Source duration to " + levelDuration.toFixed(3));
4953 mediaSource.duration = levelDuration;
4954 }
4955 };
4956
4957 _proto.updateSeekableRange = function updateSeekableRange(levelDetails) {
4958 var mediaSource = this.mediaSource;
4959 var fragments = levelDetails.fragments;
4960 var len = fragments.length;
4961
4962 if (len && levelDetails.live && mediaSource !== null && mediaSource !== void 0 && mediaSource.setLiveSeekableRange) {
4963 var start = Math.max(0, fragments[0].start);
4964 var end = Math.max(start, start + levelDetails.totalduration);
4965 mediaSource.setLiveSeekableRange(start, end);
4966 }
4967 };
4968
4969 _proto.checkPendingTracks = function checkPendingTracks() {
4970 var bufferCodecEventsExpected = this.bufferCodecEventsExpected,
4971 operationQueue = this.operationQueue,
4972 pendingTracks = this.pendingTracks; // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
4973 // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
4974 // data has been appended to existing ones.
4975 // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
4976
4977 var pendingTracksCount = Object.keys(pendingTracks).length;
4978
4979 if (pendingTracksCount && !bufferCodecEventsExpected || pendingTracksCount === 2) {
4980 // ok, let's create them now !
4981 this.createSourceBuffers(pendingTracks);
4982 this.pendingTracks = {}; // append any pending segments now !
4983
4984 var buffers = this.getSourceBufferTypes();
4985
4986 if (buffers.length === 0) {
4987 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
4988 type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
4989 details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_INCOMPATIBLE_CODECS_ERROR,
4990 fatal: true,
4991 reason: 'could not create source buffer for media codec(s)'
4992 });
4993 return;
4994 }
4995
4996 buffers.forEach(function (type) {
4997 operationQueue.executeNext(type);
4998 });
4999 }
5000 };
5001
5002 _proto.createSourceBuffers = function createSourceBuffers(tracks) {
5003 var sourceBuffer = this.sourceBuffer,
5004 mediaSource = this.mediaSource;
5005
5006 if (!mediaSource) {
5007 throw Error('createSourceBuffers called when mediaSource was null');
5008 }
5009
5010 var tracksCreated = 0;
5011
5012 for (var trackName in tracks) {
5013 if (!sourceBuffer[trackName]) {
5014 var track = tracks[trackName];
5015
5016 if (!track) {
5017 throw Error("source buffer exists for track " + trackName + ", however track does not");
5018 } // use levelCodec as first priority
5019
5020
5021 var codec = track.levelCodec || track.codec;
5022 var mimeType = track.container + ";codecs=" + codec;
5023 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: creating sourceBuffer(" + mimeType + ")");
5024
5025 try {
5026 var sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
5027 var sbName = trackName;
5028 this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart);
5029 this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd);
5030 this.addBufferListener(sbName, 'error', this._onSBUpdateError);
5031 this.tracks[trackName] = {
5032 buffer: sb,
5033 codec: codec,
5034 container: track.container,
5035 levelCodec: track.levelCodec,
5036 id: track.id
5037 };
5038 tracksCreated++;
5039 } catch (err) {
5040 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: error while trying to add sourceBuffer: " + err.message);
5041 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
5042 type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
5043 details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_ADD_CODEC_ERROR,
5044 fatal: false,
5045 error: err,
5046 mimeType: mimeType
5047 });
5048 }
5049 }
5050 }
5051
5052 if (tracksCreated) {
5053 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_CREATED, {
5054 tracks: this.tracks
5055 });
5056 }
5057 } // Keep as arrow functions so that we can directly reference these functions directly as event listeners
5058 ;
5059
5060 _proto._onSBUpdateStart = function _onSBUpdateStart(type) {
5061 var operationQueue = this.operationQueue;
5062 var operation = operationQueue.current(type);
5063 operation.onStart();
5064 };
5065
5066 _proto._onSBUpdateEnd = function _onSBUpdateEnd(type) {
5067 var operationQueue = this.operationQueue;
5068 var operation = operationQueue.current(type);
5069 operation.onComplete();
5070 operationQueue.shiftAndExecuteNext(type);
5071 };
5072
5073 _proto._onSBUpdateError = function _onSBUpdateError(type, event) {
5074 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: " + type + " SourceBuffer error", event); // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
5075 // SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
5076
5077 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
5078 type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
5079 details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_APPENDING_ERROR,
5080 fatal: false
5081 }); // updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
5082
5083 var operation = this.operationQueue.current(type);
5084
5085 if (operation) {
5086 operation.onError(event);
5087 }
5088 } // This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
5089 ;
5090
5091 _proto.removeExecutor = function removeExecutor(type, startOffset, endOffset) {
5092 var media = this.media,
5093 mediaSource = this.mediaSource,
5094 operationQueue = this.operationQueue,
5095 sourceBuffer = this.sourceBuffer;
5096 var sb = sourceBuffer[type];
5097
5098 if (!media || !mediaSource || !sb) {
5099 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Attempting to remove from the " + type + " SourceBuffer, but it does not exist");
5100 operationQueue.shiftAndExecuteNext(type);
5101 return;
5102 }
5103
5104 var mediaDuration = Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(media.duration) ? media.duration : Infinity;
5105 var msDuration = Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mediaSource.duration) ? mediaSource.duration : Infinity;
5106 var removeStart = Math.max(0, startOffset);
5107 var removeEnd = Math.min(endOffset, mediaDuration, msDuration);
5108
5109 if (removeEnd > removeStart) {
5110 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: Removing [" + removeStart + "," + removeEnd + "] from the " + type + " SourceBuffer");
5111 console.assert(!sb.updating, type + " sourceBuffer must not be updating");
5112 sb.remove(removeStart, removeEnd);
5113 } else {
5114 // Cycle the queue
5115 operationQueue.shiftAndExecuteNext(type);
5116 }
5117 } // This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
5118 ;
5119
5120 _proto.appendExecutor = function appendExecutor(data, type) {
5121 var operationQueue = this.operationQueue,
5122 sourceBuffer = this.sourceBuffer;
5123 var sb = sourceBuffer[type];
5124
5125 if (!sb) {
5126 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Attempting to append to the " + type + " SourceBuffer, but it does not exist");
5127 operationQueue.shiftAndExecuteNext(type);
5128 return;
5129 }
5130
5131 sb.ended = false;
5132 console.assert(!sb.updating, type + " sourceBuffer must not be updating");
5133 sb.appendBuffer(data);
5134 } // Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
5135 // resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
5136 // upon completion, since we already do it here
5137 ;
5138
5139 _proto.blockBuffers = function blockBuffers(onUnblocked, buffers) {
5140 var _this9 = this;
5141
5142 if (buffers === void 0) {
5143 buffers = this.getSourceBufferTypes();
5144 }
5145
5146 if (!buffers.length) {
5147 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Blocking operation requested, but no SourceBuffers exist');
5148 return;
5149 }
5150
5151 var operationQueue = this.operationQueue; // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
5152
5153 var blockingOperations = buffers.map(function (type) {
5154 return operationQueue.appendBlocker(type);
5155 });
5156 Promise.all(blockingOperations).then(function () {
5157 // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
5158 onUnblocked();
5159 buffers.forEach(function (type) {
5160 var sb = _this9.sourceBuffer[type]; // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
5161 // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
5162 // While this is a workaround, it's probably useful to have around
5163
5164 if (!sb || !sb.updating) {
5165 operationQueue.shiftAndExecuteNext(type);
5166 }
5167 });
5168 });
5169 };
5170
5171 _proto.getSourceBufferTypes = function getSourceBufferTypes() {
5172 return Object.keys(this.sourceBuffer);
5173 };
5174
5175 _proto.addBufferListener = function addBufferListener(type, event, fn) {
5176 var buffer = this.sourceBuffer[type];
5177
5178 if (!buffer) {
5179 return;
5180 }
5181
5182 var listener = fn.bind(this, type);
5183 this.listeners[type].push({
5184 event: event,
5185 listener: listener
5186 });
5187 buffer.addEventListener(event, listener);
5188 };
5189
5190 _proto.removeBufferListeners = function removeBufferListeners(type) {
5191 var buffer = this.sourceBuffer[type];
5192
5193 if (!buffer) {
5194 return;
5195 }
5196
5197 this.listeners[type].forEach(function (l) {
5198 buffer.removeEventListener(l.event, l.listener);
5199 });
5200 };
5201
5202 return BufferController;
5203}();
5204
5205
5206
5207/***/ }),
5208
5209/***/ "./src/controller/buffer-operation-queue.ts":
5210/*!**************************************************!*\
5211 !*** ./src/controller/buffer-operation-queue.ts ***!
5212 \**************************************************/
5213/*! exports provided: default */
5214/***/ (function(module, __webpack_exports__, __webpack_require__) {
5215__webpack_require__.r(__webpack_exports__);
5216/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BufferOperationQueue; });
5217/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
5218
5219
5220var BufferOperationQueue = /*#__PURE__*/function () {
5221 function BufferOperationQueue(sourceBufferReference) {
5222 this.buffers = void 0;
5223 this.queues = {
5224 video: [],
5225 audio: [],
5226 audiovideo: []
5227 };
5228 this.buffers = sourceBufferReference;
5229 }
5230
5231 var _proto = BufferOperationQueue.prototype;
5232
5233 _proto.append = function append(operation, type) {
5234 var queue = this.queues[type];
5235 queue.push(operation);
5236
5237 if (queue.length === 1 && this.buffers[type]) {
5238 this.executeNext(type);
5239 }
5240 };
5241
5242 _proto.insertAbort = function insertAbort(operation, type) {
5243 var queue = this.queues[type];
5244 queue.unshift(operation);
5245 this.executeNext(type);
5246 };
5247
5248 _proto.appendBlocker = function appendBlocker(type) {
5249 var execute;
5250 var promise = new Promise(function (resolve) {
5251 execute = resolve;
5252 });
5253 var operation = {
5254 execute: execute,
5255 onStart: function onStart() {},
5256 onComplete: function onComplete() {},
5257 onError: function onError() {}
5258 };
5259 this.append(operation, type);
5260 return promise;
5261 };
5262
5263 _proto.executeNext = function executeNext(type) {
5264 var buffers = this.buffers,
5265 queues = this.queues;
5266 var sb = buffers[type];
5267 var queue = queues[type];
5268
5269 if (queue.length) {
5270 var operation = queue[0];
5271
5272 try {
5273 // Operations are expected to result in an 'updateend' event being fired. If not, the queue will lock. Operations
5274 // which do not end with this event must call _onSBUpdateEnd manually
5275 operation.execute();
5276 } catch (e) {
5277 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].warn('[buffer-operation-queue]: Unhandled exception executing the current operation');
5278 operation.onError(e); // Only shift the current operation off, otherwise the updateend handler will do this for us
5279
5280 if (!sb || !sb.updating) {
5281 queue.shift();
5282 this.executeNext(type);
5283 }
5284 }
5285 }
5286 };
5287
5288 _proto.shiftAndExecuteNext = function shiftAndExecuteNext(type) {
5289 this.queues[type].shift();
5290 this.executeNext(type);
5291 };
5292
5293 _proto.current = function current(type) {
5294 return this.queues[type][0];
5295 };
5296
5297 return BufferOperationQueue;
5298}();
5299
5300
5301
5302/***/ }),
5303
5304/***/ "./src/controller/cap-level-controller.ts":
5305/*!************************************************!*\
5306 !*** ./src/controller/cap-level-controller.ts ***!
5307 \************************************************/
5308/*! exports provided: default */
5309/***/ (function(module, __webpack_exports__, __webpack_require__) {
5310__webpack_require__.r(__webpack_exports__);
5311/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
5312function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
5313
5314function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
5315
5316/*
5317 * cap stream level to media size dimension controller
5318 */
5319
5320
5321var CapLevelController = /*#__PURE__*/function () {
5322 function CapLevelController(hls) {
5323 this.autoLevelCapping = void 0;
5324 this.firstLevel = void 0;
5325 this.media = void 0;
5326 this.restrictedLevels = void 0;
5327 this.timer = void 0;
5328 this.hls = void 0;
5329 this.streamController = void 0;
5330 this.clientRect = void 0;
5331 this.hls = hls;
5332 this.autoLevelCapping = Number.POSITIVE_INFINITY;
5333 this.firstLevel = -1;
5334 this.media = null;
5335 this.restrictedLevels = [];
5336 this.timer = undefined;
5337 this.clientRect = null;
5338 this.registerListeners();
5339 }
5340
5341 var _proto = CapLevelController.prototype;
5342
5343 _proto.setStreamController = function setStreamController(streamController) {
5344 this.streamController = streamController;
5345 };
5346
5347 _proto.destroy = function destroy() {
5348 this.unregisterListener();
5349
5350 if (this.hls.config.capLevelToPlayerSize) {
5351 this.stopCapping();
5352 }
5353
5354 this.media = null;
5355 this.clientRect = null; // @ts-ignore
5356
5357 this.hls = this.streamController = null;
5358 };
5359
5360 _proto.registerListeners = function registerListeners() {
5361 var hls = this.hls;
5362 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
5363 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
5364 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
5365 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
5366 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
5367 };
5368
5369 _proto.unregisterListener = function unregisterListener() {
5370 var hls = this.hls;
5371 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
5372 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
5373 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
5374 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
5375 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
5376 };
5377
5378 _proto.onFpsDropLevelCapping = function onFpsDropLevelCapping(event, data) {
5379 // Don't add a restricted level more than once
5380 if (CapLevelController.isLevelAllowed(data.droppedLevel, this.restrictedLevels)) {
5381 this.restrictedLevels.push(data.droppedLevel);
5382 }
5383 };
5384
5385 _proto.onMediaAttaching = function onMediaAttaching(event, data) {
5386 this.media = data.media instanceof HTMLVideoElement ? data.media : null;
5387 };
5388
5389 _proto.onManifestParsed = function onManifestParsed(event, data) {
5390 var hls = this.hls;
5391 this.restrictedLevels = [];
5392 this.firstLevel = data.firstLevel;
5393
5394 if (hls.config.capLevelToPlayerSize && data.video) {
5395 // Start capping immediately if the manifest has signaled video codecs
5396 this.startCapping();
5397 }
5398 } // Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
5399 // to the first level
5400 ;
5401
5402 _proto.onBufferCodecs = function onBufferCodecs(event, data) {
5403 var hls = this.hls;
5404
5405 if (hls.config.capLevelToPlayerSize && data.video) {
5406 // If the manifest did not signal a video codec capping has been deferred until we're certain video is present
5407 this.startCapping();
5408 }
5409 };
5410
5411 _proto.onMediaDetaching = function onMediaDetaching() {
5412 this.stopCapping();
5413 };
5414
5415 _proto.detectPlayerSize = function detectPlayerSize() {
5416 if (this.media && this.mediaHeight > 0 && this.mediaWidth > 0) {
5417 var levels = this.hls.levels;
5418
5419 if (levels.length) {
5420 var hls = this.hls;
5421 hls.autoLevelCapping = this.getMaxLevel(levels.length - 1);
5422
5423 if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
5424 // if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
5425 // usually happen when the user go to the fullscreen mode.
5426 this.streamController.nextLevelSwitch();
5427 }
5428
5429 this.autoLevelCapping = hls.autoLevelCapping;
5430 }
5431 }
5432 }
5433 /*
5434 * returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
5435 */
5436 ;
5437
5438 _proto.getMaxLevel = function getMaxLevel(capLevelIndex) {
5439 var _this = this;
5440
5441 var levels = this.hls.levels;
5442
5443 if (!levels.length) {
5444 return -1;
5445 }
5446
5447 var validLevels = levels.filter(function (level, index) {
5448 return CapLevelController.isLevelAllowed(index, _this.restrictedLevels) && index <= capLevelIndex;
5449 });
5450 this.clientRect = null;
5451 return CapLevelController.getMaxLevelByMediaSize(validLevels, this.mediaWidth, this.mediaHeight);
5452 };
5453
5454 _proto.startCapping = function startCapping() {
5455 if (this.timer) {
5456 // Don't reset capping if started twice; this can happen if the manifest signals a video codec
5457 return;
5458 }
5459
5460 this.autoLevelCapping = Number.POSITIVE_INFINITY;
5461 this.hls.firstLevel = this.getMaxLevel(this.firstLevel);
5462 self.clearInterval(this.timer);
5463 this.timer = self.setInterval(this.detectPlayerSize.bind(this), 1000);
5464 this.detectPlayerSize();
5465 };
5466
5467 _proto.stopCapping = function stopCapping() {
5468 this.restrictedLevels = [];
5469 this.firstLevel = -1;
5470 this.autoLevelCapping = Number.POSITIVE_INFINITY;
5471
5472 if (this.timer) {
5473 self.clearInterval(this.timer);
5474 this.timer = undefined;
5475 }
5476 };
5477
5478 _proto.getDimensions = function getDimensions() {
5479 if (this.clientRect) {
5480 return this.clientRect;
5481 }
5482
5483 var media = this.media;
5484 var boundsRect = {
5485 width: 0,
5486 height: 0
5487 };
5488
5489 if (media) {
5490 var clientRect = media.getBoundingClientRect();
5491 boundsRect.width = clientRect.width;
5492 boundsRect.height = clientRect.height;
5493
5494 if (!boundsRect.width && !boundsRect.height) {
5495 // When the media element has no width or height (equivalent to not being in the DOM),
5496 // then use its width and height attributes (media.width, media.height)
5497 boundsRect.width = clientRect.right - clientRect.left || media.width || 0;
5498 boundsRect.height = clientRect.bottom - clientRect.top || media.height || 0;
5499 }
5500 }
5501
5502 this.clientRect = boundsRect;
5503 return boundsRect;
5504 };
5505
5506 CapLevelController.isLevelAllowed = function isLevelAllowed(level, restrictedLevels) {
5507 if (restrictedLevels === void 0) {
5508 restrictedLevels = [];
5509 }
5510
5511 return restrictedLevels.indexOf(level) === -1;
5512 };
5513
5514 CapLevelController.getMaxLevelByMediaSize = function getMaxLevelByMediaSize(levels, width, height) {
5515 if (!levels || !levels.length) {
5516 return -1;
5517 } // Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
5518 // to determine whether we've chosen the greatest bandwidth for the media's dimensions
5519
5520
5521 var atGreatestBandiwdth = function atGreatestBandiwdth(curLevel, nextLevel) {
5522 if (!nextLevel) {
5523 return true;
5524 }
5525
5526 return curLevel.width !== nextLevel.width || curLevel.height !== nextLevel.height;
5527 }; // If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
5528 // the max level
5529
5530
5531 var maxLevelIndex = levels.length - 1;
5532
5533 for (var i = 0; i < levels.length; i += 1) {
5534 var level = levels[i];
5535
5536 if ((level.width >= width || level.height >= height) && atGreatestBandiwdth(level, levels[i + 1])) {
5537 maxLevelIndex = i;
5538 break;
5539 }
5540 }
5541
5542 return maxLevelIndex;
5543 };
5544
5545 _createClass(CapLevelController, [{
5546 key: "mediaWidth",
5547 get: function get() {
5548 return this.getDimensions().width * CapLevelController.contentScaleFactor;
5549 }
5550 }, {
5551 key: "mediaHeight",
5552 get: function get() {
5553 return this.getDimensions().height * CapLevelController.contentScaleFactor;
5554 }
5555 }], [{
5556 key: "contentScaleFactor",
5557 get: function get() {
5558 var pixelRatio = 1;
5559
5560 try {
5561 pixelRatio = self.devicePixelRatio;
5562 } catch (e) {
5563 /* no-op */
5564 }
5565
5566 return pixelRatio;
5567 }
5568 }]);
5569
5570 return CapLevelController;
5571}();
5572
5573/* harmony default export */ __webpack_exports__["default"] = (CapLevelController);
5574
5575/***/ }),
5576
5577/***/ "./src/controller/cmcd-controller.ts":
5578/*!*******************************************!*\
5579 !*** ./src/controller/cmcd-controller.ts ***!
5580 \*******************************************/
5581/*! exports provided: default */
5582/***/ (function(module, __webpack_exports__, __webpack_require__) {
5583__webpack_require__.r(__webpack_exports__);
5584/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return CMCDController; });
5585/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
5586/* harmony import */ var _types_cmcd__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../types/cmcd */ "./src/types/cmcd.ts");
5587/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
5588/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
5589function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
5590
5591function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
5592
5593function _createForOfIteratorHelperLoose(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (it) return (it = it.call(o)).next.bind(it); if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; return function () { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
5594
5595function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
5596
5597function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
5598
5599function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
5600
5601
5602
5603
5604
5605/**
5606 * Controller to deal with Common Media Client Data (CMCD)
5607 * @see https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf
5608 */
5609
5610var CMCDController = /*#__PURE__*/function () {
5611 // eslint-disable-line no-restricted-globals
5612 // eslint-disable-line no-restricted-globals
5613 function CMCDController(hls) {
5614 var _this = this;
5615
5616 this.hls = void 0;
5617 this.config = void 0;
5618 this.media = void 0;
5619 this.sid = void 0;
5620 this.cid = void 0;
5621 this.useHeaders = false;
5622 this.initialized = false;
5623 this.starved = false;
5624 this.buffering = true;
5625 this.audioBuffer = void 0;
5626 this.videoBuffer = void 0;
5627
5628 this.onWaiting = function () {
5629 if (_this.initialized) {
5630 _this.starved = true;
5631 }
5632
5633 _this.buffering = true;
5634 };
5635
5636 this.onPlaying = function () {
5637 if (!_this.initialized) {
5638 _this.initialized = true;
5639 }
5640
5641 _this.buffering = false;
5642 };
5643
5644 this.applyPlaylistData = function (context) {
5645 try {
5646 _this.apply(context, {
5647 ot: _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MANIFEST,
5648 su: !_this.initialized
5649 });
5650 } catch (error) {
5651 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Could not generate manifest CMCD data.', error);
5652 }
5653 };
5654
5655 this.applyFragmentData = function (context) {
5656 try {
5657 var fragment = context.frag;
5658 var level = _this.hls.levels[fragment.level];
5659
5660 var ot = _this.getObjectType(fragment);
5661
5662 var data = {
5663 d: fragment.duration * 1000,
5664 ot: ot
5665 };
5666
5667 if (ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].VIDEO || ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO || ot == _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MUXED) {
5668 data.br = level.bitrate / 1000;
5669 data.tb = _this.getTopBandwidth(ot) / 1000;
5670 data.bl = _this.getBufferLength(ot);
5671 }
5672
5673 _this.apply(context, data);
5674 } catch (error) {
5675 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Could not generate segment CMCD data.', error);
5676 }
5677 };
5678
5679 this.hls = hls;
5680 var config = this.config = hls.config;
5681 var cmcd = config.cmcd;
5682
5683 if (cmcd != null) {
5684 config.pLoader = this.createPlaylistLoader();
5685 config.fLoader = this.createFragmentLoader();
5686 this.sid = cmcd.sessionId || CMCDController.uuid();
5687 this.cid = cmcd.contentId;
5688 this.useHeaders = cmcd.useHeaders === true;
5689 this.registerListeners();
5690 }
5691 }
5692
5693 var _proto = CMCDController.prototype;
5694
5695 _proto.registerListeners = function registerListeners() {
5696 var hls = this.hls;
5697 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
5698 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
5699 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
5700 };
5701
5702 _proto.unregisterListeners = function unregisterListeners() {
5703 var hls = this.hls;
5704 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
5705 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
5706 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
5707 this.onMediaDetached();
5708 };
5709
5710 _proto.destroy = function destroy() {
5711 this.unregisterListeners(); // @ts-ignore
5712
5713 this.hls = this.config = this.audioBuffer = this.videoBuffer = null;
5714 };
5715
5716 _proto.onMediaAttached = function onMediaAttached(event, data) {
5717 this.media = data.media;
5718 this.media.addEventListener('waiting', this.onWaiting);
5719 this.media.addEventListener('playing', this.onPlaying);
5720 };
5721
5722 _proto.onMediaDetached = function onMediaDetached() {
5723 if (!this.media) {
5724 return;
5725 }
5726
5727 this.media.removeEventListener('waiting', this.onWaiting);
5728 this.media.removeEventListener('playing', this.onPlaying); // @ts-ignore
5729
5730 this.media = null;
5731 };
5732
5733 _proto.onBufferCreated = function onBufferCreated(event, data) {
5734 var _data$tracks$audio, _data$tracks$video;
5735
5736 this.audioBuffer = (_data$tracks$audio = data.tracks.audio) === null || _data$tracks$audio === void 0 ? void 0 : _data$tracks$audio.buffer;
5737 this.videoBuffer = (_data$tracks$video = data.tracks.video) === null || _data$tracks$video === void 0 ? void 0 : _data$tracks$video.buffer;
5738 };
5739
5740 /**
5741 * Create baseline CMCD data
5742 */
5743 _proto.createData = function createData() {
5744 var _this$media;
5745
5746 return {
5747 v: _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDVersion"],
5748 sf: _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDStreamingFormat"].HLS,
5749 sid: this.sid,
5750 cid: this.cid,
5751 pr: (_this$media = this.media) === null || _this$media === void 0 ? void 0 : _this$media.playbackRate,
5752 mtp: this.hls.bandwidthEstimate / 1000
5753 };
5754 }
5755 /**
5756 * Apply CMCD data to a request.
5757 */
5758 ;
5759
5760 _proto.apply = function apply(context, data) {
5761 if (data === void 0) {
5762 data = {};
5763 }
5764
5765 // apply baseline data
5766 _extends(data, this.createData());
5767
5768 var isVideo = data.ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].INIT || data.ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].VIDEO || data.ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MUXED;
5769
5770 if (this.starved && isVideo) {
5771 data.bs = true;
5772 data.su = true;
5773 this.starved = false;
5774 }
5775
5776 if (data.su == null) {
5777 data.su = this.buffering;
5778 } // TODO: Implement rtp, nrr, nor, dl
5779
5780
5781 if (this.useHeaders) {
5782 var headers = CMCDController.toHeaders(data);
5783
5784 if (!Object.keys(headers).length) {
5785 return;
5786 }
5787
5788 if (!context.headers) {
5789 context.headers = {};
5790 }
5791
5792 _extends(context.headers, headers);
5793 } else {
5794 var query = CMCDController.toQuery(data);
5795
5796 if (!query) {
5797 return;
5798 }
5799
5800 context.url = CMCDController.appendQueryToUri(context.url, query);
5801 }
5802 }
5803 /**
5804 * Apply CMCD data to a manifest request.
5805 */
5806 ;
5807
5808 /**
5809 * The CMCD object type.
5810 */
5811 _proto.getObjectType = function getObjectType(fragment) {
5812 var type = fragment.type;
5813
5814 if (type === 'subtitle') {
5815 return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].TIMED_TEXT;
5816 }
5817
5818 if (fragment.sn === 'initSegment') {
5819 return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].INIT;
5820 }
5821
5822 if (type === 'audio') {
5823 return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO;
5824 }
5825
5826 if (type === 'main') {
5827 if (!this.hls.audioTracks.length) {
5828 return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MUXED;
5829 }
5830
5831 return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].VIDEO;
5832 }
5833
5834 return undefined;
5835 }
5836 /**
5837 * Get the highest bitrate.
5838 */
5839 ;
5840
5841 _proto.getTopBandwidth = function getTopBandwidth(type) {
5842 var bitrate = 0;
5843 var levels;
5844 var hls = this.hls;
5845
5846 if (type === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO) {
5847 levels = hls.audioTracks;
5848 } else {
5849 var max = hls.maxAutoLevel;
5850 var len = max > -1 ? max + 1 : hls.levels.length;
5851 levels = hls.levels.slice(0, len);
5852 }
5853
5854 for (var _iterator = _createForOfIteratorHelperLoose(levels), _step; !(_step = _iterator()).done;) {
5855 var level = _step.value;
5856
5857 if (level.bitrate > bitrate) {
5858 bitrate = level.bitrate;
5859 }
5860 }
5861
5862 return bitrate > 0 ? bitrate : NaN;
5863 }
5864 /**
5865 * Get the buffer length for a media type in milliseconds
5866 */
5867 ;
5868
5869 _proto.getBufferLength = function getBufferLength(type) {
5870 var media = this.hls.media;
5871 var buffer = type === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO ? this.audioBuffer : this.videoBuffer;
5872
5873 if (!buffer || !media) {
5874 return NaN;
5875 }
5876
5877 var info = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_2__["BufferHelper"].bufferInfo(buffer, media.currentTime, this.config.maxBufferHole);
5878 return info.len * 1000;
5879 }
5880 /**
5881 * Create a playlist loader
5882 */
5883 ;
5884
5885 _proto.createPlaylistLoader = function createPlaylistLoader() {
5886 var pLoader = this.config.pLoader;
5887 var apply = this.applyPlaylistData;
5888 var Ctor = pLoader || this.config.loader;
5889 return /*#__PURE__*/function () {
5890 function CmcdPlaylistLoader(config) {
5891 this.loader = void 0;
5892 this.loader = new Ctor(config);
5893 }
5894
5895 var _proto2 = CmcdPlaylistLoader.prototype;
5896
5897 _proto2.destroy = function destroy() {
5898 this.loader.destroy();
5899 };
5900
5901 _proto2.abort = function abort() {
5902 this.loader.abort();
5903 };
5904
5905 _proto2.load = function load(context, config, callbacks) {
5906 apply(context);
5907 this.loader.load(context, config, callbacks);
5908 };
5909
5910 _createClass(CmcdPlaylistLoader, [{
5911 key: "stats",
5912 get: function get() {
5913 return this.loader.stats;
5914 }
5915 }, {
5916 key: "context",
5917 get: function get() {
5918 return this.loader.context;
5919 }
5920 }]);
5921
5922 return CmcdPlaylistLoader;
5923 }();
5924 }
5925 /**
5926 * Create a playlist loader
5927 */
5928 ;
5929
5930 _proto.createFragmentLoader = function createFragmentLoader() {
5931 var fLoader = this.config.fLoader;
5932 var apply = this.applyFragmentData;
5933 var Ctor = fLoader || this.config.loader;
5934 return /*#__PURE__*/function () {
5935 function CmcdFragmentLoader(config) {
5936 this.loader = void 0;
5937 this.loader = new Ctor(config);
5938 }
5939
5940 var _proto3 = CmcdFragmentLoader.prototype;
5941
5942 _proto3.destroy = function destroy() {
5943 this.loader.destroy();
5944 };
5945
5946 _proto3.abort = function abort() {
5947 this.loader.abort();
5948 };
5949
5950 _proto3.load = function load(context, config, callbacks) {
5951 apply(context);
5952 this.loader.load(context, config, callbacks);
5953 };
5954
5955 _createClass(CmcdFragmentLoader, [{
5956 key: "stats",
5957 get: function get() {
5958 return this.loader.stats;
5959 }
5960 }, {
5961 key: "context",
5962 get: function get() {
5963 return this.loader.context;
5964 }
5965 }]);
5966
5967 return CmcdFragmentLoader;
5968 }();
5969 }
5970 /**
5971 * Generate a random v4 UUI
5972 *
5973 * @returns {string}
5974 */
5975 ;
5976
5977 CMCDController.uuid = function uuid() {
5978 var url = URL.createObjectURL(new Blob());
5979 var uuid = url.toString();
5980 URL.revokeObjectURL(url);
5981 return uuid.substr(uuid.lastIndexOf('/') + 1);
5982 }
5983 /**
5984 * Serialize a CMCD data object according to the rules defined in the
5985 * section 3.2 of
5986 * [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
5987 */
5988 ;
5989
5990 CMCDController.serialize = function serialize(data) {
5991 var results = [];
5992
5993 var isValid = function isValid(value) {
5994 return !Number.isNaN(value) && value != null && value !== '' && value !== false;
5995 };
5996
5997 var toRounded = function toRounded(value) {
5998 return Math.round(value);
5999 };
6000
6001 var toHundred = function toHundred(value) {
6002 return toRounded(value / 100) * 100;
6003 };
6004
6005 var toUrlSafe = function toUrlSafe(value) {
6006 return encodeURIComponent(value);
6007 };
6008
6009 var formatters = {
6010 br: toRounded,
6011 d: toRounded,
6012 bl: toHundred,
6013 dl: toHundred,
6014 mtp: toHundred,
6015 nor: toUrlSafe,
6016 rtp: toHundred,
6017 tb: toRounded
6018 };
6019 var keys = Object.keys(data || {}).sort();
6020
6021 for (var _iterator2 = _createForOfIteratorHelperLoose(keys), _step2; !(_step2 = _iterator2()).done;) {
6022 var key = _step2.value;
6023 var value = data[key]; // ignore invalid values
6024
6025 if (!isValid(value)) {
6026 continue;
6027 } // Version should only be reported if not equal to 1.
6028
6029
6030 if (key === 'v' && value === 1) {
6031 continue;
6032 } // Playback rate should only be sent if not equal to 1.
6033
6034
6035 if (key == 'pr' && value === 1) {
6036 continue;
6037 } // Certain values require special formatting
6038
6039
6040 var formatter = formatters[key];
6041
6042 if (formatter) {
6043 value = formatter(value);
6044 } // Serialize the key/value pair
6045
6046
6047 var type = typeof value;
6048 var result = void 0;
6049
6050 if (key === 'ot' || key === 'sf' || key === 'st') {
6051 result = key + "=" + value;
6052 } else if (type === 'boolean') {
6053 result = key;
6054 } else if (type === 'number') {
6055 result = key + "=" + value;
6056 } else {
6057 result = key + "=" + JSON.stringify(value);
6058 }
6059
6060 results.push(result);
6061 }
6062
6063 return results.join(',');
6064 }
6065 /**
6066 * Convert a CMCD data object to request headers according to the rules
6067 * defined in the section 2.1 and 3.2 of
6068 * [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
6069 */
6070 ;
6071
6072 CMCDController.toHeaders = function toHeaders(data) {
6073 var keys = Object.keys(data);
6074 var headers = {};
6075 var headerNames = ['Object', 'Request', 'Session', 'Status'];
6076 var headerGroups = [{}, {}, {}, {}];
6077 var headerMap = {
6078 br: 0,
6079 d: 0,
6080 ot: 0,
6081 tb: 0,
6082 bl: 1,
6083 dl: 1,
6084 mtp: 1,
6085 nor: 1,
6086 nrr: 1,
6087 su: 1,
6088 cid: 2,
6089 pr: 2,
6090 sf: 2,
6091 sid: 2,
6092 st: 2,
6093 v: 2,
6094 bs: 3,
6095 rtp: 3
6096 };
6097
6098 for (var _i = 0, _keys = keys; _i < _keys.length; _i++) {
6099 var key = _keys[_i];
6100 // Unmapped fields are mapped to the Request header
6101 var index = headerMap[key] != null ? headerMap[key] : 1;
6102 headerGroups[index][key] = data[key];
6103 }
6104
6105 for (var i = 0; i < headerGroups.length; i++) {
6106 var value = CMCDController.serialize(headerGroups[i]);
6107
6108 if (value) {
6109 headers["CMCD-" + headerNames[i]] = value;
6110 }
6111 }
6112
6113 return headers;
6114 }
6115 /**
6116 * Convert a CMCD data object to query args according to the rules
6117 * defined in the section 2.2 and 3.2 of
6118 * [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
6119 */
6120 ;
6121
6122 CMCDController.toQuery = function toQuery(data) {
6123 return "CMCD=" + encodeURIComponent(CMCDController.serialize(data));
6124 }
6125 /**
6126 * Append query args to a uri.
6127 */
6128 ;
6129
6130 CMCDController.appendQueryToUri = function appendQueryToUri(uri, query) {
6131 if (!query) {
6132 return uri;
6133 }
6134
6135 var separator = uri.includes('?') ? '&' : '?';
6136 return "" + uri + separator + query;
6137 };
6138
6139 return CMCDController;
6140}();
6141
6142
6143
6144/***/ }),
6145
6146/***/ "./src/controller/eme-controller.ts":
6147/*!******************************************!*\
6148 !*** ./src/controller/eme-controller.ts ***!
6149 \******************************************/
6150/*! exports provided: default */
6151/***/ (function(module, __webpack_exports__, __webpack_require__) {
6152__webpack_require__.r(__webpack_exports__);
6153/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
6154/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
6155/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
6156/* harmony import */ var _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/mediakeys-helper */ "./src/utils/mediakeys-helper.ts");
6157function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
6158
6159function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
6160
6161/**
6162 * @author Stephan Hesse <disparat@gmail.com> | <tchakabam@gmail.com>
6163 *
6164 * DRM support for Hls.js
6165 */
6166
6167
6168
6169
6170var MAX_LICENSE_REQUEST_FAILURES = 3;
6171/**
6172 * @see https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemConfiguration
6173 * @param {Array<string>} audioCodecs List of required audio codecs to support
6174 * @param {Array<string>} videoCodecs List of required video codecs to support
6175 * @param {object} drmSystemOptions Optional parameters/requirements for the key-system
6176 * @returns {Array<MediaSystemConfiguration>} An array of supported configurations
6177 */
6178
6179var createWidevineMediaKeySystemConfigurations = function createWidevineMediaKeySystemConfigurations(audioCodecs, videoCodecs, drmSystemOptions) {
6180 /* jshint ignore:line */
6181 var baseConfig = {
6182 // initDataTypes: ['keyids', 'mp4'],
6183 // label: "",
6184 // persistentState: "not-allowed", // or "required" ?
6185 // distinctiveIdentifier: "not-allowed", // or "required" ?
6186 // sessionTypes: ['temporary'],
6187 audioCapabilities: [],
6188 // { contentType: 'audio/mp4; codecs="mp4a.40.2"' }
6189 videoCapabilities: [] // { contentType: 'video/mp4; codecs="avc1.42E01E"' }
6190
6191 };
6192 audioCodecs.forEach(function (codec) {
6193 baseConfig.audioCapabilities.push({
6194 contentType: "audio/mp4; codecs=\"" + codec + "\"",
6195 robustness: drmSystemOptions.audioRobustness || ''
6196 });
6197 });
6198 videoCodecs.forEach(function (codec) {
6199 baseConfig.videoCapabilities.push({
6200 contentType: "video/mp4; codecs=\"" + codec + "\"",
6201 robustness: drmSystemOptions.videoRobustness || ''
6202 });
6203 });
6204 return [baseConfig];
6205};
6206/**
6207 * The idea here is to handle key-system (and their respective platforms) specific configuration differences
6208 * in order to work with the local requestMediaKeySystemAccess method.
6209 *
6210 * We can also rule-out platform-related key-system support at this point by throwing an error.
6211 *
6212 * @param {string} keySystem Identifier for the key-system, see `KeySystems` enum
6213 * @param {Array<string>} audioCodecs List of required audio codecs to support
6214 * @param {Array<string>} videoCodecs List of required video codecs to support
6215 * @throws will throw an error if a unknown key system is passed
6216 * @returns {Array<MediaSystemConfiguration>} A non-empty Array of MediaKeySystemConfiguration objects
6217 */
6218
6219
6220var getSupportedMediaKeySystemConfigurations = function getSupportedMediaKeySystemConfigurations(keySystem, audioCodecs, videoCodecs, drmSystemOptions) {
6221 switch (keySystem) {
6222 case _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE:
6223 return createWidevineMediaKeySystemConfigurations(audioCodecs, videoCodecs, drmSystemOptions);
6224
6225 default:
6226 throw new Error("Unknown key-system: " + keySystem);
6227 }
6228};
6229
6230/**
6231 * Controller to deal with encrypted media extensions (EME)
6232 * @see https://developer.mozilla.org/en-US/docs/Web/API/Encrypted_Media_Extensions_API
6233 *
6234 * @class
6235 * @constructor
6236 */
6237var EMEController = /*#__PURE__*/function () {
6238 /**
6239 * @constructs
6240 * @param {Hls} hls Our Hls.js instance
6241 */
6242 function EMEController(hls) {
6243 this.hls = void 0;
6244 this._widevineLicenseUrl = void 0;
6245 this._licenseXhrSetup = void 0;
6246 this._licenseResponseCallback = void 0;
6247 this._emeEnabled = void 0;
6248 this._requestMediaKeySystemAccess = void 0;
6249 this._drmSystemOptions = void 0;
6250 this._config = void 0;
6251 this._mediaKeysList = [];
6252 this._media = null;
6253 this._hasSetMediaKeys = false;
6254 this._requestLicenseFailureCount = 0;
6255 this.mediaKeysPromise = null;
6256 this._onMediaEncrypted = this.onMediaEncrypted.bind(this);
6257 this.hls = hls;
6258 this._config = hls.config;
6259 this._widevineLicenseUrl = this._config.widevineLicenseUrl;
6260 this._licenseXhrSetup = this._config.licenseXhrSetup;
6261 this._licenseResponseCallback = this._config.licenseResponseCallback;
6262 this._emeEnabled = this._config.emeEnabled;
6263 this._requestMediaKeySystemAccess = this._config.requestMediaKeySystemAccessFunc;
6264 this._drmSystemOptions = this._config.drmSystemOptions;
6265
6266 this._registerListeners();
6267 }
6268
6269 var _proto = EMEController.prototype;
6270
6271 _proto.destroy = function destroy() {
6272 this._unregisterListeners(); // @ts-ignore
6273
6274
6275 this.hls = this._onMediaEncrypted = null;
6276 this._requestMediaKeySystemAccess = null;
6277 };
6278
6279 _proto._registerListeners = function _registerListeners() {
6280 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
6281 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
6282 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
6283 };
6284
6285 _proto._unregisterListeners = function _unregisterListeners() {
6286 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
6287 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
6288 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
6289 }
6290 /**
6291 * @param {string} keySystem Identifier for the key-system, see `KeySystems` enum
6292 * @returns {string} License server URL for key-system (if any configured, otherwise causes error)
6293 * @throws if a unsupported keysystem is passed
6294 */
6295 ;
6296
6297 _proto.getLicenseServerUrl = function getLicenseServerUrl(keySystem) {
6298 switch (keySystem) {
6299 case _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE:
6300 if (!this._widevineLicenseUrl) {
6301 break;
6302 }
6303
6304 return this._widevineLicenseUrl;
6305 }
6306
6307 throw new Error("no license server URL configured for key-system \"" + keySystem + "\"");
6308 }
6309 /**
6310 * Requests access object and adds it to our list upon success
6311 * @private
6312 * @param {string} keySystem System ID (see `KeySystems`)
6313 * @param {Array<string>} audioCodecs List of required audio codecs to support
6314 * @param {Array<string>} videoCodecs List of required video codecs to support
6315 * @throws When a unsupported KeySystem is passed
6316 */
6317 ;
6318
6319 _proto._attemptKeySystemAccess = function _attemptKeySystemAccess(keySystem, audioCodecs, videoCodecs) {
6320 var _this = this;
6321
6322 // This can throw, but is caught in event handler callpath
6323 var mediaKeySystemConfigs = getSupportedMediaKeySystemConfigurations(keySystem, audioCodecs, videoCodecs, this._drmSystemOptions);
6324 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Requesting encrypted media key-system access'); // expecting interface like window.navigator.requestMediaKeySystemAccess
6325
6326 var keySystemAccessPromise = this.requestMediaKeySystemAccess(keySystem, mediaKeySystemConfigs);
6327 this.mediaKeysPromise = keySystemAccessPromise.then(function (mediaKeySystemAccess) {
6328 return _this._onMediaKeySystemAccessObtained(keySystem, mediaKeySystemAccess);
6329 });
6330 keySystemAccessPromise.catch(function (err) {
6331 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("Failed to obtain key-system \"" + keySystem + "\" access:", err);
6332 });
6333 };
6334
6335 /**
6336 * Handles obtaining access to a key-system
6337 * @private
6338 * @param {string} keySystem
6339 * @param {MediaKeySystemAccess} mediaKeySystemAccess https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess
6340 */
6341 _proto._onMediaKeySystemAccessObtained = function _onMediaKeySystemAccessObtained(keySystem, mediaKeySystemAccess) {
6342 var _this2 = this;
6343
6344 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Access for key-system \"" + keySystem + "\" obtained");
6345 var mediaKeysListItem = {
6346 mediaKeysSessionInitialized: false,
6347 mediaKeySystemAccess: mediaKeySystemAccess,
6348 mediaKeySystemDomain: keySystem
6349 };
6350
6351 this._mediaKeysList.push(mediaKeysListItem);
6352
6353 var mediaKeysPromise = Promise.resolve().then(function () {
6354 return mediaKeySystemAccess.createMediaKeys();
6355 }).then(function (mediaKeys) {
6356 mediaKeysListItem.mediaKeys = mediaKeys;
6357 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Media-keys created for key-system \"" + keySystem + "\"");
6358
6359 _this2._onMediaKeysCreated();
6360
6361 return mediaKeys;
6362 });
6363 mediaKeysPromise.catch(function (err) {
6364 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Failed to create media-keys:', err);
6365 });
6366 return mediaKeysPromise;
6367 }
6368 /**
6369 * Handles key-creation (represents access to CDM). We are going to create key-sessions upon this
6370 * for all existing keys where no session exists yet.
6371 *
6372 * @private
6373 */
6374 ;
6375
6376 _proto._onMediaKeysCreated = function _onMediaKeysCreated() {
6377 var _this3 = this;
6378
6379 // check for all key-list items if a session exists, otherwise, create one
6380 this._mediaKeysList.forEach(function (mediaKeysListItem) {
6381 if (!mediaKeysListItem.mediaKeysSession) {
6382 // mediaKeys is definitely initialized here
6383 mediaKeysListItem.mediaKeysSession = mediaKeysListItem.mediaKeys.createSession();
6384
6385 _this3._onNewMediaKeySession(mediaKeysListItem.mediaKeysSession);
6386 }
6387 });
6388 }
6389 /**
6390 * @private
6391 * @param {*} keySession
6392 */
6393 ;
6394
6395 _proto._onNewMediaKeySession = function _onNewMediaKeySession(keySession) {
6396 var _this4 = this;
6397
6398 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("New key-system session " + keySession.sessionId);
6399 keySession.addEventListener('message', function (event) {
6400 _this4._onKeySessionMessage(keySession, event.message);
6401 }, false);
6402 }
6403 /**
6404 * @private
6405 * @param {MediaKeySession} keySession
6406 * @param {ArrayBuffer} message
6407 */
6408 ;
6409
6410 _proto._onKeySessionMessage = function _onKeySessionMessage(keySession, message) {
6411 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Got EME message event, creating license request');
6412
6413 this._requestLicense(message, function (data) {
6414 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Received license data (length: " + (data ? data.byteLength : data) + "), updating key-session");
6415 keySession.update(data);
6416 });
6417 }
6418 /**
6419 * @private
6420 * @param e {MediaEncryptedEvent}
6421 */
6422 ;
6423
6424 _proto.onMediaEncrypted = function onMediaEncrypted(e) {
6425 var _this5 = this;
6426
6427 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Media is encrypted using \"" + e.initDataType + "\" init data type");
6428
6429 if (!this.mediaKeysPromise) {
6430 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but no CDM access or no keys have been requested');
6431 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6432 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6433 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_KEYS,
6434 fatal: true
6435 });
6436 return;
6437 }
6438
6439 var finallySetKeyAndStartSession = function finallySetKeyAndStartSession(mediaKeys) {
6440 if (!_this5._media) {
6441 return;
6442 }
6443
6444 _this5._attemptSetMediaKeys(mediaKeys);
6445
6446 _this5._generateRequestWithPreferredKeySession(e.initDataType, e.initData);
6447 }; // Could use `Promise.finally` but some Promise polyfills are missing it
6448
6449
6450 this.mediaKeysPromise.then(finallySetKeyAndStartSession).catch(finallySetKeyAndStartSession);
6451 }
6452 /**
6453 * @private
6454 */
6455 ;
6456
6457 _proto._attemptSetMediaKeys = function _attemptSetMediaKeys(mediaKeys) {
6458 if (!this._media) {
6459 throw new Error('Attempted to set mediaKeys without first attaching a media element');
6460 }
6461
6462 if (!this._hasSetMediaKeys) {
6463 // FIXME: see if we can/want/need-to really to deal with several potential key-sessions?
6464 var keysListItem = this._mediaKeysList[0];
6465
6466 if (!keysListItem || !keysListItem.mediaKeys) {
6467 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but no CDM access or no keys have been obtained yet');
6468 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6469 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6470 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_KEYS,
6471 fatal: true
6472 });
6473 return;
6474 }
6475
6476 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Setting keys for encrypted media');
6477
6478 this._media.setMediaKeys(keysListItem.mediaKeys);
6479
6480 this._hasSetMediaKeys = true;
6481 }
6482 }
6483 /**
6484 * @private
6485 */
6486 ;
6487
6488 _proto._generateRequestWithPreferredKeySession = function _generateRequestWithPreferredKeySession(initDataType, initData) {
6489 var _this6 = this;
6490
6491 // FIXME: see if we can/want/need-to really to deal with several potential key-sessions?
6492 var keysListItem = this._mediaKeysList[0];
6493
6494 if (!keysListItem) {
6495 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but not any key-system access has been obtained yet');
6496 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6497 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6498 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_ACCESS,
6499 fatal: true
6500 });
6501 return;
6502 }
6503
6504 if (keysListItem.mediaKeysSessionInitialized) {
6505 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('Key-Session already initialized but requested again');
6506 return;
6507 }
6508
6509 var keySession = keysListItem.mediaKeysSession;
6510
6511 if (!keySession) {
6512 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but no key-session existing');
6513 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6514 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6515 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_SESSION,
6516 fatal: true
6517 });
6518 return;
6519 } // initData is null if the media is not CORS-same-origin
6520
6521
6522 if (!initData) {
6523 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('Fatal: initData required for generating a key session is null');
6524 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6525 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6526 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_INIT_DATA,
6527 fatal: true
6528 });
6529 return;
6530 }
6531
6532 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Generating key-session request for \"" + initDataType + "\" init data type");
6533 keysListItem.mediaKeysSessionInitialized = true;
6534 keySession.generateRequest(initDataType, initData).then(function () {
6535 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].debug('Key-session generation succeeded');
6536 }).catch(function (err) {
6537 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Error generating key-session request:', err);
6538
6539 _this6.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6540 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6541 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_SESSION,
6542 fatal: false
6543 });
6544 });
6545 }
6546 /**
6547 * @private
6548 * @param {string} url License server URL
6549 * @param {ArrayBuffer} keyMessage Message data issued by key-system
6550 * @param {function} callback Called when XHR has succeeded
6551 * @returns {XMLHttpRequest} Unsent (but opened state) XHR object
6552 * @throws if XMLHttpRequest construction failed
6553 */
6554 ;
6555
6556 _proto._createLicenseXhr = function _createLicenseXhr(url, keyMessage, callback) {
6557 var xhr = new XMLHttpRequest();
6558 xhr.responseType = 'arraybuffer';
6559 xhr.onreadystatechange = this._onLicenseRequestReadyStageChange.bind(this, xhr, url, keyMessage, callback);
6560 var licenseXhrSetup = this._licenseXhrSetup;
6561
6562 if (licenseXhrSetup) {
6563 try {
6564 licenseXhrSetup.call(this.hls, xhr, url);
6565 licenseXhrSetup = undefined;
6566 } catch (e) {
6567 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error(e);
6568 }
6569 }
6570
6571 try {
6572 // if licenseXhrSetup did not yet call open, let's do it now
6573 if (!xhr.readyState) {
6574 xhr.open('POST', url, true);
6575 }
6576
6577 if (licenseXhrSetup) {
6578 licenseXhrSetup.call(this.hls, xhr, url);
6579 }
6580 } catch (e) {
6581 // IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
6582 throw new Error("issue setting up KeySystem license XHR " + e);
6583 }
6584
6585 return xhr;
6586 }
6587 /**
6588 * @private
6589 * @param {XMLHttpRequest} xhr
6590 * @param {string} url License server URL
6591 * @param {ArrayBuffer} keyMessage Message data issued by key-system
6592 * @param {function} callback Called when XHR has succeeded
6593 */
6594 ;
6595
6596 _proto._onLicenseRequestReadyStageChange = function _onLicenseRequestReadyStageChange(xhr, url, keyMessage, callback) {
6597 switch (xhr.readyState) {
6598 case 4:
6599 if (xhr.status === 200) {
6600 this._requestLicenseFailureCount = 0;
6601 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('License request succeeded');
6602 var _data = xhr.response;
6603 var licenseResponseCallback = this._licenseResponseCallback;
6604
6605 if (licenseResponseCallback) {
6606 try {
6607 _data = licenseResponseCallback.call(this.hls, xhr, url);
6608 } catch (e) {
6609 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error(e);
6610 }
6611 }
6612
6613 callback(_data);
6614 } else {
6615 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("License Request XHR failed (" + url + "). Status: " + xhr.status + " (" + xhr.statusText + ")");
6616 this._requestLicenseFailureCount++;
6617
6618 if (this._requestLicenseFailureCount > MAX_LICENSE_REQUEST_FAILURES) {
6619 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6620 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6621 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_LICENSE_REQUEST_FAILED,
6622 fatal: true
6623 });
6624 return;
6625 }
6626
6627 var attemptsLeft = MAX_LICENSE_REQUEST_FAILURES - this._requestLicenseFailureCount + 1;
6628 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("Retrying license request, " + attemptsLeft + " attempts left");
6629
6630 this._requestLicense(keyMessage, callback);
6631 }
6632
6633 break;
6634 }
6635 }
6636 /**
6637 * @private
6638 * @param {MediaKeysListItem} keysListItem
6639 * @param {ArrayBuffer} keyMessage
6640 * @returns {ArrayBuffer} Challenge data posted to license server
6641 * @throws if KeySystem is unsupported
6642 */
6643 ;
6644
6645 _proto._generateLicenseRequestChallenge = function _generateLicenseRequestChallenge(keysListItem, keyMessage) {
6646 switch (keysListItem.mediaKeySystemDomain) {
6647 // case KeySystems.PLAYREADY:
6648 // from https://github.com/MicrosoftEdge/Demos/blob/master/eme/scripts/demo.js
6649
6650 /*
6651 if (this.licenseType !== this.LICENSE_TYPE_WIDEVINE) {
6652 // For PlayReady CDMs, we need to dig the Challenge out of the XML.
6653 var keyMessageXml = new DOMParser().parseFromString(String.fromCharCode.apply(null, new Uint16Array(keyMessage)), 'application/xml');
6654 if (keyMessageXml.getElementsByTagName('Challenge')[0]) {
6655 challenge = atob(keyMessageXml.getElementsByTagName('Challenge')[0].childNodes[0].nodeValue);
6656 } else {
6657 throw 'Cannot find <Challenge> in key message';
6658 }
6659 var headerNames = keyMessageXml.getElementsByTagName('name');
6660 var headerValues = keyMessageXml.getElementsByTagName('value');
6661 if (headerNames.length !== headerValues.length) {
6662 throw 'Mismatched header <name>/<value> pair in key message';
6663 }
6664 for (var i = 0; i < headerNames.length; i++) {
6665 xhr.setRequestHeader(headerNames[i].childNodes[0].nodeValue, headerValues[i].childNodes[0].nodeValue);
6666 }
6667 }
6668 break;
6669 */
6670 case _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE:
6671 // For Widevine CDMs, the challenge is the keyMessage.
6672 return keyMessage;
6673 }
6674
6675 throw new Error("unsupported key-system: " + keysListItem.mediaKeySystemDomain);
6676 }
6677 /**
6678 * @private
6679 * @param keyMessage
6680 * @param callback
6681 */
6682 ;
6683
6684 _proto._requestLicense = function _requestLicense(keyMessage, callback) {
6685 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Requesting content license for key-system');
6686 var keysListItem = this._mediaKeysList[0];
6687
6688 if (!keysListItem) {
6689 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal error: Media is encrypted but no key-system access has been obtained yet');
6690 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6691 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6692 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_ACCESS,
6693 fatal: true
6694 });
6695 return;
6696 }
6697
6698 try {
6699 var _url = this.getLicenseServerUrl(keysListItem.mediaKeySystemDomain);
6700
6701 var _xhr = this._createLicenseXhr(_url, keyMessage, callback);
6702
6703 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Sending license request to URL: " + _url);
6704
6705 var challenge = this._generateLicenseRequestChallenge(keysListItem, keyMessage);
6706
6707 _xhr.send(challenge);
6708 } catch (e) {
6709 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("Failure requesting DRM license: " + e);
6710 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
6711 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
6712 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_LICENSE_REQUEST_FAILED,
6713 fatal: true
6714 });
6715 }
6716 };
6717
6718 _proto.onMediaAttached = function onMediaAttached(event, data) {
6719 if (!this._emeEnabled) {
6720 return;
6721 }
6722
6723 var media = data.media; // keep reference of media
6724
6725 this._media = media;
6726 media.addEventListener('encrypted', this._onMediaEncrypted);
6727 };
6728
6729 _proto.onMediaDetached = function onMediaDetached() {
6730 var media = this._media;
6731 var mediaKeysList = this._mediaKeysList;
6732
6733 if (!media) {
6734 return;
6735 }
6736
6737 media.removeEventListener('encrypted', this._onMediaEncrypted);
6738 this._media = null;
6739 this._mediaKeysList = []; // Close all sessions and remove media keys from the video element.
6740
6741 Promise.all(mediaKeysList.map(function (mediaKeysListItem) {
6742 if (mediaKeysListItem.mediaKeysSession) {
6743 return mediaKeysListItem.mediaKeysSession.close().catch(function () {// Ignore errors when closing the sessions. Closing a session that
6744 // generated no key requests will throw an error.
6745 });
6746 }
6747 })).then(function () {
6748 return media.setMediaKeys(null);
6749 }).catch(function () {// Ignore any failures while removing media keys from the video element.
6750 });
6751 };
6752
6753 _proto.onManifestParsed = function onManifestParsed(event, data) {
6754 if (!this._emeEnabled) {
6755 return;
6756 }
6757
6758 var audioCodecs = data.levels.map(function (level) {
6759 return level.audioCodec;
6760 }).filter(function (audioCodec) {
6761 return !!audioCodec;
6762 });
6763 var videoCodecs = data.levels.map(function (level) {
6764 return level.videoCodec;
6765 }).filter(function (videoCodec) {
6766 return !!videoCodec;
6767 });
6768
6769 this._attemptKeySystemAccess(_utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE, audioCodecs, videoCodecs);
6770 };
6771
6772 _createClass(EMEController, [{
6773 key: "requestMediaKeySystemAccess",
6774 get: function get() {
6775 if (!this._requestMediaKeySystemAccess) {
6776 throw new Error('No requestMediaKeySystemAccess function configured');
6777 }
6778
6779 return this._requestMediaKeySystemAccess;
6780 }
6781 }]);
6782
6783 return EMEController;
6784}();
6785
6786/* harmony default export */ __webpack_exports__["default"] = (EMEController);
6787
6788/***/ }),
6789
6790/***/ "./src/controller/fps-controller.ts":
6791/*!******************************************!*\
6792 !*** ./src/controller/fps-controller.ts ***!
6793 \******************************************/
6794/*! exports provided: default */
6795/***/ (function(module, __webpack_exports__, __webpack_require__) {
6796__webpack_require__.r(__webpack_exports__);
6797/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
6798/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
6799
6800
6801
6802var FPSController = /*#__PURE__*/function () {
6803 // stream controller must be provided as a dependency!
6804 function FPSController(hls) {
6805 this.hls = void 0;
6806 this.isVideoPlaybackQualityAvailable = false;
6807 this.timer = void 0;
6808 this.media = null;
6809 this.lastTime = void 0;
6810 this.lastDroppedFrames = 0;
6811 this.lastDecodedFrames = 0;
6812 this.streamController = void 0;
6813 this.hls = hls;
6814 this.registerListeners();
6815 }
6816
6817 var _proto = FPSController.prototype;
6818
6819 _proto.setStreamController = function setStreamController(streamController) {
6820 this.streamController = streamController;
6821 };
6822
6823 _proto.registerListeners = function registerListeners() {
6824 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
6825 };
6826
6827 _proto.unregisterListeners = function unregisterListeners() {
6828 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching);
6829 };
6830
6831 _proto.destroy = function destroy() {
6832 if (this.timer) {
6833 clearInterval(this.timer);
6834 }
6835
6836 this.unregisterListeners();
6837 this.isVideoPlaybackQualityAvailable = false;
6838 this.media = null;
6839 };
6840
6841 _proto.onMediaAttaching = function onMediaAttaching(event, data) {
6842 var config = this.hls.config;
6843
6844 if (config.capLevelOnFPSDrop) {
6845 var media = data.media instanceof self.HTMLVideoElement ? data.media : null;
6846 this.media = media;
6847
6848 if (media && typeof media.getVideoPlaybackQuality === 'function') {
6849 this.isVideoPlaybackQualityAvailable = true;
6850 }
6851
6852 self.clearInterval(this.timer);
6853 this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
6854 }
6855 };
6856
6857 _proto.checkFPS = function checkFPS(video, decodedFrames, droppedFrames) {
6858 var currentTime = performance.now();
6859
6860 if (decodedFrames) {
6861 if (this.lastTime) {
6862 var currentPeriod = currentTime - this.lastTime;
6863 var currentDropped = droppedFrames - this.lastDroppedFrames;
6864 var currentDecoded = decodedFrames - this.lastDecodedFrames;
6865 var droppedFPS = 1000 * currentDropped / currentPeriod;
6866 var hls = this.hls;
6867 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP, {
6868 currentDropped: currentDropped,
6869 currentDecoded: currentDecoded,
6870 totalDroppedFrames: droppedFrames
6871 });
6872
6873 if (droppedFPS > 0) {
6874 // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
6875 if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
6876 var currentLevel = hls.currentLevel;
6877 _utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
6878
6879 if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
6880 currentLevel = currentLevel - 1;
6881 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP_LEVEL_CAPPING, {
6882 level: currentLevel,
6883 droppedLevel: hls.currentLevel
6884 });
6885 hls.autoLevelCapping = currentLevel;
6886 this.streamController.nextLevelSwitch();
6887 }
6888 }
6889 }
6890 }
6891
6892 this.lastTime = currentTime;
6893 this.lastDroppedFrames = droppedFrames;
6894 this.lastDecodedFrames = decodedFrames;
6895 }
6896 };
6897
6898 _proto.checkFPSInterval = function checkFPSInterval() {
6899 var video = this.media;
6900
6901 if (video) {
6902 if (this.isVideoPlaybackQualityAvailable) {
6903 var videoPlaybackQuality = video.getVideoPlaybackQuality();
6904 this.checkFPS(video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
6905 } else {
6906 // HTMLVideoElement doesn't include the webkit types
6907 this.checkFPS(video, video.webkitDecodedFrameCount, video.webkitDroppedFrameCount);
6908 }
6909 }
6910 };
6911
6912 return FPSController;
6913}();
6914
6915/* harmony default export */ __webpack_exports__["default"] = (FPSController);
6916
6917/***/ }),
6918
6919/***/ "./src/controller/fragment-finders.ts":
6920/*!********************************************!*\
6921 !*** ./src/controller/fragment-finders.ts ***!
6922 \********************************************/
6923/*! exports provided: findFragmentByPDT, findFragmentByPTS, fragmentWithinToleranceTest, pdtWithinToleranceTest, findFragWithCC */
6924/***/ (function(module, __webpack_exports__, __webpack_require__) {
6925__webpack_require__.r(__webpack_exports__);
6926/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFragmentByPDT", function() { return findFragmentByPDT; });
6927/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFragmentByPTS", function() { return findFragmentByPTS; });
6928/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fragmentWithinToleranceTest", function() { return fragmentWithinToleranceTest; });
6929/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "pdtWithinToleranceTest", function() { return pdtWithinToleranceTest; });
6930/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFragWithCC", function() { return findFragWithCC; });
6931/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
6932/* harmony import */ var _utils_binary_search__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/binary-search */ "./src/utils/binary-search.ts");
6933
6934
6935
6936/**
6937 * Returns first fragment whose endPdt value exceeds the given PDT.
6938 * @param {Array<Fragment>} fragments - The array of candidate fragments
6939 * @param {number|null} [PDTValue = null] - The PDT value which must be exceeded
6940 * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start/end can be within in order to be considered contiguous
6941 * @returns {*|null} fragment - The best matching fragment
6942 */
6943function findFragmentByPDT(fragments, PDTValue, maxFragLookUpTolerance) {
6944 if (PDTValue === null || !Array.isArray(fragments) || !fragments.length || !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(PDTValue)) {
6945 return null;
6946 } // if less than start
6947
6948
6949 var startPDT = fragments[0].programDateTime;
6950
6951 if (PDTValue < (startPDT || 0)) {
6952 return null;
6953 }
6954
6955 var endPDT = fragments[fragments.length - 1].endProgramDateTime;
6956
6957 if (PDTValue >= (endPDT || 0)) {
6958 return null;
6959 }
6960
6961 maxFragLookUpTolerance = maxFragLookUpTolerance || 0;
6962
6963 for (var seg = 0; seg < fragments.length; ++seg) {
6964 var frag = fragments[seg];
6965
6966 if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) {
6967 return frag;
6968 }
6969 }
6970
6971 return null;
6972}
6973/**
6974 * Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
6975 * This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
6976 * breaking any traps which would cause the same fragment to be continuously selected within a small range.
6977 * @param {*} fragPrevious - The last frag successfully appended
6978 * @param {Array} fragments - The array of candidate fragments
6979 * @param {number} [bufferEnd = 0] - The end of the contiguous buffered range the playhead is currently within
6980 * @param {number} maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
6981 * @returns {*} foundFrag - The best matching fragment
6982 */
6983
6984function findFragmentByPTS(fragPrevious, fragments, bufferEnd, maxFragLookUpTolerance) {
6985 if (bufferEnd === void 0) {
6986 bufferEnd = 0;
6987 }
6988
6989 if (maxFragLookUpTolerance === void 0) {
6990 maxFragLookUpTolerance = 0;
6991 }
6992
6993 var fragNext = null;
6994
6995 if (fragPrevious) {
6996 fragNext = fragments[fragPrevious.sn - fragments[0].sn + 1] || null;
6997 } else if (bufferEnd === 0 && fragments[0].start === 0) {
6998 fragNext = fragments[0];
6999 } // Prefer the next fragment if it's within tolerance
7000
7001
7002 if (fragNext && fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, fragNext) === 0) {
7003 return fragNext;
7004 } // We might be seeking past the tolerance so find the best match
7005
7006
7007 var foundFragment = _utils_binary_search__WEBPACK_IMPORTED_MODULE_1__["default"].search(fragments, fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance));
7008
7009 if (foundFragment) {
7010 return foundFragment;
7011 } // If no match was found return the next fragment after fragPrevious, or null
7012
7013
7014 return fragNext;
7015}
7016/**
7017 * The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
7018 * @param {*} candidate - The fragment to test
7019 * @param {number} [bufferEnd = 0] - The end of the current buffered range the playhead is currently within
7020 * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
7021 * @returns {number} - 0 if it matches, 1 if too low, -1 if too high
7022 */
7023
7024function fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, candidate) {
7025 if (bufferEnd === void 0) {
7026 bufferEnd = 0;
7027 }
7028
7029 if (maxFragLookUpTolerance === void 0) {
7030 maxFragLookUpTolerance = 0;
7031 }
7032
7033 // offset should be within fragment boundary - config.maxFragLookUpTolerance
7034 // this is to cope with situations like
7035 // bufferEnd = 9.991
7036 // frag[Ø] : [0,10]
7037 // frag[1] : [10,20]
7038 // bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
7039 // frag start frag start+duration
7040 // |-----------------------------|
7041 // <---> <--->
7042 // ...--------><-----------------------------><---------....
7043 // previous frag matching fragment next frag
7044 // return -1 return 0 return 1
7045 // logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
7046 // Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
7047 var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0));
7048
7049 if (candidate.start + candidate.duration - candidateLookupTolerance <= bufferEnd) {
7050 return 1;
7051 } else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) {
7052 // if maxFragLookUpTolerance will have negative value then don't return -1 for first element
7053 return -1;
7054 }
7055
7056 return 0;
7057}
7058/**
7059 * The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
7060 * This function tests the candidate's program date time values, as represented in Unix time
7061 * @param {*} candidate - The fragment to test
7062 * @param {number} [pdtBufferEnd = 0] - The Unix time representing the end of the current buffered range
7063 * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
7064 * @returns {boolean} True if contiguous, false otherwise
7065 */
7066
7067function pdtWithinToleranceTest(pdtBufferEnd, maxFragLookUpTolerance, candidate) {
7068 var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)) * 1000; // endProgramDateTime can be null, default to zero
7069
7070 var endProgramDateTime = candidate.endProgramDateTime || 0;
7071 return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd;
7072}
7073function findFragWithCC(fragments, cc) {
7074 return _utils_binary_search__WEBPACK_IMPORTED_MODULE_1__["default"].search(fragments, function (candidate) {
7075 if (candidate.cc < cc) {
7076 return 1;
7077 } else if (candidate.cc > cc) {
7078 return -1;
7079 } else {
7080 return 0;
7081 }
7082 });
7083}
7084
7085/***/ }),
7086
7087/***/ "./src/controller/fragment-tracker.ts":
7088/*!********************************************!*\
7089 !*** ./src/controller/fragment-tracker.ts ***!
7090 \********************************************/
7091/*! exports provided: FragmentState, FragmentTracker */
7092/***/ (function(module, __webpack_exports__, __webpack_require__) {
7093__webpack_require__.r(__webpack_exports__);
7094/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FragmentState", function() { return FragmentState; });
7095/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FragmentTracker", function() { return FragmentTracker; });
7096/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
7097/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
7098
7099
7100var FragmentState;
7101
7102(function (FragmentState) {
7103 FragmentState["NOT_LOADED"] = "NOT_LOADED";
7104 FragmentState["BACKTRACKED"] = "BACKTRACKED";
7105 FragmentState["APPENDING"] = "APPENDING";
7106 FragmentState["PARTIAL"] = "PARTIAL";
7107 FragmentState["OK"] = "OK";
7108})(FragmentState || (FragmentState = {}));
7109
7110var FragmentTracker = /*#__PURE__*/function () {
7111 function FragmentTracker(hls) {
7112 this.activeFragment = null;
7113 this.activeParts = null;
7114 this.fragments = Object.create(null);
7115 this.timeRanges = Object.create(null);
7116 this.bufferPadding = 0.2;
7117 this.hls = void 0;
7118 this.hls = hls;
7119
7120 this._registerListeners();
7121 }
7122
7123 var _proto = FragmentTracker.prototype;
7124
7125 _proto._registerListeners = function _registerListeners() {
7126 var hls = this.hls;
7127 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_APPENDED, this.onBufferAppended, this);
7128 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
7129 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_LOADED, this.onFragLoaded, this);
7130 };
7131
7132 _proto._unregisterListeners = function _unregisterListeners() {
7133 var hls = this.hls;
7134 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_APPENDED, this.onBufferAppended, this);
7135 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
7136 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_LOADED, this.onFragLoaded, this);
7137 };
7138
7139 _proto.destroy = function destroy() {
7140 this._unregisterListeners(); // @ts-ignore
7141
7142
7143 this.fragments = this.timeRanges = null;
7144 }
7145 /**
7146 * Return a Fragment with an appended range that matches the position and levelType.
7147 * If not found any Fragment, return null
7148 */
7149 ;
7150
7151 _proto.getAppendedFrag = function getAppendedFrag(position, levelType) {
7152 if (levelType === _types_loader__WEBPACK_IMPORTED_MODULE_1__["PlaylistLevelType"].MAIN) {
7153 var activeFragment = this.activeFragment,
7154 activeParts = this.activeParts;
7155
7156 if (!activeFragment) {
7157 return null;
7158 }
7159
7160 if (activeParts) {
7161 for (var i = activeParts.length; i--;) {
7162 var activePart = activeParts[i];
7163 var appendedPTS = activePart ? activePart.end : activeFragment.appendedPTS;
7164
7165 if (activePart.start <= position && appendedPTS !== undefined && position <= appendedPTS) {
7166 // 9 is a magic number. remove parts from lookup after a match but keep some short seeks back.
7167 if (i > 9) {
7168 this.activeParts = activeParts.slice(i - 9);
7169 }
7170
7171 return activePart;
7172 }
7173 }
7174 } else if (activeFragment.start <= position && activeFragment.appendedPTS !== undefined && position <= activeFragment.appendedPTS) {
7175 return activeFragment;
7176 }
7177 }
7178
7179 return this.getBufferedFrag(position, levelType);
7180 }
7181 /**
7182 * Return a buffered Fragment that matches the position and levelType.
7183 * A buffered Fragment is one whose loading, parsing and appending is done (completed or "partial" meaning aborted).
7184 * If not found any Fragment, return null
7185 */
7186 ;
7187
7188 _proto.getBufferedFrag = function getBufferedFrag(position, levelType) {
7189 var fragments = this.fragments;
7190 var keys = Object.keys(fragments);
7191
7192 for (var i = keys.length; i--;) {
7193 var fragmentEntity = fragments[keys[i]];
7194
7195 if ((fragmentEntity === null || fragmentEntity === void 0 ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
7196 var frag = fragmentEntity.body;
7197
7198 if (frag.start <= position && position <= frag.end) {
7199 return frag;
7200 }
7201 }
7202 }
7203
7204 return null;
7205 }
7206 /**
7207 * Partial fragments effected by coded frame eviction will be removed
7208 * The browser will unload parts of the buffer to free up memory for new buffer data
7209 * Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
7210 */
7211 ;
7212
7213 _proto.detectEvictedFragments = function detectEvictedFragments(elementaryStream, timeRange, playlistType) {
7214 var _this = this;
7215
7216 // Check if any flagged fragments have been unloaded
7217 Object.keys(this.fragments).forEach(function (key) {
7218 var fragmentEntity = _this.fragments[key];
7219
7220 if (!fragmentEntity) {
7221 return;
7222 }
7223
7224 if (!fragmentEntity.buffered) {
7225 if (fragmentEntity.body.type === playlistType) {
7226 _this.removeFragment(fragmentEntity.body);
7227 }
7228
7229 return;
7230 }
7231
7232 var esData = fragmentEntity.range[elementaryStream];
7233
7234 if (!esData) {
7235 return;
7236 }
7237
7238 esData.time.some(function (time) {
7239 var isNotBuffered = !_this.isTimeBuffered(time.startPTS, time.endPTS, timeRange);
7240
7241 if (isNotBuffered) {
7242 // Unregister partial fragment as it needs to load again to be reused
7243 _this.removeFragment(fragmentEntity.body);
7244 }
7245
7246 return isNotBuffered;
7247 });
7248 });
7249 }
7250 /**
7251 * Checks if the fragment passed in is loaded in the buffer properly
7252 * Partially loaded fragments will be registered as a partial fragment
7253 */
7254 ;
7255
7256 _proto.detectPartialFragments = function detectPartialFragments(data) {
7257 var _this2 = this;
7258
7259 var timeRanges = this.timeRanges;
7260 var frag = data.frag,
7261 part = data.part;
7262
7263 if (!timeRanges || frag.sn === 'initSegment') {
7264 return;
7265 }
7266
7267 var fragKey = getFragmentKey(frag);
7268 var fragmentEntity = this.fragments[fragKey];
7269
7270 if (!fragmentEntity) {
7271 return;
7272 }
7273
7274 Object.keys(timeRanges).forEach(function (elementaryStream) {
7275 var streamInfo = frag.elementaryStreams[elementaryStream];
7276
7277 if (!streamInfo) {
7278 return;
7279 }
7280
7281 var timeRange = timeRanges[elementaryStream];
7282 var partial = part !== null || streamInfo.partial === true;
7283 fragmentEntity.range[elementaryStream] = _this2.getBufferedTimes(frag, part, partial, timeRange);
7284 });
7285 fragmentEntity.backtrack = fragmentEntity.loaded = null;
7286
7287 if (Object.keys(fragmentEntity.range).length) {
7288 fragmentEntity.buffered = true;
7289 } else {
7290 // remove fragment if nothing was appended
7291 this.removeFragment(fragmentEntity.body);
7292 }
7293 };
7294
7295 _proto.fragBuffered = function fragBuffered(frag) {
7296 var fragKey = getFragmentKey(frag);
7297 var fragmentEntity = this.fragments[fragKey];
7298
7299 if (fragmentEntity) {
7300 fragmentEntity.backtrack = fragmentEntity.loaded = null;
7301 fragmentEntity.buffered = true;
7302 }
7303 };
7304
7305 _proto.getBufferedTimes = function getBufferedTimes(fragment, part, partial, timeRange) {
7306 var buffered = {
7307 time: [],
7308 partial: partial
7309 };
7310 var startPTS = part ? part.start : fragment.start;
7311 var endPTS = part ? part.end : fragment.end;
7312 var minEndPTS = fragment.minEndPTS || endPTS;
7313 var maxStartPTS = fragment.maxStartPTS || startPTS;
7314
7315 for (var i = 0; i < timeRange.length; i++) {
7316 var startTime = timeRange.start(i) - this.bufferPadding;
7317 var endTime = timeRange.end(i) + this.bufferPadding;
7318
7319 if (maxStartPTS >= startTime && minEndPTS <= endTime) {
7320 // Fragment is entirely contained in buffer
7321 // No need to check the other timeRange times since it's completely playable
7322 buffered.time.push({
7323 startPTS: Math.max(startPTS, timeRange.start(i)),
7324 endPTS: Math.min(endPTS, timeRange.end(i))
7325 });
7326 break;
7327 } else if (startPTS < endTime && endPTS > startTime) {
7328 buffered.partial = true; // Check for intersection with buffer
7329 // Get playable sections of the fragment
7330
7331 buffered.time.push({
7332 startPTS: Math.max(startPTS, timeRange.start(i)),
7333 endPTS: Math.min(endPTS, timeRange.end(i))
7334 });
7335 } else if (endPTS <= startTime) {
7336 // No need to check the rest of the timeRange as it is in order
7337 break;
7338 }
7339 }
7340
7341 return buffered;
7342 }
7343 /**
7344 * Gets the partial fragment for a certain time
7345 */
7346 ;
7347
7348 _proto.getPartialFragment = function getPartialFragment(time) {
7349 var bestFragment = null;
7350 var timePadding;
7351 var startTime;
7352 var endTime;
7353 var bestOverlap = 0;
7354 var bufferPadding = this.bufferPadding,
7355 fragments = this.fragments;
7356 Object.keys(fragments).forEach(function (key) {
7357 var fragmentEntity = fragments[key];
7358
7359 if (!fragmentEntity) {
7360 return;
7361 }
7362
7363 if (isPartial(fragmentEntity)) {
7364 startTime = fragmentEntity.body.start - bufferPadding;
7365 endTime = fragmentEntity.body.end + bufferPadding;
7366
7367 if (time >= startTime && time <= endTime) {
7368 // Use the fragment that has the most padding from start and end time
7369 timePadding = Math.min(time - startTime, endTime - time);
7370
7371 if (bestOverlap <= timePadding) {
7372 bestFragment = fragmentEntity.body;
7373 bestOverlap = timePadding;
7374 }
7375 }
7376 }
7377 });
7378 return bestFragment;
7379 };
7380
7381 _proto.getState = function getState(fragment) {
7382 var fragKey = getFragmentKey(fragment);
7383 var fragmentEntity = this.fragments[fragKey];
7384
7385 if (fragmentEntity) {
7386 if (!fragmentEntity.buffered) {
7387 if (fragmentEntity.backtrack) {
7388 return FragmentState.BACKTRACKED;
7389 }
7390
7391 return FragmentState.APPENDING;
7392 } else if (isPartial(fragmentEntity)) {
7393 return FragmentState.PARTIAL;
7394 } else {
7395 return FragmentState.OK;
7396 }
7397 }
7398
7399 return FragmentState.NOT_LOADED;
7400 };
7401
7402 _proto.backtrack = function backtrack(frag, data) {
7403 var fragKey = getFragmentKey(frag);
7404 var fragmentEntity = this.fragments[fragKey];
7405
7406 if (!fragmentEntity || fragmentEntity.backtrack) {
7407 return null;
7408 }
7409
7410 var backtrack = fragmentEntity.backtrack = data ? data : fragmentEntity.loaded;
7411 fragmentEntity.loaded = null;
7412 return backtrack;
7413 };
7414
7415 _proto.getBacktrackData = function getBacktrackData(fragment) {
7416 var fragKey = getFragmentKey(fragment);
7417 var fragmentEntity = this.fragments[fragKey];
7418
7419 if (fragmentEntity) {
7420 var _backtrack$payload;
7421
7422 var backtrack = fragmentEntity.backtrack; // If data was already sent to Worker it is detached no longer available
7423
7424 if (backtrack !== null && backtrack !== void 0 && (_backtrack$payload = backtrack.payload) !== null && _backtrack$payload !== void 0 && _backtrack$payload.byteLength) {
7425 return backtrack;
7426 } else {
7427 this.removeFragment(fragment);
7428 }
7429 }
7430
7431 return null;
7432 };
7433
7434 _proto.isTimeBuffered = function isTimeBuffered(startPTS, endPTS, timeRange) {
7435 var startTime;
7436 var endTime;
7437
7438 for (var i = 0; i < timeRange.length; i++) {
7439 startTime = timeRange.start(i) - this.bufferPadding;
7440 endTime = timeRange.end(i) + this.bufferPadding;
7441
7442 if (startPTS >= startTime && endPTS <= endTime) {
7443 return true;
7444 }
7445
7446 if (endPTS <= startTime) {
7447 // No need to check the rest of the timeRange as it is in order
7448 return false;
7449 }
7450 }
7451
7452 return false;
7453 };
7454
7455 _proto.onFragLoaded = function onFragLoaded(event, data) {
7456 var frag = data.frag,
7457 part = data.part; // don't track initsegment (for which sn is not a number)
7458 // don't track frags used for bitrateTest, they're irrelevant.
7459 // don't track parts for memory efficiency
7460
7461 if (frag.sn === 'initSegment' || frag.bitrateTest || part) {
7462 return;
7463 }
7464
7465 var fragKey = getFragmentKey(frag);
7466 this.fragments[fragKey] = {
7467 body: frag,
7468 loaded: data,
7469 backtrack: null,
7470 buffered: false,
7471 range: Object.create(null)
7472 };
7473 };
7474
7475 _proto.onBufferAppended = function onBufferAppended(event, data) {
7476 var _this3 = this;
7477
7478 var frag = data.frag,
7479 part = data.part,
7480 timeRanges = data.timeRanges;
7481
7482 if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_1__["PlaylistLevelType"].MAIN) {
7483 this.activeFragment = frag;
7484
7485 if (part) {
7486 var activeParts = this.activeParts;
7487
7488 if (!activeParts) {
7489 this.activeParts = activeParts = [];
7490 }
7491
7492 activeParts.push(part);
7493 } else {
7494 this.activeParts = null;
7495 }
7496 } // Store the latest timeRanges loaded in the buffer
7497
7498
7499 this.timeRanges = timeRanges;
7500 Object.keys(timeRanges).forEach(function (elementaryStream) {
7501 var timeRange = timeRanges[elementaryStream];
7502
7503 _this3.detectEvictedFragments(elementaryStream, timeRange);
7504
7505 if (!part) {
7506 for (var i = 0; i < timeRange.length; i++) {
7507 frag.appendedPTS = Math.max(timeRange.end(i), frag.appendedPTS || 0);
7508 }
7509 }
7510 });
7511 };
7512
7513 _proto.onFragBuffered = function onFragBuffered(event, data) {
7514 this.detectPartialFragments(data);
7515 };
7516
7517 _proto.hasFragment = function hasFragment(fragment) {
7518 var fragKey = getFragmentKey(fragment);
7519 return !!this.fragments[fragKey];
7520 };
7521
7522 _proto.removeFragmentsInRange = function removeFragmentsInRange(start, end, playlistType) {
7523 var _this4 = this;
7524
7525 Object.keys(this.fragments).forEach(function (key) {
7526 var fragmentEntity = _this4.fragments[key];
7527
7528 if (!fragmentEntity) {
7529 return;
7530 }
7531
7532 if (fragmentEntity.buffered) {
7533 var frag = fragmentEntity.body;
7534
7535 if (frag.type === playlistType && frag.start < end && frag.end > start) {
7536 _this4.removeFragment(frag);
7537 }
7538 }
7539 });
7540 };
7541
7542 _proto.removeFragment = function removeFragment(fragment) {
7543 var fragKey = getFragmentKey(fragment);
7544 fragment.stats.loaded = 0;
7545 fragment.clearElementaryStreamInfo();
7546 delete this.fragments[fragKey];
7547 };
7548
7549 _proto.removeAllFragments = function removeAllFragments() {
7550 this.fragments = Object.create(null);
7551 this.activeFragment = null;
7552 this.activeParts = null;
7553 };
7554
7555 return FragmentTracker;
7556}();
7557
7558function isPartial(fragmentEntity) {
7559 var _fragmentEntity$range, _fragmentEntity$range2;
7560
7561 return fragmentEntity.buffered && (((_fragmentEntity$range = fragmentEntity.range.video) === null || _fragmentEntity$range === void 0 ? void 0 : _fragmentEntity$range.partial) || ((_fragmentEntity$range2 = fragmentEntity.range.audio) === null || _fragmentEntity$range2 === void 0 ? void 0 : _fragmentEntity$range2.partial));
7562}
7563
7564function getFragmentKey(fragment) {
7565 return fragment.type + "_" + fragment.level + "_" + fragment.urlId + "_" + fragment.sn;
7566}
7567
7568/***/ }),
7569
7570/***/ "./src/controller/gap-controller.ts":
7571/*!******************************************!*\
7572 !*** ./src/controller/gap-controller.ts ***!
7573 \******************************************/
7574/*! exports provided: STALL_MINIMUM_DURATION_MS, MAX_START_GAP_JUMP, SKIP_BUFFER_HOLE_STEP_SECONDS, SKIP_BUFFER_RANGE_START, default */
7575/***/ (function(module, __webpack_exports__, __webpack_require__) {
7576__webpack_require__.r(__webpack_exports__);
7577/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "STALL_MINIMUM_DURATION_MS", function() { return STALL_MINIMUM_DURATION_MS; });
7578/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "MAX_START_GAP_JUMP", function() { return MAX_START_GAP_JUMP; });
7579/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "SKIP_BUFFER_HOLE_STEP_SECONDS", function() { return SKIP_BUFFER_HOLE_STEP_SECONDS; });
7580/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "SKIP_BUFFER_RANGE_START", function() { return SKIP_BUFFER_RANGE_START; });
7581/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return GapController; });
7582/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
7583/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
7584/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
7585/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
7586
7587
7588
7589
7590var STALL_MINIMUM_DURATION_MS = 250;
7591var MAX_START_GAP_JUMP = 2.0;
7592var SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
7593var SKIP_BUFFER_RANGE_START = 0.05;
7594
7595var GapController = /*#__PURE__*/function () {
7596 function GapController(config, media, fragmentTracker, hls) {
7597 this.config = void 0;
7598 this.media = void 0;
7599 this.fragmentTracker = void 0;
7600 this.hls = void 0;
7601 this.nudgeRetry = 0;
7602 this.stallReported = false;
7603 this.stalled = null;
7604 this.moved = false;
7605 this.seeking = false;
7606 this.config = config;
7607 this.media = media;
7608 this.fragmentTracker = fragmentTracker;
7609 this.hls = hls;
7610 }
7611
7612 var _proto = GapController.prototype;
7613
7614 _proto.destroy = function destroy() {
7615 // @ts-ignore
7616 this.hls = this.fragmentTracker = this.media = null;
7617 }
7618 /**
7619 * Checks if the playhead is stuck within a gap, and if so, attempts to free it.
7620 * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
7621 *
7622 * @param {number} lastCurrentTime Previously read playhead position
7623 */
7624 ;
7625
7626 _proto.poll = function poll(lastCurrentTime) {
7627 var config = this.config,
7628 media = this.media,
7629 stalled = this.stalled;
7630 var currentTime = media.currentTime,
7631 seeking = media.seeking;
7632 var seeked = this.seeking && !seeking;
7633 var beginSeek = !this.seeking && seeking;
7634 this.seeking = seeking; // The playhead is moving, no-op
7635
7636 if (currentTime !== lastCurrentTime) {
7637 this.moved = true;
7638
7639 if (stalled !== null) {
7640 // The playhead is now moving, but was previously stalled
7641 if (this.stallReported) {
7642 var _stalledDuration = self.performance.now() - stalled;
7643
7644 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("playback not stuck anymore @" + currentTime + ", after " + Math.round(_stalledDuration) + "ms");
7645 this.stallReported = false;
7646 }
7647
7648 this.stalled = null;
7649 this.nudgeRetry = 0;
7650 }
7651
7652 return;
7653 } // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
7654
7655
7656 if (beginSeek || seeked) {
7657 this.stalled = null;
7658 } // The playhead should not be moving
7659
7660
7661 if (media.paused || media.ended || media.playbackRate === 0 || !_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].getBuffered(media).length) {
7662 return;
7663 }
7664
7665 var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].bufferInfo(media, currentTime, 0);
7666 var isBuffered = bufferInfo.len > 0;
7667 var nextStart = bufferInfo.nextStart || 0; // There is no playable buffer (seeked, waiting for buffer)
7668
7669 if (!isBuffered && !nextStart) {
7670 return;
7671 }
7672
7673 if (seeking) {
7674 // Waiting for seeking in a buffered range to complete
7675 var hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP; // Next buffered range is too far ahead to jump to while still seeking
7676
7677 var noBufferGap = !nextStart || nextStart - currentTime > MAX_START_GAP_JUMP && !this.fragmentTracker.getPartialFragment(currentTime);
7678
7679 if (hasEnoughBuffer || noBufferGap) {
7680 return;
7681 } // Reset moved state when seeking to a point in or before a gap
7682
7683
7684 this.moved = false;
7685 } // Skip start gaps if we haven't played, but the last poll detected the start of a stall
7686 // The addition poll gives the browser a chance to jump the gap for us
7687
7688
7689 if (!this.moved && this.stalled !== null) {
7690 var _level$details;
7691
7692 // Jump start gaps within jump threshold
7693 var startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime; // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
7694 // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
7695 // that begins over 1 target duration after the video start position.
7696
7697 var level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
7698 var isLive = level === null || level === void 0 ? void 0 : (_level$details = level.details) === null || _level$details === void 0 ? void 0 : _level$details.live;
7699 var maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
7700
7701 if (startJump > 0 && startJump <= maxStartGapJump) {
7702 this._trySkipBufferHole(null);
7703
7704 return;
7705 }
7706 } // Start tracking stall time
7707
7708
7709 var tnow = self.performance.now();
7710
7711 if (stalled === null) {
7712 this.stalled = tnow;
7713 return;
7714 }
7715
7716 var stalledDuration = tnow - stalled;
7717
7718 if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
7719 // Report stalling after trying to fix
7720 this._reportStall(bufferInfo.len);
7721 }
7722
7723 var bufferedWithHoles = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].bufferInfo(media, currentTime, config.maxBufferHole);
7724
7725 this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
7726 }
7727 /**
7728 * Detects and attempts to fix known buffer stalling issues.
7729 * @param bufferInfo - The properties of the current buffer.
7730 * @param stalledDurationMs - The amount of time Hls.js has been stalling for.
7731 * @private
7732 */
7733 ;
7734
7735 _proto._tryFixBufferStall = function _tryFixBufferStall(bufferInfo, stalledDurationMs) {
7736 var config = this.config,
7737 fragmentTracker = this.fragmentTracker,
7738 media = this.media;
7739 var currentTime = media.currentTime;
7740 var partial = fragmentTracker.getPartialFragment(currentTime);
7741
7742 if (partial) {
7743 // Try to skip over the buffer hole caused by a partial fragment
7744 // This method isn't limited by the size of the gap between buffered ranges
7745 var targetTime = this._trySkipBufferHole(partial); // we return here in this case, meaning
7746 // the branch below only executes when we don't handle a partial fragment
7747
7748
7749 if (targetTime) {
7750 return;
7751 }
7752 } // if we haven't had to skip over a buffer hole of a partial fragment
7753 // we may just have to "nudge" the playlist as the browser decoding/rendering engine
7754 // needs to cross some sort of threshold covering all source-buffers content
7755 // to start playing properly.
7756
7757
7758 if (bufferInfo.len > config.maxBufferHole && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
7759 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Trying to nudge playhead over buffer-hole'); // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
7760 // We only try to jump the hole if it's under the configured size
7761 // Reset stalled so to rearm watchdog timer
7762
7763 this.stalled = null;
7764
7765 this._tryNudgeBuffer();
7766 }
7767 }
7768 /**
7769 * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
7770 * @param bufferLen - The playhead distance from the end of the current buffer segment.
7771 * @private
7772 */
7773 ;
7774
7775 _proto._reportStall = function _reportStall(bufferLen) {
7776 var hls = this.hls,
7777 media = this.media,
7778 stallReported = this.stallReported;
7779
7780 if (!stallReported) {
7781 // Report stalled error once
7782 this.stallReported = true;
7783 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("Playback stalling at @" + media.currentTime + " due to low buffer (buffer=" + bufferLen + ")");
7784 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
7785 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
7786 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_STALLED_ERROR,
7787 fatal: false,
7788 buffer: bufferLen
7789 });
7790 }
7791 }
7792 /**
7793 * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
7794 * @param partial - The partial fragment found at the current time (where playback is stalling).
7795 * @private
7796 */
7797 ;
7798
7799 _proto._trySkipBufferHole = function _trySkipBufferHole(partial) {
7800 var config = this.config,
7801 hls = this.hls,
7802 media = this.media;
7803 var currentTime = media.currentTime;
7804 var lastEndTime = 0; // Check if currentTime is between unbuffered regions of partial fragments
7805
7806 var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].getBuffered(media);
7807
7808 for (var i = 0; i < buffered.length; i++) {
7809 var startTime = buffered.start(i);
7810
7811 if (currentTime + config.maxBufferHole >= lastEndTime && currentTime < startTime) {
7812 var targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, media.currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
7813 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("skipping hole, adjusting currentTime from " + currentTime + " to " + targetTime);
7814 this.moved = true;
7815 this.stalled = null;
7816 media.currentTime = targetTime;
7817
7818 if (partial) {
7819 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
7820 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
7821 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_SEEK_OVER_HOLE,
7822 fatal: false,
7823 reason: "fragment loaded with buffer holes, seeking from " + currentTime + " to " + targetTime,
7824 frag: partial
7825 });
7826 }
7827
7828 return targetTime;
7829 }
7830
7831 lastEndTime = buffered.end(i);
7832 }
7833
7834 return 0;
7835 }
7836 /**
7837 * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
7838 * @private
7839 */
7840 ;
7841
7842 _proto._tryNudgeBuffer = function _tryNudgeBuffer() {
7843 var config = this.config,
7844 hls = this.hls,
7845 media = this.media;
7846 var currentTime = media.currentTime;
7847 var nudgeRetry = (this.nudgeRetry || 0) + 1;
7848 this.nudgeRetry = nudgeRetry;
7849
7850 if (nudgeRetry < config.nudgeMaxRetry) {
7851 var targetTime = currentTime + nudgeRetry * config.nudgeOffset; // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
7852
7853 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("Nudging 'currentTime' from " + currentTime + " to " + targetTime);
7854 media.currentTime = targetTime;
7855 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
7856 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
7857 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_NUDGE_ON_STALL,
7858 fatal: false
7859 });
7860 } else {
7861 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].error("Playhead still not moving while enough data buffered @" + currentTime + " after " + config.nudgeMaxRetry + " nudges");
7862 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
7863 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
7864 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_STALLED_ERROR,
7865 fatal: true
7866 });
7867 }
7868 };
7869
7870 return GapController;
7871}();
7872
7873
7874
7875/***/ }),
7876
7877/***/ "./src/controller/id3-track-controller.ts":
7878/*!************************************************!*\
7879 !*** ./src/controller/id3-track-controller.ts ***!
7880 \************************************************/
7881/*! exports provided: default */
7882/***/ (function(module, __webpack_exports__, __webpack_require__) {
7883__webpack_require__.r(__webpack_exports__);
7884/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
7885/* harmony import */ var _utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/texttrack-utils */ "./src/utils/texttrack-utils.ts");
7886/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
7887
7888
7889
7890var MIN_CUE_DURATION = 0.25;
7891
7892var ID3TrackController = /*#__PURE__*/function () {
7893 function ID3TrackController(hls) {
7894 this.hls = void 0;
7895 this.id3Track = null;
7896 this.media = null;
7897 this.hls = hls;
7898
7899 this._registerListeners();
7900 }
7901
7902 var _proto = ID3TrackController.prototype;
7903
7904 _proto.destroy = function destroy() {
7905 this._unregisterListeners();
7906 };
7907
7908 _proto._registerListeners = function _registerListeners() {
7909 var hls = this.hls;
7910 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
7911 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
7912 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
7913 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
7914 };
7915
7916 _proto._unregisterListeners = function _unregisterListeners() {
7917 var hls = this.hls;
7918 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
7919 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
7920 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
7921 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
7922 } // Add ID3 metatadata text track.
7923 ;
7924
7925 _proto.onMediaAttached = function onMediaAttached(event, data) {
7926 this.media = data.media;
7927 };
7928
7929 _proto.onMediaDetaching = function onMediaDetaching() {
7930 if (!this.id3Track) {
7931 return;
7932 }
7933
7934 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["clearCurrentCues"])(this.id3Track);
7935 this.id3Track = null;
7936 this.media = null;
7937 };
7938
7939 _proto.getID3Track = function getID3Track(textTracks) {
7940 if (!this.media) {
7941 return;
7942 }
7943
7944 for (var i = 0; i < textTracks.length; i++) {
7945 var textTrack = textTracks[i];
7946
7947 if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
7948 // send 'addtrack' when reusing the textTrack for metadata,
7949 // same as what we do for captions
7950 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["sendAddTrackEvent"])(textTrack, this.media);
7951 return textTrack;
7952 }
7953 }
7954
7955 return this.media.addTextTrack('metadata', 'id3');
7956 };
7957
7958 _proto.onFragParsingMetadata = function onFragParsingMetadata(event, data) {
7959 if (!this.media) {
7960 return;
7961 }
7962
7963 var fragment = data.frag;
7964 var samples = data.samples; // create track dynamically
7965
7966 if (!this.id3Track) {
7967 this.id3Track = this.getID3Track(this.media.textTracks);
7968 this.id3Track.mode = 'hidden';
7969 } // Attempt to recreate Safari functionality by creating
7970 // WebKitDataCue objects when available and store the decoded
7971 // ID3 data in the value property of the cue
7972
7973
7974 var Cue = self.WebKitDataCue || self.VTTCue || self.TextTrackCue;
7975
7976 for (var i = 0; i < samples.length; i++) {
7977 var frames = _demux_id3__WEBPACK_IMPORTED_MODULE_2__["getID3Frames"](samples[i].data);
7978
7979 if (frames) {
7980 var startTime = samples[i].pts;
7981 var endTime = i < samples.length - 1 ? samples[i + 1].pts : fragment.end;
7982 var timeDiff = endTime - startTime;
7983
7984 if (timeDiff <= 0) {
7985 endTime = startTime + MIN_CUE_DURATION;
7986 }
7987
7988 for (var j = 0; j < frames.length; j++) {
7989 var frame = frames[j]; // Safari doesn't put the timestamp frame in the TextTrack
7990
7991 if (!_demux_id3__WEBPACK_IMPORTED_MODULE_2__["isTimeStampFrame"](frame)) {
7992 var cue = new Cue(startTime, endTime, '');
7993 cue.value = frame;
7994 this.id3Track.addCue(cue);
7995 }
7996 }
7997 }
7998 }
7999 };
8000
8001 _proto.onBufferFlushing = function onBufferFlushing(event, _ref) {
8002 var startOffset = _ref.startOffset,
8003 endOffset = _ref.endOffset,
8004 type = _ref.type;
8005
8006 if (!type || type === 'audio') {
8007 // id3 cues come from parsed audio only remove cues when audio buffer is cleared
8008 var id3Track = this.id3Track;
8009
8010 if (id3Track) {
8011 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["removeCuesInRange"])(id3Track, startOffset, endOffset);
8012 }
8013 }
8014 };
8015
8016 return ID3TrackController;
8017}();
8018
8019/* harmony default export */ __webpack_exports__["default"] = (ID3TrackController);
8020
8021/***/ }),
8022
8023/***/ "./src/controller/latency-controller.ts":
8024/*!**********************************************!*\
8025 !*** ./src/controller/latency-controller.ts ***!
8026 \**********************************************/
8027/*! exports provided: default */
8028/***/ (function(module, __webpack_exports__, __webpack_require__) {
8029__webpack_require__.r(__webpack_exports__);
8030/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return LatencyController; });
8031/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
8032/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
8033/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
8034function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
8035
8036function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
8037
8038
8039
8040
8041
8042var LatencyController = /*#__PURE__*/function () {
8043 function LatencyController(hls) {
8044 var _this = this;
8045
8046 this.hls = void 0;
8047 this.config = void 0;
8048 this.media = null;
8049 this.levelDetails = null;
8050 this.currentTime = 0;
8051 this.stallCount = 0;
8052 this._latency = null;
8053
8054 this.timeupdateHandler = function () {
8055 return _this.timeupdate();
8056 };
8057
8058 this.hls = hls;
8059 this.config = hls.config;
8060 this.registerListeners();
8061 }
8062
8063 var _proto = LatencyController.prototype;
8064
8065 _proto.destroy = function destroy() {
8066 this.unregisterListeners();
8067 this.onMediaDetaching();
8068 this.levelDetails = null; // @ts-ignore
8069
8070 this.hls = this.timeupdateHandler = null;
8071 };
8072
8073 _proto.registerListeners = function registerListeners() {
8074 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
8075 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
8076 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
8077 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated, this);
8078 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError, this);
8079 };
8080
8081 _proto.unregisterListeners = function unregisterListeners() {
8082 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHED, this.onMediaAttached);
8083 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching);
8084 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading);
8085 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated);
8086 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError);
8087 };
8088
8089 _proto.onMediaAttached = function onMediaAttached(event, data) {
8090 this.media = data.media;
8091 this.media.addEventListener('timeupdate', this.timeupdateHandler);
8092 };
8093
8094 _proto.onMediaDetaching = function onMediaDetaching() {
8095 if (this.media) {
8096 this.media.removeEventListener('timeupdate', this.timeupdateHandler);
8097 this.media = null;
8098 }
8099 };
8100
8101 _proto.onManifestLoading = function onManifestLoading() {
8102 this.levelDetails = null;
8103 this._latency = null;
8104 this.stallCount = 0;
8105 };
8106
8107 _proto.onLevelUpdated = function onLevelUpdated(event, _ref) {
8108 var details = _ref.details;
8109 this.levelDetails = details;
8110
8111 if (details.advanced) {
8112 this.timeupdate();
8113 }
8114
8115 if (!details.live && this.media) {
8116 this.media.removeEventListener('timeupdate', this.timeupdateHandler);
8117 }
8118 };
8119
8120 _proto.onError = function onError(event, data) {
8121 if (data.details !== _errors__WEBPACK_IMPORTED_MODULE_0__["ErrorDetails"].BUFFER_STALLED_ERROR) {
8122 return;
8123 }
8124
8125 this.stallCount++;
8126 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('[playback-rate-controller]: Stall detected, adjusting target latency');
8127 };
8128
8129 _proto.timeupdate = function timeupdate() {
8130 var media = this.media,
8131 levelDetails = this.levelDetails;
8132
8133 if (!media || !levelDetails) {
8134 return;
8135 }
8136
8137 this.currentTime = media.currentTime;
8138 var latency = this.computeLatency();
8139
8140 if (latency === null) {
8141 return;
8142 }
8143
8144 this._latency = latency; // Adapt playbackRate to meet target latency in low-latency mode
8145
8146 var _this$config = this.config,
8147 lowLatencyMode = _this$config.lowLatencyMode,
8148 maxLiveSyncPlaybackRate = _this$config.maxLiveSyncPlaybackRate;
8149
8150 if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1) {
8151 return;
8152 }
8153
8154 var targetLatency = this.targetLatency;
8155
8156 if (targetLatency === null) {
8157 return;
8158 }
8159
8160 var distanceFromTarget = latency - targetLatency; // Only adjust playbackRate when within one target duration of targetLatency
8161 // and more than one second from under-buffering.
8162 // Playback further than one target duration from target can be considered DVR playback.
8163
8164 var liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
8165 var inLiveRange = distanceFromTarget < liveMinLatencyDuration;
8166
8167 if (levelDetails.live && inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
8168 var max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
8169 var rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
8170 media.playbackRate = Math.min(max, Math.max(1, rate));
8171 } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
8172 media.playbackRate = 1;
8173 }
8174 };
8175
8176 _proto.estimateLiveEdge = function estimateLiveEdge() {
8177 var levelDetails = this.levelDetails;
8178
8179 if (levelDetails === null) {
8180 return null;
8181 }
8182
8183 return levelDetails.edge + levelDetails.age;
8184 };
8185
8186 _proto.computeLatency = function computeLatency() {
8187 var liveEdge = this.estimateLiveEdge();
8188
8189 if (liveEdge === null) {
8190 return null;
8191 }
8192
8193 return liveEdge - this.currentTime;
8194 };
8195
8196 _createClass(LatencyController, [{
8197 key: "latency",
8198 get: function get() {
8199 return this._latency || 0;
8200 }
8201 }, {
8202 key: "maxLatency",
8203 get: function get() {
8204 var config = this.config,
8205 levelDetails = this.levelDetails;
8206
8207 if (config.liveMaxLatencyDuration !== undefined) {
8208 return config.liveMaxLatencyDuration;
8209 }
8210
8211 return levelDetails ? config.liveMaxLatencyDurationCount * levelDetails.targetduration : 0;
8212 }
8213 }, {
8214 key: "targetLatency",
8215 get: function get() {
8216 var levelDetails = this.levelDetails;
8217
8218 if (levelDetails === null) {
8219 return null;
8220 }
8221
8222 var holdBack = levelDetails.holdBack,
8223 partHoldBack = levelDetails.partHoldBack,
8224 targetduration = levelDetails.targetduration;
8225 var _this$config2 = this.config,
8226 liveSyncDuration = _this$config2.liveSyncDuration,
8227 liveSyncDurationCount = _this$config2.liveSyncDurationCount,
8228 lowLatencyMode = _this$config2.lowLatencyMode;
8229 var userConfig = this.hls.userConfig;
8230 var targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack;
8231
8232 if (userConfig.liveSyncDuration || userConfig.liveSyncDurationCount || targetLatency === 0) {
8233 targetLatency = liveSyncDuration !== undefined ? liveSyncDuration : liveSyncDurationCount * targetduration;
8234 }
8235
8236 var maxLiveSyncOnStallIncrease = targetduration;
8237 var liveSyncOnStallIncrease = 1.0;
8238 return targetLatency + Math.min(this.stallCount * liveSyncOnStallIncrease, maxLiveSyncOnStallIncrease);
8239 }
8240 }, {
8241 key: "liveSyncPosition",
8242 get: function get() {
8243 var liveEdge = this.estimateLiveEdge();
8244 var targetLatency = this.targetLatency;
8245 var levelDetails = this.levelDetails;
8246
8247 if (liveEdge === null || targetLatency === null || levelDetails === null) {
8248 return null;
8249 }
8250
8251 var edge = levelDetails.edge;
8252 var syncPosition = liveEdge - targetLatency - this.edgeStalled;
8253 var min = edge - levelDetails.totalduration;
8254 var max = edge - (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration);
8255 return Math.min(Math.max(min, syncPosition), max);
8256 }
8257 }, {
8258 key: "drift",
8259 get: function get() {
8260 var levelDetails = this.levelDetails;
8261
8262 if (levelDetails === null) {
8263 return 1;
8264 }
8265
8266 return levelDetails.drift;
8267 }
8268 }, {
8269 key: "edgeStalled",
8270 get: function get() {
8271 var levelDetails = this.levelDetails;
8272
8273 if (levelDetails === null) {
8274 return 0;
8275 }
8276
8277 var maxLevelUpdateAge = (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration) * 3;
8278 return Math.max(levelDetails.age - maxLevelUpdateAge, 0);
8279 }
8280 }, {
8281 key: "forwardBufferLength",
8282 get: function get() {
8283 var media = this.media,
8284 levelDetails = this.levelDetails;
8285
8286 if (!media || !levelDetails) {
8287 return 0;
8288 }
8289
8290 var bufferedRanges = media.buffered.length;
8291 return bufferedRanges ? media.buffered.end(bufferedRanges - 1) : levelDetails.edge - this.currentTime;
8292 }
8293 }]);
8294
8295 return LatencyController;
8296}();
8297
8298
8299
8300/***/ }),
8301
8302/***/ "./src/controller/level-controller.ts":
8303/*!********************************************!*\
8304 !*** ./src/controller/level-controller.ts ***!
8305 \********************************************/
8306/*! exports provided: default */
8307/***/ (function(module, __webpack_exports__, __webpack_require__) {
8308__webpack_require__.r(__webpack_exports__);
8309/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return LevelController; });
8310/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
8311/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
8312/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
8313/* harmony import */ var _utils_codecs__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/codecs */ "./src/utils/codecs.ts");
8314/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
8315/* harmony import */ var _base_playlist_controller__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./base-playlist-controller */ "./src/controller/base-playlist-controller.ts");
8316/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
8317function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
8318
8319function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
8320
8321function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
8322
8323function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
8324
8325function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
8326
8327/*
8328 * Level Controller
8329 */
8330
8331
8332
8333
8334
8335
8336
8337var chromeOrFirefox = /chrome|firefox/.test(navigator.userAgent.toLowerCase());
8338
8339var LevelController = /*#__PURE__*/function (_BasePlaylistControll) {
8340 _inheritsLoose(LevelController, _BasePlaylistControll);
8341
8342 function LevelController(hls) {
8343 var _this;
8344
8345 _this = _BasePlaylistControll.call(this, hls, '[level-controller]') || this;
8346 _this._levels = [];
8347 _this._firstLevel = -1;
8348 _this._startLevel = void 0;
8349 _this.currentLevelIndex = -1;
8350 _this.manualLevelIndex = -1;
8351 _this.onParsedComplete = void 0;
8352
8353 _this._registerListeners();
8354
8355 return _this;
8356 }
8357
8358 var _proto = LevelController.prototype;
8359
8360 _proto._registerListeners = function _registerListeners() {
8361 var hls = this.hls;
8362 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
8363 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
8364 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
8365 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
8366 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError, this);
8367 };
8368
8369 _proto._unregisterListeners = function _unregisterListeners() {
8370 var hls = this.hls;
8371 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
8372 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
8373 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
8374 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
8375 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError, this);
8376 };
8377
8378 _proto.destroy = function destroy() {
8379 this._unregisterListeners();
8380
8381 this.manualLevelIndex = -1;
8382 this._levels.length = 0;
8383
8384 _BasePlaylistControll.prototype.destroy.call(this);
8385 };
8386
8387 _proto.startLoad = function startLoad() {
8388 var levels = this._levels; // clean up live level details to force reload them, and reset load errors
8389
8390 levels.forEach(function (level) {
8391 level.loadError = 0;
8392 });
8393
8394 _BasePlaylistControll.prototype.startLoad.call(this);
8395 };
8396
8397 _proto.onManifestLoaded = function onManifestLoaded(event, data) {
8398 var levels = [];
8399 var audioTracks = [];
8400 var subtitleTracks = [];
8401 var bitrateStart;
8402 var levelSet = {};
8403 var levelFromSet;
8404 var resolutionFound = false;
8405 var videoCodecFound = false;
8406 var audioCodecFound = false; // regroup redundant levels together
8407
8408 data.levels.forEach(function (levelParsed) {
8409 var attributes = levelParsed.attrs;
8410 resolutionFound = resolutionFound || !!(levelParsed.width && levelParsed.height);
8411 videoCodecFound = videoCodecFound || !!levelParsed.videoCodec;
8412 audioCodecFound = audioCodecFound || !!levelParsed.audioCodec; // erase audio codec info if browser does not support mp4a.40.34.
8413 // demuxer will autodetect codec and fallback to mpeg/audio
8414
8415 if (chromeOrFirefox && levelParsed.audioCodec && levelParsed.audioCodec.indexOf('mp4a.40.34') !== -1) {
8416 levelParsed.audioCodec = undefined;
8417 }
8418
8419 var levelKey = levelParsed.bitrate + "-" + levelParsed.attrs.RESOLUTION + "-" + levelParsed.attrs.CODECS;
8420 levelFromSet = levelSet[levelKey];
8421
8422 if (!levelFromSet) {
8423 levelFromSet = new _types_level__WEBPACK_IMPORTED_MODULE_0__["Level"](levelParsed);
8424 levelSet[levelKey] = levelFromSet;
8425 levels.push(levelFromSet);
8426 } else {
8427 levelFromSet.url.push(levelParsed.url);
8428 }
8429
8430 if (attributes) {
8431 if (attributes.AUDIO) {
8432 Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addGroupId"])(levelFromSet, 'audio', attributes.AUDIO);
8433 }
8434
8435 if (attributes.SUBTITLES) {
8436 Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addGroupId"])(levelFromSet, 'text', attributes.SUBTITLES);
8437 }
8438 }
8439 }); // remove audio-only level if we also have levels with video codecs or RESOLUTION signalled
8440
8441 if ((resolutionFound || videoCodecFound) && audioCodecFound) {
8442 levels = levels.filter(function (_ref) {
8443 var videoCodec = _ref.videoCodec,
8444 width = _ref.width,
8445 height = _ref.height;
8446 return !!videoCodec || !!(width && height);
8447 });
8448 } // only keep levels with supported audio/video codecs
8449
8450
8451 levels = levels.filter(function (_ref2) {
8452 var audioCodec = _ref2.audioCodec,
8453 videoCodec = _ref2.videoCodec;
8454 return (!audioCodec || Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_3__["isCodecSupportedInMp4"])(audioCodec, 'audio')) && (!videoCodec || Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_3__["isCodecSupportedInMp4"])(videoCodec, 'video'));
8455 });
8456
8457 if (data.audioTracks) {
8458 audioTracks = data.audioTracks.filter(function (track) {
8459 return !track.audioCodec || Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_3__["isCodecSupportedInMp4"])(track.audioCodec, 'audio');
8460 }); // Assign ids after filtering as array indices by group-id
8461
8462 Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["assignTrackIdsByGroup"])(audioTracks);
8463 }
8464
8465 if (data.subtitles) {
8466 subtitleTracks = data.subtitles;
8467 Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["assignTrackIdsByGroup"])(subtitleTracks);
8468 }
8469
8470 if (levels.length > 0) {
8471 // start bitrate is the first bitrate of the manifest
8472 bitrateStart = levels[0].bitrate; // sort level on bitrate
8473
8474 levels.sort(function (a, b) {
8475 return a.bitrate - b.bitrate;
8476 });
8477 this._levels = levels; // find index of first level in sorted levels
8478
8479 for (var i = 0; i < levels.length; i++) {
8480 if (levels[i].bitrate === bitrateStart) {
8481 this._firstLevel = i;
8482 this.log("manifest loaded, " + levels.length + " level(s) found, first bitrate: " + bitrateStart);
8483 break;
8484 }
8485 } // Audio is only alternate if manifest include a URI along with the audio group tag,
8486 // and this is not an audio-only stream where levels contain audio-only
8487
8488
8489 var audioOnly = audioCodecFound && !videoCodecFound;
8490 var edata = {
8491 levels: levels,
8492 audioTracks: audioTracks,
8493 subtitleTracks: subtitleTracks,
8494 firstLevel: this._firstLevel,
8495 stats: data.stats,
8496 audio: audioCodecFound,
8497 video: videoCodecFound,
8498 altAudio: !audioOnly && audioTracks.some(function (t) {
8499 return !!t.url;
8500 })
8501 };
8502 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_PARSED, edata); // Initiate loading after all controllers have received MANIFEST_PARSED
8503
8504 if (this.hls.config.autoStartLoad || this.hls.forceStartLoad) {
8505 this.hls.startLoad(this.hls.config.startPosition);
8506 }
8507 } else {
8508 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
8509 type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].MEDIA_ERROR,
8510 details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_INCOMPATIBLE_CODECS_ERROR,
8511 fatal: true,
8512 url: data.url,
8513 reason: 'no level with compatible codecs found in manifest'
8514 });
8515 }
8516 };
8517
8518 _proto.onError = function onError(event, data) {
8519 _BasePlaylistControll.prototype.onError.call(this, event, data);
8520
8521 if (data.fatal) {
8522 return;
8523 } // Switch to redundant level when track fails to load
8524
8525
8526 var context = data.context;
8527 var level = this._levels[this.currentLevelIndex];
8528
8529 if (context && (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK && level.audioGroupIds && context.groupId === level.audioGroupIds[level.urlId] || context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK && level.textGroupIds && context.groupId === level.textGroupIds[level.urlId])) {
8530 this.redundantFailover(this.currentLevelIndex);
8531 return;
8532 }
8533
8534 var levelError = false;
8535 var levelSwitch = true;
8536 var levelIndex; // try to recover not fatal errors
8537
8538 switch (data.details) {
8539 case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].FRAG_LOAD_ERROR:
8540 case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
8541 case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].KEY_LOAD_ERROR:
8542 case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].KEY_LOAD_TIMEOUT:
8543 if (data.frag) {
8544 var _level = this._levels[data.frag.level]; // Set levelIndex when we're out of fragment retries
8545
8546 if (_level) {
8547 _level.fragmentError++;
8548
8549 if (_level.fragmentError > this.hls.config.fragLoadingMaxRetry) {
8550 levelIndex = data.frag.level;
8551 }
8552 } else {
8553 levelIndex = data.frag.level;
8554 }
8555 }
8556
8557 break;
8558
8559 case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_ERROR:
8560 case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_TIMEOUT:
8561 // Do not perform level switch if an error occurred using delivery directives
8562 // Attempt to reload level without directives first
8563 if (context) {
8564 if (context.deliveryDirectives) {
8565 levelSwitch = false;
8566 }
8567
8568 levelIndex = context.level;
8569 }
8570
8571 levelError = true;
8572 break;
8573
8574 case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].REMUX_ALLOC_ERROR:
8575 levelIndex = data.level;
8576 levelError = true;
8577 break;
8578 }
8579
8580 if (levelIndex !== undefined) {
8581 this.recoverLevel(data, levelIndex, levelError, levelSwitch);
8582 }
8583 }
8584 /**
8585 * Switch to a redundant stream if any available.
8586 * If redundant stream is not available, emergency switch down if ABR mode is enabled.
8587 */
8588 ;
8589
8590 _proto.recoverLevel = function recoverLevel(errorEvent, levelIndex, levelError, levelSwitch) {
8591 var errorDetails = errorEvent.details;
8592 var level = this._levels[levelIndex];
8593 level.loadError++;
8594
8595 if (levelError) {
8596 var retrying = this.retryLoadingOrFail(errorEvent);
8597
8598 if (retrying) {
8599 // boolean used to inform stream controller not to switch back to IDLE on non fatal error
8600 errorEvent.levelRetry = true;
8601 } else {
8602 this.currentLevelIndex = -1;
8603 return;
8604 }
8605 }
8606
8607 if (levelSwitch) {
8608 var redundantLevels = level.url.length; // Try redundant fail-over until level.loadError reaches redundantLevels
8609
8610 if (redundantLevels > 1 && level.loadError < redundantLevels) {
8611 errorEvent.levelRetry = true;
8612 this.redundantFailover(levelIndex);
8613 } else if (this.manualLevelIndex === -1) {
8614 // Search for available level in auto level selection mode, cycling from highest to lowest bitrate
8615 var nextLevel = levelIndex === 0 ? this._levels.length - 1 : levelIndex - 1;
8616
8617 if (this.currentLevelIndex !== nextLevel && this._levels[nextLevel].loadError === 0) {
8618 this.warn(errorDetails + ": switch to " + nextLevel);
8619 errorEvent.levelRetry = true;
8620 this.hls.nextAutoLevel = nextLevel;
8621 }
8622 }
8623 }
8624 };
8625
8626 _proto.redundantFailover = function redundantFailover(levelIndex) {
8627 var level = this._levels[levelIndex];
8628 var redundantLevels = level.url.length;
8629
8630 if (redundantLevels > 1) {
8631 // Update the url id of all levels so that we stay on the same set of variants when level switching
8632 var newUrlId = (level.urlId + 1) % redundantLevels;
8633 this.warn("Switching to redundant URL-id " + newUrlId);
8634
8635 this._levels.forEach(function (level) {
8636 level.urlId = newUrlId;
8637 });
8638
8639 this.level = levelIndex;
8640 }
8641 } // reset errors on the successful load of a fragment
8642 ;
8643
8644 _proto.onFragLoaded = function onFragLoaded(event, _ref3) {
8645 var frag = _ref3.frag;
8646
8647 if (frag !== undefined && frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN) {
8648 var level = this._levels[frag.level];
8649
8650 if (level !== undefined) {
8651 level.fragmentError = 0;
8652 level.loadError = 0;
8653 }
8654 }
8655 };
8656
8657 _proto.onLevelLoaded = function onLevelLoaded(event, data) {
8658 var _data$deliveryDirecti2;
8659
8660 var level = data.level,
8661 details = data.details;
8662 var curLevel = this._levels[level];
8663
8664 if (!curLevel) {
8665 var _data$deliveryDirecti;
8666
8667 this.warn("Invalid level index " + level);
8668
8669 if ((_data$deliveryDirecti = data.deliveryDirectives) !== null && _data$deliveryDirecti !== void 0 && _data$deliveryDirecti.skip) {
8670 details.deltaUpdateFailed = true;
8671 }
8672
8673 return;
8674 } // only process level loaded events matching with expected level
8675
8676
8677 if (level === this.currentLevelIndex) {
8678 // reset level load error counter on successful level loaded only if there is no issues with fragments
8679 if (curLevel.fragmentError === 0) {
8680 curLevel.loadError = 0;
8681 this.retryCount = 0;
8682 }
8683
8684 this.playlistLoaded(level, data, curLevel.details);
8685 } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) !== null && _data$deliveryDirecti2 !== void 0 && _data$deliveryDirecti2.skip) {
8686 // received a delta playlist update that cannot be merged
8687 details.deltaUpdateFailed = true;
8688 }
8689 };
8690
8691 _proto.onAudioTrackSwitched = function onAudioTrackSwitched(event, data) {
8692 var currentLevel = this.hls.levels[this.currentLevelIndex];
8693
8694 if (!currentLevel) {
8695 return;
8696 }
8697
8698 if (currentLevel.audioGroupIds) {
8699 var urlId = -1;
8700 var audioGroupId = this.hls.audioTracks[data.id].groupId;
8701
8702 for (var i = 0; i < currentLevel.audioGroupIds.length; i++) {
8703 if (currentLevel.audioGroupIds[i] === audioGroupId) {
8704 urlId = i;
8705 break;
8706 }
8707 }
8708
8709 if (urlId !== currentLevel.urlId) {
8710 currentLevel.urlId = urlId;
8711 this.startLoad();
8712 }
8713 }
8714 };
8715
8716 _proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {
8717 var level = this.currentLevelIndex;
8718 var currentLevel = this._levels[level];
8719
8720 if (this.canLoad && currentLevel && currentLevel.url.length > 0) {
8721 var id = currentLevel.urlId;
8722 var url = currentLevel.url[id];
8723
8724 if (hlsUrlParameters) {
8725 try {
8726 url = hlsUrlParameters.addDirectives(url);
8727 } catch (error) {
8728 this.warn("Could not construct new URL with HLS Delivery Directives: " + error);
8729 }
8730 }
8731
8732 this.log("Attempt loading level index " + level + (hlsUrlParameters ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : '') + " with URL-id " + id + " " + url); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
8733 // console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
8734
8735 this.clearTimer();
8736 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADING, {
8737 url: url,
8738 level: level,
8739 id: id,
8740 deliveryDirectives: hlsUrlParameters || null
8741 });
8742 }
8743 };
8744
8745 _proto.removeLevel = function removeLevel(levelIndex, urlId) {
8746 var filterLevelAndGroupByIdIndex = function filterLevelAndGroupByIdIndex(url, id) {
8747 return id !== urlId;
8748 };
8749
8750 var levels = this._levels.filter(function (level, index) {
8751 if (index !== levelIndex) {
8752 return true;
8753 }
8754
8755 if (level.url.length > 1 && urlId !== undefined) {
8756 level.url = level.url.filter(filterLevelAndGroupByIdIndex);
8757
8758 if (level.audioGroupIds) {
8759 level.audioGroupIds = level.audioGroupIds.filter(filterLevelAndGroupByIdIndex);
8760 }
8761
8762 if (level.textGroupIds) {
8763 level.textGroupIds = level.textGroupIds.filter(filterLevelAndGroupByIdIndex);
8764 }
8765
8766 level.urlId = 0;
8767 return true;
8768 }
8769
8770 return false;
8771 }).map(function (level, index) {
8772 var details = level.details;
8773
8774 if (details !== null && details !== void 0 && details.fragments) {
8775 details.fragments.forEach(function (fragment) {
8776 fragment.level = index;
8777 });
8778 }
8779
8780 return level;
8781 });
8782
8783 this._levels = levels;
8784 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVELS_UPDATED, {
8785 levels: levels
8786 });
8787 };
8788
8789 _createClass(LevelController, [{
8790 key: "levels",
8791 get: function get() {
8792 if (this._levels.length === 0) {
8793 return null;
8794 }
8795
8796 return this._levels;
8797 }
8798 }, {
8799 key: "level",
8800 get: function get() {
8801 return this.currentLevelIndex;
8802 },
8803 set: function set(newLevel) {
8804 var _levels$newLevel;
8805
8806 var levels = this._levels;
8807
8808 if (levels.length === 0) {
8809 return;
8810 }
8811
8812 if (this.currentLevelIndex === newLevel && (_levels$newLevel = levels[newLevel]) !== null && _levels$newLevel !== void 0 && _levels$newLevel.details) {
8813 return;
8814 } // check if level idx is valid
8815
8816
8817 if (newLevel < 0 || newLevel >= levels.length) {
8818 // invalid level id given, trigger error
8819 var fatal = newLevel < 0;
8820 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
8821 type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].OTHER_ERROR,
8822 details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_SWITCH_ERROR,
8823 level: newLevel,
8824 fatal: fatal,
8825 reason: 'invalid level idx'
8826 });
8827
8828 if (fatal) {
8829 return;
8830 }
8831
8832 newLevel = Math.min(newLevel, levels.length - 1);
8833 } // stopping live reloading timer if any
8834
8835
8836 this.clearTimer();
8837 var lastLevelIndex = this.currentLevelIndex;
8838 var lastLevel = levels[lastLevelIndex];
8839 var level = levels[newLevel];
8840 this.log("switching to level " + newLevel + " from " + lastLevelIndex);
8841 this.currentLevelIndex = newLevel;
8842
8843 var levelSwitchingData = _extends({}, level, {
8844 level: newLevel,
8845 maxBitrate: level.maxBitrate,
8846 uri: level.uri,
8847 urlId: level.urlId
8848 }); // @ts-ignore
8849
8850
8851 delete levelSwitchingData._urlId;
8852 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_SWITCHING, levelSwitchingData); // check if we need to load playlist for this level
8853
8854 var levelDetails = level.details;
8855
8856 if (!levelDetails || levelDetails.live) {
8857 // level not retrieved yet, or live playlist we need to (re)load it
8858 var hlsUrlParameters = this.switchParams(level.uri, lastLevel === null || lastLevel === void 0 ? void 0 : lastLevel.details);
8859 this.loadPlaylist(hlsUrlParameters);
8860 }
8861 }
8862 }, {
8863 key: "manualLevel",
8864 get: function get() {
8865 return this.manualLevelIndex;
8866 },
8867 set: function set(newLevel) {
8868 this.manualLevelIndex = newLevel;
8869
8870 if (this._startLevel === undefined) {
8871 this._startLevel = newLevel;
8872 }
8873
8874 if (newLevel !== -1) {
8875 this.level = newLevel;
8876 }
8877 }
8878 }, {
8879 key: "firstLevel",
8880 get: function get() {
8881 return this._firstLevel;
8882 },
8883 set: function set(newLevel) {
8884 this._firstLevel = newLevel;
8885 }
8886 }, {
8887 key: "startLevel",
8888 get: function get() {
8889 // hls.startLevel takes precedence over config.startLevel
8890 // if none of these values are defined, fallback on this._firstLevel (first quality level appearing in variant manifest)
8891 if (this._startLevel === undefined) {
8892 var configStartLevel = this.hls.config.startLevel;
8893
8894 if (configStartLevel !== undefined) {
8895 return configStartLevel;
8896 } else {
8897 return this._firstLevel;
8898 }
8899 } else {
8900 return this._startLevel;
8901 }
8902 },
8903 set: function set(newLevel) {
8904 this._startLevel = newLevel;
8905 }
8906 }, {
8907 key: "nextLoadLevel",
8908 get: function get() {
8909 if (this.manualLevelIndex !== -1) {
8910 return this.manualLevelIndex;
8911 } else {
8912 return this.hls.nextAutoLevel;
8913 }
8914 },
8915 set: function set(nextLevel) {
8916 this.level = nextLevel;
8917
8918 if (this.manualLevelIndex === -1) {
8919 this.hls.nextAutoLevel = nextLevel;
8920 }
8921 }
8922 }]);
8923
8924 return LevelController;
8925}(_base_playlist_controller__WEBPACK_IMPORTED_MODULE_5__["default"]);
8926
8927
8928
8929/***/ }),
8930
8931/***/ "./src/controller/level-helper.ts":
8932/*!****************************************!*\
8933 !*** ./src/controller/level-helper.ts ***!
8934 \****************************************/
8935/*! exports provided: addGroupId, assignTrackIdsByGroup, updatePTS, updateFragPTSDTS, mergeDetails, mapPartIntersection, mapFragmentIntersection, adjustSliding, addSliding, computeReloadInterval, getFragmentWithSN, getPartWith */
8936/***/ (function(module, __webpack_exports__, __webpack_require__) {
8937__webpack_require__.r(__webpack_exports__);
8938/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addGroupId", function() { return addGroupId; });
8939/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "assignTrackIdsByGroup", function() { return assignTrackIdsByGroup; });
8940/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "updatePTS", function() { return updatePTS; });
8941/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "updateFragPTSDTS", function() { return updateFragPTSDTS; });
8942/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mergeDetails", function() { return mergeDetails; });
8943/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mapPartIntersection", function() { return mapPartIntersection; });
8944/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mapFragmentIntersection", function() { return mapFragmentIntersection; });
8945/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "adjustSliding", function() { return adjustSliding; });
8946/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addSliding", function() { return addSliding; });
8947/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "computeReloadInterval", function() { return computeReloadInterval; });
8948/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getFragmentWithSN", function() { return getFragmentWithSN; });
8949/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getPartWith", function() { return getPartWith; });
8950/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
8951/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
8952
8953
8954
8955
8956
8957
8958/**
8959 * @module LevelHelper
8960 * Providing methods dealing with playlist sliding and drift
8961 * */
8962
8963function addGroupId(level, type, id) {
8964 switch (type) {
8965 case 'audio':
8966 if (!level.audioGroupIds) {
8967 level.audioGroupIds = [];
8968 }
8969
8970 level.audioGroupIds.push(id);
8971 break;
8972
8973 case 'text':
8974 if (!level.textGroupIds) {
8975 level.textGroupIds = [];
8976 }
8977
8978 level.textGroupIds.push(id);
8979 break;
8980 }
8981}
8982function assignTrackIdsByGroup(tracks) {
8983 var groups = {};
8984 tracks.forEach(function (track) {
8985 var groupId = track.groupId || '';
8986 track.id = groups[groupId] = groups[groupId] || 0;
8987 groups[groupId]++;
8988 });
8989}
8990function updatePTS(fragments, fromIdx, toIdx) {
8991 var fragFrom = fragments[fromIdx];
8992 var fragTo = fragments[toIdx];
8993 updateFromToPTS(fragFrom, fragTo);
8994}
8995
8996function updateFromToPTS(fragFrom, fragTo) {
8997 var fragToPTS = fragTo.startPTS; // if we know startPTS[toIdx]
8998
8999 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(fragToPTS)) {
9000 // update fragment duration.
9001 // it helps to fix drifts between playlist reported duration and fragment real duration
9002 var duration = 0;
9003 var frag;
9004
9005 if (fragTo.sn > fragFrom.sn) {
9006 duration = fragToPTS - fragFrom.start;
9007 frag = fragFrom;
9008 } else {
9009 duration = fragFrom.start - fragToPTS;
9010 frag = fragTo;
9011 } // TODO? Drift can go either way, or the playlist could be completely accurate
9012 // console.assert(duration > 0,
9013 // `duration of ${duration} computed for frag ${frag.sn}, level ${frag.level}, there should be some duration drift between playlist and fragment!`);
9014
9015
9016 if (frag.duration !== duration) {
9017 frag.duration = duration;
9018 } // we dont know startPTS[toIdx]
9019
9020 } else if (fragTo.sn > fragFrom.sn) {
9021 var contiguous = fragFrom.cc === fragTo.cc; // TODO: With part-loading end/durations we need to confirm the whole fragment is loaded before using (or setting) minEndPTS
9022
9023 if (contiguous && fragFrom.minEndPTS) {
9024 fragTo.start = fragFrom.start + (fragFrom.minEndPTS - fragFrom.start);
9025 } else {
9026 fragTo.start = fragFrom.start + fragFrom.duration;
9027 }
9028 } else {
9029 fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
9030 }
9031}
9032
9033function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) {
9034 var parsedMediaDuration = endPTS - startPTS;
9035
9036 if (parsedMediaDuration <= 0) {
9037 _utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('Fragment should have a positive duration', frag);
9038 endPTS = startPTS + frag.duration;
9039 endDTS = startDTS + frag.duration;
9040 }
9041
9042 var maxStartPTS = startPTS;
9043 var minEndPTS = endPTS;
9044 var fragStartPts = frag.startPTS;
9045 var fragEndPts = frag.endPTS;
9046
9047 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(fragStartPts)) {
9048 // delta PTS between audio and video
9049 var deltaPTS = Math.abs(fragStartPts - startPTS);
9050
9051 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.deltaPTS)) {
9052 frag.deltaPTS = deltaPTS;
9053 } else {
9054 frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS);
9055 }
9056
9057 maxStartPTS = Math.max(startPTS, fragStartPts);
9058 startPTS = Math.min(startPTS, fragStartPts);
9059 startDTS = Math.min(startDTS, frag.startDTS);
9060 minEndPTS = Math.min(endPTS, fragEndPts);
9061 endPTS = Math.max(endPTS, fragEndPts);
9062 endDTS = Math.max(endDTS, frag.endDTS);
9063 }
9064
9065 frag.duration = endPTS - startPTS;
9066 var drift = startPTS - frag.start;
9067 frag.appendedPTS = endPTS;
9068 frag.start = frag.startPTS = startPTS;
9069 frag.maxStartPTS = maxStartPTS;
9070 frag.startDTS = startDTS;
9071 frag.endPTS = endPTS;
9072 frag.minEndPTS = minEndPTS;
9073 frag.endDTS = endDTS;
9074 var sn = frag.sn; // 'initSegment'
9075 // exit if sn out of range
9076
9077 if (!details || sn < details.startSN || sn > details.endSN) {
9078 return 0;
9079 }
9080
9081 var i;
9082 var fragIdx = sn - details.startSN;
9083 var fragments = details.fragments; // update frag reference in fragments array
9084 // rationale is that fragments array might not contain this frag object.
9085 // this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
9086 // if we don't update frag, we won't be able to propagate PTS info on the playlist
9087 // resulting in invalid sliding computation
9088
9089 fragments[fragIdx] = frag; // adjust fragment PTS/duration from seqnum-1 to frag 0
9090
9091 for (i = fragIdx; i > 0; i--) {
9092 updateFromToPTS(fragments[i], fragments[i - 1]);
9093 } // adjust fragment PTS/duration from seqnum to last frag
9094
9095
9096 for (i = fragIdx; i < fragments.length - 1; i++) {
9097 updateFromToPTS(fragments[i], fragments[i + 1]);
9098 }
9099
9100 if (details.fragmentHint) {
9101 updateFromToPTS(fragments[fragments.length - 1], details.fragmentHint);
9102 }
9103
9104 details.PTSKnown = details.alignedSliding = true;
9105 return drift;
9106}
9107function mergeDetails(oldDetails, newDetails) {
9108 // Track the last initSegment processed. Initialize it to the last one on the timeline.
9109 var currentInitSegment = null;
9110 var oldFragments = oldDetails.fragments;
9111
9112 for (var i = oldFragments.length - 1; i >= 0; i--) {
9113 var oldInit = oldFragments[i].initSegment;
9114
9115 if (oldInit) {
9116 currentInitSegment = oldInit;
9117 break;
9118 }
9119 }
9120
9121 if (oldDetails.fragmentHint) {
9122 // prevent PTS and duration from being adjusted on the next hint
9123 delete oldDetails.fragmentHint.endPTS;
9124 } // check if old/new playlists have fragments in common
9125 // loop through overlapping SN and update startPTS , cc, and duration if any found
9126
9127
9128 var ccOffset = 0;
9129 var PTSFrag;
9130 mapFragmentIntersection(oldDetails, newDetails, function (oldFrag, newFrag) {
9131 if (oldFrag.relurl) {
9132 // Do not compare CC if the old fragment has no url. This is a level.fragmentHint used by LL-HLS parts.
9133 // It maybe be off by 1 if it was created before any parts or discontinuity tags were appended to the end
9134 // of the playlist.
9135 ccOffset = oldFrag.cc - newFrag.cc;
9136 }
9137
9138 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(oldFrag.startPTS) && Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(oldFrag.endPTS)) {
9139 newFrag.start = newFrag.startPTS = oldFrag.startPTS;
9140 newFrag.startDTS = oldFrag.startDTS;
9141 newFrag.appendedPTS = oldFrag.appendedPTS;
9142 newFrag.maxStartPTS = oldFrag.maxStartPTS;
9143 newFrag.endPTS = oldFrag.endPTS;
9144 newFrag.endDTS = oldFrag.endDTS;
9145 newFrag.minEndPTS = oldFrag.minEndPTS;
9146 newFrag.duration = oldFrag.endPTS - oldFrag.startPTS;
9147
9148 if (newFrag.duration) {
9149 PTSFrag = newFrag;
9150 } // PTS is known when any segment has startPTS and endPTS
9151
9152
9153 newDetails.PTSKnown = newDetails.alignedSliding = true;
9154 }
9155
9156 newFrag.elementaryStreams = oldFrag.elementaryStreams;
9157 newFrag.loader = oldFrag.loader;
9158 newFrag.stats = oldFrag.stats;
9159 newFrag.urlId = oldFrag.urlId;
9160
9161 if (oldFrag.initSegment) {
9162 newFrag.initSegment = oldFrag.initSegment;
9163 currentInitSegment = oldFrag.initSegment;
9164 }
9165 });
9166
9167 if (currentInitSegment) {
9168 var fragmentsToCheck = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments;
9169 fragmentsToCheck.forEach(function (frag) {
9170 var _currentInitSegment;
9171
9172 if (!frag.initSegment || frag.initSegment.relurl === ((_currentInitSegment = currentInitSegment) === null || _currentInitSegment === void 0 ? void 0 : _currentInitSegment.relurl)) {
9173 frag.initSegment = currentInitSegment;
9174 }
9175 });
9176 }
9177
9178 if (newDetails.skippedSegments) {
9179 newDetails.deltaUpdateFailed = newDetails.fragments.some(function (frag) {
9180 return !frag;
9181 });
9182
9183 if (newDetails.deltaUpdateFailed) {
9184 _utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('[level-helper] Previous playlist missing segments skipped in delta playlist');
9185
9186 for (var _i = newDetails.skippedSegments; _i--;) {
9187 newDetails.fragments.shift();
9188 }
9189
9190 newDetails.startSN = newDetails.fragments[0].sn;
9191 newDetails.startCC = newDetails.fragments[0].cc;
9192 }
9193 }
9194
9195 var newFragments = newDetails.fragments;
9196
9197 if (ccOffset) {
9198 _utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('discontinuity sliding from playlist, take drift into account');
9199
9200 for (var _i2 = 0; _i2 < newFragments.length; _i2++) {
9201 newFragments[_i2].cc += ccOffset;
9202 }
9203 }
9204
9205 if (newDetails.skippedSegments) {
9206 newDetails.startCC = newDetails.fragments[0].cc;
9207 } // Merge parts
9208
9209
9210 mapPartIntersection(oldDetails.partList, newDetails.partList, function (oldPart, newPart) {
9211 newPart.elementaryStreams = oldPart.elementaryStreams;
9212 newPart.stats = oldPart.stats;
9213 }); // if at least one fragment contains PTS info, recompute PTS information for all fragments
9214
9215 if (PTSFrag) {
9216 updateFragPTSDTS(newDetails, PTSFrag, PTSFrag.startPTS, PTSFrag.endPTS, PTSFrag.startDTS, PTSFrag.endDTS);
9217 } else {
9218 // ensure that delta is within oldFragments range
9219 // also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
9220 // in that case we also need to adjust start offset of all fragments
9221 adjustSliding(oldDetails, newDetails);
9222 }
9223
9224 if (newFragments.length) {
9225 newDetails.totalduration = newDetails.edge - newFragments[0].start;
9226 }
9227
9228 newDetails.driftStartTime = oldDetails.driftStartTime;
9229 newDetails.driftStart = oldDetails.driftStart;
9230 var advancedDateTime = newDetails.advancedDateTime;
9231
9232 if (newDetails.advanced && advancedDateTime) {
9233 var edge = newDetails.edge;
9234
9235 if (!newDetails.driftStart) {
9236 newDetails.driftStartTime = advancedDateTime;
9237 newDetails.driftStart = edge;
9238 }
9239
9240 newDetails.driftEndTime = advancedDateTime;
9241 newDetails.driftEnd = edge;
9242 } else {
9243 newDetails.driftEndTime = oldDetails.driftEndTime;
9244 newDetails.driftEnd = oldDetails.driftEnd;
9245 newDetails.advancedDateTime = oldDetails.advancedDateTime;
9246 }
9247}
9248function mapPartIntersection(oldParts, newParts, intersectionFn) {
9249 if (oldParts && newParts) {
9250 var delta = 0;
9251
9252 for (var i = 0, len = oldParts.length; i <= len; i++) {
9253 var _oldPart = oldParts[i];
9254 var _newPart = newParts[i + delta];
9255
9256 if (_oldPart && _newPart && _oldPart.index === _newPart.index && _oldPart.fragment.sn === _newPart.fragment.sn) {
9257 intersectionFn(_oldPart, _newPart);
9258 } else {
9259 delta--;
9260 }
9261 }
9262 }
9263}
9264function mapFragmentIntersection(oldDetails, newDetails, intersectionFn) {
9265 var skippedSegments = newDetails.skippedSegments;
9266 var start = Math.max(oldDetails.startSN, newDetails.startSN) - newDetails.startSN;
9267 var end = (oldDetails.fragmentHint ? 1 : 0) + (skippedSegments ? newDetails.endSN : Math.min(oldDetails.endSN, newDetails.endSN)) - newDetails.startSN;
9268 var delta = newDetails.startSN - oldDetails.startSN;
9269 var newFrags = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments;
9270 var oldFrags = oldDetails.fragmentHint ? oldDetails.fragments.concat(oldDetails.fragmentHint) : oldDetails.fragments;
9271
9272 for (var i = start; i <= end; i++) {
9273 var _oldFrag = oldFrags[delta + i];
9274 var _newFrag = newFrags[i];
9275
9276 if (skippedSegments && !_newFrag && i < skippedSegments) {
9277 // Fill in skipped segments in delta playlist
9278 _newFrag = newDetails.fragments[i] = _oldFrag;
9279 }
9280
9281 if (_oldFrag && _newFrag) {
9282 intersectionFn(_oldFrag, _newFrag);
9283 }
9284 }
9285}
9286function adjustSliding(oldDetails, newDetails) {
9287 var delta = newDetails.startSN + newDetails.skippedSegments - oldDetails.startSN;
9288 var oldFragments = oldDetails.fragments;
9289
9290 if (delta < 0 || delta >= oldFragments.length) {
9291 return;
9292 }
9293
9294 addSliding(newDetails, oldFragments[delta].start);
9295}
9296function addSliding(details, start) {
9297 if (start) {
9298 var fragments = details.fragments;
9299
9300 for (var i = details.skippedSegments; i < fragments.length; i++) {
9301 fragments[i].start += start;
9302 }
9303
9304 if (details.fragmentHint) {
9305 details.fragmentHint.start += start;
9306 }
9307 }
9308}
9309function computeReloadInterval(newDetails, stats) {
9310 var reloadInterval = 1000 * newDetails.levelTargetDuration;
9311 var reloadIntervalAfterMiss = reloadInterval / 2;
9312 var timeSinceLastModified = newDetails.age;
9313 var useLastModified = timeSinceLastModified > 0 && timeSinceLastModified < reloadInterval * 3;
9314 var roundTrip = stats.loading.end - stats.loading.start;
9315 var estimatedTimeUntilUpdate;
9316 var availabilityDelay = newDetails.availabilityDelay; // let estimate = 'average';
9317
9318 if (newDetails.updated === false) {
9319 if (useLastModified) {
9320 // estimate = 'miss round trip';
9321 // We should have had a hit so try again in the time it takes to get a response,
9322 // but no less than 1/3 second.
9323 var minRetry = 333 * newDetails.misses;
9324 estimatedTimeUntilUpdate = Math.max(Math.min(reloadIntervalAfterMiss, roundTrip * 2), minRetry);
9325 newDetails.availabilityDelay = (newDetails.availabilityDelay || 0) + estimatedTimeUntilUpdate;
9326 } else {
9327 // estimate = 'miss half average';
9328 // follow HLS Spec, If the client reloads a Playlist file and finds that it has not
9329 // changed then it MUST wait for a period of one-half the target
9330 // duration before retrying.
9331 estimatedTimeUntilUpdate = reloadIntervalAfterMiss;
9332 }
9333 } else if (useLastModified) {
9334 // estimate = 'next modified date';
9335 // Get the closest we've been to timeSinceLastModified on update
9336 availabilityDelay = Math.min(availabilityDelay || reloadInterval / 2, timeSinceLastModified);
9337 newDetails.availabilityDelay = availabilityDelay;
9338 estimatedTimeUntilUpdate = availabilityDelay + reloadInterval - timeSinceLastModified;
9339 } else {
9340 estimatedTimeUntilUpdate = reloadInterval - roundTrip;
9341 } // console.log(`[computeReloadInterval] live reload ${newDetails.updated ? 'REFRESHED' : 'MISSED'}`,
9342 // '\n method', estimate,
9343 // '\n estimated time until update =>', estimatedTimeUntilUpdate,
9344 // '\n average target duration', reloadInterval,
9345 // '\n time since modified', timeSinceLastModified,
9346 // '\n time round trip', roundTrip,
9347 // '\n availability delay', availabilityDelay);
9348
9349
9350 return Math.round(estimatedTimeUntilUpdate);
9351}
9352function getFragmentWithSN(level, sn, fragCurrent) {
9353 if (!level || !level.details) {
9354 return null;
9355 }
9356
9357 var levelDetails = level.details;
9358 var fragment = levelDetails.fragments[sn - levelDetails.startSN];
9359
9360 if (fragment) {
9361 return fragment;
9362 }
9363
9364 fragment = levelDetails.fragmentHint;
9365
9366 if (fragment && fragment.sn === sn) {
9367 return fragment;
9368 }
9369
9370 if (sn < levelDetails.startSN && fragCurrent && fragCurrent.sn === sn) {
9371 return fragCurrent;
9372 }
9373
9374 return null;
9375}
9376function getPartWith(level, sn, partIndex) {
9377 if (!level || !level.details) {
9378 return null;
9379 }
9380
9381 var partList = level.details.partList;
9382
9383 if (partList) {
9384 for (var i = partList.length; i--;) {
9385 var part = partList[i];
9386
9387 if (part.index === partIndex && part.fragment.sn === sn) {
9388 return part;
9389 }
9390 }
9391 }
9392
9393 return null;
9394}
9395
9396/***/ }),
9397
9398/***/ "./src/controller/stream-controller.ts":
9399/*!*********************************************!*\
9400 !*** ./src/controller/stream-controller.ts ***!
9401 \*********************************************/
9402/*! exports provided: default */
9403/***/ (function(module, __webpack_exports__, __webpack_require__) {
9404__webpack_require__.r(__webpack_exports__);
9405/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return StreamController; });
9406/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
9407/* harmony import */ var _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./base-stream-controller */ "./src/controller/base-stream-controller.ts");
9408/* harmony import */ var _is_supported__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../is-supported */ "./src/is-supported.ts");
9409/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../events */ "./src/events.ts");
9410/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
9411/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
9412/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
9413/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
9414/* harmony import */ var _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../demux/transmuxer-interface */ "./src/demux/transmuxer-interface.ts");
9415/* harmony import */ var _types_transmuxer__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../types/transmuxer */ "./src/types/transmuxer.ts");
9416/* harmony import */ var _gap_controller__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./gap-controller */ "./src/controller/gap-controller.ts");
9417/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
9418/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
9419
9420
9421
9422function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
9423
9424function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
9425
9426function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
9427
9428function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
9429
9430
9431
9432
9433
9434
9435
9436
9437
9438
9439
9440
9441
9442var TICK_INTERVAL = 100; // how often to tick in ms
9443
9444var StreamController = /*#__PURE__*/function (_BaseStreamController) {
9445 _inheritsLoose(StreamController, _BaseStreamController);
9446
9447 function StreamController(hls, fragmentTracker) {
9448 var _this;
9449
9450 _this = _BaseStreamController.call(this, hls, fragmentTracker, '[stream-controller]') || this;
9451 _this.audioCodecSwap = false;
9452 _this.gapController = null;
9453 _this.level = -1;
9454 _this._forceStartLoad = false;
9455 _this.altAudio = false;
9456 _this.audioOnly = false;
9457 _this.fragPlaying = null;
9458 _this.onvplaying = null;
9459 _this.onvseeked = null;
9460 _this.fragLastKbps = 0;
9461 _this.stalled = false;
9462 _this.couldBacktrack = false;
9463 _this.audioCodecSwitch = false;
9464 _this.videoBuffer = null;
9465
9466 _this._registerListeners();
9467
9468 return _this;
9469 }
9470
9471 var _proto = StreamController.prototype;
9472
9473 _proto._registerListeners = function _registerListeners() {
9474 var hls = this.hls;
9475 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
9476 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
9477 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
9478 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
9479 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
9480 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
9481 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
9482 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, this.onError, this);
9483 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
9484 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
9485 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
9486 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
9487 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVELS_UPDATED, this.onLevelsUpdated, this);
9488 hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
9489 };
9490
9491 _proto._unregisterListeners = function _unregisterListeners() {
9492 var hls = this.hls;
9493 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
9494 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
9495 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
9496 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
9497 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
9498 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
9499 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, this.onError, this);
9500 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
9501 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
9502 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
9503 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
9504 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVELS_UPDATED, this.onLevelsUpdated, this);
9505 hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
9506 };
9507
9508 _proto.onHandlerDestroying = function onHandlerDestroying() {
9509 this._unregisterListeners();
9510
9511 this.onMediaDetaching();
9512 };
9513
9514 _proto.startLoad = function startLoad(startPosition) {
9515 if (this.levels) {
9516 var lastCurrentTime = this.lastCurrentTime,
9517 hls = this.hls;
9518 this.stopLoad();
9519 this.setInterval(TICK_INTERVAL);
9520 this.level = -1;
9521 this.fragLoadError = 0;
9522
9523 if (!this.startFragRequested) {
9524 // determine load level
9525 var startLevel = hls.startLevel;
9526
9527 if (startLevel === -1) {
9528 if (hls.config.testBandwidth) {
9529 // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
9530 startLevel = 0;
9531 this.bitrateTest = true;
9532 } else {
9533 startLevel = hls.nextAutoLevel;
9534 }
9535 } // set new level to playlist loader : this will trigger start level load
9536 // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
9537
9538
9539 this.level = hls.nextLoadLevel = startLevel;
9540 this.loadedmetadata = false;
9541 } // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
9542
9543
9544 if (lastCurrentTime > 0 && startPosition === -1) {
9545 this.log("Override startPosition with lastCurrentTime @" + lastCurrentTime.toFixed(3));
9546 startPosition = lastCurrentTime;
9547 }
9548
9549 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
9550 this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
9551 this.tick();
9552 } else {
9553 this._forceStartLoad = true;
9554 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED;
9555 }
9556 };
9557
9558 _proto.stopLoad = function stopLoad() {
9559 this._forceStartLoad = false;
9560
9561 _BaseStreamController.prototype.stopLoad.call(this);
9562 };
9563
9564 _proto.doTick = function doTick() {
9565 switch (this.state) {
9566 case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE:
9567 this.doTickIdle();
9568 break;
9569
9570 case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL:
9571 {
9572 var _levels$level;
9573
9574 var levels = this.levels,
9575 level = this.level;
9576 var details = levels === null || levels === void 0 ? void 0 : (_levels$level = levels[level]) === null || _levels$level === void 0 ? void 0 : _levels$level.details;
9577
9578 if (details && (!details.live || this.levelLastLoaded === this.level)) {
9579 if (this.waitForCdnTuneIn(details)) {
9580 break;
9581 }
9582
9583 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
9584 break;
9585 }
9586
9587 break;
9588 }
9589
9590 case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING_WAITING_RETRY:
9591 {
9592 var _this$media;
9593
9594 var now = self.performance.now();
9595 var retryDate = this.retryDate; // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
9596
9597 if (!retryDate || now >= retryDate || (_this$media = this.media) !== null && _this$media !== void 0 && _this$media.seeking) {
9598 this.log('retryDate reached, switch back to IDLE state');
9599 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
9600 }
9601 }
9602 break;
9603 } // check buffer
9604 // check/update current fragment
9605
9606
9607 this.onTickEnd();
9608 };
9609
9610 _proto.onTickEnd = function onTickEnd() {
9611 _BaseStreamController.prototype.onTickEnd.call(this);
9612
9613 this.checkBuffer();
9614 this.checkFragmentChanged();
9615 };
9616
9617 _proto.doTickIdle = function doTickIdle() {
9618 var _frag$decryptdata, _frag$decryptdata2;
9619
9620 var hls = this.hls,
9621 levelLastLoaded = this.levelLastLoaded,
9622 levels = this.levels,
9623 media = this.media;
9624 var config = hls.config,
9625 level = hls.nextLoadLevel; // if start level not parsed yet OR
9626 // if video not attached AND start fragment already requested OR start frag prefetch not enabled
9627 // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
9628
9629 if (levelLastLoaded === null || !media && (this.startFragRequested || !config.startFragPrefetch)) {
9630 return;
9631 } // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
9632
9633
9634 if (this.altAudio && this.audioOnly) {
9635 return;
9636 }
9637
9638 if (!levels || !levels[level]) {
9639 return;
9640 }
9641
9642 var levelInfo = levels[level]; // if buffer length is less than maxBufLen try to load a new fragment
9643 // set next load level : this will trigger a playlist load if needed
9644
9645 this.level = hls.nextLoadLevel = level;
9646 var levelDetails = levelInfo.details; // if level info not retrieved yet, switch state and wait for level retrieval
9647 // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
9648 // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
9649
9650 if (!levelDetails || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL || levelDetails.live && this.levelLastLoaded !== level) {
9651 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL;
9652 return;
9653 }
9654
9655 var bufferInfo = this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
9656
9657 if (bufferInfo === null) {
9658 return;
9659 }
9660
9661 var bufferLen = bufferInfo.len; // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
9662
9663 var maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate); // Stay idle if we are still with buffer margins
9664
9665 if (bufferLen >= maxBufLen) {
9666 return;
9667 }
9668
9669 if (this._streamEnded(bufferInfo, levelDetails)) {
9670 var data = {};
9671
9672 if (this.altAudio) {
9673 data.type = 'video';
9674 }
9675
9676 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_EOS, data);
9677 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ENDED;
9678 return;
9679 }
9680
9681 var targetBufferTime = bufferInfo.end;
9682 var frag = this.getNextFragment(targetBufferTime, levelDetails); // Avoid backtracking after seeking or switching by loading an earlier segment in streams that could backtrack
9683
9684 if (this.couldBacktrack && !this.fragPrevious && frag && frag.sn !== 'initSegment') {
9685 var fragIdx = frag.sn - levelDetails.startSN;
9686
9687 if (fragIdx > 1) {
9688 frag = levelDetails.fragments[fragIdx - 1];
9689 this.fragmentTracker.removeFragment(frag);
9690 }
9691 } // Avoid loop loading by using nextLoadPosition set for backtracking
9692
9693
9694 if (frag && this.fragmentTracker.getState(frag) === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].OK && this.nextLoadPosition > targetBufferTime) {
9695 // Cleanup the fragment tracker before trying to find the next unbuffered fragment
9696 var type = this.audioOnly && !this.altAudio ? _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO : _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].VIDEO;
9697 this.afterBufferFlushed(media, type, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
9698 frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
9699 }
9700
9701 if (!frag) {
9702 return;
9703 }
9704
9705 if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
9706 frag = frag.initSegment;
9707 } // We want to load the key if we're dealing with an identity key, because we will decrypt
9708 // this content using the key we fetch. Other keys will be handled by the DRM CDM via EME.
9709
9710
9711 if (((_frag$decryptdata = frag.decryptdata) === null || _frag$decryptdata === void 0 ? void 0 : _frag$decryptdata.keyFormat) === 'identity' && !((_frag$decryptdata2 = frag.decryptdata) !== null && _frag$decryptdata2 !== void 0 && _frag$decryptdata2.key)) {
9712 this.loadKey(frag, levelDetails);
9713 } else {
9714 this.loadFragment(frag, levelDetails, targetBufferTime);
9715 }
9716 };
9717
9718 _proto.loadFragment = function loadFragment(frag, levelDetails, targetBufferTime) {
9719 var _this$media2;
9720
9721 // Check if fragment is not loaded
9722 var fragState = this.fragmentTracker.getState(frag);
9723 this.fragCurrent = frag; // Use data from loaded backtracked fragment if available
9724
9725 if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].BACKTRACKED) {
9726 var data = this.fragmentTracker.getBacktrackData(frag);
9727
9728 if (data) {
9729 this._handleFragmentLoadProgress(data);
9730
9731 this._handleFragmentLoadComplete(data);
9732
9733 return;
9734 } else {
9735 fragState = _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].NOT_LOADED;
9736 }
9737 }
9738
9739 if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].NOT_LOADED || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].PARTIAL) {
9740 if (frag.sn === 'initSegment') {
9741 this._loadInitSegment(frag);
9742 } else if (this.bitrateTest) {
9743 frag.bitrateTest = true;
9744 this.log("Fragment " + frag.sn + " of level " + frag.level + " is being downloaded to test bitrate and will not be buffered");
9745
9746 this._loadBitrateTestFrag(frag);
9747 } else {
9748 this.startFragRequested = true;
9749
9750 _BaseStreamController.prototype.loadFragment.call(this, frag, levelDetails, targetBufferTime);
9751 }
9752 } else if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].APPENDING) {
9753 // Lower the buffer size and try again
9754 if (this.reduceMaxBufferLength(frag.duration)) {
9755 this.fragmentTracker.removeFragment(frag);
9756 }
9757 } else if (((_this$media2 = this.media) === null || _this$media2 === void 0 ? void 0 : _this$media2.buffered.length) === 0) {
9758 // Stop gap for bad tracker / buffer flush behavior
9759 this.fragmentTracker.removeAllFragments();
9760 }
9761 };
9762
9763 _proto.getAppendedFrag = function getAppendedFrag(position) {
9764 var fragOrPart = this.fragmentTracker.getAppendedFrag(position, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
9765
9766 if (fragOrPart && 'fragment' in fragOrPart) {
9767 return fragOrPart.fragment;
9768 }
9769
9770 return fragOrPart;
9771 };
9772
9773 _proto.getBufferedFrag = function getBufferedFrag(position) {
9774 return this.fragmentTracker.getBufferedFrag(position, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
9775 };
9776
9777 _proto.followingBufferedFrag = function followingBufferedFrag(frag) {
9778 if (frag) {
9779 // try to get range of next fragment (500ms after this range)
9780 return this.getBufferedFrag(frag.end + 0.5);
9781 }
9782
9783 return null;
9784 }
9785 /*
9786 on immediate level switch :
9787 - pause playback if playing
9788 - cancel any pending load request
9789 - and trigger a buffer flush
9790 */
9791 ;
9792
9793 _proto.immediateLevelSwitch = function immediateLevelSwitch() {
9794 this.abortCurrentFrag();
9795 this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
9796 }
9797 /**
9798 * try to switch ASAP without breaking video playback:
9799 * in order to ensure smooth but quick level switching,
9800 * we need to find the next flushable buffer range
9801 * we should take into account new segment fetch time
9802 */
9803 ;
9804
9805 _proto.nextLevelSwitch = function nextLevelSwitch() {
9806 var levels = this.levels,
9807 media = this.media; // ensure that media is defined and that metadata are available (to retrieve currentTime)
9808
9809 if (media !== null && media !== void 0 && media.readyState) {
9810 var fetchdelay;
9811 var fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
9812
9813 if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
9814 // flush buffer preceding current fragment (flush until current fragment start offset)
9815 // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
9816 this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
9817 }
9818
9819 if (!media.paused && levels) {
9820 // add a safety delay of 1s
9821 var nextLevelId = this.hls.nextLoadLevel;
9822 var nextLevel = levels[nextLevelId];
9823 var fragLastKbps = this.fragLastKbps;
9824
9825 if (fragLastKbps && this.fragCurrent) {
9826 fetchdelay = this.fragCurrent.duration * nextLevel.maxBitrate / (1000 * fragLastKbps) + 1;
9827 } else {
9828 fetchdelay = 0;
9829 }
9830 } else {
9831 fetchdelay = 0;
9832 } // this.log('fetchdelay:'+fetchdelay);
9833 // find buffer range that will be reached once new fragment will be fetched
9834
9835
9836 var bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
9837
9838 if (bufferedFrag) {
9839 // we can flush buffer range following this one without stalling playback
9840 var nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
9841
9842 if (nextBufferedFrag) {
9843 // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
9844 this.abortCurrentFrag(); // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
9845
9846 var maxStart = nextBufferedFrag.maxStartPTS ? nextBufferedFrag.maxStartPTS : nextBufferedFrag.start;
9847 var fragDuration = nextBufferedFrag.duration;
9848 var startPts = Math.max(bufferedFrag.end, maxStart + Math.min(Math.max(fragDuration - this.config.maxFragLookUpTolerance, fragDuration * 0.5), fragDuration * 0.75));
9849 this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
9850 }
9851 }
9852 }
9853 };
9854
9855 _proto.abortCurrentFrag = function abortCurrentFrag() {
9856 var fragCurrent = this.fragCurrent;
9857 this.fragCurrent = null;
9858
9859 if (fragCurrent !== null && fragCurrent !== void 0 && fragCurrent.loader) {
9860 fragCurrent.loader.abort();
9861 }
9862
9863 if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].KEY_LOADING) {
9864 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
9865 }
9866
9867 this.nextLoadPosition = this.getLoadPosition();
9868 };
9869
9870 _proto.flushMainBuffer = function flushMainBuffer(startOffset, endOffset) {
9871 _BaseStreamController.prototype.flushMainBuffer.call(this, startOffset, endOffset, this.altAudio ? 'video' : null);
9872 };
9873
9874 _proto.onMediaAttached = function onMediaAttached(event, data) {
9875 _BaseStreamController.prototype.onMediaAttached.call(this, event, data);
9876
9877 var media = data.media;
9878 this.onvplaying = this.onMediaPlaying.bind(this);
9879 this.onvseeked = this.onMediaSeeked.bind(this);
9880 media.addEventListener('playing', this.onvplaying);
9881 media.addEventListener('seeked', this.onvseeked);
9882 this.gapController = new _gap_controller__WEBPACK_IMPORTED_MODULE_10__["default"](this.config, media, this.fragmentTracker, this.hls);
9883 };
9884
9885 _proto.onMediaDetaching = function onMediaDetaching() {
9886 var media = this.media;
9887
9888 if (media) {
9889 media.removeEventListener('playing', this.onvplaying);
9890 media.removeEventListener('seeked', this.onvseeked);
9891 this.onvplaying = this.onvseeked = null;
9892 this.videoBuffer = null;
9893 }
9894
9895 this.fragPlaying = null;
9896
9897 if (this.gapController) {
9898 this.gapController.destroy();
9899 this.gapController = null;
9900 }
9901
9902 _BaseStreamController.prototype.onMediaDetaching.call(this);
9903 };
9904
9905 _proto.onMediaPlaying = function onMediaPlaying() {
9906 // tick to speed up FRAG_CHANGED triggering
9907 this.tick();
9908 };
9909
9910 _proto.onMediaSeeked = function onMediaSeeked() {
9911 var media = this.media;
9912 var currentTime = media ? media.currentTime : null;
9913
9914 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(currentTime)) {
9915 this.log("Media seeked to " + currentTime.toFixed(3));
9916 } // tick to speed up FRAG_CHANGED triggering
9917
9918
9919 this.tick();
9920 };
9921
9922 _proto.onManifestLoading = function onManifestLoading() {
9923 // reset buffer on manifest loading
9924 this.log('Trigger BUFFER_RESET');
9925 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_RESET, undefined);
9926 this.fragmentTracker.removeAllFragments();
9927 this.couldBacktrack = this.stalled = false;
9928 this.startPosition = this.lastCurrentTime = 0;
9929 this.fragPlaying = null;
9930 };
9931
9932 _proto.onManifestParsed = function onManifestParsed(event, data) {
9933 var aac = false;
9934 var heaac = false;
9935 var codec;
9936 data.levels.forEach(function (level) {
9937 // detect if we have different kind of audio codecs used amongst playlists
9938 codec = level.audioCodec;
9939
9940 if (codec) {
9941 if (codec.indexOf('mp4a.40.2') !== -1) {
9942 aac = true;
9943 }
9944
9945 if (codec.indexOf('mp4a.40.5') !== -1) {
9946 heaac = true;
9947 }
9948 }
9949 });
9950 this.audioCodecSwitch = aac && heaac && !Object(_is_supported__WEBPACK_IMPORTED_MODULE_2__["changeTypeSupported"])();
9951
9952 if (this.audioCodecSwitch) {
9953 this.log('Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC');
9954 }
9955
9956 this.levels = data.levels;
9957 this.startFragRequested = false;
9958 };
9959
9960 _proto.onLevelLoading = function onLevelLoading(event, data) {
9961 var levels = this.levels;
9962
9963 if (!levels || this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE) {
9964 return;
9965 }
9966
9967 var level = levels[data.level];
9968
9969 if (!level.details || level.details.live && this.levelLastLoaded !== data.level || this.waitForCdnTuneIn(level.details)) {
9970 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL;
9971 }
9972 };
9973
9974 _proto.onLevelLoaded = function onLevelLoaded(event, data) {
9975 var _curLevel$details;
9976
9977 var levels = this.levels;
9978 var newLevelId = data.level;
9979 var newDetails = data.details;
9980 var duration = newDetails.totalduration;
9981
9982 if (!levels) {
9983 this.warn("Levels were reset while loading level " + newLevelId);
9984 return;
9985 }
9986
9987 this.log("Level " + newLevelId + " loaded [" + newDetails.startSN + "," + newDetails.endSN + "], cc [" + newDetails.startCC + ", " + newDetails.endCC + "] duration:" + duration);
9988 var fragCurrent = this.fragCurrent;
9989
9990 if (fragCurrent && (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING_WAITING_RETRY)) {
9991 if (fragCurrent.level !== data.level && fragCurrent.loader) {
9992 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
9993 fragCurrent.loader.abort();
9994 }
9995 }
9996
9997 var curLevel = levels[newLevelId];
9998 var sliding = 0;
9999
10000 if (newDetails.live || (_curLevel$details = curLevel.details) !== null && _curLevel$details !== void 0 && _curLevel$details.live) {
10001 if (!newDetails.fragments[0]) {
10002 newDetails.deltaUpdateFailed = true;
10003 }
10004
10005 if (newDetails.deltaUpdateFailed) {
10006 return;
10007 }
10008
10009 sliding = this.alignPlaylists(newDetails, curLevel.details);
10010 } // override level info
10011
10012
10013 curLevel.details = newDetails;
10014 this.levelLastLoaded = newLevelId;
10015 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_UPDATED, {
10016 details: newDetails,
10017 level: newLevelId
10018 }); // only switch back to IDLE state if we were waiting for level to start downloading a new fragment
10019
10020 if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL) {
10021 if (this.waitForCdnTuneIn(newDetails)) {
10022 // Wait for Low-Latency CDN Tune-in
10023 return;
10024 }
10025
10026 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
10027 }
10028
10029 if (!this.startFragRequested) {
10030 this.setStartPosition(newDetails, sliding);
10031 } else if (newDetails.live) {
10032 this.synchronizeToLiveEdge(newDetails);
10033 } // trigger handler right now
10034
10035
10036 this.tick();
10037 };
10038
10039 _proto._handleFragmentLoadProgress = function _handleFragmentLoadProgress(data) {
10040 var _frag$initSegment;
10041
10042 var frag = data.frag,
10043 part = data.part,
10044 payload = data.payload;
10045 var levels = this.levels;
10046
10047 if (!levels) {
10048 this.warn("Levels were reset while fragment load was in progress. Fragment " + frag.sn + " of level " + frag.level + " will not be buffered");
10049 return;
10050 }
10051
10052 var currentLevel = levels[frag.level];
10053 var details = currentLevel.details;
10054
10055 if (!details) {
10056 this.warn("Dropping fragment " + frag.sn + " of level " + frag.level + " after level details were reset");
10057 return;
10058 }
10059
10060 var videoCodec = currentLevel.videoCodec; // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
10061
10062 var accurateTimeOffset = details.PTSKnown || !details.live;
10063 var initSegmentData = (_frag$initSegment = frag.initSegment) === null || _frag$initSegment === void 0 ? void 0 : _frag$initSegment.data;
10064
10065 var audioCodec = this._getAudioCodec(currentLevel); // transmux the MPEG-TS data to ISO-BMFF segments
10066 // this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
10067
10068
10069 var transmuxer = this.transmuxer = this.transmuxer || new _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_8__["default"](this.hls, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN, this._handleTransmuxComplete.bind(this), this._handleTransmuxerFlush.bind(this));
10070 var partIndex = part ? part.index : -1;
10071 var partial = partIndex !== -1;
10072 var chunkMeta = new _types_transmuxer__WEBPACK_IMPORTED_MODULE_9__["ChunkMetadata"](frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength, partIndex, partial);
10073 var initPTS = this.initPTS[frag.cc];
10074 transmuxer.push(payload, initSegmentData, audioCodec, videoCodec, frag, part, details.totalduration, accurateTimeOffset, chunkMeta, initPTS);
10075 };
10076
10077 _proto.onAudioTrackSwitching = function onAudioTrackSwitching(event, data) {
10078 // if any URL found on new audio track, it is an alternate audio track
10079 var fromAltAudio = this.altAudio;
10080 var altAudio = !!data.url;
10081 var trackId = data.id; // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
10082 // don't do anything if we switch to alt audio: audio stream controller is handling it.
10083 // we will just have to change buffer scheduling on audioTrackSwitched
10084
10085 if (!altAudio) {
10086 if (this.mediaBuffer !== this.media) {
10087 this.log('Switching on main audio, use media.buffered to schedule main fragment loading');
10088 this.mediaBuffer = this.media;
10089 var fragCurrent = this.fragCurrent; // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
10090
10091 if (fragCurrent !== null && fragCurrent !== void 0 && fragCurrent.loader) {
10092 this.log('Switching to main audio track, cancel main fragment load');
10093 fragCurrent.loader.abort();
10094 } // destroy transmuxer to force init segment generation (following audio switch)
10095
10096
10097 this.resetTransmuxer(); // switch to IDLE state to load new fragment
10098
10099 this.resetLoadingState();
10100 } else if (this.audioOnly) {
10101 // Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
10102 this.resetTransmuxer();
10103 }
10104
10105 var hls = this.hls; // If switching from alt to main audio, flush all audio and trigger track switched
10106
10107 if (fromAltAudio) {
10108 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_FLUSHING, {
10109 startOffset: 0,
10110 endOffset: Number.POSITIVE_INFINITY,
10111 type: 'audio'
10112 });
10113 }
10114
10115 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHED, {
10116 id: trackId
10117 });
10118 }
10119 };
10120
10121 _proto.onAudioTrackSwitched = function onAudioTrackSwitched(event, data) {
10122 var trackId = data.id;
10123 var altAudio = !!this.hls.audioTracks[trackId].url;
10124
10125 if (altAudio) {
10126 var videoBuffer = this.videoBuffer; // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
10127
10128 if (videoBuffer && this.mediaBuffer !== videoBuffer) {
10129 this.log('Switching on alternate audio, use video.buffered to schedule main fragment loading');
10130 this.mediaBuffer = videoBuffer;
10131 }
10132 }
10133
10134 this.altAudio = altAudio;
10135 this.tick();
10136 };
10137
10138 _proto.onBufferCreated = function onBufferCreated(event, data) {
10139 var tracks = data.tracks;
10140 var mediaTrack;
10141 var name;
10142 var alternate = false;
10143
10144 for (var type in tracks) {
10145 var track = tracks[type];
10146
10147 if (track.id === 'main') {
10148 name = type;
10149 mediaTrack = track; // keep video source buffer reference
10150
10151 if (type === 'video') {
10152 var videoTrack = tracks[type];
10153
10154 if (videoTrack) {
10155 this.videoBuffer = videoTrack.buffer;
10156 }
10157 }
10158 } else {
10159 alternate = true;
10160 }
10161 }
10162
10163 if (alternate && mediaTrack) {
10164 this.log("Alternate track found, use " + name + ".buffered to schedule main fragment loading");
10165 this.mediaBuffer = mediaTrack.buffer;
10166 } else {
10167 this.mediaBuffer = this.media;
10168 }
10169 };
10170
10171 _proto.onFragBuffered = function onFragBuffered(event, data) {
10172 var frag = data.frag,
10173 part = data.part;
10174
10175 if (frag && frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN) {
10176 return;
10177 }
10178
10179 if (this.fragContextChanged(frag)) {
10180 // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
10181 // Avoid setting state back to IDLE, since that will interfere with a level switch
10182 this.warn("Fragment " + frag.sn + (part ? ' p: ' + part.index : '') + " of level " + frag.level + " finished buffering, but was aborted. state: " + this.state);
10183
10184 if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSED) {
10185 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
10186 }
10187
10188 return;
10189 }
10190
10191 var stats = part ? part.stats : frag.stats;
10192 this.fragLastKbps = Math.round(8 * stats.total / (stats.buffering.end - stats.loading.first));
10193
10194 if (frag.sn !== 'initSegment') {
10195 this.fragPrevious = frag;
10196 }
10197
10198 this.fragBufferedComplete(frag, part);
10199 };
10200
10201 _proto.onError = function onError(event, data) {
10202 switch (data.details) {
10203 case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].FRAG_LOAD_ERROR:
10204 case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
10205 case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].KEY_LOAD_ERROR:
10206 case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].KEY_LOAD_TIMEOUT:
10207 this.onFragmentOrKeyLoadError(_types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN, data);
10208 break;
10209
10210 case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].LEVEL_LOAD_ERROR:
10211 case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].LEVEL_LOAD_TIMEOUT:
10212 if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR) {
10213 if (data.fatal) {
10214 // if fatal error, stop processing
10215 this.warn("" + data.details);
10216 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR;
10217 } else {
10218 // in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
10219 if (!data.levelRetry && this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL) {
10220 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
10221 }
10222 }
10223 }
10224
10225 break;
10226
10227 case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].BUFFER_FULL_ERROR:
10228 // if in appending state
10229 if (data.parent === 'main' && (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSED)) {
10230 var flushBuffer = true;
10231 var bufferedInfo = this.getFwdBufferInfo(this.media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN); // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
10232 // reduce max buf len if current position is buffered
10233
10234 if (bufferedInfo && bufferedInfo.len > 0.5) {
10235 flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
10236 }
10237
10238 if (flushBuffer) {
10239 // current position is not buffered, but browser is still complaining about buffer full error
10240 // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
10241 // in that case flush the whole buffer to recover
10242 this.warn('buffer full error also media.currentTime is not buffered, flush main'); // flush main buffer
10243
10244 this.immediateLevelSwitch();
10245 }
10246
10247 this.resetLoadingState();
10248 }
10249
10250 break;
10251 }
10252 } // Checks the health of the buffer and attempts to resolve playback stalls.
10253 ;
10254
10255 _proto.checkBuffer = function checkBuffer() {
10256 var media = this.media,
10257 gapController = this.gapController;
10258
10259 if (!media || !gapController || !media.readyState) {
10260 // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
10261 return;
10262 } // Check combined buffer
10263
10264
10265 var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(media);
10266
10267 if (!this.loadedmetadata && buffered.length) {
10268 this.loadedmetadata = true;
10269 this.seekToStartPos();
10270 } else {
10271 // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
10272 gapController.poll(this.lastCurrentTime);
10273 }
10274
10275 this.lastCurrentTime = media.currentTime;
10276 };
10277
10278 _proto.onFragLoadEmergencyAborted = function onFragLoadEmergencyAborted() {
10279 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE; // if loadedmetadata is not set, it means that we are emergency switch down on first frag
10280 // in that case, reset startFragRequested flag
10281
10282 if (!this.loadedmetadata) {
10283 this.startFragRequested = false;
10284 this.nextLoadPosition = this.startPosition;
10285 }
10286
10287 this.tickImmediate();
10288 };
10289
10290 _proto.onBufferFlushed = function onBufferFlushed(event, _ref) {
10291 var type = _ref.type;
10292
10293 if (type !== _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO || this.audioOnly && !this.altAudio) {
10294 var media = (type === _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media;
10295 this.afterBufferFlushed(media, type, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
10296 }
10297 };
10298
10299 _proto.onLevelsUpdated = function onLevelsUpdated(event, data) {
10300 this.levels = data.levels;
10301 };
10302
10303 _proto.swapAudioCodec = function swapAudioCodec() {
10304 this.audioCodecSwap = !this.audioCodecSwap;
10305 }
10306 /**
10307 * Seeks to the set startPosition if not equal to the mediaElement's current time.
10308 * @private
10309 */
10310 ;
10311
10312 _proto.seekToStartPos = function seekToStartPos() {
10313 var media = this.media;
10314 var currentTime = media.currentTime;
10315 var startPosition = this.startPosition; // only adjust currentTime if different from startPosition or if startPosition not buffered
10316 // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
10317
10318 if (startPosition >= 0 && currentTime < startPosition) {
10319 if (media.seeking) {
10320 _utils_logger__WEBPACK_IMPORTED_MODULE_12__["logger"].log("could not seek to " + startPosition + ", already seeking at " + currentTime);
10321 return;
10322 }
10323
10324 var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(media);
10325 var bufferStart = buffered.length ? buffered.start(0) : 0;
10326 var delta = bufferStart - startPosition;
10327
10328 if (delta > 0 && (delta < this.config.maxBufferHole || delta < this.config.maxFragLookUpTolerance)) {
10329 _utils_logger__WEBPACK_IMPORTED_MODULE_12__["logger"].log("adjusting start position by " + delta + " to match buffer start");
10330 startPosition += delta;
10331 this.startPosition = startPosition;
10332 }
10333
10334 this.log("seek to target start position " + startPosition + " from current time " + currentTime);
10335 media.currentTime = startPosition;
10336 }
10337 };
10338
10339 _proto._getAudioCodec = function _getAudioCodec(currentLevel) {
10340 var audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
10341
10342 if (this.audioCodecSwap && audioCodec) {
10343 this.log('Swapping audio codec');
10344
10345 if (audioCodec.indexOf('mp4a.40.5') !== -1) {
10346 audioCodec = 'mp4a.40.2';
10347 } else {
10348 audioCodec = 'mp4a.40.5';
10349 }
10350 }
10351
10352 return audioCodec;
10353 };
10354
10355 _proto._loadBitrateTestFrag = function _loadBitrateTestFrag(frag) {
10356 var _this2 = this;
10357
10358 this._doFragLoad(frag).then(function (data) {
10359 var hls = _this2.hls;
10360
10361 if (!data || hls.nextLoadLevel || _this2.fragContextChanged(frag)) {
10362 return;
10363 }
10364
10365 _this2.fragLoadError = 0;
10366 _this2.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
10367 _this2.startFragRequested = false;
10368 _this2.bitrateTest = false;
10369 var stats = frag.stats; // Bitrate tests fragments are neither parsed nor buffered
10370
10371 stats.parsing.start = stats.parsing.end = stats.buffering.start = stats.buffering.end = self.performance.now();
10372 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_LOADED, data);
10373 });
10374 };
10375
10376 _proto._handleTransmuxComplete = function _handleTransmuxComplete(transmuxResult) {
10377 var _id3$samples;
10378
10379 var id = 'main';
10380 var hls = this.hls;
10381 var remuxResult = transmuxResult.remuxResult,
10382 chunkMeta = transmuxResult.chunkMeta;
10383 var context = this.getCurrentContext(chunkMeta);
10384
10385 if (!context) {
10386 this.warn("The loading context changed while buffering fragment " + chunkMeta.sn + " of level " + chunkMeta.level + ". This chunk will not be buffered.");
10387 this.resetLiveStartWhenNotLoaded(chunkMeta.level);
10388 return;
10389 }
10390
10391 var frag = context.frag,
10392 part = context.part,
10393 level = context.level;
10394 var video = remuxResult.video,
10395 text = remuxResult.text,
10396 id3 = remuxResult.id3,
10397 initSegment = remuxResult.initSegment; // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
10398
10399 var audio = this.altAudio ? undefined : remuxResult.audio; // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
10400 // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
10401
10402 if (this.fragContextChanged(frag)) {
10403 return;
10404 }
10405
10406 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING;
10407
10408 if (initSegment) {
10409 if (initSegment.tracks) {
10410 this._bufferInitSegment(level, initSegment.tracks, frag, chunkMeta);
10411
10412 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_PARSING_INIT_SEGMENT, {
10413 frag: frag,
10414 id: id,
10415 tracks: initSegment.tracks
10416 });
10417 } // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
10418
10419
10420 var initPTS = initSegment.initPTS;
10421 var timescale = initSegment.timescale;
10422
10423 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(initPTS)) {
10424 this.initPTS[frag.cc] = initPTS;
10425 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].INIT_PTS_FOUND, {
10426 frag: frag,
10427 id: id,
10428 initPTS: initPTS,
10429 timescale: timescale
10430 });
10431 }
10432 } // Avoid buffering if backtracking this fragment
10433
10434
10435 if (video && remuxResult.independent !== false) {
10436 if (level.details) {
10437 var startPTS = video.startPTS,
10438 endPTS = video.endPTS,
10439 startDTS = video.startDTS,
10440 endDTS = video.endDTS;
10441
10442 if (part) {
10443 part.elementaryStreams[video.type] = {
10444 startPTS: startPTS,
10445 endPTS: endPTS,
10446 startDTS: startDTS,
10447 endDTS: endDTS
10448 };
10449 } else {
10450 if (video.firstKeyFrame && video.independent) {
10451 this.couldBacktrack = true;
10452 }
10453
10454 if (video.dropped && video.independent) {
10455 // Backtrack if dropped frames create a gap after currentTime
10456 var pos = this.getLoadPosition() + this.config.maxBufferHole;
10457
10458 if (pos < startPTS) {
10459 this.backtrack(frag);
10460 return;
10461 } // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
10462
10463
10464 frag.setElementaryStreamInfo(video.type, frag.start, endPTS, frag.start, endDTS, true);
10465 }
10466 }
10467
10468 frag.setElementaryStreamInfo(video.type, startPTS, endPTS, startDTS, endDTS);
10469 this.bufferFragmentData(video, frag, part, chunkMeta);
10470 }
10471 } else if (remuxResult.independent === false) {
10472 this.backtrack(frag);
10473 return;
10474 }
10475
10476 if (audio) {
10477 var _startPTS = audio.startPTS,
10478 _endPTS = audio.endPTS,
10479 _startDTS = audio.startDTS,
10480 _endDTS = audio.endDTS;
10481
10482 if (part) {
10483 part.elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO] = {
10484 startPTS: _startPTS,
10485 endPTS: _endPTS,
10486 startDTS: _startDTS,
10487 endDTS: _endDTS
10488 };
10489 }
10490
10491 frag.setElementaryStreamInfo(_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO, _startPTS, _endPTS, _startDTS, _endDTS);
10492 this.bufferFragmentData(audio, frag, part, chunkMeta);
10493 }
10494
10495 if (id3 !== null && id3 !== void 0 && (_id3$samples = id3.samples) !== null && _id3$samples !== void 0 && _id3$samples.length) {
10496 var emittedID3 = {
10497 frag: frag,
10498 id: id,
10499 samples: id3.samples
10500 };
10501 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_PARSING_METADATA, emittedID3);
10502 }
10503
10504 if (text) {
10505 var emittedText = {
10506 frag: frag,
10507 id: id,
10508 samples: text.samples
10509 };
10510 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_PARSING_USERDATA, emittedText);
10511 }
10512 };
10513
10514 _proto._bufferInitSegment = function _bufferInitSegment(currentLevel, tracks, frag, chunkMeta) {
10515 var _this3 = this;
10516
10517 if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING) {
10518 return;
10519 }
10520
10521 this.audioOnly = !!tracks.audio && !tracks.video; // if audio track is expected to come from audio stream controller, discard any coming from main
10522
10523 if (this.altAudio && !this.audioOnly) {
10524 delete tracks.audio;
10525 } // include levelCodec in audio and video tracks
10526
10527
10528 var audio = tracks.audio,
10529 video = tracks.video,
10530 audiovideo = tracks.audiovideo;
10531
10532 if (audio) {
10533 var audioCodec = currentLevel.audioCodec;
10534 var ua = navigator.userAgent.toLowerCase();
10535
10536 if (this.audioCodecSwitch) {
10537 if (audioCodec) {
10538 if (audioCodec.indexOf('mp4a.40.5') !== -1) {
10539 audioCodec = 'mp4a.40.2';
10540 } else {
10541 audioCodec = 'mp4a.40.5';
10542 }
10543 } // In the case that AAC and HE-AAC audio codecs are signalled in manifest,
10544 // force HE-AAC, as it seems that most browsers prefers it.
10545 // don't force HE-AAC if mono stream, or in Firefox
10546
10547
10548 if (audio.metadata.channelCount !== 1 && ua.indexOf('firefox') === -1) {
10549 audioCodec = 'mp4a.40.5';
10550 }
10551 } // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
10552
10553
10554 if (ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') {
10555 // Exclude mpeg audio
10556 audioCodec = 'mp4a.40.2';
10557 this.log("Android: force audio codec to " + audioCodec);
10558 }
10559
10560 if (currentLevel.audioCodec && currentLevel.audioCodec !== audioCodec) {
10561 this.log("Swapping manifest audio codec \"" + currentLevel.audioCodec + "\" for \"" + audioCodec + "\"");
10562 }
10563
10564 audio.levelCodec = audioCodec;
10565 audio.id = 'main';
10566 this.log("Init audio buffer, container:" + audio.container + ", codecs[selected/level/parsed]=[" + (audioCodec || '') + "/" + (currentLevel.audioCodec || '') + "/" + audio.codec + "]");
10567 }
10568
10569 if (video) {
10570 video.levelCodec = currentLevel.videoCodec;
10571 video.id = 'main';
10572 this.log("Init video buffer, container:" + video.container + ", codecs[level/parsed]=[" + (currentLevel.videoCodec || '') + "/" + video.codec + "]");
10573 }
10574
10575 if (audiovideo) {
10576 this.log("Init audiovideo buffer, container:" + audiovideo.container + ", codecs[level/parsed]=[" + (currentLevel.attrs.CODECS || '') + "/" + audiovideo.codec + "]");
10577 }
10578
10579 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_CODECS, tracks); // loop through tracks that are going to be provided to bufferController
10580
10581 Object.keys(tracks).forEach(function (trackName) {
10582 var track = tracks[trackName];
10583 var initSegment = track.initSegment;
10584
10585 if (initSegment !== null && initSegment !== void 0 && initSegment.byteLength) {
10586 _this3.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_APPENDING, {
10587 type: trackName,
10588 data: initSegment,
10589 frag: frag,
10590 part: null,
10591 chunkMeta: chunkMeta,
10592 parent: frag.type
10593 });
10594 }
10595 }); // trigger handler right now
10596
10597 this.tick();
10598 };
10599
10600 _proto.backtrack = function backtrack(frag) {
10601 this.couldBacktrack = true; // Causes findFragments to backtrack through fragments to find the keyframe
10602
10603 this.resetTransmuxer();
10604 this.flushBufferGap(frag);
10605 var data = this.fragmentTracker.backtrack(frag);
10606 this.fragPrevious = null;
10607 this.nextLoadPosition = frag.start;
10608
10609 if (data) {
10610 this.resetFragmentLoading(frag);
10611 } else {
10612 // Change state to BACKTRACKING so that fragmentEntity.backtrack data can be added after _doFragLoad
10613 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].BACKTRACKING;
10614 }
10615 };
10616
10617 _proto.checkFragmentChanged = function checkFragmentChanged() {
10618 var video = this.media;
10619 var fragPlayingCurrent = null;
10620
10621 if (video && video.readyState > 1 && video.seeking === false) {
10622 var currentTime = video.currentTime;
10623 /* if video element is in seeked state, currentTime can only increase.
10624 (assuming that playback rate is positive ...)
10625 As sometimes currentTime jumps back to zero after a
10626 media decode error, check this, to avoid seeking back to
10627 wrong position after a media decode error
10628 */
10629
10630 if (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].isBuffered(video, currentTime)) {
10631 fragPlayingCurrent = this.getAppendedFrag(currentTime);
10632 } else if (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].isBuffered(video, currentTime + 0.1)) {
10633 /* ensure that FRAG_CHANGED event is triggered at startup,
10634 when first video frame is displayed and playback is paused.
10635 add a tolerance of 100ms, in case current position is not buffered,
10636 check if current pos+100ms is buffered and use that buffer range
10637 for FRAG_CHANGED event reporting */
10638 fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
10639 }
10640
10641 if (fragPlayingCurrent) {
10642 var fragPlaying = this.fragPlaying;
10643 var fragCurrentLevel = fragPlayingCurrent.level;
10644
10645 if (!fragPlaying || fragPlayingCurrent.sn !== fragPlaying.sn || fragPlaying.level !== fragCurrentLevel || fragPlayingCurrent.urlId !== fragPlaying.urlId) {
10646 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_CHANGED, {
10647 frag: fragPlayingCurrent
10648 });
10649
10650 if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) {
10651 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_SWITCHED, {
10652 level: fragCurrentLevel
10653 });
10654 }
10655
10656 this.fragPlaying = fragPlayingCurrent;
10657 }
10658 }
10659 }
10660 };
10661
10662 _createClass(StreamController, [{
10663 key: "nextLevel",
10664 get: function get() {
10665 var frag = this.nextBufferedFrag;
10666
10667 if (frag) {
10668 return frag.level;
10669 } else {
10670 return -1;
10671 }
10672 }
10673 }, {
10674 key: "currentLevel",
10675 get: function get() {
10676 var media = this.media;
10677
10678 if (media) {
10679 var fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
10680
10681 if (fragPlayingCurrent) {
10682 return fragPlayingCurrent.level;
10683 }
10684 }
10685
10686 return -1;
10687 }
10688 }, {
10689 key: "nextBufferedFrag",
10690 get: function get() {
10691 var media = this.media;
10692
10693 if (media) {
10694 // first get end range of current fragment
10695 var fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
10696 return this.followingBufferedFrag(fragPlayingCurrent);
10697 } else {
10698 return null;
10699 }
10700 }
10701 }, {
10702 key: "forceStartLoad",
10703 get: function get() {
10704 return this._forceStartLoad;
10705 }
10706 }]);
10707
10708 return StreamController;
10709}(_base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["default"]);
10710
10711
10712
10713/***/ }),
10714
10715/***/ "./src/controller/subtitle-stream-controller.ts":
10716/*!******************************************************!*\
10717 !*** ./src/controller/subtitle-stream-controller.ts ***!
10718 \******************************************************/
10719/*! exports provided: SubtitleStreamController */
10720/***/ (function(module, __webpack_exports__, __webpack_require__) {
10721__webpack_require__.r(__webpack_exports__);
10722/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "SubtitleStreamController", function() { return SubtitleStreamController; });
10723/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
10724/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
10725/* harmony import */ var _fragment_finders__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./fragment-finders */ "./src/controller/fragment-finders.ts");
10726/* harmony import */ var _utils_discontinuities__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/discontinuities */ "./src/utils/discontinuities.ts");
10727/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
10728/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
10729/* harmony import */ var _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./base-stream-controller */ "./src/controller/base-stream-controller.ts");
10730/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
10731/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
10732function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
10733
10734function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
10735
10736function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
10737
10738function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
10739
10740
10741
10742
10743
10744
10745
10746
10747
10748
10749var TICK_INTERVAL = 500; // how often to tick in ms
10750
10751var SubtitleStreamController = /*#__PURE__*/function (_BaseStreamController) {
10752 _inheritsLoose(SubtitleStreamController, _BaseStreamController);
10753
10754 function SubtitleStreamController(hls, fragmentTracker) {
10755 var _this;
10756
10757 _this = _BaseStreamController.call(this, hls, fragmentTracker, '[subtitle-stream-controller]') || this;
10758 _this.levels = [];
10759 _this.currentTrackId = -1;
10760 _this.tracksBuffered = [];
10761 _this.mainDetails = null;
10762
10763 _this._registerListeners();
10764
10765 return _this;
10766 }
10767
10768 var _proto = SubtitleStreamController.prototype;
10769
10770 _proto.onHandlerDestroying = function onHandlerDestroying() {
10771 this._unregisterListeners();
10772
10773 this.mainDetails = null;
10774 };
10775
10776 _proto._registerListeners = function _registerListeners() {
10777 var hls = this.hls;
10778 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
10779 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
10780 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
10781 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
10782 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
10783 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
10784 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
10785 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
10786 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
10787 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
10788 };
10789
10790 _proto._unregisterListeners = function _unregisterListeners() {
10791 var hls = this.hls;
10792 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
10793 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
10794 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
10795 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
10796 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
10797 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
10798 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
10799 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
10800 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
10801 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
10802 };
10803
10804 _proto.startLoad = function startLoad() {
10805 this.stopLoad();
10806 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
10807 this.setInterval(TICK_INTERVAL);
10808 this.tick();
10809 };
10810
10811 _proto.onManifestLoading = function onManifestLoading() {
10812 this.mainDetails = null;
10813 this.fragmentTracker.removeAllFragments();
10814 };
10815
10816 _proto.onLevelLoaded = function onLevelLoaded(event, data) {
10817 this.mainDetails = data.details;
10818 };
10819
10820 _proto.onSubtitleFragProcessed = function onSubtitleFragProcessed(event, data) {
10821 var frag = data.frag,
10822 success = data.success;
10823 this.fragPrevious = frag;
10824 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
10825
10826 if (!success) {
10827 return;
10828 }
10829
10830 var buffered = this.tracksBuffered[this.currentTrackId];
10831
10832 if (!buffered) {
10833 return;
10834 } // Create/update a buffered array matching the interface used by BufferHelper.bufferedInfo
10835 // so we can re-use the logic used to detect how much has been buffered
10836
10837
10838 var timeRange;
10839 var fragStart = frag.start;
10840
10841 for (var i = 0; i < buffered.length; i++) {
10842 if (fragStart >= buffered[i].start && fragStart <= buffered[i].end) {
10843 timeRange = buffered[i];
10844 break;
10845 }
10846 }
10847
10848 var fragEnd = frag.start + frag.duration;
10849
10850 if (timeRange) {
10851 timeRange.end = fragEnd;
10852 } else {
10853 timeRange = {
10854 start: fragStart,
10855 end: fragEnd
10856 };
10857 buffered.push(timeRange);
10858 }
10859
10860 this.fragmentTracker.fragBuffered(frag);
10861 };
10862
10863 _proto.onBufferFlushing = function onBufferFlushing(event, data) {
10864 var startOffset = data.startOffset,
10865 endOffset = data.endOffset;
10866
10867 if (startOffset === 0 && endOffset !== Number.POSITIVE_INFINITY) {
10868 var currentTrackId = this.currentTrackId,
10869 levels = this.levels;
10870
10871 if (!levels.length || !levels[currentTrackId] || !levels[currentTrackId].details) {
10872 return;
10873 }
10874
10875 var trackDetails = levels[currentTrackId].details;
10876 var targetDuration = trackDetails.targetduration;
10877 var endOffsetSubtitles = endOffset - targetDuration;
10878
10879 if (endOffsetSubtitles <= 0) {
10880 return;
10881 }
10882
10883 data.endOffsetSubtitles = Math.max(0, endOffsetSubtitles);
10884 this.tracksBuffered.forEach(function (buffered) {
10885 for (var i = 0; i < buffered.length;) {
10886 if (buffered[i].end <= endOffsetSubtitles) {
10887 buffered.shift();
10888 continue;
10889 } else if (buffered[i].start < endOffsetSubtitles) {
10890 buffered[i].start = endOffsetSubtitles;
10891 } else {
10892 break;
10893 }
10894
10895 i++;
10896 }
10897 });
10898 this.fragmentTracker.removeFragmentsInRange(startOffset, endOffsetSubtitles, _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE);
10899 }
10900 } // If something goes wrong, proceed to next frag, if we were processing one.
10901 ;
10902
10903 _proto.onError = function onError(event, data) {
10904 var _this$fragCurrent;
10905
10906 var frag = data.frag; // don't handle error not related to subtitle fragment
10907
10908 if (!frag || frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE) {
10909 return;
10910 }
10911
10912 if ((_this$fragCurrent = this.fragCurrent) !== null && _this$fragCurrent !== void 0 && _this$fragCurrent.loader) {
10913 this.fragCurrent.loader.abort();
10914 }
10915
10916 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
10917 } // Got all new subtitle levels.
10918 ;
10919
10920 _proto.onSubtitleTracksUpdated = function onSubtitleTracksUpdated(event, _ref) {
10921 var _this2 = this;
10922
10923 var subtitleTracks = _ref.subtitleTracks;
10924 this.tracksBuffered = [];
10925 this.levels = subtitleTracks.map(function (mediaPlaylist) {
10926 return new _types_level__WEBPACK_IMPORTED_MODULE_8__["Level"](mediaPlaylist);
10927 });
10928 this.fragmentTracker.removeAllFragments();
10929 this.fragPrevious = null;
10930 this.levels.forEach(function (level) {
10931 _this2.tracksBuffered[level.id] = [];
10932 });
10933 this.mediaBuffer = null;
10934 };
10935
10936 _proto.onSubtitleTrackSwitch = function onSubtitleTrackSwitch(event, data) {
10937 this.currentTrackId = data.id;
10938
10939 if (!this.levels.length || this.currentTrackId === -1) {
10940 this.clearInterval();
10941 return;
10942 } // Check if track has the necessary details to load fragments
10943
10944
10945 var currentTrack = this.levels[this.currentTrackId];
10946
10947 if (currentTrack !== null && currentTrack !== void 0 && currentTrack.details) {
10948 this.mediaBuffer = this.mediaBufferTimeRanges;
10949 } else {
10950 this.mediaBuffer = null;
10951 }
10952
10953 if (currentTrack) {
10954 this.setInterval(TICK_INTERVAL);
10955 }
10956 } // Got a new set of subtitle fragments.
10957 ;
10958
10959 _proto.onSubtitleTrackLoaded = function onSubtitleTrackLoaded(event, data) {
10960 var _track$details;
10961
10962 var newDetails = data.details,
10963 trackId = data.id;
10964 var currentTrackId = this.currentTrackId,
10965 levels = this.levels;
10966
10967 if (!levels.length) {
10968 return;
10969 }
10970
10971 var track = levels[currentTrackId];
10972
10973 if (trackId >= levels.length || trackId !== currentTrackId || !track) {
10974 return;
10975 }
10976
10977 this.mediaBuffer = this.mediaBufferTimeRanges;
10978
10979 if (newDetails.live || (_track$details = track.details) !== null && _track$details !== void 0 && _track$details.live) {
10980 var mainDetails = this.mainDetails;
10981
10982 if (newDetails.deltaUpdateFailed || !mainDetails) {
10983 return;
10984 }
10985
10986 var mainSlidingStartFragment = mainDetails.fragments[0];
10987
10988 if (!track.details) {
10989 if (newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) {
10990 Object(_utils_discontinuities__WEBPACK_IMPORTED_MODULE_3__["alignMediaPlaylistByPDT"])(newDetails, mainDetails);
10991 } else if (mainSlidingStartFragment) {
10992 // line up live playlist with main so that fragments in range are loaded
10993 Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addSliding"])(newDetails, mainSlidingStartFragment.start);
10994 }
10995 } else {
10996 var sliding = this.alignPlaylists(newDetails, track.details);
10997
10998 if (sliding === 0 && mainSlidingStartFragment) {
10999 // realign with main when there is no overlap with last refresh
11000 Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addSliding"])(newDetails, mainSlidingStartFragment.start);
11001 }
11002 }
11003 }
11004
11005 track.details = newDetails;
11006 this.levelLastLoaded = trackId; // trigger handler right now
11007
11008 this.tick(); // If playlist is misaligned because of bad PDT or drift, delete details to resync with main on reload
11009
11010 if (newDetails.live && !this.fragCurrent && this.media && this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE) {
11011 var foundFrag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_2__["findFragmentByPTS"])(null, newDetails.fragments, this.media.currentTime, 0);
11012
11013 if (!foundFrag) {
11014 this.warn('Subtitle playlist not aligned with playback');
11015 track.details = undefined;
11016 }
11017 }
11018 };
11019
11020 _proto._handleFragmentLoadComplete = function _handleFragmentLoadComplete(fragLoadedData) {
11021 var frag = fragLoadedData.frag,
11022 payload = fragLoadedData.payload;
11023 var decryptData = frag.decryptdata;
11024 var hls = this.hls;
11025
11026 if (this.fragContextChanged(frag)) {
11027 return;
11028 } // check to see if the payload needs to be decrypted
11029
11030
11031 if (payload && payload.byteLength > 0 && decryptData && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
11032 var startTime = performance.now(); // decrypt the subtitles
11033
11034 this.decrypter.webCryptoDecrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).then(function (decryptedData) {
11035 var endTime = performance.now();
11036 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_DECRYPTED, {
11037 frag: frag,
11038 payload: decryptedData,
11039 stats: {
11040 tstart: startTime,
11041 tdecrypt: endTime
11042 }
11043 });
11044 });
11045 }
11046 };
11047
11048 _proto.doTick = function doTick() {
11049 if (!this.media) {
11050 this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
11051 return;
11052 }
11053
11054 if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE) {
11055 var _foundFrag;
11056
11057 var currentTrackId = this.currentTrackId,
11058 levels = this.levels;
11059
11060 if (!levels.length || !levels[currentTrackId] || !levels[currentTrackId].details) {
11061 return;
11062 } // Expand range of subs loaded by one target-duration in either direction to make up for misaligned playlists
11063
11064
11065 var trackDetails = levels[currentTrackId].details;
11066 var targetDuration = trackDetails.targetduration;
11067 var config = this.config,
11068 media = this.media;
11069 var bufferedInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_1__["BufferHelper"].bufferedInfo(this.mediaBufferTimeRanges, media.currentTime - targetDuration, config.maxBufferHole);
11070 var targetBufferTime = bufferedInfo.end,
11071 bufferLen = bufferedInfo.len;
11072 var maxBufLen = this.getMaxBufferLength() + targetDuration;
11073
11074 if (bufferLen > maxBufLen) {
11075 return;
11076 }
11077
11078 console.assert(trackDetails, 'Subtitle track details are defined on idle subtitle stream controller tick');
11079 var fragments = trackDetails.fragments;
11080 var fragLen = fragments.length;
11081 var end = trackDetails.edge;
11082 var foundFrag;
11083 var fragPrevious = this.fragPrevious;
11084
11085 if (targetBufferTime < end) {
11086 var maxFragLookUpTolerance = config.maxFragLookUpTolerance;
11087 foundFrag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_2__["findFragmentByPTS"])(fragPrevious, fragments, targetBufferTime, maxFragLookUpTolerance);
11088
11089 if (!foundFrag && fragPrevious && fragPrevious.start < fragments[0].start) {
11090 foundFrag = fragments[0];
11091 }
11092 } else {
11093 foundFrag = fragments[fragLen - 1];
11094 }
11095
11096 if ((_foundFrag = foundFrag) !== null && _foundFrag !== void 0 && _foundFrag.encrypted) {
11097 this.loadKey(foundFrag, trackDetails);
11098 } else if (foundFrag && this.fragmentTracker.getState(foundFrag) === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].NOT_LOADED) {
11099 // only load if fragment is not loaded
11100 this.loadFragment(foundFrag, trackDetails, targetBufferTime);
11101 }
11102 }
11103 };
11104
11105 _proto.loadFragment = function loadFragment(frag, levelDetails, targetBufferTime) {
11106 this.fragCurrent = frag;
11107
11108 _BaseStreamController.prototype.loadFragment.call(this, frag, levelDetails, targetBufferTime);
11109 };
11110
11111 _createClass(SubtitleStreamController, [{
11112 key: "mediaBufferTimeRanges",
11113 get: function get() {
11114 return this.tracksBuffered[this.currentTrackId] || [];
11115 }
11116 }]);
11117
11118 return SubtitleStreamController;
11119}(_base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["default"]);
11120
11121/***/ }),
11122
11123/***/ "./src/controller/subtitle-track-controller.ts":
11124/*!*****************************************************!*\
11125 !*** ./src/controller/subtitle-track-controller.ts ***!
11126 \*****************************************************/
11127/*! exports provided: default */
11128/***/ (function(module, __webpack_exports__, __webpack_require__) {
11129__webpack_require__.r(__webpack_exports__);
11130/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
11131/* harmony import */ var _utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/texttrack-utils */ "./src/utils/texttrack-utils.ts");
11132/* harmony import */ var _base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./base-playlist-controller */ "./src/controller/base-playlist-controller.ts");
11133/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
11134function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
11135
11136function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
11137
11138function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
11139
11140function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
11141
11142
11143
11144
11145
11146
11147var SubtitleTrackController = /*#__PURE__*/function (_BasePlaylistControll) {
11148 _inheritsLoose(SubtitleTrackController, _BasePlaylistControll);
11149
11150 // Enable/disable subtitle display rendering
11151 function SubtitleTrackController(hls) {
11152 var _this;
11153
11154 _this = _BasePlaylistControll.call(this, hls, '[subtitle-track-controller]') || this;
11155 _this.media = null;
11156 _this.tracks = [];
11157 _this.groupId = null;
11158 _this.tracksInGroup = [];
11159 _this.trackId = -1;
11160 _this.selectDefaultTrack = true;
11161 _this.queuedDefaultTrack = -1;
11162
11163 _this.trackChangeListener = function () {
11164 return _this.onTextTracksChanged();
11165 };
11166
11167 _this.asyncPollTrackChange = function () {
11168 return _this.pollTrackChange(0);
11169 };
11170
11171 _this.useTextTrackPolling = false;
11172 _this.subtitlePollingInterval = -1;
11173 _this.subtitleDisplay = true;
11174
11175 _this.registerListeners();
11176
11177 return _this;
11178 }
11179
11180 var _proto = SubtitleTrackController.prototype;
11181
11182 _proto.destroy = function destroy() {
11183 this.unregisterListeners();
11184 this.tracks.length = 0;
11185 this.tracksInGroup.length = 0;
11186 this.trackChangeListener = this.asyncPollTrackChange = null;
11187
11188 _BasePlaylistControll.prototype.destroy.call(this);
11189 };
11190
11191 _proto.registerListeners = function registerListeners() {
11192 var hls = this.hls;
11193 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
11194 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
11195 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
11196 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
11197 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
11198 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
11199 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
11200 hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
11201 };
11202
11203 _proto.unregisterListeners = function unregisterListeners() {
11204 var hls = this.hls;
11205 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
11206 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
11207 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
11208 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
11209 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
11210 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
11211 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
11212 hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
11213 } // Listen for subtitle track change, then extract the current track ID.
11214 ;
11215
11216 _proto.onMediaAttached = function onMediaAttached(event, data) {
11217 this.media = data.media;
11218
11219 if (!this.media) {
11220 return;
11221 }
11222
11223 if (this.queuedDefaultTrack > -1) {
11224 this.subtitleTrack = this.queuedDefaultTrack;
11225 this.queuedDefaultTrack = -1;
11226 }
11227
11228 this.useTextTrackPolling = !(this.media.textTracks && 'onchange' in this.media.textTracks);
11229
11230 if (this.useTextTrackPolling) {
11231 this.pollTrackChange(500);
11232 } else {
11233 this.media.textTracks.addEventListener('change', this.asyncPollTrackChange);
11234 }
11235 };
11236
11237 _proto.pollTrackChange = function pollTrackChange(timeout) {
11238 self.clearInterval(this.subtitlePollingInterval);
11239 this.subtitlePollingInterval = self.setInterval(this.trackChangeListener, timeout);
11240 };
11241
11242 _proto.onMediaDetaching = function onMediaDetaching() {
11243 if (!this.media) {
11244 return;
11245 }
11246
11247 self.clearInterval(this.subtitlePollingInterval);
11248
11249 if (!this.useTextTrackPolling) {
11250 this.media.textTracks.removeEventListener('change', this.asyncPollTrackChange);
11251 }
11252
11253 if (this.trackId > -1) {
11254 this.queuedDefaultTrack = this.trackId;
11255 }
11256
11257 var textTracks = filterSubtitleTracks(this.media.textTracks); // Clear loaded cues on media detachment from tracks
11258
11259 textTracks.forEach(function (track) {
11260 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["clearCurrentCues"])(track);
11261 }); // Disable all subtitle tracks before detachment so when reattached only tracks in that content are enabled.
11262
11263 this.subtitleTrack = -1;
11264 this.media = null;
11265 };
11266
11267 _proto.onManifestLoading = function onManifestLoading() {
11268 this.tracks = [];
11269 this.groupId = null;
11270 this.tracksInGroup = [];
11271 this.trackId = -1;
11272 this.selectDefaultTrack = true;
11273 } // Fired whenever a new manifest is loaded.
11274 ;
11275
11276 _proto.onManifestParsed = function onManifestParsed(event, data) {
11277 this.tracks = data.subtitleTracks;
11278 };
11279
11280 _proto.onSubtitleTrackLoaded = function onSubtitleTrackLoaded(event, data) {
11281 var id = data.id,
11282 details = data.details;
11283 var trackId = this.trackId;
11284 var currentTrack = this.tracksInGroup[trackId];
11285
11286 if (!currentTrack) {
11287 this.warn("Invalid subtitle track id " + id);
11288 return;
11289 }
11290
11291 var curDetails = currentTrack.details;
11292 currentTrack.details = data.details;
11293 this.log("subtitle track " + id + " loaded [" + details.startSN + "-" + details.endSN + "]");
11294
11295 if (id === this.trackId) {
11296 this.retryCount = 0;
11297 this.playlistLoaded(id, data, curDetails);
11298 }
11299 };
11300
11301 _proto.onLevelLoading = function onLevelLoading(event, data) {
11302 this.switchLevel(data.level);
11303 };
11304
11305 _proto.onLevelSwitching = function onLevelSwitching(event, data) {
11306 this.switchLevel(data.level);
11307 };
11308
11309 _proto.switchLevel = function switchLevel(levelIndex) {
11310 var levelInfo = this.hls.levels[levelIndex];
11311
11312 if (!(levelInfo !== null && levelInfo !== void 0 && levelInfo.textGroupIds)) {
11313 return;
11314 }
11315
11316 var textGroupId = levelInfo.textGroupIds[levelInfo.urlId];
11317
11318 if (this.groupId !== textGroupId) {
11319 var lastTrack = this.tracksInGroup ? this.tracksInGroup[this.trackId] : undefined;
11320 var subtitleTracks = this.tracks.filter(function (track) {
11321 return !textGroupId || track.groupId === textGroupId;
11322 });
11323 this.tracksInGroup = subtitleTracks;
11324 var initialTrackId = this.findTrackId(lastTrack === null || lastTrack === void 0 ? void 0 : lastTrack.name) || this.findTrackId();
11325 this.groupId = textGroupId;
11326 var subtitleTracksUpdated = {
11327 subtitleTracks: subtitleTracks
11328 };
11329 this.log("Updating subtitle tracks, " + subtitleTracks.length + " track(s) found in \"" + textGroupId + "\" group-id");
11330 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACKS_UPDATED, subtitleTracksUpdated);
11331
11332 if (initialTrackId !== -1) {
11333 this.setSubtitleTrack(initialTrackId, lastTrack);
11334 }
11335 }
11336 };
11337
11338 _proto.findTrackId = function findTrackId(name) {
11339 var textTracks = this.tracksInGroup;
11340
11341 for (var i = 0; i < textTracks.length; i++) {
11342 var track = textTracks[i];
11343
11344 if (!this.selectDefaultTrack || track.default) {
11345 if (!name || name === track.name) {
11346 return track.id;
11347 }
11348 }
11349 }
11350
11351 return -1;
11352 };
11353
11354 _proto.onError = function onError(event, data) {
11355 _BasePlaylistControll.prototype.onError.call(this, event, data);
11356
11357 if (data.fatal || !data.context) {
11358 return;
11359 }
11360
11361 if (data.context.type === _types_loader__WEBPACK_IMPORTED_MODULE_3__["PlaylistContextType"].SUBTITLE_TRACK && data.context.id === this.trackId && data.context.groupId === this.groupId) {
11362 this.retryLoadingOrFail(data);
11363 }
11364 }
11365 /** get alternate subtitle tracks list from playlist **/
11366 ;
11367
11368 _proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {
11369 var currentTrack = this.tracksInGroup[this.trackId];
11370
11371 if (this.shouldLoadTrack(currentTrack)) {
11372 var id = currentTrack.id;
11373 var groupId = currentTrack.groupId;
11374 var url = currentTrack.url;
11375
11376 if (hlsUrlParameters) {
11377 try {
11378 url = hlsUrlParameters.addDirectives(url);
11379 } catch (error) {
11380 this.warn("Could not construct new URL with HLS Delivery Directives: " + error);
11381 }
11382 }
11383
11384 this.log("Loading subtitle playlist for id " + id);
11385 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADING, {
11386 url: url,
11387 id: id,
11388 groupId: groupId,
11389 deliveryDirectives: hlsUrlParameters || null
11390 });
11391 }
11392 }
11393 /**
11394 * Disables the old subtitleTrack and sets current mode on the next subtitleTrack.
11395 * This operates on the DOM textTracks.
11396 * A value of -1 will disable all subtitle tracks.
11397 */
11398 ;
11399
11400 _proto.toggleTrackModes = function toggleTrackModes(newId) {
11401 var _this2 = this;
11402
11403 var media = this.media,
11404 subtitleDisplay = this.subtitleDisplay,
11405 trackId = this.trackId;
11406
11407 if (!media) {
11408 return;
11409 }
11410
11411 var textTracks = filterSubtitleTracks(media.textTracks);
11412 var groupTracks = textTracks.filter(function (track) {
11413 return track.groupId === _this2.groupId;
11414 });
11415
11416 if (newId === -1) {
11417 [].slice.call(textTracks).forEach(function (track) {
11418 track.mode = 'disabled';
11419 });
11420 } else {
11421 var oldTrack = groupTracks[trackId];
11422
11423 if (oldTrack) {
11424 oldTrack.mode = 'disabled';
11425 }
11426 }
11427
11428 var nextTrack = groupTracks[newId];
11429
11430 if (nextTrack) {
11431 nextTrack.mode = subtitleDisplay ? 'showing' : 'hidden';
11432 }
11433 }
11434 /**
11435 * This method is responsible for validating the subtitle index and periodically reloading if live.
11436 * Dispatches the SUBTITLE_TRACK_SWITCH event, which instructs the subtitle-stream-controller to load the selected track.
11437 */
11438 ;
11439
11440 _proto.setSubtitleTrack = function setSubtitleTrack(newId, lastTrack) {
11441 var _tracks$newId;
11442
11443 var tracks = this.tracksInGroup; // setting this.subtitleTrack will trigger internal logic
11444 // if media has not been attached yet, it will fail
11445 // we keep a reference to the default track id
11446 // and we'll set subtitleTrack when onMediaAttached is triggered
11447
11448 if (!this.media) {
11449 this.queuedDefaultTrack = newId;
11450 return;
11451 }
11452
11453 if (this.trackId !== newId) {
11454 this.toggleTrackModes(newId);
11455 } // exit if track id as already set or invalid
11456
11457
11458 if (this.trackId === newId && (newId === -1 || (_tracks$newId = tracks[newId]) !== null && _tracks$newId !== void 0 && _tracks$newId.details) || newId < -1 || newId >= tracks.length) {
11459 return;
11460 } // stopping live reloading timer if any
11461
11462
11463 this.clearTimer();
11464 var track = tracks[newId];
11465 this.log("Switching to subtitle track " + newId);
11466 this.trackId = newId;
11467
11468 if (track) {
11469 var id = track.id,
11470 _track$groupId = track.groupId,
11471 groupId = _track$groupId === void 0 ? '' : _track$groupId,
11472 name = track.name,
11473 type = track.type,
11474 url = track.url;
11475 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, {
11476 id: id,
11477 groupId: groupId,
11478 name: name,
11479 type: type,
11480 url: url
11481 });
11482 var hlsUrlParameters = this.switchParams(track.url, lastTrack === null || lastTrack === void 0 ? void 0 : lastTrack.details);
11483 this.loadPlaylist(hlsUrlParameters);
11484 } else {
11485 // switch to -1
11486 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, {
11487 id: newId
11488 });
11489 }
11490 };
11491
11492 _proto.onTextTracksChanged = function onTextTracksChanged() {
11493 if (!this.useTextTrackPolling) {
11494 self.clearInterval(this.subtitlePollingInterval);
11495 } // Media is undefined when switching streams via loadSource()
11496
11497
11498 if (!this.media || !this.hls.config.renderTextTracksNatively) {
11499 return;
11500 }
11501
11502 var trackId = -1;
11503 var tracks = filterSubtitleTracks(this.media.textTracks);
11504
11505 for (var id = 0; id < tracks.length; id++) {
11506 if (tracks[id].mode === 'hidden') {
11507 // Do not break in case there is a following track with showing.
11508 trackId = id;
11509 } else if (tracks[id].mode === 'showing') {
11510 trackId = id;
11511 break;
11512 }
11513 } // Setting current subtitleTrack will invoke code.
11514
11515
11516 if (this.subtitleTrack !== trackId) {
11517 this.subtitleTrack = trackId;
11518 }
11519 };
11520
11521 _createClass(SubtitleTrackController, [{
11522 key: "subtitleTracks",
11523 get: function get() {
11524 return this.tracksInGroup;
11525 }
11526 /** get/set index of the selected subtitle track (based on index in subtitle track lists) **/
11527
11528 }, {
11529 key: "subtitleTrack",
11530 get: function get() {
11531 return this.trackId;
11532 },
11533 set: function set(newId) {
11534 this.selectDefaultTrack = false;
11535 var lastTrack = this.tracksInGroup ? this.tracksInGroup[this.trackId] : undefined;
11536 this.setSubtitleTrack(newId, lastTrack);
11537 }
11538 }]);
11539
11540 return SubtitleTrackController;
11541}(_base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__["default"]);
11542
11543function filterSubtitleTracks(textTrackList) {
11544 var tracks = [];
11545
11546 for (var i = 0; i < textTrackList.length; i++) {
11547 var track = textTrackList[i]; // Edge adds a track without a label; we don't want to use it
11548
11549 if (track.kind === 'subtitles' && track.label) {
11550 tracks.push(textTrackList[i]);
11551 }
11552 }
11553
11554 return tracks;
11555}
11556
11557/* harmony default export */ __webpack_exports__["default"] = (SubtitleTrackController);
11558
11559/***/ }),
11560
11561/***/ "./src/controller/timeline-controller.ts":
11562/*!***********************************************!*\
11563 !*** ./src/controller/timeline-controller.ts ***!
11564 \***********************************************/
11565/*! exports provided: TimelineController */
11566/***/ (function(module, __webpack_exports__, __webpack_require__) {
11567__webpack_require__.r(__webpack_exports__);
11568/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "TimelineController", function() { return TimelineController; });
11569/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
11570/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
11571/* harmony import */ var _utils_cea_608_parser__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/cea-608-parser */ "./src/utils/cea-608-parser.ts");
11572/* harmony import */ var _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/output-filter */ "./src/utils/output-filter.ts");
11573/* harmony import */ var _utils_webvtt_parser__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/webvtt-parser */ "./src/utils/webvtt-parser.ts");
11574/* harmony import */ var _utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/texttrack-utils */ "./src/utils/texttrack-utils.ts");
11575/* harmony import */ var _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/imsc1-ttml-parser */ "./src/utils/imsc1-ttml-parser.ts");
11576/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
11577/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
11578
11579
11580
11581
11582
11583
11584
11585
11586
11587
11588var TimelineController = /*#__PURE__*/function () {
11589 function TimelineController(hls) {
11590 this.hls = void 0;
11591 this.media = null;
11592 this.config = void 0;
11593 this.enabled = true;
11594 this.Cues = void 0;
11595 this.textTracks = [];
11596 this.tracks = [];
11597 this.initPTS = [];
11598 this.timescale = [];
11599 this.unparsedVttFrags = [];
11600 this.captionsTracks = {};
11601 this.nonNativeCaptionsTracks = {};
11602 this.cea608Parser1 = void 0;
11603 this.cea608Parser2 = void 0;
11604 this.lastSn = -1;
11605 this.lastPartIndex = -1;
11606 this.prevCC = -1;
11607 this.vttCCs = newVTTCCs();
11608 this.captionsProperties = void 0;
11609 this.hls = hls;
11610 this.config = hls.config;
11611 this.Cues = hls.config.cueHandler;
11612 this.captionsProperties = {
11613 textTrack1: {
11614 label: this.config.captionsTextTrack1Label,
11615 languageCode: this.config.captionsTextTrack1LanguageCode
11616 },
11617 textTrack2: {
11618 label: this.config.captionsTextTrack2Label,
11619 languageCode: this.config.captionsTextTrack2LanguageCode
11620 },
11621 textTrack3: {
11622 label: this.config.captionsTextTrack3Label,
11623 languageCode: this.config.captionsTextTrack3LanguageCode
11624 },
11625 textTrack4: {
11626 label: this.config.captionsTextTrack4Label,
11627 languageCode: this.config.captionsTextTrack4LanguageCode
11628 }
11629 };
11630
11631 if (this.config.enableCEA708Captions) {
11632 var channel1 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack1');
11633 var channel2 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack2');
11634 var channel3 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack3');
11635 var channel4 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack4');
11636 this.cea608Parser1 = new _utils_cea_608_parser__WEBPACK_IMPORTED_MODULE_2__["default"](1, channel1, channel2);
11637 this.cea608Parser2 = new _utils_cea_608_parser__WEBPACK_IMPORTED_MODULE_2__["default"](3, channel3, channel4);
11638 }
11639
11640 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
11641 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
11642 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
11643 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
11644 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
11645 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADING, this.onFragLoading, this);
11646 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
11647 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
11648 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, this.onFragDecrypted, this);
11649 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
11650 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
11651 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
11652 }
11653
11654 var _proto = TimelineController.prototype;
11655
11656 _proto.destroy = function destroy() {
11657 var hls = this.hls;
11658 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
11659 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
11660 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
11661 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
11662 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
11663 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADING, this.onFragLoading, this);
11664 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
11665 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
11666 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, this.onFragDecrypted, this);
11667 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
11668 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
11669 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this); // @ts-ignore
11670
11671 this.hls = this.config = this.cea608Parser1 = this.cea608Parser2 = null;
11672 };
11673
11674 _proto.addCues = function addCues(trackName, startTime, endTime, screen, cueRanges) {
11675 // skip cues which overlap more than 50% with previously parsed time ranges
11676 var merged = false;
11677
11678 for (var i = cueRanges.length; i--;) {
11679 var cueRange = cueRanges[i];
11680 var overlap = intersection(cueRange[0], cueRange[1], startTime, endTime);
11681
11682 if (overlap >= 0) {
11683 cueRange[0] = Math.min(cueRange[0], startTime);
11684 cueRange[1] = Math.max(cueRange[1], endTime);
11685 merged = true;
11686
11687 if (overlap / (endTime - startTime) > 0.5) {
11688 return;
11689 }
11690 }
11691 }
11692
11693 if (!merged) {
11694 cueRanges.push([startTime, endTime]);
11695 }
11696
11697 if (this.config.renderTextTracksNatively) {
11698 var track = this.captionsTracks[trackName];
11699 this.Cues.newCue(track, startTime, endTime, screen);
11700 } else {
11701 var cues = this.Cues.newCue(null, startTime, endTime, screen);
11702 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].CUES_PARSED, {
11703 type: 'captions',
11704 cues: cues,
11705 track: trackName
11706 });
11707 }
11708 } // Triggered when an initial PTS is found; used for synchronisation of WebVTT.
11709 ;
11710
11711 _proto.onInitPtsFound = function onInitPtsFound(event, _ref) {
11712 var _this = this;
11713
11714 var frag = _ref.frag,
11715 id = _ref.id,
11716 initPTS = _ref.initPTS,
11717 timescale = _ref.timescale;
11718 var unparsedVttFrags = this.unparsedVttFrags;
11719
11720 if (id === 'main') {
11721 this.initPTS[frag.cc] = initPTS;
11722 this.timescale[frag.cc] = timescale;
11723 } // Due to asynchronous processing, initial PTS may arrive later than the first VTT fragments are loaded.
11724 // Parse any unparsed fragments upon receiving the initial PTS.
11725
11726
11727 if (unparsedVttFrags.length) {
11728 this.unparsedVttFrags = [];
11729 unparsedVttFrags.forEach(function (frag) {
11730 _this.onFragLoaded(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, frag);
11731 });
11732 }
11733 };
11734
11735 _proto.getExistingTrack = function getExistingTrack(trackName) {
11736 var media = this.media;
11737
11738 if (media) {
11739 for (var i = 0; i < media.textTracks.length; i++) {
11740 var textTrack = media.textTracks[i];
11741
11742 if (textTrack[trackName]) {
11743 return textTrack;
11744 }
11745 }
11746 }
11747
11748 return null;
11749 };
11750
11751 _proto.createCaptionsTrack = function createCaptionsTrack(trackName) {
11752 if (this.config.renderTextTracksNatively) {
11753 this.createNativeTrack(trackName);
11754 } else {
11755 this.createNonNativeTrack(trackName);
11756 }
11757 };
11758
11759 _proto.createNativeTrack = function createNativeTrack(trackName) {
11760 if (this.captionsTracks[trackName]) {
11761 return;
11762 }
11763
11764 var captionsProperties = this.captionsProperties,
11765 captionsTracks = this.captionsTracks,
11766 media = this.media;
11767 var _captionsProperties$t = captionsProperties[trackName],
11768 label = _captionsProperties$t.label,
11769 languageCode = _captionsProperties$t.languageCode; // Enable reuse of existing text track.
11770
11771 var existingTrack = this.getExistingTrack(trackName);
11772
11773 if (!existingTrack) {
11774 var textTrack = this.createTextTrack('captions', label, languageCode);
11775
11776 if (textTrack) {
11777 // Set a special property on the track so we know it's managed by Hls.js
11778 textTrack[trackName] = true;
11779 captionsTracks[trackName] = textTrack;
11780 }
11781 } else {
11782 captionsTracks[trackName] = existingTrack;
11783 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(captionsTracks[trackName]);
11784 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["sendAddTrackEvent"])(captionsTracks[trackName], media);
11785 }
11786 };
11787
11788 _proto.createNonNativeTrack = function createNonNativeTrack(trackName) {
11789 if (this.nonNativeCaptionsTracks[trackName]) {
11790 return;
11791 } // Create a list of a single track for the provider to consume
11792
11793
11794 var trackProperties = this.captionsProperties[trackName];
11795
11796 if (!trackProperties) {
11797 return;
11798 }
11799
11800 var label = trackProperties.label;
11801 var track = {
11802 _id: trackName,
11803 label: label,
11804 kind: 'captions',
11805 default: trackProperties.media ? !!trackProperties.media.default : false,
11806 closedCaptions: trackProperties.media
11807 };
11808 this.nonNativeCaptionsTracks[trackName] = track;
11809 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].NON_NATIVE_TEXT_TRACKS_FOUND, {
11810 tracks: [track]
11811 });
11812 };
11813
11814 _proto.createTextTrack = function createTextTrack(kind, label, lang) {
11815 var media = this.media;
11816
11817 if (!media) {
11818 return;
11819 }
11820
11821 return media.addTextTrack(kind, label, lang);
11822 };
11823
11824 _proto.onMediaAttaching = function onMediaAttaching(event, data) {
11825 this.media = data.media;
11826
11827 this._cleanTracks();
11828 };
11829
11830 _proto.onMediaDetaching = function onMediaDetaching() {
11831 var captionsTracks = this.captionsTracks;
11832 Object.keys(captionsTracks).forEach(function (trackName) {
11833 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(captionsTracks[trackName]);
11834 delete captionsTracks[trackName];
11835 });
11836 this.nonNativeCaptionsTracks = {};
11837 };
11838
11839 _proto.onManifestLoading = function onManifestLoading() {
11840 this.lastSn = -1; // Detect discontinuity in fragment parsing
11841
11842 this.lastPartIndex = -1;
11843 this.prevCC = -1;
11844 this.vttCCs = newVTTCCs(); // Detect discontinuity in subtitle manifests
11845
11846 this._cleanTracks();
11847
11848 this.tracks = [];
11849 this.captionsTracks = {};
11850 this.nonNativeCaptionsTracks = {};
11851 this.textTracks = [];
11852 this.unparsedVttFrags = this.unparsedVttFrags || [];
11853 this.initPTS = [];
11854 this.timescale = [];
11855
11856 if (this.cea608Parser1 && this.cea608Parser2) {
11857 this.cea608Parser1.reset();
11858 this.cea608Parser2.reset();
11859 }
11860 };
11861
11862 _proto._cleanTracks = function _cleanTracks() {
11863 // clear outdated subtitles
11864 var media = this.media;
11865
11866 if (!media) {
11867 return;
11868 }
11869
11870 var textTracks = media.textTracks;
11871
11872 if (textTracks) {
11873 for (var i = 0; i < textTracks.length; i++) {
11874 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(textTracks[i]);
11875 }
11876 }
11877 };
11878
11879 _proto.onSubtitleTracksUpdated = function onSubtitleTracksUpdated(event, data) {
11880 var _this2 = this;
11881
11882 this.textTracks = [];
11883 var tracks = data.subtitleTracks || [];
11884 var hasIMSC1 = tracks.some(function (track) {
11885 return track.textCodec === _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["IMSC1_CODEC"];
11886 });
11887
11888 if (this.config.enableWebVTT || hasIMSC1 && this.config.enableIMSC1) {
11889 var sameTracks = this.tracks && tracks && this.tracks.length === tracks.length;
11890 this.tracks = tracks || [];
11891
11892 if (this.config.renderTextTracksNatively) {
11893 var inUseTracks = this.media ? this.media.textTracks : [];
11894 this.tracks.forEach(function (track, index) {
11895 var textTrack;
11896
11897 if (index < inUseTracks.length) {
11898 var inUseTrack = null;
11899
11900 for (var i = 0; i < inUseTracks.length; i++) {
11901 if (canReuseVttTextTrack(inUseTracks[i], track)) {
11902 inUseTrack = inUseTracks[i];
11903 break;
11904 }
11905 } // Reuse tracks with the same label, but do not reuse 608/708 tracks
11906
11907
11908 if (inUseTrack) {
11909 textTrack = inUseTrack;
11910 }
11911 }
11912
11913 if (textTrack) {
11914 Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(textTrack);
11915 } else {
11916 textTrack = _this2.createTextTrack('subtitles', track.name, track.lang);
11917
11918 if (textTrack) {
11919 textTrack.mode = 'disabled';
11920 }
11921 }
11922
11923 if (textTrack) {
11924 textTrack.groupId = track.groupId;
11925
11926 _this2.textTracks.push(textTrack);
11927 }
11928 });
11929 } else if (!sameTracks && this.tracks && this.tracks.length) {
11930 // Create a list of tracks for the provider to consume
11931 var tracksList = this.tracks.map(function (track) {
11932 return {
11933 label: track.name,
11934 kind: track.type.toLowerCase(),
11935 default: track.default,
11936 subtitleTrack: track
11937 };
11938 });
11939 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].NON_NATIVE_TEXT_TRACKS_FOUND, {
11940 tracks: tracksList
11941 });
11942 }
11943 }
11944 };
11945
11946 _proto.onManifestLoaded = function onManifestLoaded(event, data) {
11947 var _this3 = this;
11948
11949 if (this.config.enableCEA708Captions && data.captions) {
11950 data.captions.forEach(function (captionsTrack) {
11951 var instreamIdMatch = /(?:CC|SERVICE)([1-4])/.exec(captionsTrack.instreamId);
11952
11953 if (!instreamIdMatch) {
11954 return;
11955 }
11956
11957 var trackName = "textTrack" + instreamIdMatch[1];
11958 var trackProperties = _this3.captionsProperties[trackName];
11959
11960 if (!trackProperties) {
11961 return;
11962 }
11963
11964 trackProperties.label = captionsTrack.name;
11965
11966 if (captionsTrack.lang) {
11967 // optional attribute
11968 trackProperties.languageCode = captionsTrack.lang;
11969 }
11970
11971 trackProperties.media = captionsTrack;
11972 });
11973 }
11974 };
11975
11976 _proto.onFragLoading = function onFragLoading(event, data) {
11977 var cea608Parser1 = this.cea608Parser1,
11978 cea608Parser2 = this.cea608Parser2,
11979 lastSn = this.lastSn,
11980 lastPartIndex = this.lastPartIndex;
11981
11982 if (!this.enabled || !(cea608Parser1 && cea608Parser2)) {
11983 return;
11984 } // if this frag isn't contiguous, clear the parser so cues with bad start/end times aren't added to the textTrack
11985
11986
11987 if (data.frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].MAIN) {
11988 var _data$part$index, _data$part;
11989
11990 var sn = data.frag.sn;
11991 var partIndex = (_data$part$index = data === null || data === void 0 ? void 0 : (_data$part = data.part) === null || _data$part === void 0 ? void 0 : _data$part.index) != null ? _data$part$index : -1;
11992
11993 if (!(sn === lastSn + 1 || sn === lastSn && partIndex === lastPartIndex + 1)) {
11994 cea608Parser1.reset();
11995 cea608Parser2.reset();
11996 }
11997
11998 this.lastSn = sn;
11999 this.lastPartIndex = partIndex;
12000 }
12001 };
12002
12003 _proto.onFragLoaded = function onFragLoaded(event, data) {
12004 var frag = data.frag,
12005 payload = data.payload;
12006 var initPTS = this.initPTS,
12007 unparsedVttFrags = this.unparsedVttFrags;
12008
12009 if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE) {
12010 // If fragment is subtitle type, parse as WebVTT.
12011 if (payload.byteLength) {
12012 // We need an initial synchronisation PTS. Store fragments as long as none has arrived.
12013 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(initPTS[frag.cc])) {
12014 unparsedVttFrags.push(data);
12015
12016 if (initPTS.length) {
12017 // finish unsuccessfully, otherwise the subtitle-stream-controller could be blocked from loading new frags.
12018 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
12019 success: false,
12020 frag: frag,
12021 error: new Error('Missing initial subtitle PTS')
12022 });
12023 }
12024
12025 return;
12026 }
12027
12028 var decryptData = frag.decryptdata; // fragment after decryption has a stats object
12029
12030 var decrypted = ('stats' in data); // If the subtitles are not encrypted, parse VTTs now. Otherwise, we need to wait.
12031
12032 if (decryptData == null || decryptData.key == null || decryptData.method !== 'AES-128' || decrypted) {
12033 var trackPlaylistMedia = this.tracks[frag.level];
12034 var vttCCs = this.vttCCs;
12035
12036 if (!vttCCs[frag.cc]) {
12037 vttCCs[frag.cc] = {
12038 start: frag.start,
12039 prevCC: this.prevCC,
12040 new: true
12041 };
12042 this.prevCC = frag.cc;
12043 }
12044
12045 if (trackPlaylistMedia && trackPlaylistMedia.textCodec === _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["IMSC1_CODEC"]) {
12046 this._parseIMSC1(frag, payload);
12047 } else {
12048 this._parseVTTs(frag, payload, vttCCs);
12049 }
12050 }
12051 } else {
12052 // In case there is no payload, finish unsuccessfully.
12053 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
12054 success: false,
12055 frag: frag,
12056 error: new Error('Empty subtitle payload')
12057 });
12058 }
12059 }
12060 };
12061
12062 _proto._parseIMSC1 = function _parseIMSC1(frag, payload) {
12063 var _this4 = this;
12064
12065 var hls = this.hls;
12066 Object(_utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["parseIMSC1"])(payload, this.initPTS[frag.cc], this.timescale[frag.cc], function (cues) {
12067 _this4._appendCues(cues, frag.level);
12068
12069 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
12070 success: true,
12071 frag: frag
12072 });
12073 }, function (error) {
12074 _utils_logger__WEBPACK_IMPORTED_MODULE_8__["logger"].log("Failed to parse IMSC1: " + error);
12075 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
12076 success: false,
12077 frag: frag,
12078 error: error
12079 });
12080 });
12081 };
12082
12083 _proto._parseVTTs = function _parseVTTs(frag, payload, vttCCs) {
12084 var _this5 = this;
12085
12086 var hls = this.hls; // Parse the WebVTT file contents.
12087
12088 Object(_utils_webvtt_parser__WEBPACK_IMPORTED_MODULE_4__["parseWebVTT"])(payload, this.initPTS[frag.cc], this.timescale[frag.cc], vttCCs, frag.cc, frag.start, function (cues) {
12089 _this5._appendCues(cues, frag.level);
12090
12091 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
12092 success: true,
12093 frag: frag
12094 });
12095 }, function (error) {
12096 _this5._fallbackToIMSC1(frag, payload); // Something went wrong while parsing. Trigger event with success false.
12097
12098
12099 _utils_logger__WEBPACK_IMPORTED_MODULE_8__["logger"].log("Failed to parse VTT cue: " + error);
12100 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
12101 success: false,
12102 frag: frag,
12103 error: error
12104 });
12105 });
12106 };
12107
12108 _proto._fallbackToIMSC1 = function _fallbackToIMSC1(frag, payload) {
12109 var _this6 = this;
12110
12111 // If textCodec is unknown, try parsing as IMSC1. Set textCodec based on the result
12112 var trackPlaylistMedia = this.tracks[frag.level];
12113
12114 if (!trackPlaylistMedia.textCodec) {
12115 Object(_utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["parseIMSC1"])(payload, this.initPTS[frag.cc], this.timescale[frag.cc], function () {
12116 trackPlaylistMedia.textCodec = _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["IMSC1_CODEC"];
12117
12118 _this6._parseIMSC1(frag, payload);
12119 }, function () {
12120 trackPlaylistMedia.textCodec = 'wvtt';
12121 });
12122 }
12123 };
12124
12125 _proto._appendCues = function _appendCues(cues, fragLevel) {
12126 var hls = this.hls;
12127
12128 if (this.config.renderTextTracksNatively) {
12129 var textTrack = this.textTracks[fragLevel]; // WebVTTParser.parse is an async method and if the currently selected text track mode is set to "disabled"
12130 // before parsing is done then don't try to access currentTrack.cues.getCueById as cues will be null
12131 // and trying to access getCueById method of cues will throw an exception
12132 // Because we check if the mode is disabled, we can force check `cues` below. They can't be null.
12133
12134 if (textTrack.mode === 'disabled') {
12135 return;
12136 }
12137
12138 cues.forEach(function (cue) {
12139 return Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["addCueToTrack"])(textTrack, cue);
12140 });
12141 } else {
12142 var currentTrack = this.tracks[fragLevel];
12143 var track = currentTrack.default ? 'default' : 'subtitles' + fragLevel;
12144 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].CUES_PARSED, {
12145 type: 'subtitles',
12146 cues: cues,
12147 track: track
12148 });
12149 }
12150 };
12151
12152 _proto.onFragDecrypted = function onFragDecrypted(event, data) {
12153 var frag = data.frag;
12154
12155 if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE) {
12156 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.initPTS[frag.cc])) {
12157 this.unparsedVttFrags.push(data);
12158 return;
12159 }
12160
12161 this.onFragLoaded(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, data);
12162 }
12163 };
12164
12165 _proto.onSubtitleTracksCleared = function onSubtitleTracksCleared() {
12166 this.tracks = [];
12167 this.captionsTracks = {};
12168 };
12169
12170 _proto.onFragParsingUserdata = function onFragParsingUserdata(event, data) {
12171 var cea608Parser1 = this.cea608Parser1,
12172 cea608Parser2 = this.cea608Parser2;
12173
12174 if (!this.enabled || !(cea608Parser1 && cea608Parser2)) {
12175 return;
12176 } // If the event contains captions (found in the bytes property), push all bytes into the parser immediately
12177 // It will create the proper timestamps based on the PTS value
12178
12179
12180 for (var i = 0; i < data.samples.length; i++) {
12181 var ccBytes = data.samples[i].bytes;
12182
12183 if (ccBytes) {
12184 var ccdatas = this.extractCea608Data(ccBytes);
12185 cea608Parser1.addData(data.samples[i].pts, ccdatas[0]);
12186 cea608Parser2.addData(data.samples[i].pts, ccdatas[1]);
12187 }
12188 }
12189 };
12190
12191 _proto.onBufferFlushing = function onBufferFlushing(event, _ref2) {
12192 var startOffset = _ref2.startOffset,
12193 endOffset = _ref2.endOffset,
12194 endOffsetSubtitles = _ref2.endOffsetSubtitles,
12195 type = _ref2.type;
12196 var media = this.media;
12197
12198 if (!media || media.currentTime < endOffset) {
12199 return;
12200 } // Clear 608 caption cues from the captions TextTracks when the video back buffer is flushed
12201 // Forward cues are never removed because we can loose streamed 608 content from recent fragments
12202
12203
12204 if (!type || type === 'video') {
12205 var captionsTracks = this.captionsTracks;
12206 Object.keys(captionsTracks).forEach(function (trackName) {
12207 return Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["removeCuesInRange"])(captionsTracks[trackName], startOffset, endOffset);
12208 });
12209 }
12210
12211 if (this.config.renderTextTracksNatively) {
12212 // Clear VTT/IMSC1 subtitle cues from the subtitle TextTracks when the back buffer is flushed
12213 if (startOffset === 0 && endOffsetSubtitles !== undefined) {
12214 var textTracks = this.textTracks;
12215 Object.keys(textTracks).forEach(function (trackName) {
12216 return Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["removeCuesInRange"])(textTracks[trackName], startOffset, endOffsetSubtitles);
12217 });
12218 }
12219 }
12220 };
12221
12222 _proto.extractCea608Data = function extractCea608Data(byteArray) {
12223 var count = byteArray[0] & 31;
12224 var position = 2;
12225 var actualCCBytes = [[], []];
12226
12227 for (var j = 0; j < count; j++) {
12228 var tmpByte = byteArray[position++];
12229 var ccbyte1 = 0x7f & byteArray[position++];
12230 var ccbyte2 = 0x7f & byteArray[position++];
12231 var ccValid = (4 & tmpByte) !== 0;
12232 var ccType = 3 & tmpByte;
12233
12234 if (ccbyte1 === 0 && ccbyte2 === 0) {
12235 continue;
12236 }
12237
12238 if (ccValid) {
12239 if (ccType === 0 || ccType === 1) {
12240 actualCCBytes[ccType].push(ccbyte1);
12241 actualCCBytes[ccType].push(ccbyte2);
12242 }
12243 }
12244 }
12245
12246 return actualCCBytes;
12247 };
12248
12249 return TimelineController;
12250}();
12251
12252function canReuseVttTextTrack(inUseTrack, manifestTrack) {
12253 return inUseTrack && inUseTrack.label === manifestTrack.name && !(inUseTrack.textTrack1 || inUseTrack.textTrack2);
12254}
12255
12256function intersection(x1, x2, y1, y2) {
12257 return Math.min(x2, y2) - Math.max(x1, y1);
12258}
12259
12260function newVTTCCs() {
12261 return {
12262 ccOffset: 0,
12263 presentationOffset: 0,
12264 0: {
12265 start: 0,
12266 prevCC: -1,
12267 new: false
12268 }
12269 };
12270}
12271
12272/***/ }),
12273
12274/***/ "./src/crypt/aes-crypto.ts":
12275/*!*********************************!*\
12276 !*** ./src/crypt/aes-crypto.ts ***!
12277 \*********************************/
12278/*! exports provided: default */
12279/***/ (function(module, __webpack_exports__, __webpack_require__) {
12280__webpack_require__.r(__webpack_exports__);
12281/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return AESCrypto; });
12282var AESCrypto = /*#__PURE__*/function () {
12283 function AESCrypto(subtle, iv) {
12284 this.subtle = void 0;
12285 this.aesIV = void 0;
12286 this.subtle = subtle;
12287 this.aesIV = iv;
12288 }
12289
12290 var _proto = AESCrypto.prototype;
12291
12292 _proto.decrypt = function decrypt(data, key) {
12293 return this.subtle.decrypt({
12294 name: 'AES-CBC',
12295 iv: this.aesIV
12296 }, key, data);
12297 };
12298
12299 return AESCrypto;
12300}();
12301
12302
12303
12304/***/ }),
12305
12306/***/ "./src/crypt/aes-decryptor.ts":
12307/*!************************************!*\
12308 !*** ./src/crypt/aes-decryptor.ts ***!
12309 \************************************/
12310/*! exports provided: removePadding, default */
12311/***/ (function(module, __webpack_exports__, __webpack_require__) {
12312__webpack_require__.r(__webpack_exports__);
12313/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "removePadding", function() { return removePadding; });
12314/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return AESDecryptor; });
12315/* harmony import */ var _utils_typed_array__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/typed-array */ "./src/utils/typed-array.ts");
12316 // PKCS7
12317
12318function removePadding(array) {
12319 var outputBytes = array.byteLength;
12320 var paddingBytes = outputBytes && new DataView(array.buffer).getUint8(outputBytes - 1);
12321
12322 if (paddingBytes) {
12323 return Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_0__["sliceUint8"])(array, 0, outputBytes - paddingBytes);
12324 }
12325
12326 return array;
12327}
12328
12329var AESDecryptor = /*#__PURE__*/function () {
12330 function AESDecryptor() {
12331 this.rcon = [0x0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36];
12332 this.subMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)];
12333 this.invSubMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)];
12334 this.sBox = new Uint32Array(256);
12335 this.invSBox = new Uint32Array(256);
12336 this.key = new Uint32Array(0);
12337 this.ksRows = 0;
12338 this.keySize = 0;
12339 this.keySchedule = void 0;
12340 this.invKeySchedule = void 0;
12341 this.initTable();
12342 } // Using view.getUint32() also swaps the byte order.
12343
12344
12345 var _proto = AESDecryptor.prototype;
12346
12347 _proto.uint8ArrayToUint32Array_ = function uint8ArrayToUint32Array_(arrayBuffer) {
12348 var view = new DataView(arrayBuffer);
12349 var newArray = new Uint32Array(4);
12350
12351 for (var i = 0; i < 4; i++) {
12352 newArray[i] = view.getUint32(i * 4);
12353 }
12354
12355 return newArray;
12356 };
12357
12358 _proto.initTable = function initTable() {
12359 var sBox = this.sBox;
12360 var invSBox = this.invSBox;
12361 var subMix = this.subMix;
12362 var subMix0 = subMix[0];
12363 var subMix1 = subMix[1];
12364 var subMix2 = subMix[2];
12365 var subMix3 = subMix[3];
12366 var invSubMix = this.invSubMix;
12367 var invSubMix0 = invSubMix[0];
12368 var invSubMix1 = invSubMix[1];
12369 var invSubMix2 = invSubMix[2];
12370 var invSubMix3 = invSubMix[3];
12371 var d = new Uint32Array(256);
12372 var x = 0;
12373 var xi = 0;
12374 var i = 0;
12375
12376 for (i = 0; i < 256; i++) {
12377 if (i < 128) {
12378 d[i] = i << 1;
12379 } else {
12380 d[i] = i << 1 ^ 0x11b;
12381 }
12382 }
12383
12384 for (i = 0; i < 256; i++) {
12385 var sx = xi ^ xi << 1 ^ xi << 2 ^ xi << 3 ^ xi << 4;
12386 sx = sx >>> 8 ^ sx & 0xff ^ 0x63;
12387 sBox[x] = sx;
12388 invSBox[sx] = x; // Compute multiplication
12389
12390 var x2 = d[x];
12391 var x4 = d[x2];
12392 var x8 = d[x4]; // Compute sub/invSub bytes, mix columns tables
12393
12394 var t = d[sx] * 0x101 ^ sx * 0x1010100;
12395 subMix0[x] = t << 24 | t >>> 8;
12396 subMix1[x] = t << 16 | t >>> 16;
12397 subMix2[x] = t << 8 | t >>> 24;
12398 subMix3[x] = t; // Compute inv sub bytes, inv mix columns tables
12399
12400 t = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
12401 invSubMix0[sx] = t << 24 | t >>> 8;
12402 invSubMix1[sx] = t << 16 | t >>> 16;
12403 invSubMix2[sx] = t << 8 | t >>> 24;
12404 invSubMix3[sx] = t; // Compute next counter
12405
12406 if (!x) {
12407 x = xi = 1;
12408 } else {
12409 x = x2 ^ d[d[d[x8 ^ x2]]];
12410 xi ^= d[d[xi]];
12411 }
12412 }
12413 };
12414
12415 _proto.expandKey = function expandKey(keyBuffer) {
12416 // convert keyBuffer to Uint32Array
12417 var key = this.uint8ArrayToUint32Array_(keyBuffer);
12418 var sameKey = true;
12419 var offset = 0;
12420
12421 while (offset < key.length && sameKey) {
12422 sameKey = key[offset] === this.key[offset];
12423 offset++;
12424 }
12425
12426 if (sameKey) {
12427 return;
12428 }
12429
12430 this.key = key;
12431 var keySize = this.keySize = key.length;
12432
12433 if (keySize !== 4 && keySize !== 6 && keySize !== 8) {
12434 throw new Error('Invalid aes key size=' + keySize);
12435 }
12436
12437 var ksRows = this.ksRows = (keySize + 6 + 1) * 4;
12438 var ksRow;
12439 var invKsRow;
12440 var keySchedule = this.keySchedule = new Uint32Array(ksRows);
12441 var invKeySchedule = this.invKeySchedule = new Uint32Array(ksRows);
12442 var sbox = this.sBox;
12443 var rcon = this.rcon;
12444 var invSubMix = this.invSubMix;
12445 var invSubMix0 = invSubMix[0];
12446 var invSubMix1 = invSubMix[1];
12447 var invSubMix2 = invSubMix[2];
12448 var invSubMix3 = invSubMix[3];
12449 var prev;
12450 var t;
12451
12452 for (ksRow = 0; ksRow < ksRows; ksRow++) {
12453 if (ksRow < keySize) {
12454 prev = keySchedule[ksRow] = key[ksRow];
12455 continue;
12456 }
12457
12458 t = prev;
12459
12460 if (ksRow % keySize === 0) {
12461 // Rot word
12462 t = t << 8 | t >>> 24; // Sub word
12463
12464 t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff]; // Mix Rcon
12465
12466 t ^= rcon[ksRow / keySize | 0] << 24;
12467 } else if (keySize > 6 && ksRow % keySize === 4) {
12468 // Sub word
12469 t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff];
12470 }
12471
12472 keySchedule[ksRow] = prev = (keySchedule[ksRow - keySize] ^ t) >>> 0;
12473 }
12474
12475 for (invKsRow = 0; invKsRow < ksRows; invKsRow++) {
12476 ksRow = ksRows - invKsRow;
12477
12478 if (invKsRow & 3) {
12479 t = keySchedule[ksRow];
12480 } else {
12481 t = keySchedule[ksRow - 4];
12482 }
12483
12484 if (invKsRow < 4 || ksRow <= 4) {
12485 invKeySchedule[invKsRow] = t;
12486 } else {
12487 invKeySchedule[invKsRow] = invSubMix0[sbox[t >>> 24]] ^ invSubMix1[sbox[t >>> 16 & 0xff]] ^ invSubMix2[sbox[t >>> 8 & 0xff]] ^ invSubMix3[sbox[t & 0xff]];
12488 }
12489
12490 invKeySchedule[invKsRow] = invKeySchedule[invKsRow] >>> 0;
12491 }
12492 } // Adding this as a method greatly improves performance.
12493 ;
12494
12495 _proto.networkToHostOrderSwap = function networkToHostOrderSwap(word) {
12496 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
12497 };
12498
12499 _proto.decrypt = function decrypt(inputArrayBuffer, offset, aesIV) {
12500 var nRounds = this.keySize + 6;
12501 var invKeySchedule = this.invKeySchedule;
12502 var invSBOX = this.invSBox;
12503 var invSubMix = this.invSubMix;
12504 var invSubMix0 = invSubMix[0];
12505 var invSubMix1 = invSubMix[1];
12506 var invSubMix2 = invSubMix[2];
12507 var invSubMix3 = invSubMix[3];
12508 var initVector = this.uint8ArrayToUint32Array_(aesIV);
12509 var initVector0 = initVector[0];
12510 var initVector1 = initVector[1];
12511 var initVector2 = initVector[2];
12512 var initVector3 = initVector[3];
12513 var inputInt32 = new Int32Array(inputArrayBuffer);
12514 var outputInt32 = new Int32Array(inputInt32.length);
12515 var t0, t1, t2, t3;
12516 var s0, s1, s2, s3;
12517 var inputWords0, inputWords1, inputWords2, inputWords3;
12518 var ksRow, i;
12519 var swapWord = this.networkToHostOrderSwap;
12520
12521 while (offset < inputInt32.length) {
12522 inputWords0 = swapWord(inputInt32[offset]);
12523 inputWords1 = swapWord(inputInt32[offset + 1]);
12524 inputWords2 = swapWord(inputInt32[offset + 2]);
12525 inputWords3 = swapWord(inputInt32[offset + 3]);
12526 s0 = inputWords0 ^ invKeySchedule[0];
12527 s1 = inputWords3 ^ invKeySchedule[1];
12528 s2 = inputWords2 ^ invKeySchedule[2];
12529 s3 = inputWords1 ^ invKeySchedule[3];
12530 ksRow = 4; // Iterate through the rounds of decryption
12531
12532 for (i = 1; i < nRounds; i++) {
12533 t0 = invSubMix0[s0 >>> 24] ^ invSubMix1[s1 >> 16 & 0xff] ^ invSubMix2[s2 >> 8 & 0xff] ^ invSubMix3[s3 & 0xff] ^ invKeySchedule[ksRow];
12534 t1 = invSubMix0[s1 >>> 24] ^ invSubMix1[s2 >> 16 & 0xff] ^ invSubMix2[s3 >> 8 & 0xff] ^ invSubMix3[s0 & 0xff] ^ invKeySchedule[ksRow + 1];
12535 t2 = invSubMix0[s2 >>> 24] ^ invSubMix1[s3 >> 16 & 0xff] ^ invSubMix2[s0 >> 8 & 0xff] ^ invSubMix3[s1 & 0xff] ^ invKeySchedule[ksRow + 2];
12536 t3 = invSubMix0[s3 >>> 24] ^ invSubMix1[s0 >> 16 & 0xff] ^ invSubMix2[s1 >> 8 & 0xff] ^ invSubMix3[s2 & 0xff] ^ invKeySchedule[ksRow + 3]; // Update state
12537
12538 s0 = t0;
12539 s1 = t1;
12540 s2 = t2;
12541 s3 = t3;
12542 ksRow = ksRow + 4;
12543 } // Shift rows, sub bytes, add round key
12544
12545
12546 t0 = invSBOX[s0 >>> 24] << 24 ^ invSBOX[s1 >> 16 & 0xff] << 16 ^ invSBOX[s2 >> 8 & 0xff] << 8 ^ invSBOX[s3 & 0xff] ^ invKeySchedule[ksRow];
12547 t1 = invSBOX[s1 >>> 24] << 24 ^ invSBOX[s2 >> 16 & 0xff] << 16 ^ invSBOX[s3 >> 8 & 0xff] << 8 ^ invSBOX[s0 & 0xff] ^ invKeySchedule[ksRow + 1];
12548 t2 = invSBOX[s2 >>> 24] << 24 ^ invSBOX[s3 >> 16 & 0xff] << 16 ^ invSBOX[s0 >> 8 & 0xff] << 8 ^ invSBOX[s1 & 0xff] ^ invKeySchedule[ksRow + 2];
12549 t3 = invSBOX[s3 >>> 24] << 24 ^ invSBOX[s0 >> 16 & 0xff] << 16 ^ invSBOX[s1 >> 8 & 0xff] << 8 ^ invSBOX[s2 & 0xff] ^ invKeySchedule[ksRow + 3]; // Write
12550
12551 outputInt32[offset] = swapWord(t0 ^ initVector0);
12552 outputInt32[offset + 1] = swapWord(t3 ^ initVector1);
12553 outputInt32[offset + 2] = swapWord(t2 ^ initVector2);
12554 outputInt32[offset + 3] = swapWord(t1 ^ initVector3); // reset initVector to last 4 unsigned int
12555
12556 initVector0 = inputWords0;
12557 initVector1 = inputWords1;
12558 initVector2 = inputWords2;
12559 initVector3 = inputWords3;
12560 offset = offset + 4;
12561 }
12562
12563 return outputInt32.buffer;
12564 };
12565
12566 return AESDecryptor;
12567}();
12568
12569
12570
12571/***/ }),
12572
12573/***/ "./src/crypt/decrypter.ts":
12574/*!********************************!*\
12575 !*** ./src/crypt/decrypter.ts ***!
12576 \********************************/
12577/*! exports provided: default */
12578/***/ (function(module, __webpack_exports__, __webpack_require__) {
12579__webpack_require__.r(__webpack_exports__);
12580/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return Decrypter; });
12581/* harmony import */ var _aes_crypto__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./aes-crypto */ "./src/crypt/aes-crypto.ts");
12582/* harmony import */ var _fast_aes_key__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./fast-aes-key */ "./src/crypt/fast-aes-key.ts");
12583/* harmony import */ var _aes_decryptor__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./aes-decryptor */ "./src/crypt/aes-decryptor.ts");
12584/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
12585/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
12586/* harmony import */ var _utils_typed_array__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/typed-array */ "./src/utils/typed-array.ts");
12587
12588
12589
12590
12591
12592
12593var CHUNK_SIZE = 16; // 16 bytes, 128 bits
12594
12595var Decrypter = /*#__PURE__*/function () {
12596 function Decrypter(observer, config, _temp) {
12597 var _ref = _temp === void 0 ? {} : _temp,
12598 _ref$removePKCS7Paddi = _ref.removePKCS7Padding,
12599 removePKCS7Padding = _ref$removePKCS7Paddi === void 0 ? true : _ref$removePKCS7Paddi;
12600
12601 this.logEnabled = true;
12602 this.observer = void 0;
12603 this.config = void 0;
12604 this.removePKCS7Padding = void 0;
12605 this.subtle = null;
12606 this.softwareDecrypter = null;
12607 this.key = null;
12608 this.fastAesKey = null;
12609 this.remainderData = null;
12610 this.currentIV = null;
12611 this.currentResult = null;
12612 this.observer = observer;
12613 this.config = config;
12614 this.removePKCS7Padding = removePKCS7Padding; // built in decryptor expects PKCS7 padding
12615
12616 if (removePKCS7Padding) {
12617 try {
12618 var browserCrypto = self.crypto;
12619
12620 if (browserCrypto) {
12621 this.subtle = browserCrypto.subtle || browserCrypto.webkitSubtle;
12622 }
12623 } catch (e) {
12624 /* no-op */
12625 }
12626 }
12627
12628 if (this.subtle === null) {
12629 this.config.enableSoftwareAES = true;
12630 }
12631 }
12632
12633 var _proto = Decrypter.prototype;
12634
12635 _proto.destroy = function destroy() {
12636 // @ts-ignore
12637 this.observer = null;
12638 };
12639
12640 _proto.isSync = function isSync() {
12641 return this.config.enableSoftwareAES;
12642 };
12643
12644 _proto.flush = function flush() {
12645 var currentResult = this.currentResult;
12646
12647 if (!currentResult) {
12648 this.reset();
12649 return;
12650 }
12651
12652 var data = new Uint8Array(currentResult);
12653 this.reset();
12654
12655 if (this.removePKCS7Padding) {
12656 return Object(_aes_decryptor__WEBPACK_IMPORTED_MODULE_2__["removePadding"])(data);
12657 }
12658
12659 return data;
12660 };
12661
12662 _proto.reset = function reset() {
12663 this.currentResult = null;
12664 this.currentIV = null;
12665 this.remainderData = null;
12666
12667 if (this.softwareDecrypter) {
12668 this.softwareDecrypter = null;
12669 }
12670 };
12671
12672 _proto.decrypt = function decrypt(data, key, iv, callback) {
12673 if (this.config.enableSoftwareAES) {
12674 this.softwareDecrypt(new Uint8Array(data), key, iv);
12675 var decryptResult = this.flush();
12676
12677 if (decryptResult) {
12678 callback(decryptResult.buffer);
12679 }
12680 } else {
12681 this.webCryptoDecrypt(new Uint8Array(data), key, iv).then(callback);
12682 }
12683 };
12684
12685 _proto.softwareDecrypt = function softwareDecrypt(data, key, iv) {
12686 var currentIV = this.currentIV,
12687 currentResult = this.currentResult,
12688 remainderData = this.remainderData;
12689 this.logOnce('JS AES decrypt'); // The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
12690 // This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
12691 // the end on flush(), but by that time we have already received all bytes for the segment.
12692 // Progressive decryption does not work with WebCrypto
12693
12694 if (remainderData) {
12695 data = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__["appendUint8Array"])(remainderData, data);
12696 this.remainderData = null;
12697 } // Byte length must be a multiple of 16 (AES-128 = 128 bit blocks = 16 bytes)
12698
12699
12700 var currentChunk = this.getValidChunk(data);
12701
12702 if (!currentChunk.length) {
12703 return null;
12704 }
12705
12706 if (currentIV) {
12707 iv = currentIV;
12708 }
12709
12710 var softwareDecrypter = this.softwareDecrypter;
12711
12712 if (!softwareDecrypter) {
12713 softwareDecrypter = this.softwareDecrypter = new _aes_decryptor__WEBPACK_IMPORTED_MODULE_2__["default"]();
12714 }
12715
12716 softwareDecrypter.expandKey(key);
12717 var result = currentResult;
12718 this.currentResult = softwareDecrypter.decrypt(currentChunk.buffer, 0, iv);
12719 this.currentIV = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_5__["sliceUint8"])(currentChunk, -16).buffer;
12720
12721 if (!result) {
12722 return null;
12723 }
12724
12725 return result;
12726 };
12727
12728 _proto.webCryptoDecrypt = function webCryptoDecrypt(data, key, iv) {
12729 var _this = this;
12730
12731 var subtle = this.subtle;
12732
12733 if (this.key !== key || !this.fastAesKey) {
12734 this.key = key;
12735 this.fastAesKey = new _fast_aes_key__WEBPACK_IMPORTED_MODULE_1__["default"](subtle, key);
12736 }
12737
12738 return this.fastAesKey.expandKey().then(function (aesKey) {
12739 // decrypt using web crypto
12740 if (!subtle) {
12741 return Promise.reject(new Error('web crypto not initialized'));
12742 }
12743
12744 var crypto = new _aes_crypto__WEBPACK_IMPORTED_MODULE_0__["default"](subtle, iv);
12745 return crypto.decrypt(data.buffer, aesKey);
12746 }).catch(function (err) {
12747 return _this.onWebCryptoError(err, data, key, iv);
12748 });
12749 };
12750
12751 _proto.onWebCryptoError = function onWebCryptoError(err, data, key, iv) {
12752 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('[decrypter.ts]: WebCrypto Error, disable WebCrypto API:', err);
12753 this.config.enableSoftwareAES = true;
12754 this.logEnabled = true;
12755 return this.softwareDecrypt(data, key, iv);
12756 };
12757
12758 _proto.getValidChunk = function getValidChunk(data) {
12759 var currentChunk = data;
12760 var splitPoint = data.length - data.length % CHUNK_SIZE;
12761
12762 if (splitPoint !== data.length) {
12763 currentChunk = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_5__["sliceUint8"])(data, 0, splitPoint);
12764 this.remainderData = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_5__["sliceUint8"])(data, splitPoint);
12765 }
12766
12767 return currentChunk;
12768 };
12769
12770 _proto.logOnce = function logOnce(msg) {
12771 if (!this.logEnabled) {
12772 return;
12773 }
12774
12775 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log("[decrypter.ts]: " + msg);
12776 this.logEnabled = false;
12777 };
12778
12779 return Decrypter;
12780}();
12781
12782
12783
12784/***/ }),
12785
12786/***/ "./src/crypt/fast-aes-key.ts":
12787/*!***********************************!*\
12788 !*** ./src/crypt/fast-aes-key.ts ***!
12789 \***********************************/
12790/*! exports provided: default */
12791/***/ (function(module, __webpack_exports__, __webpack_require__) {
12792__webpack_require__.r(__webpack_exports__);
12793/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return FastAESKey; });
12794var FastAESKey = /*#__PURE__*/function () {
12795 function FastAESKey(subtle, key) {
12796 this.subtle = void 0;
12797 this.key = void 0;
12798 this.subtle = subtle;
12799 this.key = key;
12800 }
12801
12802 var _proto = FastAESKey.prototype;
12803
12804 _proto.expandKey = function expandKey() {
12805 return this.subtle.importKey('raw', this.key, {
12806 name: 'AES-CBC'
12807 }, false, ['encrypt', 'decrypt']);
12808 };
12809
12810 return FastAESKey;
12811}();
12812
12813
12814
12815/***/ }),
12816
12817/***/ "./src/demux/aacdemuxer.ts":
12818/*!*********************************!*\
12819 !*** ./src/demux/aacdemuxer.ts ***!
12820 \*********************************/
12821/*! exports provided: default */
12822/***/ (function(module, __webpack_exports__, __webpack_require__) {
12823__webpack_require__.r(__webpack_exports__);
12824/* harmony import */ var _base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./base-audio-demuxer */ "./src/demux/base-audio-demuxer.ts");
12825/* harmony import */ var _adts__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./adts */ "./src/demux/adts.ts");
12826/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
12827/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
12828function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
12829
12830function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
12831
12832/**
12833 * AAC demuxer
12834 */
12835
12836
12837
12838
12839
12840var AACDemuxer = /*#__PURE__*/function (_BaseAudioDemuxer) {
12841 _inheritsLoose(AACDemuxer, _BaseAudioDemuxer);
12842
12843 function AACDemuxer(observer, config) {
12844 var _this;
12845
12846 _this = _BaseAudioDemuxer.call(this) || this;
12847 _this.observer = void 0;
12848 _this.config = void 0;
12849 _this.observer = observer;
12850 _this.config = config;
12851 return _this;
12852 }
12853
12854 var _proto = AACDemuxer.prototype;
12855
12856 _proto.resetInitSegment = function resetInitSegment(audioCodec, videoCodec, duration) {
12857 _BaseAudioDemuxer.prototype.resetInitSegment.call(this, audioCodec, videoCodec, duration);
12858
12859 this._audioTrack = {
12860 container: 'audio/adts',
12861 type: 'audio',
12862 id: 2,
12863 pid: -1,
12864 sequenceNumber: 0,
12865 isAAC: true,
12866 samples: [],
12867 manifestCodec: audioCodec,
12868 duration: duration,
12869 inputTimeScale: 90000,
12870 dropped: 0
12871 };
12872 } // Source for probe info - https://wiki.multimedia.cx/index.php?title=ADTS
12873 ;
12874
12875 AACDemuxer.probe = function probe(data) {
12876 if (!data) {
12877 return false;
12878 } // Check for the ADTS sync word
12879 // Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
12880 // Layer bits (position 14 and 15) in header should be always 0 for ADTS
12881 // More info https://wiki.multimedia.cx/index.php?title=ADTS
12882
12883
12884 var id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_3__["getID3Data"](data, 0) || [];
12885 var offset = id3Data.length;
12886
12887 for (var length = data.length; offset < length; offset++) {
12888 if (_adts__WEBPACK_IMPORTED_MODULE_1__["probe"](data, offset)) {
12889 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('ADTS sync word found !');
12890 return true;
12891 }
12892 }
12893
12894 return false;
12895 };
12896
12897 _proto.canParse = function canParse(data, offset) {
12898 return _adts__WEBPACK_IMPORTED_MODULE_1__["canParse"](data, offset);
12899 };
12900
12901 _proto.appendFrame = function appendFrame(track, data, offset) {
12902 _adts__WEBPACK_IMPORTED_MODULE_1__["initTrackConfig"](track, this.observer, data, offset, track.manifestCodec);
12903 var frame = _adts__WEBPACK_IMPORTED_MODULE_1__["appendFrame"](track, data, offset, this.initPTS, this.frameIndex);
12904
12905 if (frame && frame.missing === 0) {
12906 return frame;
12907 }
12908 };
12909
12910 return AACDemuxer;
12911}(_base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__["default"]);
12912
12913AACDemuxer.minProbeByteLength = 9;
12914/* harmony default export */ __webpack_exports__["default"] = (AACDemuxer);
12915
12916/***/ }),
12917
12918/***/ "./src/demux/adts.ts":
12919/*!***************************!*\
12920 !*** ./src/demux/adts.ts ***!
12921 \***************************/
12922/*! exports provided: getAudioConfig, isHeaderPattern, getHeaderLength, getFullFrameLength, canGetFrameLength, isHeader, canParse, probe, initTrackConfig, getFrameDuration, parseFrameHeader, appendFrame */
12923/***/ (function(module, __webpack_exports__, __webpack_require__) {
12924__webpack_require__.r(__webpack_exports__);
12925/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getAudioConfig", function() { return getAudioConfig; });
12926/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeaderPattern", function() { return isHeaderPattern; });
12927/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getHeaderLength", function() { return getHeaderLength; });
12928/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getFullFrameLength", function() { return getFullFrameLength; });
12929/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canGetFrameLength", function() { return canGetFrameLength; });
12930/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeader", function() { return isHeader; });
12931/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canParse", function() { return canParse; });
12932/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "probe", function() { return probe; });
12933/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "initTrackConfig", function() { return initTrackConfig; });
12934/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getFrameDuration", function() { return getFrameDuration; });
12935/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseFrameHeader", function() { return parseFrameHeader; });
12936/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "appendFrame", function() { return appendFrame; });
12937/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
12938/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
12939/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
12940/**
12941 * ADTS parser helper
12942 * @link https://wiki.multimedia.cx/index.php?title=ADTS
12943 */
12944
12945
12946
12947function getAudioConfig(observer, data, offset, audioCodec) {
12948 var adtsObjectType;
12949 var adtsExtensionSamplingIndex;
12950 var adtsChanelConfig;
12951 var config;
12952 var userAgent = navigator.userAgent.toLowerCase();
12953 var manifestCodec = audioCodec;
12954 var adtsSampleingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350]; // byte 2
12955
12956 adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
12957 var adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
12958
12959 if (adtsSamplingIndex > adtsSampleingRates.length - 1) {
12960 observer.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
12961 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
12962 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_PARSING_ERROR,
12963 fatal: true,
12964 reason: "invalid ADTS sampling index:" + adtsSamplingIndex
12965 });
12966 return;
12967 }
12968
12969 adtsChanelConfig = (data[offset + 2] & 0x01) << 2; // byte 3
12970
12971 adtsChanelConfig |= (data[offset + 3] & 0xc0) >>> 6;
12972 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log("manifest codec:" + audioCodec + ", ADTS type:" + adtsObjectType + ", samplingIndex:" + adtsSamplingIndex); // firefox: freq less than 24kHz = AAC SBR (HE-AAC)
12973
12974 if (/firefox/i.test(userAgent)) {
12975 if (adtsSamplingIndex >= 6) {
12976 adtsObjectType = 5;
12977 config = new Array(4); // HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
12978 // there is a factor 2 between frame sample rate and output sample rate
12979 // multiply frequency by 2 (see table below, equivalent to substract 3)
12980
12981 adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
12982 } else {
12983 adtsObjectType = 2;
12984 config = new Array(2);
12985 adtsExtensionSamplingIndex = adtsSamplingIndex;
12986 } // Android : always use AAC
12987
12988 } else if (userAgent.indexOf('android') !== -1) {
12989 adtsObjectType = 2;
12990 config = new Array(2);
12991 adtsExtensionSamplingIndex = adtsSamplingIndex;
12992 } else {
12993 /* for other browsers (Chrome/Vivaldi/Opera ...)
12994 always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
12995 */
12996 adtsObjectType = 5;
12997 config = new Array(4); // if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
12998
12999 if (audioCodec && (audioCodec.indexOf('mp4a.40.29') !== -1 || audioCodec.indexOf('mp4a.40.5') !== -1) || !audioCodec && adtsSamplingIndex >= 6) {
13000 // HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
13001 // there is a factor 2 between frame sample rate and output sample rate
13002 // multiply frequency by 2 (see table below, equivalent to substract 3)
13003 adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
13004 } else {
13005 // if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio)
13006 // Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
13007 if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSamplingIndex >= 6 && adtsChanelConfig === 1 || /vivaldi/i.test(userAgent)) || !audioCodec && adtsChanelConfig === 1) {
13008 adtsObjectType = 2;
13009 config = new Array(2);
13010 }
13011
13012 adtsExtensionSamplingIndex = adtsSamplingIndex;
13013 }
13014 }
13015 /* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
13016 ISO 14496-3 (AAC).pdf - Table 1.13 — Syntax of AudioSpecificConfig()
13017 Audio Profile / Audio Object Type
13018 0: Null
13019 1: AAC Main
13020 2: AAC LC (Low Complexity)
13021 3: AAC SSR (Scalable Sample Rate)
13022 4: AAC LTP (Long Term Prediction)
13023 5: SBR (Spectral Band Replication)
13024 6: AAC Scalable
13025 sampling freq
13026 0: 96000 Hz
13027 1: 88200 Hz
13028 2: 64000 Hz
13029 3: 48000 Hz
13030 4: 44100 Hz
13031 5: 32000 Hz
13032 6: 24000 Hz
13033 7: 22050 Hz
13034 8: 16000 Hz
13035 9: 12000 Hz
13036 10: 11025 Hz
13037 11: 8000 Hz
13038 12: 7350 Hz
13039 13: Reserved
13040 14: Reserved
13041 15: frequency is written explictly
13042 Channel Configurations
13043 These are the channel configurations:
13044 0: Defined in AOT Specifc Config
13045 1: 1 channel: front-center
13046 2: 2 channels: front-left, front-right
13047 */
13048 // audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
13049
13050
13051 config[0] = adtsObjectType << 3; // samplingFrequencyIndex
13052
13053 config[0] |= (adtsSamplingIndex & 0x0e) >> 1;
13054 config[1] |= (adtsSamplingIndex & 0x01) << 7; // channelConfiguration
13055
13056 config[1] |= adtsChanelConfig << 3;
13057
13058 if (adtsObjectType === 5) {
13059 // adtsExtensionSampleingIndex
13060 config[1] |= (adtsExtensionSamplingIndex & 0x0e) >> 1;
13061 config[2] = (adtsExtensionSamplingIndex & 0x01) << 7; // adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
13062 // https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
13063
13064 config[2] |= 2 << 2;
13065 config[3] = 0;
13066 }
13067
13068 return {
13069 config: config,
13070 samplerate: adtsSampleingRates[adtsSamplingIndex],
13071 channelCount: adtsChanelConfig,
13072 codec: 'mp4a.40.' + adtsObjectType,
13073 manifestCodec: manifestCodec
13074 };
13075}
13076function isHeaderPattern(data, offset) {
13077 return data[offset] === 0xff && (data[offset + 1] & 0xf6) === 0xf0;
13078}
13079function getHeaderLength(data, offset) {
13080 return data[offset + 1] & 0x01 ? 7 : 9;
13081}
13082function getFullFrameLength(data, offset) {
13083 return (data[offset + 3] & 0x03) << 11 | data[offset + 4] << 3 | (data[offset + 5] & 0xe0) >>> 5;
13084}
13085function canGetFrameLength(data, offset) {
13086 return offset + 5 < data.length;
13087}
13088function isHeader(data, offset) {
13089 // Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
13090 // Layer bits (position 14 and 15) in header should be always 0 for ADTS
13091 // More info https://wiki.multimedia.cx/index.php?title=ADTS
13092 return offset + 1 < data.length && isHeaderPattern(data, offset);
13093}
13094function canParse(data, offset) {
13095 return canGetFrameLength(data, offset) && isHeaderPattern(data, offset) && getFullFrameLength(data, offset) <= data.length - offset;
13096}
13097function probe(data, offset) {
13098 // same as isHeader but we also check that ADTS frame follows last ADTS frame
13099 // or end of data is reached
13100 if (isHeader(data, offset)) {
13101 // ADTS header Length
13102 var headerLength = getHeaderLength(data, offset);
13103
13104 if (offset + headerLength >= data.length) {
13105 return false;
13106 } // ADTS frame Length
13107
13108
13109 var frameLength = getFullFrameLength(data, offset);
13110
13111 if (frameLength <= headerLength) {
13112 return false;
13113 }
13114
13115 var newOffset = offset + frameLength;
13116 return newOffset === data.length || isHeader(data, newOffset);
13117 }
13118
13119 return false;
13120}
13121function initTrackConfig(track, observer, data, offset, audioCodec) {
13122 if (!track.samplerate) {
13123 var config = getAudioConfig(observer, data, offset, audioCodec);
13124
13125 if (!config) {
13126 return;
13127 }
13128
13129 track.config = config.config;
13130 track.samplerate = config.samplerate;
13131 track.channelCount = config.channelCount;
13132 track.codec = config.codec;
13133 track.manifestCodec = config.manifestCodec;
13134 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log("parsed codec:" + track.codec + ", rate:" + config.samplerate + ", channels:" + config.channelCount);
13135 }
13136}
13137function getFrameDuration(samplerate) {
13138 return 1024 * 90000 / samplerate;
13139}
13140function parseFrameHeader(data, offset, pts, frameIndex, frameDuration) {
13141 // The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
13142 var headerLength = getHeaderLength(data, offset); // retrieve frame size
13143
13144 var frameLength = getFullFrameLength(data, offset);
13145 frameLength -= headerLength;
13146
13147 if (frameLength > 0) {
13148 var stamp = pts + frameIndex * frameDuration; // logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}/${(stamp/90).toFixed(0)}`);
13149
13150 return {
13151 headerLength: headerLength,
13152 frameLength: frameLength,
13153 stamp: stamp
13154 };
13155 }
13156}
13157function appendFrame(track, data, offset, pts, frameIndex) {
13158 var frameDuration = getFrameDuration(track.samplerate);
13159 var header = parseFrameHeader(data, offset, pts, frameIndex, frameDuration);
13160
13161 if (header) {
13162 var frameLength = header.frameLength,
13163 headerLength = header.headerLength,
13164 stamp = header.stamp;
13165 var length = headerLength + frameLength;
13166 var missing = Math.max(0, offset + length - data.length); // logger.log(`AAC frame ${frameIndex}, pts:${stamp} length@offset/total: ${frameLength}@${offset+headerLength}/${data.byteLength} missing: ${missing}`);
13167
13168 var unit;
13169
13170 if (missing) {
13171 unit = new Uint8Array(length - headerLength);
13172 unit.set(data.subarray(offset + headerLength, data.length), 0);
13173 } else {
13174 unit = data.subarray(offset + headerLength, offset + length);
13175 }
13176
13177 var sample = {
13178 unit: unit,
13179 pts: stamp
13180 };
13181
13182 if (!missing) {
13183 track.samples.push(sample);
13184 }
13185
13186 return {
13187 sample: sample,
13188 length: length,
13189 missing: missing
13190 };
13191 }
13192}
13193
13194/***/ }),
13195
13196/***/ "./src/demux/base-audio-demuxer.ts":
13197/*!*****************************************!*\
13198 !*** ./src/demux/base-audio-demuxer.ts ***!
13199 \*****************************************/
13200/*! exports provided: initPTSFn, default */
13201/***/ (function(module, __webpack_exports__, __webpack_require__) {
13202__webpack_require__.r(__webpack_exports__);
13203/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "initPTSFn", function() { return initPTSFn; });
13204/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
13205/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
13206/* harmony import */ var _dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dummy-demuxed-track */ "./src/demux/dummy-demuxed-track.ts");
13207/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
13208/* harmony import */ var _utils_typed_array__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/typed-array */ "./src/utils/typed-array.ts");
13209
13210
13211
13212
13213
13214
13215var BaseAudioDemuxer = /*#__PURE__*/function () {
13216 function BaseAudioDemuxer() {
13217 this._audioTrack = void 0;
13218 this._id3Track = void 0;
13219 this.frameIndex = 0;
13220 this.cachedData = null;
13221 this.initPTS = null;
13222 }
13223
13224 var _proto = BaseAudioDemuxer.prototype;
13225
13226 _proto.resetInitSegment = function resetInitSegment(audioCodec, videoCodec, duration) {
13227 this._id3Track = {
13228 type: 'id3',
13229 id: 3,
13230 pid: -1,
13231 inputTimeScale: 90000,
13232 sequenceNumber: 0,
13233 samples: [],
13234 dropped: 0
13235 };
13236 };
13237
13238 _proto.resetTimeStamp = function resetTimeStamp() {};
13239
13240 _proto.resetContiguity = function resetContiguity() {};
13241
13242 _proto.canParse = function canParse(data, offset) {
13243 return false;
13244 };
13245
13246 _proto.appendFrame = function appendFrame(track, data, offset) {} // feed incoming data to the front of the parsing pipeline
13247 ;
13248
13249 _proto.demux = function demux(data, timeOffset) {
13250 if (this.cachedData) {
13251 data = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_3__["appendUint8Array"])(this.cachedData, data);
13252 this.cachedData = null;
13253 }
13254
13255 var id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getID3Data"](data, 0);
13256 var offset = id3Data ? id3Data.length : 0;
13257 var lastDataIndex;
13258 var pts;
13259 var track = this._audioTrack;
13260 var id3Track = this._id3Track;
13261 var timestamp = id3Data ? _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getTimeStamp"](id3Data) : undefined;
13262 var length = data.length;
13263
13264 if (this.frameIndex === 0 || this.initPTS === null) {
13265 this.initPTS = initPTSFn(timestamp, timeOffset);
13266 } // more expressive than alternative: id3Data?.length
13267
13268
13269 if (id3Data && id3Data.length > 0) {
13270 id3Track.samples.push({
13271 pts: this.initPTS,
13272 dts: this.initPTS,
13273 data: id3Data
13274 });
13275 }
13276
13277 pts = this.initPTS;
13278
13279 while (offset < length) {
13280 if (this.canParse(data, offset)) {
13281 var frame = this.appendFrame(track, data, offset);
13282
13283 if (frame) {
13284 this.frameIndex++;
13285 pts = frame.sample.pts;
13286 offset += frame.length;
13287 lastDataIndex = offset;
13288 } else {
13289 offset = length;
13290 }
13291 } else if (_demux_id3__WEBPACK_IMPORTED_MODULE_1__["canParse"](data, offset)) {
13292 // after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
13293 id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getID3Data"](data, offset);
13294 id3Track.samples.push({
13295 pts: pts,
13296 dts: pts,
13297 data: id3Data
13298 });
13299 offset += id3Data.length;
13300 lastDataIndex = offset;
13301 } else {
13302 offset++;
13303 }
13304
13305 if (offset === length && lastDataIndex !== length) {
13306 var partialData = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_4__["sliceUint8"])(data, lastDataIndex);
13307
13308 if (this.cachedData) {
13309 this.cachedData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_3__["appendUint8Array"])(this.cachedData, partialData);
13310 } else {
13311 this.cachedData = partialData;
13312 }
13313 }
13314 }
13315
13316 return {
13317 audioTrack: track,
13318 avcTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])(),
13319 id3Track: id3Track,
13320 textTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])()
13321 };
13322 };
13323
13324 _proto.demuxSampleAes = function demuxSampleAes(data, keyData, timeOffset) {
13325 return Promise.reject(new Error("[" + this + "] This demuxer does not support Sample-AES decryption"));
13326 };
13327
13328 _proto.flush = function flush(timeOffset) {
13329 // Parse cache in case of remaining frames.
13330 var cachedData = this.cachedData;
13331
13332 if (cachedData) {
13333 this.cachedData = null;
13334 this.demux(cachedData, 0);
13335 }
13336
13337 this.frameIndex = 0;
13338 return {
13339 audioTrack: this._audioTrack,
13340 avcTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])(),
13341 id3Track: this._id3Track,
13342 textTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])()
13343 };
13344 };
13345
13346 _proto.destroy = function destroy() {};
13347
13348 return BaseAudioDemuxer;
13349}();
13350/**
13351 * Initialize PTS
13352 * <p>
13353 * use timestamp unless it is undefined, NaN or Infinity
13354 * </p>
13355 */
13356
13357
13358var initPTSFn = function initPTSFn(timestamp, timeOffset) {
13359 return Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(timestamp) ? timestamp * 90 : timeOffset * 90000;
13360};
13361/* harmony default export */ __webpack_exports__["default"] = (BaseAudioDemuxer);
13362
13363/***/ }),
13364
13365/***/ "./src/demux/chunk-cache.ts":
13366/*!**********************************!*\
13367 !*** ./src/demux/chunk-cache.ts ***!
13368 \**********************************/
13369/*! exports provided: default */
13370/***/ (function(module, __webpack_exports__, __webpack_require__) {
13371__webpack_require__.r(__webpack_exports__);
13372/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return ChunkCache; });
13373var ChunkCache = /*#__PURE__*/function () {
13374 function ChunkCache() {
13375 this.chunks = [];
13376 this.dataLength = 0;
13377 }
13378
13379 var _proto = ChunkCache.prototype;
13380
13381 _proto.push = function push(chunk) {
13382 this.chunks.push(chunk);
13383 this.dataLength += chunk.length;
13384 };
13385
13386 _proto.flush = function flush() {
13387 var chunks = this.chunks,
13388 dataLength = this.dataLength;
13389 var result;
13390
13391 if (!chunks.length) {
13392 return new Uint8Array(0);
13393 } else if (chunks.length === 1) {
13394 result = chunks[0];
13395 } else {
13396 result = concatUint8Arrays(chunks, dataLength);
13397 }
13398
13399 this.reset();
13400 return result;
13401 };
13402
13403 _proto.reset = function reset() {
13404 this.chunks.length = 0;
13405 this.dataLength = 0;
13406 };
13407
13408 return ChunkCache;
13409}();
13410
13411
13412
13413function concatUint8Arrays(chunks, dataLength) {
13414 var result = new Uint8Array(dataLength);
13415 var offset = 0;
13416
13417 for (var i = 0; i < chunks.length; i++) {
13418 var chunk = chunks[i];
13419 result.set(chunk, offset);
13420 offset += chunk.length;
13421 }
13422
13423 return result;
13424}
13425
13426/***/ }),
13427
13428/***/ "./src/demux/dummy-demuxed-track.ts":
13429/*!******************************************!*\
13430 !*** ./src/demux/dummy-demuxed-track.ts ***!
13431 \******************************************/
13432/*! exports provided: dummyTrack */
13433/***/ (function(module, __webpack_exports__, __webpack_require__) {
13434__webpack_require__.r(__webpack_exports__);
13435/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "dummyTrack", function() { return dummyTrack; });
13436function dummyTrack() {
13437 return {
13438 type: '',
13439 id: -1,
13440 pid: -1,
13441 inputTimeScale: 90000,
13442 sequenceNumber: -1,
13443 samples: [],
13444 dropped: 0
13445 };
13446}
13447
13448/***/ }),
13449
13450/***/ "./src/demux/exp-golomb.ts":
13451/*!*********************************!*\
13452 !*** ./src/demux/exp-golomb.ts ***!
13453 \*********************************/
13454/*! exports provided: default */
13455/***/ (function(module, __webpack_exports__, __webpack_require__) {
13456__webpack_require__.r(__webpack_exports__);
13457/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
13458/**
13459 * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
13460 */
13461
13462
13463var ExpGolomb = /*#__PURE__*/function () {
13464 function ExpGolomb(data) {
13465 this.data = void 0;
13466 this.bytesAvailable = void 0;
13467 this.word = void 0;
13468 this.bitsAvailable = void 0;
13469 this.data = data; // the number of bytes left to examine in this.data
13470
13471 this.bytesAvailable = data.byteLength; // the current word being examined
13472
13473 this.word = 0; // :uint
13474 // the number of bits left to examine in the current word
13475
13476 this.bitsAvailable = 0; // :uint
13477 } // ():void
13478
13479
13480 var _proto = ExpGolomb.prototype;
13481
13482 _proto.loadWord = function loadWord() {
13483 var data = this.data;
13484 var bytesAvailable = this.bytesAvailable;
13485 var position = data.byteLength - bytesAvailable;
13486 var workingBytes = new Uint8Array(4);
13487 var availableBytes = Math.min(4, bytesAvailable);
13488
13489 if (availableBytes === 0) {
13490 throw new Error('no bytes available');
13491 }
13492
13493 workingBytes.set(data.subarray(position, position + availableBytes));
13494 this.word = new DataView(workingBytes.buffer).getUint32(0); // track the amount of this.data that has been processed
13495
13496 this.bitsAvailable = availableBytes * 8;
13497 this.bytesAvailable -= availableBytes;
13498 } // (count:int):void
13499 ;
13500
13501 _proto.skipBits = function skipBits(count) {
13502 var skipBytes; // :int
13503
13504 if (this.bitsAvailable > count) {
13505 this.word <<= count;
13506 this.bitsAvailable -= count;
13507 } else {
13508 count -= this.bitsAvailable;
13509 skipBytes = count >> 3;
13510 count -= skipBytes >> 3;
13511 this.bytesAvailable -= skipBytes;
13512 this.loadWord();
13513 this.word <<= count;
13514 this.bitsAvailable -= count;
13515 }
13516 } // (size:int):uint
13517 ;
13518
13519 _proto.readBits = function readBits(size) {
13520 var bits = Math.min(this.bitsAvailable, size); // :uint
13521
13522 var valu = this.word >>> 32 - bits; // :uint
13523
13524 if (size > 32) {
13525 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].error('Cannot read more than 32 bits at a time');
13526 }
13527
13528 this.bitsAvailable -= bits;
13529
13530 if (this.bitsAvailable > 0) {
13531 this.word <<= bits;
13532 } else if (this.bytesAvailable > 0) {
13533 this.loadWord();
13534 }
13535
13536 bits = size - bits;
13537
13538 if (bits > 0 && this.bitsAvailable) {
13539 return valu << bits | this.readBits(bits);
13540 } else {
13541 return valu;
13542 }
13543 } // ():uint
13544 ;
13545
13546 _proto.skipLZ = function skipLZ() {
13547 var leadingZeroCount; // :uint
13548
13549 for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
13550 if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
13551 // the first bit of working word is 1
13552 this.word <<= leadingZeroCount;
13553 this.bitsAvailable -= leadingZeroCount;
13554 return leadingZeroCount;
13555 }
13556 } // we exhausted word and still have not found a 1
13557
13558
13559 this.loadWord();
13560 return leadingZeroCount + this.skipLZ();
13561 } // ():void
13562 ;
13563
13564 _proto.skipUEG = function skipUEG() {
13565 this.skipBits(1 + this.skipLZ());
13566 } // ():void
13567 ;
13568
13569 _proto.skipEG = function skipEG() {
13570 this.skipBits(1 + this.skipLZ());
13571 } // ():uint
13572 ;
13573
13574 _proto.readUEG = function readUEG() {
13575 var clz = this.skipLZ(); // :uint
13576
13577 return this.readBits(clz + 1) - 1;
13578 } // ():int
13579 ;
13580
13581 _proto.readEG = function readEG() {
13582 var valu = this.readUEG(); // :int
13583
13584 if (0x01 & valu) {
13585 // the number is odd if the low order bit is set
13586 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
13587 } else {
13588 return -1 * (valu >>> 1); // divide by two then make it negative
13589 }
13590 } // Some convenience functions
13591 // :Boolean
13592 ;
13593
13594 _proto.readBoolean = function readBoolean() {
13595 return this.readBits(1) === 1;
13596 } // ():int
13597 ;
13598
13599 _proto.readUByte = function readUByte() {
13600 return this.readBits(8);
13601 } // ():int
13602 ;
13603
13604 _proto.readUShort = function readUShort() {
13605 return this.readBits(16);
13606 } // ():int
13607 ;
13608
13609 _proto.readUInt = function readUInt() {
13610 return this.readBits(32);
13611 }
13612 /**
13613 * Advance the ExpGolomb decoder past a scaling list. The scaling
13614 * list is optionally transmitted as part of a sequence parameter
13615 * set and is not relevant to transmuxing.
13616 * @param count the number of entries in this scaling list
13617 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
13618 */
13619 ;
13620
13621 _proto.skipScalingList = function skipScalingList(count) {
13622 var lastScale = 8;
13623 var nextScale = 8;
13624 var deltaScale;
13625
13626 for (var j = 0; j < count; j++) {
13627 if (nextScale !== 0) {
13628 deltaScale = this.readEG();
13629 nextScale = (lastScale + deltaScale + 256) % 256;
13630 }
13631
13632 lastScale = nextScale === 0 ? lastScale : nextScale;
13633 }
13634 }
13635 /**
13636 * Read a sequence parameter set and return some interesting video
13637 * properties. A sequence parameter set is the H264 metadata that
13638 * describes the properties of upcoming video frames.
13639 * @param data {Uint8Array} the bytes of a sequence parameter set
13640 * @return {object} an object with configuration parsed from the
13641 * sequence parameter set, including the dimensions of the
13642 * associated video frames.
13643 */
13644 ;
13645
13646 _proto.readSPS = function readSPS() {
13647 var frameCropLeftOffset = 0;
13648 var frameCropRightOffset = 0;
13649 var frameCropTopOffset = 0;
13650 var frameCropBottomOffset = 0;
13651 var numRefFramesInPicOrderCntCycle;
13652 var scalingListCount;
13653 var i;
13654 var readUByte = this.readUByte.bind(this);
13655 var readBits = this.readBits.bind(this);
13656 var readUEG = this.readUEG.bind(this);
13657 var readBoolean = this.readBoolean.bind(this);
13658 var skipBits = this.skipBits.bind(this);
13659 var skipEG = this.skipEG.bind(this);
13660 var skipUEG = this.skipUEG.bind(this);
13661 var skipScalingList = this.skipScalingList.bind(this);
13662 readUByte();
13663 var profileIdc = readUByte(); // profile_idc
13664
13665 readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
13666
13667 skipBits(3); // reserved_zero_3bits u(3),
13668
13669 readUByte(); // level_idc u(8)
13670
13671 skipUEG(); // seq_parameter_set_id
13672 // some profiles have more optional data we don't need
13673
13674 if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
13675 var chromaFormatIdc = readUEG();
13676
13677 if (chromaFormatIdc === 3) {
13678 skipBits(1);
13679 } // separate_colour_plane_flag
13680
13681
13682 skipUEG(); // bit_depth_luma_minus8
13683
13684 skipUEG(); // bit_depth_chroma_minus8
13685
13686 skipBits(1); // qpprime_y_zero_transform_bypass_flag
13687
13688 if (readBoolean()) {
13689 // seq_scaling_matrix_present_flag
13690 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
13691
13692 for (i = 0; i < scalingListCount; i++) {
13693 if (readBoolean()) {
13694 // seq_scaling_list_present_flag[ i ]
13695 if (i < 6) {
13696 skipScalingList(16);
13697 } else {
13698 skipScalingList(64);
13699 }
13700 }
13701 }
13702 }
13703 }
13704
13705 skipUEG(); // log2_max_frame_num_minus4
13706
13707 var picOrderCntType = readUEG();
13708
13709 if (picOrderCntType === 0) {
13710 readUEG(); // log2_max_pic_order_cnt_lsb_minus4
13711 } else if (picOrderCntType === 1) {
13712 skipBits(1); // delta_pic_order_always_zero_flag
13713
13714 skipEG(); // offset_for_non_ref_pic
13715
13716 skipEG(); // offset_for_top_to_bottom_field
13717
13718 numRefFramesInPicOrderCntCycle = readUEG();
13719
13720 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
13721 skipEG();
13722 } // offset_for_ref_frame[ i ]
13723
13724 }
13725
13726 skipUEG(); // max_num_ref_frames
13727
13728 skipBits(1); // gaps_in_frame_num_value_allowed_flag
13729
13730 var picWidthInMbsMinus1 = readUEG();
13731 var picHeightInMapUnitsMinus1 = readUEG();
13732 var frameMbsOnlyFlag = readBits(1);
13733
13734 if (frameMbsOnlyFlag === 0) {
13735 skipBits(1);
13736 } // mb_adaptive_frame_field_flag
13737
13738
13739 skipBits(1); // direct_8x8_inference_flag
13740
13741 if (readBoolean()) {
13742 // frame_cropping_flag
13743 frameCropLeftOffset = readUEG();
13744 frameCropRightOffset = readUEG();
13745 frameCropTopOffset = readUEG();
13746 frameCropBottomOffset = readUEG();
13747 }
13748
13749 var pixelRatio = [1, 1];
13750
13751 if (readBoolean()) {
13752 // vui_parameters_present_flag
13753 if (readBoolean()) {
13754 // aspect_ratio_info_present_flag
13755 var aspectRatioIdc = readUByte();
13756
13757 switch (aspectRatioIdc) {
13758 case 1:
13759 pixelRatio = [1, 1];
13760 break;
13761
13762 case 2:
13763 pixelRatio = [12, 11];
13764 break;
13765
13766 case 3:
13767 pixelRatio = [10, 11];
13768 break;
13769
13770 case 4:
13771 pixelRatio = [16, 11];
13772 break;
13773
13774 case 5:
13775 pixelRatio = [40, 33];
13776 break;
13777
13778 case 6:
13779 pixelRatio = [24, 11];
13780 break;
13781
13782 case 7:
13783 pixelRatio = [20, 11];
13784 break;
13785
13786 case 8:
13787 pixelRatio = [32, 11];
13788 break;
13789
13790 case 9:
13791 pixelRatio = [80, 33];
13792 break;
13793
13794 case 10:
13795 pixelRatio = [18, 11];
13796 break;
13797
13798 case 11:
13799 pixelRatio = [15, 11];
13800 break;
13801
13802 case 12:
13803 pixelRatio = [64, 33];
13804 break;
13805
13806 case 13:
13807 pixelRatio = [160, 99];
13808 break;
13809
13810 case 14:
13811 pixelRatio = [4, 3];
13812 break;
13813
13814 case 15:
13815 pixelRatio = [3, 2];
13816 break;
13817
13818 case 16:
13819 pixelRatio = [2, 1];
13820 break;
13821
13822 case 255:
13823 {
13824 pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
13825 break;
13826 }
13827 }
13828 }
13829 }
13830
13831 return {
13832 width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
13833 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
13834 pixelRatio: pixelRatio
13835 };
13836 };
13837
13838 _proto.readSliceType = function readSliceType() {
13839 // skip NALu type
13840 this.readUByte(); // discard first_mb_in_slice
13841
13842 this.readUEG(); // return slice_type
13843
13844 return this.readUEG();
13845 };
13846
13847 return ExpGolomb;
13848}();
13849
13850/* harmony default export */ __webpack_exports__["default"] = (ExpGolomb);
13851
13852/***/ }),
13853
13854/***/ "./src/demux/id3.ts":
13855/*!**************************!*\
13856 !*** ./src/demux/id3.ts ***!
13857 \**************************/
13858/*! exports provided: isHeader, isFooter, getID3Data, canParse, getTimeStamp, isTimeStampFrame, getID3Frames, decodeFrame, utf8ArrayToStr, testables */
13859/***/ (function(module, __webpack_exports__, __webpack_require__) {
13860__webpack_require__.r(__webpack_exports__);
13861/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeader", function() { return isHeader; });
13862/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isFooter", function() { return isFooter; });
13863/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getID3Data", function() { return getID3Data; });
13864/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canParse", function() { return canParse; });
13865/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getTimeStamp", function() { return getTimeStamp; });
13866/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isTimeStampFrame", function() { return isTimeStampFrame; });
13867/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getID3Frames", function() { return getID3Frames; });
13868/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "decodeFrame", function() { return decodeFrame; });
13869/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "utf8ArrayToStr", function() { return utf8ArrayToStr; });
13870/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "testables", function() { return testables; });
13871// breaking up those two types in order to clarify what is happening in the decoding path.
13872
13873/**
13874 * Returns true if an ID3 header can be found at offset in data
13875 * @param {Uint8Array} data - The data to search in
13876 * @param {number} offset - The offset at which to start searching
13877 * @return {boolean} - True if an ID3 header is found
13878 */
13879var isHeader = function isHeader(data, offset) {
13880 /*
13881 * http://id3.org/id3v2.3.0
13882 * [0] = 'I'
13883 * [1] = 'D'
13884 * [2] = '3'
13885 * [3,4] = {Version}
13886 * [5] = {Flags}
13887 * [6-9] = {ID3 Size}
13888 *
13889 * An ID3v2 tag can be detected with the following pattern:
13890 * $49 44 33 yy yy xx zz zz zz zz
13891 * Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
13892 */
13893 if (offset + 10 <= data.length) {
13894 // look for 'ID3' identifier
13895 if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
13896 // check version is within range
13897 if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
13898 // check size is within range
13899 if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
13900 return true;
13901 }
13902 }
13903 }
13904 }
13905
13906 return false;
13907};
13908/**
13909 * Returns true if an ID3 footer can be found at offset in data
13910 * @param {Uint8Array} data - The data to search in
13911 * @param {number} offset - The offset at which to start searching
13912 * @return {boolean} - True if an ID3 footer is found
13913 */
13914
13915var isFooter = function isFooter(data, offset) {
13916 /*
13917 * The footer is a copy of the header, but with a different identifier
13918 */
13919 if (offset + 10 <= data.length) {
13920 // look for '3DI' identifier
13921 if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
13922 // check version is within range
13923 if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
13924 // check size is within range
13925 if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
13926 return true;
13927 }
13928 }
13929 }
13930 }
13931
13932 return false;
13933};
13934/**
13935 * Returns any adjacent ID3 tags found in data starting at offset, as one block of data
13936 * @param {Uint8Array} data - The data to search in
13937 * @param {number} offset - The offset at which to start searching
13938 * @return {Uint8Array | undefined} - The block of data containing any ID3 tags found
13939 * or *undefined* if no header is found at the starting offset
13940 */
13941
13942var getID3Data = function getID3Data(data, offset) {
13943 var front = offset;
13944 var length = 0;
13945
13946 while (isHeader(data, offset)) {
13947 // ID3 header is 10 bytes
13948 length += 10;
13949 var size = readSize(data, offset + 6);
13950 length += size;
13951
13952 if (isFooter(data, offset + 10)) {
13953 // ID3 footer is 10 bytes
13954 length += 10;
13955 }
13956
13957 offset += length;
13958 }
13959
13960 if (length > 0) {
13961 return data.subarray(front, front + length);
13962 }
13963
13964 return undefined;
13965};
13966
13967var readSize = function readSize(data, offset) {
13968 var size = 0;
13969 size = (data[offset] & 0x7f) << 21;
13970 size |= (data[offset + 1] & 0x7f) << 14;
13971 size |= (data[offset + 2] & 0x7f) << 7;
13972 size |= data[offset + 3] & 0x7f;
13973 return size;
13974};
13975
13976var canParse = function canParse(data, offset) {
13977 return isHeader(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
13978};
13979/**
13980 * Searches for the Elementary Stream timestamp found in the ID3 data chunk
13981 * @param {Uint8Array} data - Block of data containing one or more ID3 tags
13982 * @return {number | undefined} - The timestamp
13983 */
13984
13985var getTimeStamp = function getTimeStamp(data) {
13986 var frames = getID3Frames(data);
13987
13988 for (var i = 0; i < frames.length; i++) {
13989 var frame = frames[i];
13990
13991 if (isTimeStampFrame(frame)) {
13992 return readTimeStamp(frame);
13993 }
13994 }
13995
13996 return undefined;
13997};
13998/**
13999 * Returns true if the ID3 frame is an Elementary Stream timestamp frame
14000 * @param {ID3 frame} frame
14001 */
14002
14003var isTimeStampFrame = function isTimeStampFrame(frame) {
14004 return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
14005};
14006
14007var getFrameData = function getFrameData(data) {
14008 /*
14009 Frame ID $xx xx xx xx (four characters)
14010 Size $xx xx xx xx
14011 Flags $xx xx
14012 */
14013 var type = String.fromCharCode(data[0], data[1], data[2], data[3]);
14014 var size = readSize(data, 4); // skip frame id, size, and flags
14015
14016 var offset = 10;
14017 return {
14018 type: type,
14019 size: size,
14020 data: data.subarray(offset, offset + size)
14021 };
14022};
14023/**
14024 * Returns an array of ID3 frames found in all the ID3 tags in the id3Data
14025 * @param {Uint8Array} id3Data - The ID3 data containing one or more ID3 tags
14026 * @return {ID3.Frame[]} - Array of ID3 frame objects
14027 */
14028
14029
14030var getID3Frames = function getID3Frames(id3Data) {
14031 var offset = 0;
14032 var frames = [];
14033
14034 while (isHeader(id3Data, offset)) {
14035 var size = readSize(id3Data, offset + 6); // skip past ID3 header
14036
14037 offset += 10;
14038 var end = offset + size; // loop through frames in the ID3 tag
14039
14040 while (offset + 8 < end) {
14041 var frameData = getFrameData(id3Data.subarray(offset));
14042 var frame = decodeFrame(frameData);
14043
14044 if (frame) {
14045 frames.push(frame);
14046 } // skip frame header and frame data
14047
14048
14049 offset += frameData.size + 10;
14050 }
14051
14052 if (isFooter(id3Data, offset)) {
14053 offset += 10;
14054 }
14055 }
14056
14057 return frames;
14058};
14059var decodeFrame = function decodeFrame(frame) {
14060 if (frame.type === 'PRIV') {
14061 return decodePrivFrame(frame);
14062 } else if (frame.type[0] === 'W') {
14063 return decodeURLFrame(frame);
14064 }
14065
14066 return decodeTextFrame(frame);
14067};
14068
14069var decodePrivFrame = function decodePrivFrame(frame) {
14070 /*
14071 Format: <text string>\0<binary data>
14072 */
14073 if (frame.size < 2) {
14074 return undefined;
14075 }
14076
14077 var owner = utf8ArrayToStr(frame.data, true);
14078 var privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
14079 return {
14080 key: frame.type,
14081 info: owner,
14082 data: privateData.buffer
14083 };
14084};
14085
14086var decodeTextFrame = function decodeTextFrame(frame) {
14087 if (frame.size < 2) {
14088 return undefined;
14089 }
14090
14091 if (frame.type === 'TXXX') {
14092 /*
14093 Format:
14094 [0] = {Text Encoding}
14095 [1-?] = {Description}\0{Value}
14096 */
14097 var index = 1;
14098 var description = utf8ArrayToStr(frame.data.subarray(index), true);
14099 index += description.length + 1;
14100 var value = utf8ArrayToStr(frame.data.subarray(index));
14101 return {
14102 key: frame.type,
14103 info: description,
14104 data: value
14105 };
14106 }
14107 /*
14108 Format:
14109 [0] = {Text Encoding}
14110 [1-?] = {Value}
14111 */
14112
14113
14114 var text = utf8ArrayToStr(frame.data.subarray(1));
14115 return {
14116 key: frame.type,
14117 data: text
14118 };
14119};
14120
14121var decodeURLFrame = function decodeURLFrame(frame) {
14122 if (frame.type === 'WXXX') {
14123 /*
14124 Format:
14125 [0] = {Text Encoding}
14126 [1-?] = {Description}\0{URL}
14127 */
14128 if (frame.size < 2) {
14129 return undefined;
14130 }
14131
14132 var index = 1;
14133 var description = utf8ArrayToStr(frame.data.subarray(index), true);
14134 index += description.length + 1;
14135 var value = utf8ArrayToStr(frame.data.subarray(index));
14136 return {
14137 key: frame.type,
14138 info: description,
14139 data: value
14140 };
14141 }
14142 /*
14143 Format:
14144 [0-?] = {URL}
14145 */
14146
14147
14148 var url = utf8ArrayToStr(frame.data);
14149 return {
14150 key: frame.type,
14151 data: url
14152 };
14153};
14154
14155var readTimeStamp = function readTimeStamp(timeStampFrame) {
14156 if (timeStampFrame.data.byteLength === 8) {
14157 var data = new Uint8Array(timeStampFrame.data); // timestamp is 33 bit expressed as a big-endian eight-octet number,
14158 // with the upper 31 bits set to zero.
14159
14160 var pts33Bit = data[3] & 0x1;
14161 var timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
14162 timestamp /= 45;
14163
14164 if (pts33Bit) {
14165 timestamp += 47721858.84;
14166 } // 2^32 / 90
14167
14168
14169 return Math.round(timestamp);
14170 }
14171
14172 return undefined;
14173}; // http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
14174// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
14175
14176/* utf.js - UTF-8 <=> UTF-16 convertion
14177 *
14178 * Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
14179 * Version: 1.0
14180 * LastModified: Dec 25 1999
14181 * This library is free. You can redistribute it and/or modify it.
14182 */
14183
14184
14185var utf8ArrayToStr = function utf8ArrayToStr(array, exitOnNull) {
14186 if (exitOnNull === void 0) {
14187 exitOnNull = false;
14188 }
14189
14190 var decoder = getTextDecoder();
14191
14192 if (decoder) {
14193 var decoded = decoder.decode(array);
14194
14195 if (exitOnNull) {
14196 // grab up to the first null
14197 var idx = decoded.indexOf('\0');
14198 return idx !== -1 ? decoded.substring(0, idx) : decoded;
14199 } // remove any null characters
14200
14201
14202 return decoded.replace(/\0/g, '');
14203 }
14204
14205 var len = array.length;
14206 var c;
14207 var char2;
14208 var char3;
14209 var out = '';
14210 var i = 0;
14211
14212 while (i < len) {
14213 c = array[i++];
14214
14215 if (c === 0x00 && exitOnNull) {
14216 return out;
14217 } else if (c === 0x00 || c === 0x03) {
14218 // If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
14219 continue;
14220 }
14221
14222 switch (c >> 4) {
14223 case 0:
14224 case 1:
14225 case 2:
14226 case 3:
14227 case 4:
14228 case 5:
14229 case 6:
14230 case 7:
14231 // 0xxxxxxx
14232 out += String.fromCharCode(c);
14233 break;
14234
14235 case 12:
14236 case 13:
14237 // 110x xxxx 10xx xxxx
14238 char2 = array[i++];
14239 out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
14240 break;
14241
14242 case 14:
14243 // 1110 xxxx 10xx xxxx 10xx xxxx
14244 char2 = array[i++];
14245 char3 = array[i++];
14246 out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
14247 break;
14248 }
14249 }
14250
14251 return out;
14252};
14253var testables = {
14254 decodeTextFrame: decodeTextFrame
14255};
14256var decoder;
14257
14258function getTextDecoder() {
14259 if (!decoder && typeof self.TextDecoder !== 'undefined') {
14260 decoder = new self.TextDecoder('utf-8');
14261 }
14262
14263 return decoder;
14264}
14265
14266/***/ }),
14267
14268/***/ "./src/demux/mp3demuxer.ts":
14269/*!*********************************!*\
14270 !*** ./src/demux/mp3demuxer.ts ***!
14271 \*********************************/
14272/*! exports provided: default */
14273/***/ (function(module, __webpack_exports__, __webpack_require__) {
14274__webpack_require__.r(__webpack_exports__);
14275/* harmony import */ var _base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./base-audio-demuxer */ "./src/demux/base-audio-demuxer.ts");
14276/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
14277/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
14278/* harmony import */ var _mpegaudio__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./mpegaudio */ "./src/demux/mpegaudio.ts");
14279function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
14280
14281function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
14282
14283/**
14284 * MP3 demuxer
14285 */
14286
14287
14288
14289
14290
14291var MP3Demuxer = /*#__PURE__*/function (_BaseAudioDemuxer) {
14292 _inheritsLoose(MP3Demuxer, _BaseAudioDemuxer);
14293
14294 function MP3Demuxer() {
14295 return _BaseAudioDemuxer.apply(this, arguments) || this;
14296 }
14297
14298 var _proto = MP3Demuxer.prototype;
14299
14300 _proto.resetInitSegment = function resetInitSegment(audioCodec, videoCodec, duration) {
14301 _BaseAudioDemuxer.prototype.resetInitSegment.call(this, audioCodec, videoCodec, duration);
14302
14303 this._audioTrack = {
14304 container: 'audio/mpeg',
14305 type: 'audio',
14306 id: 2,
14307 pid: -1,
14308 sequenceNumber: 0,
14309 isAAC: false,
14310 samples: [],
14311 manifestCodec: audioCodec,
14312 duration: duration,
14313 inputTimeScale: 90000,
14314 dropped: 0
14315 };
14316 };
14317
14318 MP3Demuxer.probe = function probe(data) {
14319 if (!data) {
14320 return false;
14321 } // check if data contains ID3 timestamp and MPEG sync word
14322 // Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
14323 // Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
14324 // More info http://www.mp3-tech.org/programmer/frame_header.html
14325
14326
14327 var id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getID3Data"](data, 0) || [];
14328 var offset = id3Data.length;
14329
14330 for (var length = data.length; offset < length; offset++) {
14331 if (_mpegaudio__WEBPACK_IMPORTED_MODULE_3__["probe"](data, offset)) {
14332 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('MPEG Audio sync word found !');
14333 return true;
14334 }
14335 }
14336
14337 return false;
14338 };
14339
14340 _proto.canParse = function canParse(data, offset) {
14341 return _mpegaudio__WEBPACK_IMPORTED_MODULE_3__["canParse"](data, offset);
14342 };
14343
14344 _proto.appendFrame = function appendFrame(track, data, offset) {
14345 if (this.initPTS === null) {
14346 return;
14347 }
14348
14349 return _mpegaudio__WEBPACK_IMPORTED_MODULE_3__["appendFrame"](track, data, offset, this.initPTS, this.frameIndex);
14350 };
14351
14352 return MP3Demuxer;
14353}(_base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__["default"]);
14354
14355MP3Demuxer.minProbeByteLength = 4;
14356/* harmony default export */ __webpack_exports__["default"] = (MP3Demuxer);
14357
14358/***/ }),
14359
14360/***/ "./src/demux/mp4demuxer.ts":
14361/*!*********************************!*\
14362 !*** ./src/demux/mp4demuxer.ts ***!
14363 \*********************************/
14364/*! exports provided: default */
14365/***/ (function(module, __webpack_exports__, __webpack_require__) {
14366__webpack_require__.r(__webpack_exports__);
14367/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
14368/* harmony import */ var _dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./dummy-demuxed-track */ "./src/demux/dummy-demuxed-track.ts");
14369/**
14370 * MP4 demuxer
14371 */
14372
14373
14374
14375var MP4Demuxer = /*#__PURE__*/function () {
14376 function MP4Demuxer(observer, config) {
14377 this.remainderData = null;
14378 this.config = void 0;
14379 this.config = config;
14380 }
14381
14382 var _proto = MP4Demuxer.prototype;
14383
14384 _proto.resetTimeStamp = function resetTimeStamp() {};
14385
14386 _proto.resetInitSegment = function resetInitSegment() {};
14387
14388 _proto.resetContiguity = function resetContiguity() {};
14389
14390 MP4Demuxer.probe = function probe(data) {
14391 // ensure we find a moof box in the first 16 kB
14392 return Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_0__["findBox"])({
14393 data: data,
14394 start: 0,
14395 end: Math.min(data.length, 16384)
14396 }, ['moof']).length > 0;
14397 };
14398
14399 _proto.demux = function demux(data) {
14400 // Load all data into the avc track. The CMAF remuxer will look for the data in the samples object; the rest of the fields do not matter
14401 var avcSamples = data;
14402 var avcTrack = Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])();
14403
14404 if (this.config.progressive) {
14405 // Split the bytestream into two ranges: one encompassing all data up until the start of the last moof, and everything else.
14406 // This is done to guarantee that we're sending valid data to MSE - when demuxing progressively, we have no guarantee
14407 // that the fetch loader gives us flush moof+mdat pairs. If we push jagged data to MSE, it will throw an exception.
14408 if (this.remainderData) {
14409 avcSamples = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_0__["appendUint8Array"])(this.remainderData, data);
14410 }
14411
14412 var segmentedData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_0__["segmentValidRange"])(avcSamples);
14413 this.remainderData = segmentedData.remainder;
14414 avcTrack.samples = segmentedData.valid || new Uint8Array();
14415 } else {
14416 avcTrack.samples = avcSamples;
14417 }
14418
14419 return {
14420 audioTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])(),
14421 avcTrack: avcTrack,
14422 id3Track: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])(),
14423 textTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])()
14424 };
14425 };
14426
14427 _proto.flush = function flush() {
14428 var avcTrack = Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])();
14429 avcTrack.samples = this.remainderData || new Uint8Array();
14430 this.remainderData = null;
14431 return {
14432 audioTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])(),
14433 avcTrack: avcTrack,
14434 id3Track: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])(),
14435 textTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_1__["dummyTrack"])()
14436 };
14437 };
14438
14439 _proto.demuxSampleAes = function demuxSampleAes(data, keyData, timeOffset) {
14440 return Promise.reject(new Error('The MP4 demuxer does not support SAMPLE-AES decryption'));
14441 };
14442
14443 _proto.destroy = function destroy() {};
14444
14445 return MP4Demuxer;
14446}();
14447
14448MP4Demuxer.minProbeByteLength = 1024;
14449/* harmony default export */ __webpack_exports__["default"] = (MP4Demuxer);
14450
14451/***/ }),
14452
14453/***/ "./src/demux/mpegaudio.ts":
14454/*!********************************!*\
14455 !*** ./src/demux/mpegaudio.ts ***!
14456 \********************************/
14457/*! exports provided: appendFrame, parseHeader, isHeaderPattern, isHeader, canParse, probe */
14458/***/ (function(module, __webpack_exports__, __webpack_require__) {
14459__webpack_require__.r(__webpack_exports__);
14460/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "appendFrame", function() { return appendFrame; });
14461/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseHeader", function() { return parseHeader; });
14462/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeaderPattern", function() { return isHeaderPattern; });
14463/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeader", function() { return isHeader; });
14464/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canParse", function() { return canParse; });
14465/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "probe", function() { return probe; });
14466/**
14467 * MPEG parser helper
14468 */
14469var chromeVersion = null;
14470var BitratesMap = [32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160];
14471var SamplingRateMap = [44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000];
14472var SamplesCoefficients = [// MPEG 2.5
14473[0, // Reserved
1447472, // Layer3
14475144, // Layer2
1447612 // Layer1
14477], // Reserved
14478[0, // Reserved
144790, // Layer3
144800, // Layer2
144810 // Layer1
14482], // MPEG 2
14483[0, // Reserved
1448472, // Layer3
14485144, // Layer2
1448612 // Layer1
14487], // MPEG 1
14488[0, // Reserved
14489144, // Layer3
14490144, // Layer2
1449112 // Layer1
14492]];
14493var BytesInSlot = [0, // Reserved
144941, // Layer3
144951, // Layer2
144964 // Layer1
14497];
14498function appendFrame(track, data, offset, pts, frameIndex) {
14499 // Using http://www.datavoyage.com/mpgscript/mpeghdr.htm as a reference
14500 if (offset + 24 > data.length) {
14501 return;
14502 }
14503
14504 var header = parseHeader(data, offset);
14505
14506 if (header && offset + header.frameLength <= data.length) {
14507 var frameDuration = header.samplesPerFrame * 90000 / header.sampleRate;
14508 var stamp = pts + frameIndex * frameDuration;
14509 var sample = {
14510 unit: data.subarray(offset, offset + header.frameLength),
14511 pts: stamp,
14512 dts: stamp
14513 };
14514 track.config = [];
14515 track.channelCount = header.channelCount;
14516 track.samplerate = header.sampleRate;
14517 track.samples.push(sample);
14518 return {
14519 sample: sample,
14520 length: header.frameLength,
14521 missing: 0
14522 };
14523 }
14524}
14525function parseHeader(data, offset) {
14526 var mpegVersion = data[offset + 1] >> 3 & 3;
14527 var mpegLayer = data[offset + 1] >> 1 & 3;
14528 var bitRateIndex = data[offset + 2] >> 4 & 15;
14529 var sampleRateIndex = data[offset + 2] >> 2 & 3;
14530
14531 if (mpegVersion !== 1 && bitRateIndex !== 0 && bitRateIndex !== 15 && sampleRateIndex !== 3) {
14532 var paddingBit = data[offset + 2] >> 1 & 1;
14533 var channelMode = data[offset + 3] >> 6;
14534 var columnInBitrates = mpegVersion === 3 ? 3 - mpegLayer : mpegLayer === 3 ? 3 : 4;
14535 var bitRate = BitratesMap[columnInBitrates * 14 + bitRateIndex - 1] * 1000;
14536 var columnInSampleRates = mpegVersion === 3 ? 0 : mpegVersion === 2 ? 1 : 2;
14537 var sampleRate = SamplingRateMap[columnInSampleRates * 3 + sampleRateIndex];
14538 var channelCount = channelMode === 3 ? 1 : 2; // If bits of channel mode are `11` then it is a single channel (Mono)
14539
14540 var sampleCoefficient = SamplesCoefficients[mpegVersion][mpegLayer];
14541 var bytesInSlot = BytesInSlot[mpegLayer];
14542 var samplesPerFrame = sampleCoefficient * 8 * bytesInSlot;
14543 var frameLength = Math.floor(sampleCoefficient * bitRate / sampleRate + paddingBit) * bytesInSlot;
14544
14545 if (chromeVersion === null) {
14546 var userAgent = navigator.userAgent || '';
14547 var result = userAgent.match(/Chrome\/(\d+)/i);
14548 chromeVersion = result ? parseInt(result[1]) : 0;
14549 }
14550
14551 var needChromeFix = !!chromeVersion && chromeVersion <= 87;
14552
14553 if (needChromeFix && mpegLayer === 2 && bitRate >= 224000 && channelMode === 0) {
14554 // Work around bug in Chromium by setting channelMode to dual-channel (01) instead of stereo (00)
14555 data[offset + 3] = data[offset + 3] | 0x80;
14556 }
14557
14558 return {
14559 sampleRate: sampleRate,
14560 channelCount: channelCount,
14561 frameLength: frameLength,
14562 samplesPerFrame: samplesPerFrame
14563 };
14564 }
14565}
14566function isHeaderPattern(data, offset) {
14567 return data[offset] === 0xff && (data[offset + 1] & 0xe0) === 0xe0 && (data[offset + 1] & 0x06) !== 0x00;
14568}
14569function isHeader(data, offset) {
14570 // Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
14571 // Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
14572 // More info http://www.mp3-tech.org/programmer/frame_header.html
14573 return offset + 1 < data.length && isHeaderPattern(data, offset);
14574}
14575function canParse(data, offset) {
14576 var headerSize = 4;
14577 return isHeaderPattern(data, offset) && headerSize <= data.length - offset;
14578}
14579function probe(data, offset) {
14580 // same as isHeader but we also check that MPEG frame follows last MPEG frame
14581 // or end of data is reached
14582 if (offset + 1 < data.length && isHeaderPattern(data, offset)) {
14583 // MPEG header Length
14584 var headerLength = 4; // MPEG frame Length
14585
14586 var header = parseHeader(data, offset);
14587 var frameLength = headerLength;
14588
14589 if (header !== null && header !== void 0 && header.frameLength) {
14590 frameLength = header.frameLength;
14591 }
14592
14593 var newOffset = offset + frameLength;
14594 return newOffset === data.length || isHeader(data, newOffset);
14595 }
14596
14597 return false;
14598}
14599
14600/***/ }),
14601
14602/***/ "./src/demux/sample-aes.ts":
14603/*!*********************************!*\
14604 !*** ./src/demux/sample-aes.ts ***!
14605 \*********************************/
14606/*! exports provided: default */
14607/***/ (function(module, __webpack_exports__, __webpack_require__) {
14608__webpack_require__.r(__webpack_exports__);
14609/* harmony import */ var _crypt_decrypter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../crypt/decrypter */ "./src/crypt/decrypter.ts");
14610/* harmony import */ var _tsdemuxer__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./tsdemuxer */ "./src/demux/tsdemuxer.ts");
14611/**
14612 * SAMPLE-AES decrypter
14613 */
14614
14615
14616
14617var SampleAesDecrypter = /*#__PURE__*/function () {
14618 function SampleAesDecrypter(observer, config, keyData) {
14619 this.keyData = void 0;
14620 this.decrypter = void 0;
14621 this.keyData = keyData;
14622 this.decrypter = new _crypt_decrypter__WEBPACK_IMPORTED_MODULE_0__["default"](observer, config, {
14623 removePKCS7Padding: false
14624 });
14625 }
14626
14627 var _proto = SampleAesDecrypter.prototype;
14628
14629 _proto.decryptBuffer = function decryptBuffer(encryptedData, callback) {
14630 this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, callback);
14631 } // AAC - encrypt all full 16 bytes blocks starting from offset 16
14632 ;
14633
14634 _proto.decryptAacSample = function decryptAacSample(samples, sampleIndex, callback, sync) {
14635 var curUnit = samples[sampleIndex].unit;
14636 var encryptedData = curUnit.subarray(16, curUnit.length - curUnit.length % 16);
14637 var encryptedBuffer = encryptedData.buffer.slice(encryptedData.byteOffset, encryptedData.byteOffset + encryptedData.length);
14638 var localthis = this;
14639 this.decryptBuffer(encryptedBuffer, function (decryptedBuffer) {
14640 var decryptedData = new Uint8Array(decryptedBuffer);
14641 curUnit.set(decryptedData, 16);
14642
14643 if (!sync) {
14644 localthis.decryptAacSamples(samples, sampleIndex + 1, callback);
14645 }
14646 });
14647 };
14648
14649 _proto.decryptAacSamples = function decryptAacSamples(samples, sampleIndex, callback) {
14650 for (;; sampleIndex++) {
14651 if (sampleIndex >= samples.length) {
14652 callback();
14653 return;
14654 }
14655
14656 if (samples[sampleIndex].unit.length < 32) {
14657 continue;
14658 }
14659
14660 var sync = this.decrypter.isSync();
14661 this.decryptAacSample(samples, sampleIndex, callback, sync);
14662
14663 if (!sync) {
14664 return;
14665 }
14666 }
14667 } // AVC - encrypt one 16 bytes block out of ten, starting from offset 32
14668 ;
14669
14670 _proto.getAvcEncryptedData = function getAvcEncryptedData(decodedData) {
14671 var encryptedDataLen = Math.floor((decodedData.length - 48) / 160) * 16 + 16;
14672 var encryptedData = new Int8Array(encryptedDataLen);
14673 var outputPos = 0;
14674
14675 for (var inputPos = 32; inputPos < decodedData.length - 16; inputPos += 160, outputPos += 16) {
14676 encryptedData.set(decodedData.subarray(inputPos, inputPos + 16), outputPos);
14677 }
14678
14679 return encryptedData;
14680 };
14681
14682 _proto.getAvcDecryptedUnit = function getAvcDecryptedUnit(decodedData, decryptedData) {
14683 var uint8DecryptedData = new Uint8Array(decryptedData);
14684 var inputPos = 0;
14685
14686 for (var outputPos = 32; outputPos < decodedData.length - 16; outputPos += 160, inputPos += 16) {
14687 decodedData.set(uint8DecryptedData.subarray(inputPos, inputPos + 16), outputPos);
14688 }
14689
14690 return decodedData;
14691 };
14692
14693 _proto.decryptAvcSample = function decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit, sync) {
14694 var decodedData = Object(_tsdemuxer__WEBPACK_IMPORTED_MODULE_1__["discardEPB"])(curUnit.data);
14695 var encryptedData = this.getAvcEncryptedData(decodedData);
14696 var localthis = this;
14697 this.decryptBuffer(encryptedData.buffer, function (decryptedBuffer) {
14698 curUnit.data = localthis.getAvcDecryptedUnit(decodedData, decryptedBuffer);
14699
14700 if (!sync) {
14701 localthis.decryptAvcSamples(samples, sampleIndex, unitIndex + 1, callback);
14702 }
14703 });
14704 };
14705
14706 _proto.decryptAvcSamples = function decryptAvcSamples(samples, sampleIndex, unitIndex, callback) {
14707 if (samples instanceof Uint8Array) {
14708 throw new Error('Cannot decrypt samples of type Uint8Array');
14709 }
14710
14711 for (;; sampleIndex++, unitIndex = 0) {
14712 if (sampleIndex >= samples.length) {
14713 callback();
14714 return;
14715 }
14716
14717 var curUnits = samples[sampleIndex].units;
14718
14719 for (;; unitIndex++) {
14720 if (unitIndex >= curUnits.length) {
14721 break;
14722 }
14723
14724 var curUnit = curUnits[unitIndex];
14725
14726 if (curUnit.data.length <= 48 || curUnit.type !== 1 && curUnit.type !== 5) {
14727 continue;
14728 }
14729
14730 var sync = this.decrypter.isSync();
14731 this.decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit, sync);
14732
14733 if (!sync) {
14734 return;
14735 }
14736 }
14737 }
14738 };
14739
14740 return SampleAesDecrypter;
14741}();
14742
14743/* harmony default export */ __webpack_exports__["default"] = (SampleAesDecrypter);
14744
14745/***/ }),
14746
14747/***/ "./src/demux/transmuxer-interface.ts":
14748/*!*******************************************!*\
14749 !*** ./src/demux/transmuxer-interface.ts ***!
14750 \*******************************************/
14751/*! exports provided: default */
14752/***/ (function(module, __webpack_exports__, __webpack_require__) {
14753__webpack_require__.r(__webpack_exports__);
14754/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return TransmuxerInterface; });
14755/* harmony import */ var webworkify_webpack__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! webworkify-webpack */ "./node_modules/webworkify-webpack/index.js");
14756/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
14757/* harmony import */ var _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/transmuxer */ "./src/demux/transmuxer.ts");
14758/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
14759/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
14760/* harmony import */ var _utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/mediasource-helper */ "./src/utils/mediasource-helper.ts");
14761/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! eventemitter3 */ "./node_modules/eventemitter3/index.js");
14762
14763
14764
14765
14766
14767
14768
14769var MediaSource = Object(_utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__["getMediaSource"])() || {
14770 isTypeSupported: function isTypeSupported() {
14771 return false;
14772 }
14773};
14774
14775var TransmuxerInterface = /*#__PURE__*/function () {
14776 function TransmuxerInterface(hls, id, onTransmuxComplete, onFlush) {
14777 var _this = this;
14778
14779 this.hls = void 0;
14780 this.id = void 0;
14781 this.observer = void 0;
14782 this.frag = null;
14783 this.part = null;
14784 this.worker = void 0;
14785 this.onwmsg = void 0;
14786 this.transmuxer = null;
14787 this.onTransmuxComplete = void 0;
14788 this.onFlush = void 0;
14789 this.hls = hls;
14790 this.id = id;
14791 this.onTransmuxComplete = onTransmuxComplete;
14792 this.onFlush = onFlush;
14793 var config = hls.config;
14794
14795 var forwardMessage = function forwardMessage(ev, data) {
14796 data = data || {};
14797 data.frag = _this.frag;
14798 data.id = _this.id;
14799 hls.trigger(ev, data);
14800 }; // forward events to main thread
14801
14802
14803 this.observer = new eventemitter3__WEBPACK_IMPORTED_MODULE_6__["EventEmitter"]();
14804 this.observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, forwardMessage);
14805 this.observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, forwardMessage);
14806 var typeSupported = {
14807 mp4: MediaSource.isTypeSupported('video/mp4'),
14808 mpeg: MediaSource.isTypeSupported('audio/mpeg'),
14809 mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"')
14810 }; // navigator.vendor is not always available in Web Worker
14811 // refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
14812
14813 var vendor = navigator.vendor;
14814
14815 if (config.enableWorker && typeof Worker !== 'undefined') {
14816 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log('demuxing in webworker');
14817 var worker;
14818
14819 try {
14820 worker = this.worker = webworkify_webpack__WEBPACK_IMPORTED_MODULE_0__(/*require.resolve*/(/*! ../demux/transmuxer-worker.ts */ "./src/demux/transmuxer-worker.ts"));
14821 this.onwmsg = this.onWorkerMessage.bind(this);
14822 worker.addEventListener('message', this.onwmsg);
14823
14824 worker.onerror = function (event) {
14825 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
14826 type: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].OTHER_ERROR,
14827 details: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].INTERNAL_EXCEPTION,
14828 fatal: true,
14829 event: 'demuxerWorker',
14830 error: new Error(event.message + " (" + event.filename + ":" + event.lineno + ")")
14831 });
14832 };
14833
14834 worker.postMessage({
14835 cmd: 'init',
14836 typeSupported: typeSupported,
14837 vendor: vendor,
14838 id: id,
14839 config: JSON.stringify(config)
14840 });
14841 } catch (err) {
14842 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Error in worker:', err);
14843 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].error('Error while initializing DemuxerWorker, fallback to inline');
14844
14845 if (worker) {
14846 // revoke the Object URL that was used to create transmuxer worker, so as not to leak it
14847 self.URL.revokeObjectURL(worker.objectURL);
14848 }
14849
14850 this.transmuxer = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["default"](this.observer, typeSupported, config, vendor, id);
14851 this.worker = null;
14852 }
14853 } else {
14854 this.transmuxer = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["default"](this.observer, typeSupported, config, vendor, id);
14855 }
14856 }
14857
14858 var _proto = TransmuxerInterface.prototype;
14859
14860 _proto.destroy = function destroy() {
14861 var w = this.worker;
14862
14863 if (w) {
14864 w.removeEventListener('message', this.onwmsg);
14865 w.terminate();
14866 this.worker = null;
14867 } else {
14868 var transmuxer = this.transmuxer;
14869
14870 if (transmuxer) {
14871 transmuxer.destroy();
14872 this.transmuxer = null;
14873 }
14874 }
14875
14876 var observer = this.observer;
14877
14878 if (observer) {
14879 observer.removeAllListeners();
14880 } // @ts-ignore
14881
14882
14883 this.observer = null;
14884 };
14885
14886 _proto.push = function push(data, initSegmentData, audioCodec, videoCodec, frag, part, duration, accurateTimeOffset, chunkMeta, defaultInitPTS) {
14887 var _frag$initSegment,
14888 _lastFrag$initSegment,
14889 _this2 = this;
14890
14891 chunkMeta.transmuxing.start = self.performance.now();
14892 var transmuxer = this.transmuxer,
14893 worker = this.worker;
14894 var timeOffset = part ? part.start : frag.start;
14895 var decryptdata = frag.decryptdata;
14896 var lastFrag = this.frag;
14897 var discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
14898 var trackSwitch = !(lastFrag && chunkMeta.level === lastFrag.level);
14899 var snDiff = lastFrag ? chunkMeta.sn - lastFrag.sn : -1;
14900 var partDiff = this.part ? chunkMeta.part - this.part.index : 1;
14901 var contiguous = !trackSwitch && (snDiff === 1 || snDiff === 0 && partDiff === 1);
14902 var now = self.performance.now();
14903
14904 if (trackSwitch || snDiff || frag.stats.parsing.start === 0) {
14905 frag.stats.parsing.start = now;
14906 }
14907
14908 if (part && (partDiff || !contiguous)) {
14909 part.stats.parsing.start = now;
14910 }
14911
14912 var initSegmentChange = !(lastFrag && ((_frag$initSegment = frag.initSegment) === null || _frag$initSegment === void 0 ? void 0 : _frag$initSegment.url) === ((_lastFrag$initSegment = lastFrag.initSegment) === null || _lastFrag$initSegment === void 0 ? void 0 : _lastFrag$initSegment.url));
14913 var state = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["TransmuxState"](discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange);
14914
14915 if (!contiguous || discontinuity || initSegmentChange) {
14916 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log("[transmuxer-interface, " + frag.type + "]: Starting new transmux session for sn: " + chunkMeta.sn + " p: " + chunkMeta.part + " level: " + chunkMeta.level + " id: " + chunkMeta.id + "\n discontinuity: " + discontinuity + "\n trackSwitch: " + trackSwitch + "\n contiguous: " + contiguous + "\n accurateTimeOffset: " + accurateTimeOffset + "\n timeOffset: " + timeOffset + "\n initSegmentChange: " + initSegmentChange);
14917 var config = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["TransmuxConfig"](audioCodec, videoCodec, initSegmentData, duration, defaultInitPTS);
14918 this.configureTransmuxer(config);
14919 }
14920
14921 this.frag = frag;
14922 this.part = part; // Frags with sn of 'initSegment' are not transmuxed
14923
14924 if (worker) {
14925 // post fragment payload as transferable objects for ArrayBuffer (no copy)
14926 worker.postMessage({
14927 cmd: 'demux',
14928 data: data,
14929 decryptdata: decryptdata,
14930 chunkMeta: chunkMeta,
14931 state: state
14932 }, data instanceof ArrayBuffer ? [data] : []);
14933 } else if (transmuxer) {
14934 var _transmuxResult = transmuxer.push(data, decryptdata, chunkMeta, state);
14935
14936 if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["isPromise"])(_transmuxResult)) {
14937 _transmuxResult.then(function (data) {
14938 _this2.handleTransmuxComplete(data);
14939 });
14940 } else {
14941 this.handleTransmuxComplete(_transmuxResult);
14942 }
14943 }
14944 };
14945
14946 _proto.flush = function flush(chunkMeta) {
14947 var _this3 = this;
14948
14949 chunkMeta.transmuxing.start = self.performance.now();
14950 var transmuxer = this.transmuxer,
14951 worker = this.worker;
14952
14953 if (worker) {
14954 worker.postMessage({
14955 cmd: 'flush',
14956 chunkMeta: chunkMeta
14957 });
14958 } else if (transmuxer) {
14959 var _transmuxResult2 = transmuxer.flush(chunkMeta);
14960
14961 if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["isPromise"])(_transmuxResult2)) {
14962 _transmuxResult2.then(function (data) {
14963 _this3.handleFlushResult(data, chunkMeta);
14964 });
14965 } else {
14966 this.handleFlushResult(_transmuxResult2, chunkMeta);
14967 }
14968 }
14969 };
14970
14971 _proto.handleFlushResult = function handleFlushResult(results, chunkMeta) {
14972 var _this4 = this;
14973
14974 results.forEach(function (result) {
14975 _this4.handleTransmuxComplete(result);
14976 });
14977 this.onFlush(chunkMeta);
14978 };
14979
14980 _proto.onWorkerMessage = function onWorkerMessage(ev) {
14981 var data = ev.data;
14982 var hls = this.hls;
14983
14984 switch (data.event) {
14985 case 'init':
14986 {
14987 // revoke the Object URL that was used to create transmuxer worker, so as not to leak it
14988 self.URL.revokeObjectURL(this.worker.objectURL);
14989 break;
14990 }
14991
14992 case 'transmuxComplete':
14993 {
14994 this.handleTransmuxComplete(data.data);
14995 break;
14996 }
14997
14998 case 'flush':
14999 {
15000 this.onFlush(data.data);
15001 break;
15002 }
15003
15004 /* falls through */
15005
15006 default:
15007 {
15008 data.data = data.data || {};
15009 data.data.frag = this.frag;
15010 data.data.id = this.id;
15011 hls.trigger(data.event, data.data);
15012 break;
15013 }
15014 }
15015 };
15016
15017 _proto.configureTransmuxer = function configureTransmuxer(config) {
15018 var worker = this.worker,
15019 transmuxer = this.transmuxer;
15020
15021 if (worker) {
15022 worker.postMessage({
15023 cmd: 'configure',
15024 config: config
15025 });
15026 } else if (transmuxer) {
15027 transmuxer.configure(config);
15028 }
15029 };
15030
15031 _proto.handleTransmuxComplete = function handleTransmuxComplete(result) {
15032 result.chunkMeta.transmuxing.end = self.performance.now();
15033 this.onTransmuxComplete(result);
15034 };
15035
15036 return TransmuxerInterface;
15037}();
15038
15039
15040
15041/***/ }),
15042
15043/***/ "./src/demux/transmuxer-worker.ts":
15044/*!****************************************!*\
15045 !*** ./src/demux/transmuxer-worker.ts ***!
15046 \****************************************/
15047/*! exports provided: default */
15048/***/ (function(module, __webpack_exports__, __webpack_require__) {
15049__webpack_require__.r(__webpack_exports__);
15050/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return TransmuxerWorker; });
15051/* harmony import */ var _demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../demux/transmuxer */ "./src/demux/transmuxer.ts");
15052/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
15053/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
15054/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! eventemitter3 */ "./node_modules/eventemitter3/index.js");
15055
15056
15057
15058
15059function TransmuxerWorker(self) {
15060 var observer = new eventemitter3__WEBPACK_IMPORTED_MODULE_3__["EventEmitter"]();
15061
15062 var forwardMessage = function forwardMessage(ev, data) {
15063 self.postMessage({
15064 event: ev,
15065 data: data
15066 });
15067 }; // forward events to main thread
15068
15069
15070 observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, forwardMessage);
15071 observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, forwardMessage);
15072 self.addEventListener('message', function (ev) {
15073 var data = ev.data;
15074
15075 switch (data.cmd) {
15076 case 'init':
15077 {
15078 var config = JSON.parse(data.config);
15079 self.transmuxer = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__["default"](observer, data.typeSupported, config, data.vendor, data.id);
15080 Object(_utils_logger__WEBPACK_IMPORTED_MODULE_2__["enableLogs"])(config.debug);
15081 forwardMessage('init', null);
15082 break;
15083 }
15084
15085 case 'configure':
15086 {
15087 self.transmuxer.configure(data.config);
15088 break;
15089 }
15090
15091 case 'demux':
15092 {
15093 var transmuxResult = self.transmuxer.push(data.data, data.decryptdata, data.chunkMeta, data.state);
15094
15095 if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__["isPromise"])(transmuxResult)) {
15096 transmuxResult.then(function (data) {
15097 emitTransmuxComplete(self, data);
15098 });
15099 } else {
15100 emitTransmuxComplete(self, transmuxResult);
15101 }
15102
15103 break;
15104 }
15105
15106 case 'flush':
15107 {
15108 var id = data.chunkMeta;
15109
15110 var _transmuxResult = self.transmuxer.flush(id);
15111
15112 if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__["isPromise"])(_transmuxResult)) {
15113 _transmuxResult.then(function (results) {
15114 handleFlushResult(self, results, id);
15115 });
15116 } else {
15117 handleFlushResult(self, _transmuxResult, id);
15118 }
15119
15120 break;
15121 }
15122 }
15123 });
15124}
15125
15126function emitTransmuxComplete(self, transmuxResult) {
15127 if (isEmptyResult(transmuxResult.remuxResult)) {
15128 return;
15129 }
15130
15131 var transferable = [];
15132 var _transmuxResult$remux = transmuxResult.remuxResult,
15133 audio = _transmuxResult$remux.audio,
15134 video = _transmuxResult$remux.video;
15135
15136 if (audio) {
15137 addToTransferable(transferable, audio);
15138 }
15139
15140 if (video) {
15141 addToTransferable(transferable, video);
15142 }
15143
15144 self.postMessage({
15145 event: 'transmuxComplete',
15146 data: transmuxResult
15147 }, transferable);
15148} // Converts data to a transferable object https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast)
15149// in order to minimize message passing overhead
15150
15151
15152function addToTransferable(transferable, track) {
15153 if (track.data1) {
15154 transferable.push(track.data1.buffer);
15155 }
15156
15157 if (track.data2) {
15158 transferable.push(track.data2.buffer);
15159 }
15160}
15161
15162function handleFlushResult(self, results, chunkMeta) {
15163 results.forEach(function (result) {
15164 emitTransmuxComplete(self, result);
15165 });
15166 self.postMessage({
15167 event: 'flush',
15168 data: chunkMeta
15169 });
15170}
15171
15172function isEmptyResult(remuxResult) {
15173 return !remuxResult.audio && !remuxResult.video && !remuxResult.text && !remuxResult.id3 && !remuxResult.initSegment;
15174}
15175
15176/***/ }),
15177
15178/***/ "./src/demux/transmuxer.ts":
15179/*!*********************************!*\
15180 !*** ./src/demux/transmuxer.ts ***!
15181 \*********************************/
15182/*! exports provided: default, isPromise, TransmuxConfig, TransmuxState */
15183/***/ (function(module, __webpack_exports__, __webpack_require__) {
15184__webpack_require__.r(__webpack_exports__);
15185/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return Transmuxer; });
15186/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isPromise", function() { return isPromise; });
15187/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "TransmuxConfig", function() { return TransmuxConfig; });
15188/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "TransmuxState", function() { return TransmuxState; });
15189/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
15190/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
15191/* harmony import */ var _crypt_decrypter__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../crypt/decrypter */ "./src/crypt/decrypter.ts");
15192/* harmony import */ var _demux_aacdemuxer__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../demux/aacdemuxer */ "./src/demux/aacdemuxer.ts");
15193/* harmony import */ var _demux_mp4demuxer__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../demux/mp4demuxer */ "./src/demux/mp4demuxer.ts");
15194/* harmony import */ var _demux_tsdemuxer__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../demux/tsdemuxer */ "./src/demux/tsdemuxer.ts");
15195/* harmony import */ var _demux_mp3demuxer__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../demux/mp3demuxer */ "./src/demux/mp3demuxer.ts");
15196/* harmony import */ var _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../remux/mp4-remuxer */ "./src/remux/mp4-remuxer.ts");
15197/* harmony import */ var _remux_passthrough_remuxer__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../remux/passthrough-remuxer */ "./src/remux/passthrough-remuxer.ts");
15198/* harmony import */ var _chunk_cache__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./chunk-cache */ "./src/demux/chunk-cache.ts");
15199/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
15200/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
15201
15202
15203
15204
15205
15206
15207
15208
15209
15210
15211
15212
15213var now; // performance.now() not available on WebWorker, at least on Safari Desktop
15214
15215try {
15216 now = self.performance.now.bind(self.performance);
15217} catch (err) {
15218 _utils_logger__WEBPACK_IMPORTED_MODULE_11__["logger"].debug('Unable to use Performance API on this environment');
15219 now = self.Date.now;
15220}
15221
15222var muxConfig = [{
15223 demux: _demux_tsdemuxer__WEBPACK_IMPORTED_MODULE_5__["default"],
15224 remux: _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__["default"]
15225}, {
15226 demux: _demux_mp4demuxer__WEBPACK_IMPORTED_MODULE_4__["default"],
15227 remux: _remux_passthrough_remuxer__WEBPACK_IMPORTED_MODULE_8__["default"]
15228}, {
15229 demux: _demux_aacdemuxer__WEBPACK_IMPORTED_MODULE_3__["default"],
15230 remux: _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__["default"]
15231}, {
15232 demux: _demux_mp3demuxer__WEBPACK_IMPORTED_MODULE_6__["default"],
15233 remux: _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__["default"]
15234}];
15235var minProbeByteLength = 1024;
15236muxConfig.forEach(function (_ref) {
15237 var demux = _ref.demux;
15238 minProbeByteLength = Math.max(minProbeByteLength, demux.minProbeByteLength);
15239});
15240
15241var Transmuxer = /*#__PURE__*/function () {
15242 function Transmuxer(observer, typeSupported, config, vendor, id) {
15243 this.observer = void 0;
15244 this.typeSupported = void 0;
15245 this.config = void 0;
15246 this.vendor = void 0;
15247 this.id = void 0;
15248 this.demuxer = void 0;
15249 this.remuxer = void 0;
15250 this.decrypter = void 0;
15251 this.probe = void 0;
15252 this.decryptionPromise = null;
15253 this.transmuxConfig = void 0;
15254 this.currentTransmuxState = void 0;
15255 this.cache = new _chunk_cache__WEBPACK_IMPORTED_MODULE_9__["default"]();
15256 this.observer = observer;
15257 this.typeSupported = typeSupported;
15258 this.config = config;
15259 this.vendor = vendor;
15260 this.id = id;
15261 }
15262
15263 var _proto = Transmuxer.prototype;
15264
15265 _proto.configure = function configure(transmuxConfig) {
15266 this.transmuxConfig = transmuxConfig;
15267
15268 if (this.decrypter) {
15269 this.decrypter.reset();
15270 }
15271 };
15272
15273 _proto.push = function push(data, decryptdata, chunkMeta, state) {
15274 var _this = this;
15275
15276 var stats = chunkMeta.transmuxing;
15277 stats.executeStart = now();
15278 var uintData = new Uint8Array(data);
15279 var cache = this.cache,
15280 config = this.config,
15281 currentTransmuxState = this.currentTransmuxState,
15282 transmuxConfig = this.transmuxConfig;
15283
15284 if (state) {
15285 this.currentTransmuxState = state;
15286 }
15287
15288 var keyData = getEncryptionType(uintData, decryptdata);
15289
15290 if (keyData && keyData.method === 'AES-128') {
15291 var decrypter = this.getDecrypter(); // Software decryption is synchronous; webCrypto is not
15292
15293 if (config.enableSoftwareAES) {
15294 // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
15295 // data is handled in the flush() call
15296 var decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
15297
15298 if (!decryptedData) {
15299 stats.executeEnd = now();
15300 return emptyResult(chunkMeta);
15301 }
15302
15303 uintData = new Uint8Array(decryptedData);
15304 } else {
15305 this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(function (decryptedData) {
15306 // Calling push here is important; if flush() is called while this is still resolving, this ensures that
15307 // the decrypted data has been transmuxed
15308 var result = _this.push(decryptedData, null, chunkMeta);
15309
15310 _this.decryptionPromise = null;
15311 return result;
15312 });
15313 return this.decryptionPromise;
15314 }
15315 }
15316
15317 var _ref2 = state || currentTransmuxState,
15318 contiguous = _ref2.contiguous,
15319 discontinuity = _ref2.discontinuity,
15320 trackSwitch = _ref2.trackSwitch,
15321 accurateTimeOffset = _ref2.accurateTimeOffset,
15322 timeOffset = _ref2.timeOffset,
15323 initSegmentChange = _ref2.initSegmentChange;
15324
15325 var audioCodec = transmuxConfig.audioCodec,
15326 videoCodec = transmuxConfig.videoCodec,
15327 defaultInitPts = transmuxConfig.defaultInitPts,
15328 duration = transmuxConfig.duration,
15329 initSegmentData = transmuxConfig.initSegmentData; // Reset muxers before probing to ensure that their state is clean, even if flushing occurs before a successful probe
15330
15331 if (discontinuity || trackSwitch || initSegmentChange) {
15332 this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
15333 }
15334
15335 if (discontinuity || initSegmentChange) {
15336 this.resetInitialTimestamp(defaultInitPts);
15337 }
15338
15339 if (!contiguous) {
15340 this.resetContiguity();
15341 }
15342
15343 if (this.needsProbing(uintData, discontinuity, trackSwitch)) {
15344 if (cache.dataLength) {
15345 var cachedData = cache.flush();
15346 uintData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_10__["appendUint8Array"])(cachedData, uintData);
15347 }
15348
15349 this.configureTransmuxer(uintData, transmuxConfig);
15350 }
15351
15352 var result = this.transmux(uintData, keyData, timeOffset, accurateTimeOffset, chunkMeta);
15353 var currentState = this.currentTransmuxState;
15354 currentState.contiguous = true;
15355 currentState.discontinuity = false;
15356 currentState.trackSwitch = false;
15357 stats.executeEnd = now();
15358 return result;
15359 } // Due to data caching, flush calls can produce more than one TransmuxerResult (hence the Array type)
15360 ;
15361
15362 _proto.flush = function flush(chunkMeta) {
15363 var _this2 = this;
15364
15365 var stats = chunkMeta.transmuxing;
15366 stats.executeStart = now();
15367 var decrypter = this.decrypter,
15368 cache = this.cache,
15369 currentTransmuxState = this.currentTransmuxState,
15370 decryptionPromise = this.decryptionPromise;
15371
15372 if (decryptionPromise) {
15373 // Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore
15374 // only flushing is required for async decryption
15375 return decryptionPromise.then(function () {
15376 return _this2.flush(chunkMeta);
15377 });
15378 }
15379
15380 var transmuxResults = [];
15381 var timeOffset = currentTransmuxState.timeOffset;
15382
15383 if (decrypter) {
15384 // The decrypter may have data cached, which needs to be demuxed. In this case we'll have two TransmuxResults
15385 // This happens in the case that we receive only 1 push call for a segment (either for non-progressive downloads,
15386 // or for progressive downloads with small segments)
15387 var decryptedData = decrypter.flush();
15388
15389 if (decryptedData) {
15390 // Push always returns a TransmuxerResult if decryptdata is null
15391 transmuxResults.push(this.push(decryptedData, null, chunkMeta));
15392 }
15393 }
15394
15395 var bytesSeen = cache.dataLength;
15396 cache.reset();
15397 var demuxer = this.demuxer,
15398 remuxer = this.remuxer;
15399
15400 if (!demuxer || !remuxer) {
15401 // If probing failed, and each demuxer saw enough bytes to be able to probe, then Hls.js has been given content its not able to handle
15402 if (bytesSeen >= minProbeByteLength) {
15403 this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
15404 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
15405 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_PARSING_ERROR,
15406 fatal: true,
15407 reason: 'no demux matching with content found'
15408 });
15409 }
15410
15411 stats.executeEnd = now();
15412 return [emptyResult(chunkMeta)];
15413 }
15414
15415 var demuxResultOrPromise = demuxer.flush(timeOffset);
15416
15417 if (isPromise(demuxResultOrPromise)) {
15418 // Decrypt final SAMPLE-AES samples
15419 return demuxResultOrPromise.then(function (demuxResult) {
15420 _this2.flushRemux(transmuxResults, demuxResult, chunkMeta);
15421
15422 return transmuxResults;
15423 });
15424 }
15425
15426 this.flushRemux(transmuxResults, demuxResultOrPromise, chunkMeta);
15427 return transmuxResults;
15428 };
15429
15430 _proto.flushRemux = function flushRemux(transmuxResults, demuxResult, chunkMeta) {
15431 var audioTrack = demuxResult.audioTrack,
15432 avcTrack = demuxResult.avcTrack,
15433 id3Track = demuxResult.id3Track,
15434 textTrack = demuxResult.textTrack;
15435 var _this$currentTransmux = this.currentTransmuxState,
15436 accurateTimeOffset = _this$currentTransmux.accurateTimeOffset,
15437 timeOffset = _this$currentTransmux.timeOffset;
15438 _utils_logger__WEBPACK_IMPORTED_MODULE_11__["logger"].log("[transmuxer.ts]: Flushed fragment " + chunkMeta.sn + (chunkMeta.part > -1 ? ' p: ' + chunkMeta.part : '') + " of level " + chunkMeta.level);
15439 var remuxResult = this.remuxer.remux(audioTrack, avcTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, true, this.id);
15440 transmuxResults.push({
15441 remuxResult: remuxResult,
15442 chunkMeta: chunkMeta
15443 });
15444 chunkMeta.transmuxing.executeEnd = now();
15445 };
15446
15447 _proto.resetInitialTimestamp = function resetInitialTimestamp(defaultInitPts) {
15448 var demuxer = this.demuxer,
15449 remuxer = this.remuxer;
15450
15451 if (!demuxer || !remuxer) {
15452 return;
15453 }
15454
15455 demuxer.resetTimeStamp(defaultInitPts);
15456 remuxer.resetTimeStamp(defaultInitPts);
15457 };
15458
15459 _proto.resetContiguity = function resetContiguity() {
15460 var demuxer = this.demuxer,
15461 remuxer = this.remuxer;
15462
15463 if (!demuxer || !remuxer) {
15464 return;
15465 }
15466
15467 demuxer.resetContiguity();
15468 remuxer.resetNextTimestamp();
15469 };
15470
15471 _proto.resetInitSegment = function resetInitSegment(initSegmentData, audioCodec, videoCodec, duration) {
15472 var demuxer = this.demuxer,
15473 remuxer = this.remuxer;
15474
15475 if (!demuxer || !remuxer) {
15476 return;
15477 }
15478
15479 demuxer.resetInitSegment(audioCodec, videoCodec, duration);
15480 remuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec);
15481 };
15482
15483 _proto.destroy = function destroy() {
15484 if (this.demuxer) {
15485 this.demuxer.destroy();
15486 this.demuxer = undefined;
15487 }
15488
15489 if (this.remuxer) {
15490 this.remuxer.destroy();
15491 this.remuxer = undefined;
15492 }
15493 };
15494
15495 _proto.transmux = function transmux(data, keyData, timeOffset, accurateTimeOffset, chunkMeta) {
15496 var result;
15497
15498 if (keyData && keyData.method === 'SAMPLE-AES') {
15499 result = this.transmuxSampleAes(data, keyData, timeOffset, accurateTimeOffset, chunkMeta);
15500 } else {
15501 result = this.transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta);
15502 }
15503
15504 return result;
15505 };
15506
15507 _proto.transmuxUnencrypted = function transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta) {
15508 var _demux = this.demuxer.demux(data, timeOffset, false, !this.config.progressive),
15509 audioTrack = _demux.audioTrack,
15510 avcTrack = _demux.avcTrack,
15511 id3Track = _demux.id3Track,
15512 textTrack = _demux.textTrack;
15513
15514 var remuxResult = this.remuxer.remux(audioTrack, avcTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, false, this.id);
15515 return {
15516 remuxResult: remuxResult,
15517 chunkMeta: chunkMeta
15518 };
15519 };
15520
15521 _proto.transmuxSampleAes = function transmuxSampleAes(data, decryptData, timeOffset, accurateTimeOffset, chunkMeta) {
15522 var _this3 = this;
15523
15524 return this.demuxer.demuxSampleAes(data, decryptData, timeOffset).then(function (demuxResult) {
15525 var remuxResult = _this3.remuxer.remux(demuxResult.audioTrack, demuxResult.avcTrack, demuxResult.id3Track, demuxResult.textTrack, timeOffset, accurateTimeOffset, false, _this3.id);
15526
15527 return {
15528 remuxResult: remuxResult,
15529 chunkMeta: chunkMeta
15530 };
15531 });
15532 };
15533
15534 _proto.configureTransmuxer = function configureTransmuxer(data, transmuxConfig) {
15535 var config = this.config,
15536 observer = this.observer,
15537 typeSupported = this.typeSupported,
15538 vendor = this.vendor;
15539 var audioCodec = transmuxConfig.audioCodec,
15540 defaultInitPts = transmuxConfig.defaultInitPts,
15541 duration = transmuxConfig.duration,
15542 initSegmentData = transmuxConfig.initSegmentData,
15543 videoCodec = transmuxConfig.videoCodec; // probe for content type
15544
15545 var mux;
15546
15547 for (var i = 0, len = muxConfig.length; i < len; i++) {
15548 if (muxConfig[i].demux.probe(data)) {
15549 mux = muxConfig[i];
15550 break;
15551 }
15552 }
15553
15554 if (!mux) {
15555 // If probing previous configs fail, use mp4 passthrough
15556 _utils_logger__WEBPACK_IMPORTED_MODULE_11__["logger"].warn('Failed to find demuxer by probing frag, treating as mp4 passthrough');
15557 mux = {
15558 demux: _demux_mp4demuxer__WEBPACK_IMPORTED_MODULE_4__["default"],
15559 remux: _remux_passthrough_remuxer__WEBPACK_IMPORTED_MODULE_8__["default"]
15560 };
15561 } // so let's check that current remuxer and demuxer are still valid
15562
15563
15564 var demuxer = this.demuxer;
15565 var remuxer = this.remuxer;
15566 var Remuxer = mux.remux;
15567 var Demuxer = mux.demux;
15568
15569 if (!remuxer || !(remuxer instanceof Remuxer)) {
15570 this.remuxer = new Remuxer(observer, config, typeSupported, vendor);
15571 }
15572
15573 if (!demuxer || !(demuxer instanceof Demuxer)) {
15574 this.demuxer = new Demuxer(observer, config, typeSupported);
15575 this.probe = Demuxer.probe;
15576 } // Ensure that muxers are always initialized with an initSegment
15577
15578
15579 this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
15580 this.resetInitialTimestamp(defaultInitPts);
15581 };
15582
15583 _proto.needsProbing = function needsProbing(data, discontinuity, trackSwitch) {
15584 // in case of continuity change, or track switch
15585 // we might switch from content type (AAC container to TS container, or TS to fmp4 for example)
15586 return !this.demuxer || !this.remuxer || discontinuity || trackSwitch;
15587 };
15588
15589 _proto.getDecrypter = function getDecrypter() {
15590 var decrypter = this.decrypter;
15591
15592 if (!decrypter) {
15593 decrypter = this.decrypter = new _crypt_decrypter__WEBPACK_IMPORTED_MODULE_2__["default"](this.observer, this.config);
15594 }
15595
15596 return decrypter;
15597 };
15598
15599 return Transmuxer;
15600}();
15601
15602
15603
15604function getEncryptionType(data, decryptData) {
15605 var encryptionType = null;
15606
15607 if (data.byteLength > 0 && decryptData != null && decryptData.key != null && decryptData.iv !== null && decryptData.method != null) {
15608 encryptionType = decryptData;
15609 }
15610
15611 return encryptionType;
15612}
15613
15614var emptyResult = function emptyResult(chunkMeta) {
15615 return {
15616 remuxResult: {},
15617 chunkMeta: chunkMeta
15618 };
15619};
15620
15621function isPromise(p) {
15622 return 'then' in p && p.then instanceof Function;
15623}
15624var TransmuxConfig = function TransmuxConfig(audioCodec, videoCodec, initSegmentData, duration, defaultInitPts) {
15625 this.audioCodec = void 0;
15626 this.videoCodec = void 0;
15627 this.initSegmentData = void 0;
15628 this.duration = void 0;
15629 this.defaultInitPts = void 0;
15630 this.audioCodec = audioCodec;
15631 this.videoCodec = videoCodec;
15632 this.initSegmentData = initSegmentData;
15633 this.duration = duration;
15634 this.defaultInitPts = defaultInitPts;
15635};
15636var TransmuxState = function TransmuxState(discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange) {
15637 this.discontinuity = void 0;
15638 this.contiguous = void 0;
15639 this.accurateTimeOffset = void 0;
15640 this.trackSwitch = void 0;
15641 this.timeOffset = void 0;
15642 this.initSegmentChange = void 0;
15643 this.discontinuity = discontinuity;
15644 this.contiguous = contiguous;
15645 this.accurateTimeOffset = accurateTimeOffset;
15646 this.trackSwitch = trackSwitch;
15647 this.timeOffset = timeOffset;
15648 this.initSegmentChange = initSegmentChange;
15649};
15650
15651/***/ }),
15652
15653/***/ "./src/demux/tsdemuxer.ts":
15654/*!********************************!*\
15655 !*** ./src/demux/tsdemuxer.ts ***!
15656 \********************************/
15657/*! exports provided: discardEPB, default */
15658/***/ (function(module, __webpack_exports__, __webpack_require__) {
15659__webpack_require__.r(__webpack_exports__);
15660/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "discardEPB", function() { return discardEPB; });
15661/* harmony import */ var _adts__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./adts */ "./src/demux/adts.ts");
15662/* harmony import */ var _mpegaudio__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./mpegaudio */ "./src/demux/mpegaudio.ts");
15663/* harmony import */ var _exp_golomb__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./exp-golomb */ "./src/demux/exp-golomb.ts");
15664/* harmony import */ var _id3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./id3 */ "./src/demux/id3.ts");
15665/* harmony import */ var _sample_aes__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./sample-aes */ "./src/demux/sample-aes.ts");
15666/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../events */ "./src/events.ts");
15667/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
15668/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
15669/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
15670/**
15671 * highly optimized TS demuxer:
15672 * parse PAT, PMT
15673 * extract PES packet from audio and video PIDs
15674 * extract AVC/H264 NAL units and AAC/ADTS samples from PES packet
15675 * trigger the remuxer upon parsing completion
15676 * it also tries to workaround as best as it can audio codec switch (HE-AAC to AAC and vice versa), without having to restart the MediaSource.
15677 * it also controls the remuxing process :
15678 * upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state.
15679 */
15680
15681
15682
15683
15684
15685
15686
15687
15688
15689// We are using fixed track IDs for driving the MP4 remuxer
15690// instead of following the TS PIDs.
15691// There is no reason not to do this and some browsers/SourceBuffer-demuxers
15692// may not like if there are TrackID "switches"
15693// See https://github.com/video-dev/hls.js/issues/1331
15694// Here we are mapping our internal track types to constant MP4 track IDs
15695// With MSE currently one can only have one track of each, and we are muxing
15696// whatever video/audio rendition in them.
15697var RemuxerTrackIdConfig = {
15698 video: 1,
15699 audio: 2,
15700 id3: 3,
15701 text: 4
15702};
15703
15704var TSDemuxer = /*#__PURE__*/function () {
15705 function TSDemuxer(observer, config, typeSupported) {
15706 this.observer = void 0;
15707 this.config = void 0;
15708 this.typeSupported = void 0;
15709 this.sampleAes = null;
15710 this.pmtParsed = false;
15711 this.audioCodec = void 0;
15712 this.videoCodec = void 0;
15713 this._duration = 0;
15714 this.aacLastPTS = null;
15715 this._initPTS = null;
15716 this._initDTS = null;
15717 this._pmtId = -1;
15718 this._avcTrack = void 0;
15719 this._audioTrack = void 0;
15720 this._id3Track = void 0;
15721 this._txtTrack = void 0;
15722 this.aacOverFlow = null;
15723 this.avcSample = null;
15724 this.remainderData = null;
15725 this.observer = observer;
15726 this.config = config;
15727 this.typeSupported = typeSupported;
15728 }
15729
15730 TSDemuxer.probe = function probe(data) {
15731 var syncOffset = TSDemuxer.syncOffset(data);
15732
15733 if (syncOffset < 0) {
15734 return false;
15735 } else {
15736 if (syncOffset) {
15737 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].warn("MPEG2-TS detected but first sync word found @ offset " + syncOffset + ", junk ahead ?");
15738 }
15739
15740 return true;
15741 }
15742 };
15743
15744 TSDemuxer.syncOffset = function syncOffset(data) {
15745 // scan 1000 first bytes
15746 var scanwindow = Math.min(1000, data.length - 3 * 188);
15747 var i = 0;
15748
15749 while (i < scanwindow) {
15750 // a TS fragment should contain at least 3 TS packets, a PAT, a PMT, and one PID, each starting with 0x47
15751 if (data[i] === 0x47 && data[i + 188] === 0x47 && data[i + 2 * 188] === 0x47) {
15752 return i;
15753 } else {
15754 i++;
15755 }
15756 }
15757
15758 return -1;
15759 }
15760 /**
15761 * Creates a track model internal to demuxer used to drive remuxing input
15762 *
15763 * @param type 'audio' | 'video' | 'id3' | 'text'
15764 * @param duration
15765 * @return TSDemuxer's internal track model
15766 */
15767 ;
15768
15769 TSDemuxer.createTrack = function createTrack(type, duration) {
15770 return {
15771 container: type === 'video' || type === 'audio' ? 'video/mp2t' : undefined,
15772 type: type,
15773 id: RemuxerTrackIdConfig[type],
15774 pid: -1,
15775 inputTimeScale: 90000,
15776 sequenceNumber: 0,
15777 samples: [],
15778 dropped: 0,
15779 duration: type === 'audio' ? duration : undefined
15780 };
15781 }
15782 /**
15783 * Initializes a new init segment on the demuxer/remuxer interface. Needed for discontinuities/track-switches (or at stream start)
15784 * Resets all internal track instances of the demuxer.
15785 */
15786 ;
15787
15788 var _proto = TSDemuxer.prototype;
15789
15790 _proto.resetInitSegment = function resetInitSegment(audioCodec, videoCodec, duration) {
15791 this.pmtParsed = false;
15792 this._pmtId = -1;
15793 this._avcTrack = TSDemuxer.createTrack('video', duration);
15794 this._audioTrack = TSDemuxer.createTrack('audio', duration);
15795 this._id3Track = TSDemuxer.createTrack('id3', duration);
15796 this._txtTrack = TSDemuxer.createTrack('text', duration);
15797 this._audioTrack.isAAC = true; // flush any partial content
15798
15799 this.aacOverFlow = null;
15800 this.aacLastPTS = null;
15801 this.avcSample = null;
15802 this.audioCodec = audioCodec;
15803 this.videoCodec = videoCodec;
15804 this._duration = duration;
15805 };
15806
15807 _proto.resetTimeStamp = function resetTimeStamp() {};
15808
15809 _proto.resetContiguity = function resetContiguity() {
15810 var _audioTrack = this._audioTrack,
15811 _avcTrack = this._avcTrack,
15812 _id3Track = this._id3Track;
15813
15814 if (_audioTrack) {
15815 _audioTrack.pesData = null;
15816 }
15817
15818 if (_avcTrack) {
15819 _avcTrack.pesData = null;
15820 }
15821
15822 if (_id3Track) {
15823 _id3Track.pesData = null;
15824 }
15825
15826 this.aacOverFlow = null;
15827 this.aacLastPTS = null;
15828 };
15829
15830 _proto.demux = function demux(data, timeOffset, isSampleAes, flush) {
15831 if (isSampleAes === void 0) {
15832 isSampleAes = false;
15833 }
15834
15835 if (flush === void 0) {
15836 flush = false;
15837 }
15838
15839 if (!isSampleAes) {
15840 this.sampleAes = null;
15841 }
15842
15843 var pes;
15844 var avcTrack = this._avcTrack;
15845 var audioTrack = this._audioTrack;
15846 var id3Track = this._id3Track;
15847 var avcId = avcTrack.pid;
15848 var avcData = avcTrack.pesData;
15849 var audioId = audioTrack.pid;
15850 var id3Id = id3Track.pid;
15851 var audioData = audioTrack.pesData;
15852 var id3Data = id3Track.pesData;
15853 var unknownPIDs = false;
15854 var pmtParsed = this.pmtParsed;
15855 var pmtId = this._pmtId;
15856 var len = data.length;
15857
15858 if (this.remainderData) {
15859 data = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_6__["appendUint8Array"])(this.remainderData, data);
15860 len = data.length;
15861 this.remainderData = null;
15862 }
15863
15864 if (len < 188 && !flush) {
15865 this.remainderData = data;
15866 return {
15867 audioTrack: audioTrack,
15868 avcTrack: avcTrack,
15869 id3Track: id3Track,
15870 textTrack: this._txtTrack
15871 };
15872 }
15873
15874 var syncOffset = Math.max(0, TSDemuxer.syncOffset(data));
15875 len -= (len + syncOffset) % 188;
15876
15877 if (len < data.byteLength && !flush) {
15878 this.remainderData = new Uint8Array(data.buffer, len, data.buffer.byteLength - len);
15879 } // loop through TS packets
15880
15881
15882 var tsPacketErrors = 0;
15883
15884 for (var start = syncOffset; start < len; start += 188) {
15885 if (data[start] === 0x47) {
15886 var stt = !!(data[start + 1] & 0x40); // pid is a 13-bit field starting at the last bit of TS[1]
15887
15888 var pid = ((data[start + 1] & 0x1f) << 8) + data[start + 2];
15889 var atf = (data[start + 3] & 0x30) >> 4; // if an adaption field is present, its length is specified by the fifth byte of the TS packet header.
15890
15891 var offset = void 0;
15892
15893 if (atf > 1) {
15894 offset = start + 5 + data[start + 4]; // continue if there is only adaptation field
15895
15896 if (offset === start + 188) {
15897 continue;
15898 }
15899 } else {
15900 offset = start + 4;
15901 }
15902
15903 switch (pid) {
15904 case avcId:
15905 if (stt) {
15906 if (avcData && (pes = parsePES(avcData))) {
15907 this.parseAVCPES(pes, false);
15908 }
15909
15910 avcData = {
15911 data: [],
15912 size: 0
15913 };
15914 }
15915
15916 if (avcData) {
15917 avcData.data.push(data.subarray(offset, start + 188));
15918 avcData.size += start + 188 - offset;
15919 }
15920
15921 break;
15922
15923 case audioId:
15924 if (stt) {
15925 if (audioData && (pes = parsePES(audioData))) {
15926 if (audioTrack.isAAC) {
15927 this.parseAACPES(pes);
15928 } else {
15929 this.parseMPEGPES(pes);
15930 }
15931 }
15932
15933 audioData = {
15934 data: [],
15935 size: 0
15936 };
15937 }
15938
15939 if (audioData) {
15940 audioData.data.push(data.subarray(offset, start + 188));
15941 audioData.size += start + 188 - offset;
15942 }
15943
15944 break;
15945
15946 case id3Id:
15947 if (stt) {
15948 if (id3Data && (pes = parsePES(id3Data))) {
15949 this.parseID3PES(pes);
15950 }
15951
15952 id3Data = {
15953 data: [],
15954 size: 0
15955 };
15956 }
15957
15958 if (id3Data) {
15959 id3Data.data.push(data.subarray(offset, start + 188));
15960 id3Data.size += start + 188 - offset;
15961 }
15962
15963 break;
15964
15965 case 0:
15966 if (stt) {
15967 offset += data[offset] + 1;
15968 }
15969
15970 pmtId = this._pmtId = parsePAT(data, offset);
15971 break;
15972
15973 case pmtId:
15974 {
15975 if (stt) {
15976 offset += data[offset] + 1;
15977 }
15978
15979 var parsedPIDs = parsePMT(data, offset, this.typeSupported.mpeg === true || this.typeSupported.mp3 === true, isSampleAes); // only update track id if track PID found while parsing PMT
15980 // this is to avoid resetting the PID to -1 in case
15981 // track PID transiently disappears from the stream
15982 // this could happen in case of transient missing audio samples for example
15983 // NOTE this is only the PID of the track as found in TS,
15984 // but we are not using this for MP4 track IDs.
15985
15986 avcId = parsedPIDs.avc;
15987
15988 if (avcId > 0) {
15989 avcTrack.pid = avcId;
15990 }
15991
15992 audioId = parsedPIDs.audio;
15993
15994 if (audioId > 0) {
15995 audioTrack.pid = audioId;
15996 audioTrack.isAAC = parsedPIDs.isAAC;
15997 }
15998
15999 id3Id = parsedPIDs.id3;
16000
16001 if (id3Id > 0) {
16002 id3Track.pid = id3Id;
16003 }
16004
16005 if (unknownPIDs && !pmtParsed) {
16006 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].log('reparse from beginning');
16007 unknownPIDs = false; // we set it to -188, the += 188 in the for loop will reset start to 0
16008
16009 start = syncOffset - 188;
16010 }
16011
16012 pmtParsed = this.pmtParsed = true;
16013 break;
16014 }
16015
16016 case 17:
16017 case 0x1fff:
16018 break;
16019
16020 default:
16021 unknownPIDs = true;
16022 break;
16023 }
16024 } else {
16025 tsPacketErrors++;
16026 }
16027 }
16028
16029 if (tsPacketErrors > 0) {
16030 this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_5__["Events"].ERROR, {
16031 type: _errors__WEBPACK_IMPORTED_MODULE_8__["ErrorTypes"].MEDIA_ERROR,
16032 details: _errors__WEBPACK_IMPORTED_MODULE_8__["ErrorDetails"].FRAG_PARSING_ERROR,
16033 fatal: false,
16034 reason: "Found " + tsPacketErrors + " TS packet/s that do not start with 0x47"
16035 });
16036 }
16037
16038 avcTrack.pesData = avcData;
16039 audioTrack.pesData = audioData;
16040 id3Track.pesData = id3Data;
16041 var demuxResult = {
16042 audioTrack: audioTrack,
16043 avcTrack: avcTrack,
16044 id3Track: id3Track,
16045 textTrack: this._txtTrack
16046 };
16047
16048 if (flush) {
16049 this.extractRemainingSamples(demuxResult);
16050 }
16051
16052 return demuxResult;
16053 };
16054
16055 _proto.flush = function flush() {
16056 var remainderData = this.remainderData;
16057 this.remainderData = null;
16058 var result;
16059
16060 if (remainderData) {
16061 result = this.demux(remainderData, -1, false, true);
16062 } else {
16063 result = {
16064 audioTrack: this._audioTrack,
16065 avcTrack: this._avcTrack,
16066 textTrack: this._txtTrack,
16067 id3Track: this._id3Track
16068 };
16069 }
16070
16071 this.extractRemainingSamples(result);
16072
16073 if (this.sampleAes) {
16074 return this.decrypt(result, this.sampleAes);
16075 }
16076
16077 return result;
16078 };
16079
16080 _proto.extractRemainingSamples = function extractRemainingSamples(demuxResult) {
16081 var audioTrack = demuxResult.audioTrack,
16082 avcTrack = demuxResult.avcTrack,
16083 id3Track = demuxResult.id3Track;
16084 var avcData = avcTrack.pesData;
16085 var audioData = audioTrack.pesData;
16086 var id3Data = id3Track.pesData; // try to parse last PES packets
16087
16088 var pes;
16089
16090 if (avcData && (pes = parsePES(avcData))) {
16091 this.parseAVCPES(pes, true);
16092 avcTrack.pesData = null;
16093 } else {
16094 // either avcData null or PES truncated, keep it for next frag parsing
16095 avcTrack.pesData = avcData;
16096 }
16097
16098 if (audioData && (pes = parsePES(audioData))) {
16099 if (audioTrack.isAAC) {
16100 this.parseAACPES(pes);
16101 } else {
16102 this.parseMPEGPES(pes);
16103 }
16104
16105 audioTrack.pesData = null;
16106 } else {
16107 if (audioData !== null && audioData !== void 0 && audioData.size) {
16108 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].log('last AAC PES packet truncated,might overlap between fragments');
16109 } // either audioData null or PES truncated, keep it for next frag parsing
16110
16111
16112 audioTrack.pesData = audioData;
16113 }
16114
16115 if (id3Data && (pes = parsePES(id3Data))) {
16116 this.parseID3PES(pes);
16117 id3Track.pesData = null;
16118 } else {
16119 // either id3Data null or PES truncated, keep it for next frag parsing
16120 id3Track.pesData = id3Data;
16121 }
16122 };
16123
16124 _proto.demuxSampleAes = function demuxSampleAes(data, keyData, timeOffset) {
16125 var demuxResult = this.demux(data, timeOffset, true, !this.config.progressive);
16126 var sampleAes = this.sampleAes = new _sample_aes__WEBPACK_IMPORTED_MODULE_4__["default"](this.observer, this.config, keyData);
16127 return this.decrypt(demuxResult, sampleAes);
16128 };
16129
16130 _proto.decrypt = function decrypt(demuxResult, sampleAes) {
16131 return new Promise(function (resolve) {
16132 var audioTrack = demuxResult.audioTrack,
16133 avcTrack = demuxResult.avcTrack;
16134
16135 if (audioTrack.samples && audioTrack.isAAC) {
16136 sampleAes.decryptAacSamples(audioTrack.samples, 0, function () {
16137 if (avcTrack.samples) {
16138 sampleAes.decryptAvcSamples(avcTrack.samples, 0, 0, function () {
16139 resolve(demuxResult);
16140 });
16141 } else {
16142 resolve(demuxResult);
16143 }
16144 });
16145 } else if (avcTrack.samples) {
16146 sampleAes.decryptAvcSamples(avcTrack.samples, 0, 0, function () {
16147 resolve(demuxResult);
16148 });
16149 }
16150 });
16151 };
16152
16153 _proto.destroy = function destroy() {
16154 this._initPTS = this._initDTS = null;
16155 this._duration = 0;
16156 };
16157
16158 _proto.parseAVCPES = function parseAVCPES(pes, last) {
16159 var _this = this;
16160
16161 var track = this._avcTrack;
16162 var units = this.parseAVCNALu(pes.data);
16163 var avcSample = this.avcSample;
16164 var push;
16165 var spsfound = false; // free pes.data to save up some memory
16166
16167 pes.data = null; // if new NAL units found and last sample still there, let's push ...
16168 // this helps parsing streams with missing AUD (only do this if AUD never found)
16169
16170 if (avcSample && units.length && !track.audFound) {
16171 pushAccessUnit(avcSample, track);
16172 avcSample = this.avcSample = createAVCSample(false, pes.pts, pes.dts, '');
16173 }
16174
16175 units.forEach(function (unit) {
16176 switch (unit.type) {
16177 // NDR
16178 case 1:
16179 {
16180 push = true;
16181
16182 if (!avcSample) {
16183 avcSample = _this.avcSample = createAVCSample(true, pes.pts, pes.dts, '');
16184 }
16185
16186 avcSample.frame = true;
16187 var data = unit.data; // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
16188
16189 if (spsfound && data.length > 4) {
16190 // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
16191 var sliceType = new _exp_golomb__WEBPACK_IMPORTED_MODULE_2__["default"](data).readSliceType(); // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
16192 // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
16193 // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
16194 // I slice: A slice that is not an SI slice that is decoded using intra prediction only.
16195 // if (sliceType === 2 || sliceType === 7) {
16196
16197 if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
16198 avcSample.key = true;
16199 }
16200 }
16201
16202 break; // IDR
16203 }
16204
16205 case 5:
16206 push = true; // handle PES not starting with AUD
16207
16208 if (!avcSample) {
16209 avcSample = _this.avcSample = createAVCSample(true, pes.pts, pes.dts, '');
16210 }
16211
16212 avcSample.key = true;
16213 avcSample.frame = true;
16214 break;
16215 // SEI
16216
16217 case 6:
16218 {
16219 push = true;
16220
16221 var expGolombDecoder = new _exp_golomb__WEBPACK_IMPORTED_MODULE_2__["default"](discardEPB(unit.data)); // skip frameType
16222
16223 expGolombDecoder.readUByte();
16224 var payloadType = 0;
16225 var payloadSize = 0;
16226 var endOfCaptions = false;
16227 var b = 0;
16228
16229 while (!endOfCaptions && expGolombDecoder.bytesAvailable > 1) {
16230 payloadType = 0;
16231
16232 do {
16233 b = expGolombDecoder.readUByte();
16234 payloadType += b;
16235 } while (b === 0xff); // Parse payload size.
16236
16237
16238 payloadSize = 0;
16239
16240 do {
16241 b = expGolombDecoder.readUByte();
16242 payloadSize += b;
16243 } while (b === 0xff); // TODO: there can be more than one payload in an SEI packet...
16244 // TODO: need to read type and size in a while loop to get them all
16245
16246
16247 if (payloadType === 4 && expGolombDecoder.bytesAvailable !== 0) {
16248 endOfCaptions = true;
16249 var countryCode = expGolombDecoder.readUByte();
16250
16251 if (countryCode === 181) {
16252 var providerCode = expGolombDecoder.readUShort();
16253
16254 if (providerCode === 49) {
16255 var userStructure = expGolombDecoder.readUInt();
16256
16257 if (userStructure === 0x47413934) {
16258 var userDataType = expGolombDecoder.readUByte(); // Raw CEA-608 bytes wrapped in CEA-708 packet
16259
16260 if (userDataType === 3) {
16261 var firstByte = expGolombDecoder.readUByte();
16262 var secondByte = expGolombDecoder.readUByte();
16263 var totalCCs = 31 & firstByte;
16264 var byteArray = [firstByte, secondByte];
16265
16266 for (var i = 0; i < totalCCs; i++) {
16267 // 3 bytes per CC
16268 byteArray.push(expGolombDecoder.readUByte());
16269 byteArray.push(expGolombDecoder.readUByte());
16270 byteArray.push(expGolombDecoder.readUByte());
16271 }
16272
16273 insertSampleInOrder(_this._txtTrack.samples, {
16274 type: 3,
16275 pts: pes.pts,
16276 bytes: byteArray
16277 });
16278 }
16279 }
16280 }
16281 }
16282 } else if (payloadType === 5 && expGolombDecoder.bytesAvailable !== 0) {
16283 endOfCaptions = true;
16284
16285 if (payloadSize > 16) {
16286 var uuidStrArray = [];
16287
16288 for (var _i = 0; _i < 16; _i++) {
16289 uuidStrArray.push(expGolombDecoder.readUByte().toString(16));
16290
16291 if (_i === 3 || _i === 5 || _i === 7 || _i === 9) {
16292 uuidStrArray.push('-');
16293 }
16294 }
16295
16296 var length = payloadSize - 16;
16297 var userDataPayloadBytes = new Uint8Array(length);
16298
16299 for (var _i2 = 0; _i2 < length; _i2++) {
16300 userDataPayloadBytes[_i2] = expGolombDecoder.readUByte();
16301 }
16302
16303 insertSampleInOrder(_this._txtTrack.samples, {
16304 pts: pes.pts,
16305 payloadType: payloadType,
16306 uuid: uuidStrArray.join(''),
16307 userData: Object(_id3__WEBPACK_IMPORTED_MODULE_3__["utf8ArrayToStr"])(userDataPayloadBytes),
16308 userDataBytes: userDataPayloadBytes
16309 });
16310 }
16311 } else if (payloadSize < expGolombDecoder.bytesAvailable) {
16312 for (var _i3 = 0; _i3 < payloadSize; _i3++) {
16313 expGolombDecoder.readUByte();
16314 }
16315 }
16316 }
16317
16318 break; // SPS
16319 }
16320
16321 case 7:
16322 push = true;
16323 spsfound = true;
16324
16325 if (!track.sps) {
16326 var _expGolombDecoder = new _exp_golomb__WEBPACK_IMPORTED_MODULE_2__["default"](unit.data);
16327
16328 var config = _expGolombDecoder.readSPS();
16329
16330 track.width = config.width;
16331 track.height = config.height;
16332 track.pixelRatio = config.pixelRatio; // TODO: `track.sps` is defined as a `number[]`, but we're setting it to a `Uint8Array[]`.
16333
16334 track.sps = [unit.data];
16335 track.duration = _this._duration;
16336 var codecarray = unit.data.subarray(1, 4);
16337 var codecstring = 'avc1.';
16338
16339 for (var _i4 = 0; _i4 < 3; _i4++) {
16340 var h = codecarray[_i4].toString(16);
16341
16342 if (h.length < 2) {
16343 h = '0' + h;
16344 }
16345
16346 codecstring += h;
16347 }
16348
16349 track.codec = codecstring;
16350 }
16351
16352 break;
16353 // PPS
16354
16355 case 8:
16356 push = true;
16357
16358 if (!track.pps) {
16359 // TODO: `track.pss` is defined as a `number[]`, but we're setting it to a `Uint8Array[]`.
16360 track.pps = [unit.data];
16361 }
16362
16363 break;
16364 // AUD
16365
16366 case 9:
16367 push = false;
16368 track.audFound = true;
16369
16370 if (avcSample) {
16371 pushAccessUnit(avcSample, track);
16372 }
16373
16374 avcSample = _this.avcSample = createAVCSample(false, pes.pts, pes.dts, '');
16375 break;
16376 // Filler Data
16377
16378 case 12:
16379 push = false;
16380 break;
16381
16382 default:
16383 push = false;
16384
16385 if (avcSample) {
16386 avcSample.debug += 'unknown NAL ' + unit.type + ' ';
16387 }
16388
16389 break;
16390 }
16391
16392 if (avcSample && push) {
16393 var _units = avcSample.units;
16394
16395 _units.push(unit);
16396 }
16397 }); // if last PES packet, push samples
16398
16399 if (last && avcSample) {
16400 pushAccessUnit(avcSample, track);
16401 this.avcSample = null;
16402 }
16403 };
16404
16405 _proto.getLastNalUnit = function getLastNalUnit() {
16406 var _avcSample;
16407
16408 var avcSample = this.avcSample;
16409 var lastUnit; // try to fallback to previous sample if current one is empty
16410
16411 if (!avcSample || avcSample.units.length === 0) {
16412 var samples = this._avcTrack.samples;
16413 avcSample = samples[samples.length - 1];
16414 }
16415
16416 if ((_avcSample = avcSample) !== null && _avcSample !== void 0 && _avcSample.units) {
16417 var units = avcSample.units;
16418 lastUnit = units[units.length - 1];
16419 }
16420
16421 return lastUnit;
16422 };
16423
16424 _proto.parseAVCNALu = function parseAVCNALu(array) {
16425 var len = array.byteLength;
16426 var track = this._avcTrack;
16427 var state = track.naluState || 0;
16428 var lastState = state;
16429 var units = [];
16430 var i = 0;
16431 var value;
16432 var overflow;
16433 var unitType;
16434 var lastUnitStart = -1;
16435 var lastUnitType = 0; // logger.log('PES:' + Hex.hexDump(array));
16436
16437 if (state === -1) {
16438 // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
16439 lastUnitStart = 0; // NALu type is value read from offset 0
16440
16441 lastUnitType = array[0] & 0x1f;
16442 state = 0;
16443 i = 1;
16444 }
16445
16446 while (i < len) {
16447 value = array[i++]; // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
16448
16449 if (!state) {
16450 state = value ? 0 : 1;
16451 continue;
16452 }
16453
16454 if (state === 1) {
16455 state = value ? 0 : 2;
16456 continue;
16457 } // here we have state either equal to 2 or 3
16458
16459
16460 if (!value) {
16461 state = 3;
16462 } else if (value === 1) {
16463 if (lastUnitStart >= 0) {
16464 var unit = {
16465 data: array.subarray(lastUnitStart, i - state - 1),
16466 type: lastUnitType
16467 }; // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
16468
16469 units.push(unit);
16470 } else {
16471 // lastUnitStart is undefined => this is the first start code found in this PES packet
16472 // first check if start code delimiter is overlapping between 2 PES packets,
16473 // ie it started in last packet (lastState not zero)
16474 // and ended at the beginning of this PES packet (i <= 4 - lastState)
16475 var lastUnit = this.getLastNalUnit();
16476
16477 if (lastUnit) {
16478 if (lastState && i <= 4 - lastState) {
16479 // start delimiter overlapping between PES packets
16480 // strip start delimiter bytes from the end of last NAL unit
16481 // check if lastUnit had a state different from zero
16482 if (lastUnit.state) {
16483 // strip last bytes
16484 lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
16485 }
16486 } // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
16487
16488
16489 overflow = i - state - 1;
16490
16491 if (overflow > 0) {
16492 // logger.log('first NALU found with overflow:' + overflow);
16493 var tmp = new Uint8Array(lastUnit.data.byteLength + overflow);
16494 tmp.set(lastUnit.data, 0);
16495 tmp.set(array.subarray(0, overflow), lastUnit.data.byteLength);
16496 lastUnit.data = tmp;
16497 lastUnit.state = 0;
16498 }
16499 }
16500 } // check if we can read unit type
16501
16502
16503 if (i < len) {
16504 unitType = array[i] & 0x1f; // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
16505
16506 lastUnitStart = i;
16507 lastUnitType = unitType;
16508 state = 0;
16509 } else {
16510 // not enough byte to read unit type. let's read it on next PES parsing
16511 state = -1;
16512 }
16513 } else {
16514 state = 0;
16515 }
16516 }
16517
16518 if (lastUnitStart >= 0 && state >= 0) {
16519 var _unit = {
16520 data: array.subarray(lastUnitStart, len),
16521 type: lastUnitType,
16522 state: state
16523 };
16524 units.push(_unit); // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
16525 } // no NALu found
16526
16527
16528 if (units.length === 0) {
16529 // append pes.data to previous NAL unit
16530 var _lastUnit = this.getLastNalUnit();
16531
16532 if (_lastUnit) {
16533 var _tmp = new Uint8Array(_lastUnit.data.byteLength + array.byteLength);
16534
16535 _tmp.set(_lastUnit.data, 0);
16536
16537 _tmp.set(array, _lastUnit.data.byteLength);
16538
16539 _lastUnit.data = _tmp;
16540 }
16541 }
16542
16543 track.naluState = state;
16544 return units;
16545 };
16546
16547 _proto.parseAACPES = function parseAACPES(pes) {
16548 var startOffset = 0;
16549 var track = this._audioTrack;
16550 var aacOverFlow = this.aacOverFlow;
16551 var data = pes.data;
16552
16553 if (aacOverFlow) {
16554 this.aacOverFlow = null;
16555 var sampleLength = aacOverFlow.sample.unit.byteLength;
16556 var frameMissingBytes = Math.min(aacOverFlow.missing, sampleLength);
16557 var frameOverflowBytes = sampleLength - frameMissingBytes;
16558 aacOverFlow.sample.unit.set(data.subarray(0, frameMissingBytes), frameOverflowBytes);
16559 track.samples.push(aacOverFlow.sample); // logger.log(`AAC: append overflowing ${frameOverflowBytes} bytes to beginning of new PES`);
16560
16561 startOffset = aacOverFlow.missing;
16562 } // look for ADTS header (0xFFFx)
16563
16564
16565 var offset;
16566 var len;
16567
16568 for (offset = startOffset, len = data.length; offset < len - 1; offset++) {
16569 if (_adts__WEBPACK_IMPORTED_MODULE_0__["isHeader"](data, offset)) {
16570 break;
16571 }
16572 } // if ADTS header does not start straight from the beginning of the PES payload, raise an error
16573
16574
16575 if (offset !== startOffset) {
16576 var reason;
16577 var fatal;
16578
16579 if (offset < len - 1) {
16580 reason = "AAC PES did not start with ADTS header,offset:" + offset;
16581 fatal = false;
16582 } else {
16583 reason = 'no ADTS header found in AAC PES';
16584 fatal = true;
16585 }
16586
16587 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].warn("parsing error:" + reason);
16588 this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_5__["Events"].ERROR, {
16589 type: _errors__WEBPACK_IMPORTED_MODULE_8__["ErrorTypes"].MEDIA_ERROR,
16590 details: _errors__WEBPACK_IMPORTED_MODULE_8__["ErrorDetails"].FRAG_PARSING_ERROR,
16591 fatal: fatal,
16592 reason: reason
16593 });
16594
16595 if (fatal) {
16596 return;
16597 }
16598 }
16599
16600 _adts__WEBPACK_IMPORTED_MODULE_0__["initTrackConfig"](track, this.observer, data, offset, this.audioCodec);
16601 var pts;
16602
16603 if (pes.pts !== undefined) {
16604 pts = pes.pts;
16605 } else if (aacOverFlow) {
16606 // if last AAC frame is overflowing, we should ensure timestamps are contiguous:
16607 // first sample PTS should be equal to last sample PTS + frameDuration
16608 var frameDuration = _adts__WEBPACK_IMPORTED_MODULE_0__["getFrameDuration"](track.samplerate);
16609 pts = aacOverFlow.sample.pts + frameDuration;
16610 } else {
16611 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].warn('[tsdemuxer]: AAC PES unknown PTS');
16612 return;
16613 } // scan for aac samples
16614
16615
16616 var frameIndex = 0;
16617
16618 while (offset < len) {
16619 if (_adts__WEBPACK_IMPORTED_MODULE_0__["isHeader"](data, offset)) {
16620 if (offset + 5 < len) {
16621 var frame = _adts__WEBPACK_IMPORTED_MODULE_0__["appendFrame"](track, data, offset, pts, frameIndex);
16622
16623 if (frame) {
16624 if (frame.missing) {
16625 this.aacOverFlow = frame;
16626 } else {
16627 offset += frame.length;
16628 frameIndex++;
16629 continue;
16630 }
16631 }
16632 } // We are at an ADTS header, but do not have enough data for a frame
16633 // Remaining data will be added to aacOverFlow
16634
16635
16636 break;
16637 } else {
16638 // nothing found, keep looking
16639 offset++;
16640 }
16641 }
16642 };
16643
16644 _proto.parseMPEGPES = function parseMPEGPES(pes) {
16645 var data = pes.data;
16646 var length = data.length;
16647 var frameIndex = 0;
16648 var offset = 0;
16649 var pts = pes.pts;
16650
16651 if (pts === undefined) {
16652 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].warn('[tsdemuxer]: MPEG PES unknown PTS');
16653 return;
16654 }
16655
16656 while (offset < length) {
16657 if (_mpegaudio__WEBPACK_IMPORTED_MODULE_1__["isHeader"](data, offset)) {
16658 var frame = _mpegaudio__WEBPACK_IMPORTED_MODULE_1__["appendFrame"](this._audioTrack, data, offset, pts, frameIndex);
16659
16660 if (frame) {
16661 offset += frame.length;
16662 frameIndex++;
16663 } else {
16664 // logger.log('Unable to parse Mpeg audio frame');
16665 break;
16666 }
16667 } else {
16668 // nothing found, keep looking
16669 offset++;
16670 }
16671 }
16672 };
16673
16674 _proto.parseID3PES = function parseID3PES(pes) {
16675 if (pes.pts === undefined) {
16676 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].warn('[tsdemuxer]: ID3 PES unknown PTS');
16677 return;
16678 }
16679
16680 this._id3Track.samples.push(pes);
16681 };
16682
16683 return TSDemuxer;
16684}();
16685
16686TSDemuxer.minProbeByteLength = 188;
16687
16688function createAVCSample(key, pts, dts, debug) {
16689 return {
16690 key: key,
16691 frame: false,
16692 pts: pts,
16693 dts: dts,
16694 units: [],
16695 debug: debug,
16696 length: 0
16697 };
16698}
16699
16700function parsePAT(data, offset) {
16701 // skip the PSI header and parse the first PMT entry
16702 return (data[offset + 10] & 0x1f) << 8 | data[offset + 11]; // logger.log('PMT PID:' + this._pmtId);
16703}
16704
16705function parsePMT(data, offset, mpegSupported, isSampleAes) {
16706 var result = {
16707 audio: -1,
16708 avc: -1,
16709 id3: -1,
16710 isAAC: true
16711 };
16712 var sectionLength = (data[offset + 1] & 0x0f) << 8 | data[offset + 2];
16713 var tableEnd = offset + 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
16714 // long the program info descriptors are
16715
16716 var programInfoLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11]; // advance the offset to the first entry in the mapping table
16717
16718 offset += 12 + programInfoLength;
16719
16720 while (offset < tableEnd) {
16721 var pid = (data[offset + 1] & 0x1f) << 8 | data[offset + 2];
16722
16723 switch (data[offset]) {
16724 case 0xcf:
16725 // SAMPLE-AES AAC
16726 if (!isSampleAes) {
16727 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].log('ADTS AAC with AES-128-CBC frame encryption found in unencrypted stream');
16728 break;
16729 }
16730
16731 /* falls through */
16732
16733 case 0x0f:
16734 // ISO/IEC 13818-7 ADTS AAC (MPEG-2 lower bit-rate audio)
16735 // logger.log('AAC PID:' + pid);
16736 if (result.audio === -1) {
16737 result.audio = pid;
16738 }
16739
16740 break;
16741 // Packetized metadata (ID3)
16742
16743 case 0x15:
16744 // logger.log('ID3 PID:' + pid);
16745 if (result.id3 === -1) {
16746 result.id3 = pid;
16747 }
16748
16749 break;
16750
16751 case 0xdb:
16752 // SAMPLE-AES AVC
16753 if (!isSampleAes) {
16754 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].log('H.264 with AES-128-CBC slice encryption found in unencrypted stream');
16755 break;
16756 }
16757
16758 /* falls through */
16759
16760 case 0x1b:
16761 // ITU-T Rec. H.264 and ISO/IEC 14496-10 (lower bit-rate video)
16762 // logger.log('AVC PID:' + pid);
16763 if (result.avc === -1) {
16764 result.avc = pid;
16765 }
16766
16767 break;
16768 // ISO/IEC 11172-3 (MPEG-1 audio)
16769 // or ISO/IEC 13818-3 (MPEG-2 halved sample rate audio)
16770
16771 case 0x03:
16772 case 0x04:
16773 // logger.log('MPEG PID:' + pid);
16774 if (!mpegSupported) {
16775 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].log('MPEG audio found, not supported in this browser');
16776 } else if (result.audio === -1) {
16777 result.audio = pid;
16778 result.isAAC = false;
16779 }
16780
16781 break;
16782
16783 case 0x24:
16784 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].warn('Unsupported HEVC stream type found');
16785 break;
16786 } // move to the next table entry
16787 // skip past the elementary stream descriptors, if present
16788
16789
16790 offset += ((data[offset + 3] & 0x0f) << 8 | data[offset + 4]) + 5;
16791 }
16792
16793 return result;
16794}
16795
16796function parsePES(stream) {
16797 var i = 0;
16798 var frag;
16799 var pesLen;
16800 var pesHdrLen;
16801 var pesPts;
16802 var pesDts;
16803 var data = stream.data; // safety check
16804
16805 if (!stream || stream.size === 0) {
16806 return null;
16807 } // we might need up to 19 bytes to read PES header
16808 // if first chunk of data is less than 19 bytes, let's merge it with following ones until we get 19 bytes
16809 // usually only one merge is needed (and this is rare ...)
16810
16811
16812 while (data[0].length < 19 && data.length > 1) {
16813 var newData = new Uint8Array(data[0].length + data[1].length);
16814 newData.set(data[0]);
16815 newData.set(data[1], data[0].length);
16816 data[0] = newData;
16817 data.splice(1, 1);
16818 } // retrieve PTS/DTS from first fragment
16819
16820
16821 frag = data[0];
16822 var pesPrefix = (frag[0] << 16) + (frag[1] << 8) + frag[2];
16823
16824 if (pesPrefix === 1) {
16825 pesLen = (frag[4] << 8) + frag[5]; // if PES parsed length is not zero and greater than total received length, stop parsing. PES might be truncated
16826 // minus 6 : PES header size
16827
16828 if (pesLen && pesLen > stream.size - 6) {
16829 return null;
16830 }
16831
16832 var pesFlags = frag[7];
16833
16834 if (pesFlags & 0xc0) {
16835 /* PES header described here : http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
16836 as PTS / DTS is 33 bit we cannot use bitwise operator in JS,
16837 as Bitwise operators treat their operands as a sequence of 32 bits */
16838 pesPts = (frag[9] & 0x0e) * 536870912 + // 1 << 29
16839 (frag[10] & 0xff) * 4194304 + // 1 << 22
16840 (frag[11] & 0xfe) * 16384 + // 1 << 14
16841 (frag[12] & 0xff) * 128 + // 1 << 7
16842 (frag[13] & 0xfe) / 2;
16843
16844 if (pesFlags & 0x40) {
16845 pesDts = (frag[14] & 0x0e) * 536870912 + // 1 << 29
16846 (frag[15] & 0xff) * 4194304 + // 1 << 22
16847 (frag[16] & 0xfe) * 16384 + // 1 << 14
16848 (frag[17] & 0xff) * 128 + // 1 << 7
16849 (frag[18] & 0xfe) / 2;
16850
16851 if (pesPts - pesDts > 60 * 90000) {
16852 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].warn(Math.round((pesPts - pesDts) / 90000) + "s delta between PTS and DTS, align them");
16853 pesPts = pesDts;
16854 }
16855 } else {
16856 pesDts = pesPts;
16857 }
16858 }
16859
16860 pesHdrLen = frag[8]; // 9 bytes : 6 bytes for PES header + 3 bytes for PES extension
16861
16862 var payloadStartOffset = pesHdrLen + 9;
16863
16864 if (stream.size <= payloadStartOffset) {
16865 return null;
16866 }
16867
16868 stream.size -= payloadStartOffset; // reassemble PES packet
16869
16870 var pesData = new Uint8Array(stream.size);
16871
16872 for (var j = 0, dataLen = data.length; j < dataLen; j++) {
16873 frag = data[j];
16874 var len = frag.byteLength;
16875
16876 if (payloadStartOffset) {
16877 if (payloadStartOffset > len) {
16878 // trim full frag if PES header bigger than frag
16879 payloadStartOffset -= len;
16880 continue;
16881 } else {
16882 // trim partial frag if PES header smaller than frag
16883 frag = frag.subarray(payloadStartOffset);
16884 len -= payloadStartOffset;
16885 payloadStartOffset = 0;
16886 }
16887 }
16888
16889 pesData.set(frag, i);
16890 i += len;
16891 }
16892
16893 if (pesLen) {
16894 // payload size : remove PES header + PES extension
16895 pesLen -= pesHdrLen + 3;
16896 }
16897
16898 return {
16899 data: pesData,
16900 pts: pesPts,
16901 dts: pesDts,
16902 len: pesLen
16903 };
16904 }
16905
16906 return null;
16907}
16908
16909function pushAccessUnit(avcSample, avcTrack) {
16910 if (avcSample.units.length && avcSample.frame) {
16911 // if sample does not have PTS/DTS, patch with last sample PTS/DTS
16912 if (avcSample.pts === undefined) {
16913 var samples = avcTrack.samples;
16914 var nbSamples = samples.length;
16915
16916 if (nbSamples) {
16917 var lastSample = samples[nbSamples - 1];
16918 avcSample.pts = lastSample.pts;
16919 avcSample.dts = lastSample.dts;
16920 } else {
16921 // dropping samples, no timestamp found
16922 avcTrack.dropped++;
16923 return;
16924 }
16925 }
16926
16927 avcTrack.samples.push(avcSample);
16928 }
16929
16930 if (avcSample.debug.length) {
16931 _utils_logger__WEBPACK_IMPORTED_MODULE_7__["logger"].log(avcSample.pts + '/' + avcSample.dts + ':' + avcSample.debug);
16932 }
16933}
16934
16935function insertSampleInOrder(arr, data) {
16936 var len = arr.length;
16937
16938 if (len > 0) {
16939 if (data.pts >= arr[len - 1].pts) {
16940 arr.push(data);
16941 } else {
16942 for (var pos = len - 1; pos >= 0; pos--) {
16943 if (data.pts < arr[pos].pts) {
16944 arr.splice(pos, 0, data);
16945 break;
16946 }
16947 }
16948 }
16949 } else {
16950 arr.push(data);
16951 }
16952}
16953/**
16954 * remove Emulation Prevention bytes from a RBSP
16955 */
16956
16957
16958function discardEPB(data) {
16959 var length = data.byteLength;
16960 var EPBPositions = [];
16961 var i = 1; // Find all `Emulation Prevention Bytes`
16962
16963 while (i < length - 2) {
16964 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
16965 EPBPositions.push(i + 2);
16966 i += 2;
16967 } else {
16968 i++;
16969 }
16970 } // If no Emulation Prevention Bytes were found just return the original
16971 // array
16972
16973
16974 if (EPBPositions.length === 0) {
16975 return data;
16976 } // Create a new array to hold the NAL unit data
16977
16978
16979 var newLength = length - EPBPositions.length;
16980 var newData = new Uint8Array(newLength);
16981 var sourceIndex = 0;
16982
16983 for (i = 0; i < newLength; sourceIndex++, i++) {
16984 if (sourceIndex === EPBPositions[0]) {
16985 // Skip this byte
16986 sourceIndex++; // Remove this position index
16987
16988 EPBPositions.shift();
16989 }
16990
16991 newData[i] = data[sourceIndex];
16992 }
16993
16994 return newData;
16995}
16996/* harmony default export */ __webpack_exports__["default"] = (TSDemuxer);
16997
16998/***/ }),
16999
17000/***/ "./src/errors.ts":
17001/*!***********************!*\
17002 !*** ./src/errors.ts ***!
17003 \***********************/
17004/*! exports provided: ErrorTypes, ErrorDetails */
17005/***/ (function(module, __webpack_exports__, __webpack_require__) {
17006__webpack_require__.r(__webpack_exports__);
17007/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ErrorTypes", function() { return ErrorTypes; });
17008/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ErrorDetails", function() { return ErrorDetails; });
17009var ErrorTypes;
17010/**
17011 * @enum {ErrorDetails}
17012 * @typedef {string} ErrorDetail
17013 */
17014
17015(function (ErrorTypes) {
17016 ErrorTypes["NETWORK_ERROR"] = "networkError";
17017 ErrorTypes["MEDIA_ERROR"] = "mediaError";
17018 ErrorTypes["KEY_SYSTEM_ERROR"] = "keySystemError";
17019 ErrorTypes["MUX_ERROR"] = "muxError";
17020 ErrorTypes["OTHER_ERROR"] = "otherError";
17021})(ErrorTypes || (ErrorTypes = {}));
17022
17023var ErrorDetails;
17024
17025(function (ErrorDetails) {
17026 ErrorDetails["KEY_SYSTEM_NO_KEYS"] = "keySystemNoKeys";
17027 ErrorDetails["KEY_SYSTEM_NO_ACCESS"] = "keySystemNoAccess";
17028 ErrorDetails["KEY_SYSTEM_NO_SESSION"] = "keySystemNoSession";
17029 ErrorDetails["KEY_SYSTEM_LICENSE_REQUEST_FAILED"] = "keySystemLicenseRequestFailed";
17030 ErrorDetails["KEY_SYSTEM_NO_INIT_DATA"] = "keySystemNoInitData";
17031 ErrorDetails["MANIFEST_LOAD_ERROR"] = "manifestLoadError";
17032 ErrorDetails["MANIFEST_LOAD_TIMEOUT"] = "manifestLoadTimeOut";
17033 ErrorDetails["MANIFEST_PARSING_ERROR"] = "manifestParsingError";
17034 ErrorDetails["MANIFEST_INCOMPATIBLE_CODECS_ERROR"] = "manifestIncompatibleCodecsError";
17035 ErrorDetails["LEVEL_EMPTY_ERROR"] = "levelEmptyError";
17036 ErrorDetails["LEVEL_LOAD_ERROR"] = "levelLoadError";
17037 ErrorDetails["LEVEL_LOAD_TIMEOUT"] = "levelLoadTimeOut";
17038 ErrorDetails["LEVEL_SWITCH_ERROR"] = "levelSwitchError";
17039 ErrorDetails["AUDIO_TRACK_LOAD_ERROR"] = "audioTrackLoadError";
17040 ErrorDetails["AUDIO_TRACK_LOAD_TIMEOUT"] = "audioTrackLoadTimeOut";
17041 ErrorDetails["SUBTITLE_LOAD_ERROR"] = "subtitleTrackLoadError";
17042 ErrorDetails["SUBTITLE_TRACK_LOAD_TIMEOUT"] = "subtitleTrackLoadTimeOut";
17043 ErrorDetails["FRAG_LOAD_ERROR"] = "fragLoadError";
17044 ErrorDetails["FRAG_LOAD_TIMEOUT"] = "fragLoadTimeOut";
17045 ErrorDetails["FRAG_DECRYPT_ERROR"] = "fragDecryptError";
17046 ErrorDetails["FRAG_PARSING_ERROR"] = "fragParsingError";
17047 ErrorDetails["REMUX_ALLOC_ERROR"] = "remuxAllocError";
17048 ErrorDetails["KEY_LOAD_ERROR"] = "keyLoadError";
17049 ErrorDetails["KEY_LOAD_TIMEOUT"] = "keyLoadTimeOut";
17050 ErrorDetails["BUFFER_ADD_CODEC_ERROR"] = "bufferAddCodecError";
17051 ErrorDetails["BUFFER_INCOMPATIBLE_CODECS_ERROR"] = "bufferIncompatibleCodecsError";
17052 ErrorDetails["BUFFER_APPEND_ERROR"] = "bufferAppendError";
17053 ErrorDetails["BUFFER_APPENDING_ERROR"] = "bufferAppendingError";
17054 ErrorDetails["BUFFER_STALLED_ERROR"] = "bufferStalledError";
17055 ErrorDetails["BUFFER_FULL_ERROR"] = "bufferFullError";
17056 ErrorDetails["BUFFER_SEEK_OVER_HOLE"] = "bufferSeekOverHole";
17057 ErrorDetails["BUFFER_NUDGE_ON_STALL"] = "bufferNudgeOnStall";
17058 ErrorDetails["INTERNAL_EXCEPTION"] = "internalException";
17059 ErrorDetails["INTERNAL_ABORTED"] = "aborted";
17060 ErrorDetails["UNKNOWN"] = "unknown";
17061})(ErrorDetails || (ErrorDetails = {}));
17062
17063/***/ }),
17064
17065/***/ "./src/events.ts":
17066/*!***********************!*\
17067 !*** ./src/events.ts ***!
17068 \***********************/
17069/*! exports provided: Events */
17070/***/ (function(module, __webpack_exports__, __webpack_require__) {
17071__webpack_require__.r(__webpack_exports__);
17072/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Events", function() { return Events; });
17073/**
17074 * @readonly
17075 * @enum {string}
17076 */
17077var Events;
17078
17079(function (Events) {
17080 Events["MEDIA_ATTACHING"] = "hlsMediaAttaching";
17081 Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
17082 Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
17083 Events["MEDIA_DETACHED"] = "hlsMediaDetached";
17084 Events["BUFFER_RESET"] = "hlsBufferReset";
17085 Events["BUFFER_CODECS"] = "hlsBufferCodecs";
17086 Events["BUFFER_CREATED"] = "hlsBufferCreated";
17087 Events["BUFFER_APPENDING"] = "hlsBufferAppending";
17088 Events["BUFFER_APPENDED"] = "hlsBufferAppended";
17089 Events["BUFFER_EOS"] = "hlsBufferEos";
17090 Events["BUFFER_FLUSHING"] = "hlsBufferFlushing";
17091 Events["BUFFER_FLUSHED"] = "hlsBufferFlushed";
17092 Events["MANIFEST_LOADING"] = "hlsManifestLoading";
17093 Events["MANIFEST_LOADED"] = "hlsManifestLoaded";
17094 Events["MANIFEST_PARSED"] = "hlsManifestParsed";
17095 Events["LEVEL_SWITCHING"] = "hlsLevelSwitching";
17096 Events["LEVEL_SWITCHED"] = "hlsLevelSwitched";
17097 Events["LEVEL_LOADING"] = "hlsLevelLoading";
17098 Events["LEVEL_LOADED"] = "hlsLevelLoaded";
17099 Events["LEVEL_UPDATED"] = "hlsLevelUpdated";
17100 Events["LEVEL_PTS_UPDATED"] = "hlsLevelPtsUpdated";
17101 Events["LEVELS_UPDATED"] = "hlsLevelsUpdated";
17102 Events["AUDIO_TRACKS_UPDATED"] = "hlsAudioTracksUpdated";
17103 Events["AUDIO_TRACK_SWITCHING"] = "hlsAudioTrackSwitching";
17104 Events["AUDIO_TRACK_SWITCHED"] = "hlsAudioTrackSwitched";
17105 Events["AUDIO_TRACK_LOADING"] = "hlsAudioTrackLoading";
17106 Events["AUDIO_TRACK_LOADED"] = "hlsAudioTrackLoaded";
17107 Events["SUBTITLE_TRACKS_UPDATED"] = "hlsSubtitleTracksUpdated";
17108 Events["SUBTITLE_TRACKS_CLEARED"] = "hlsSubtitleTracksCleared";
17109 Events["SUBTITLE_TRACK_SWITCH"] = "hlsSubtitleTrackSwitch";
17110 Events["SUBTITLE_TRACK_LOADING"] = "hlsSubtitleTrackLoading";
17111 Events["SUBTITLE_TRACK_LOADED"] = "hlsSubtitleTrackLoaded";
17112 Events["SUBTITLE_FRAG_PROCESSED"] = "hlsSubtitleFragProcessed";
17113 Events["CUES_PARSED"] = "hlsCuesParsed";
17114 Events["NON_NATIVE_TEXT_TRACKS_FOUND"] = "hlsNonNativeTextTracksFound";
17115 Events["INIT_PTS_FOUND"] = "hlsInitPtsFound";
17116 Events["FRAG_LOADING"] = "hlsFragLoading";
17117 Events["FRAG_LOAD_EMERGENCY_ABORTED"] = "hlsFragLoadEmergencyAborted";
17118 Events["FRAG_LOADED"] = "hlsFragLoaded";
17119 Events["FRAG_DECRYPTED"] = "hlsFragDecrypted";
17120 Events["FRAG_PARSING_INIT_SEGMENT"] = "hlsFragParsingInitSegment";
17121 Events["FRAG_PARSING_USERDATA"] = "hlsFragParsingUserdata";
17122 Events["FRAG_PARSING_METADATA"] = "hlsFragParsingMetadata";
17123 Events["FRAG_PARSED"] = "hlsFragParsed";
17124 Events["FRAG_BUFFERED"] = "hlsFragBuffered";
17125 Events["FRAG_CHANGED"] = "hlsFragChanged";
17126 Events["FPS_DROP"] = "hlsFpsDrop";
17127 Events["FPS_DROP_LEVEL_CAPPING"] = "hlsFpsDropLevelCapping";
17128 Events["ERROR"] = "hlsError";
17129 Events["DESTROYING"] = "hlsDestroying";
17130 Events["KEY_LOADING"] = "hlsKeyLoading";
17131 Events["KEY_LOADED"] = "hlsKeyLoaded";
17132 Events["LIVE_BACK_BUFFER_REACHED"] = "hlsLiveBackBufferReached";
17133 Events["BACK_BUFFER_REACHED"] = "hlsBackBufferReached";
17134})(Events || (Events = {}));
17135
17136/***/ }),
17137
17138/***/ "./src/hls.ts":
17139/*!********************!*\
17140 !*** ./src/hls.ts ***!
17141 \********************/
17142/*! exports provided: default */
17143/***/ (function(module, __webpack_exports__, __webpack_require__) {
17144__webpack_require__.r(__webpack_exports__);
17145/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return Hls; });
17146/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
17147/* harmony import */ var _loader_playlist_loader__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./loader/playlist-loader */ "./src/loader/playlist-loader.ts");
17148/* harmony import */ var _loader_key_loader__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./loader/key-loader */ "./src/loader/key-loader.ts");
17149/* harmony import */ var _controller_id3_track_controller__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./controller/id3-track-controller */ "./src/controller/id3-track-controller.ts");
17150/* harmony import */ var _controller_latency_controller__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./controller/latency-controller */ "./src/controller/latency-controller.ts");
17151/* harmony import */ var _controller_level_controller__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./controller/level-controller */ "./src/controller/level-controller.ts");
17152/* harmony import */ var _controller_fragment_tracker__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./controller/fragment-tracker */ "./src/controller/fragment-tracker.ts");
17153/* harmony import */ var _controller_stream_controller__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./controller/stream-controller */ "./src/controller/stream-controller.ts");
17154/* harmony import */ var _is_supported__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./is-supported */ "./src/is-supported.ts");
17155/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./utils/logger */ "./src/utils/logger.ts");
17156/* harmony import */ var _config__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./config */ "./src/config.ts");
17157/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! eventemitter3 */ "./node_modules/eventemitter3/index.js");
17158/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./events */ "./src/events.ts");
17159/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./errors */ "./src/errors.ts");
17160function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
17161
17162function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
17163
17164
17165
17166
17167
17168
17169
17170
17171
17172
17173
17174
17175
17176
17177
17178
17179/**
17180 * @module Hls
17181 * @class
17182 * @constructor
17183 */
17184var Hls = /*#__PURE__*/function () {
17185 Hls.isSupported = function isSupported() {
17186 return Object(_is_supported__WEBPACK_IMPORTED_MODULE_8__["isSupported"])();
17187 };
17188
17189 /**
17190 * Creates an instance of an HLS client that can attach to exactly one `HTMLMediaElement`.
17191 *
17192 * @constructs Hls
17193 * @param {HlsConfig} config
17194 */
17195 function Hls(userConfig) {
17196 if (userConfig === void 0) {
17197 userConfig = {};
17198 }
17199
17200 this.config = void 0;
17201 this.userConfig = void 0;
17202 this.coreComponents = void 0;
17203 this.networkControllers = void 0;
17204 this._emitter = new eventemitter3__WEBPACK_IMPORTED_MODULE_11__["EventEmitter"]();
17205 this._autoLevelCapping = void 0;
17206 this.abrController = void 0;
17207 this.bufferController = void 0;
17208 this.capLevelController = void 0;
17209 this.latencyController = void 0;
17210 this.levelController = void 0;
17211 this.streamController = void 0;
17212 this.audioTrackController = void 0;
17213 this.subtitleTrackController = void 0;
17214 this.emeController = void 0;
17215 this.cmcdController = void 0;
17216 this._media = null;
17217 this.url = null;
17218 var config = this.config = Object(_config__WEBPACK_IMPORTED_MODULE_10__["mergeConfig"])(Hls.DefaultConfig, userConfig);
17219 this.userConfig = userConfig;
17220 Object(_utils_logger__WEBPACK_IMPORTED_MODULE_9__["enableLogs"])(config.debug);
17221 this._autoLevelCapping = -1;
17222
17223 if (config.progressive) {
17224 Object(_config__WEBPACK_IMPORTED_MODULE_10__["enableStreamingMode"])(config);
17225 } // core controllers and network loaders
17226
17227
17228 var ConfigAbrController = config.abrController,
17229 ConfigBufferController = config.bufferController,
17230 ConfigCapLevelController = config.capLevelController,
17231 ConfigFpsController = config.fpsController;
17232 var abrController = this.abrController = new ConfigAbrController(this);
17233 var bufferController = this.bufferController = new ConfigBufferController(this);
17234 var capLevelController = this.capLevelController = new ConfigCapLevelController(this);
17235 var fpsController = new ConfigFpsController(this);
17236 var playListLoader = new _loader_playlist_loader__WEBPACK_IMPORTED_MODULE_1__["default"](this);
17237 var keyLoader = new _loader_key_loader__WEBPACK_IMPORTED_MODULE_2__["default"](this);
17238 var id3TrackController = new _controller_id3_track_controller__WEBPACK_IMPORTED_MODULE_3__["default"](this); // network controllers
17239
17240 var levelController = this.levelController = new _controller_level_controller__WEBPACK_IMPORTED_MODULE_5__["default"](this); // FragmentTracker must be defined before StreamController because the order of event handling is important
17241
17242 var fragmentTracker = new _controller_fragment_tracker__WEBPACK_IMPORTED_MODULE_6__["FragmentTracker"](this);
17243 var streamController = this.streamController = new _controller_stream_controller__WEBPACK_IMPORTED_MODULE_7__["default"](this, fragmentTracker); // Cap level controller uses streamController to flush the buffer
17244
17245 capLevelController.setStreamController(streamController); // fpsController uses streamController to switch when frames are being dropped
17246
17247 fpsController.setStreamController(streamController);
17248 var networkControllers = [levelController, streamController];
17249 this.networkControllers = networkControllers;
17250 var coreComponents = [playListLoader, keyLoader, abrController, bufferController, capLevelController, fpsController, id3TrackController, fragmentTracker];
17251 this.audioTrackController = this.createController(config.audioTrackController, null, networkControllers);
17252 this.createController(config.audioStreamController, fragmentTracker, networkControllers); // subtitleTrackController must be defined before because the order of event handling is important
17253
17254 this.subtitleTrackController = this.createController(config.subtitleTrackController, null, networkControllers);
17255 this.createController(config.subtitleStreamController, fragmentTracker, networkControllers);
17256 this.createController(config.timelineController, null, coreComponents);
17257 this.emeController = this.createController(config.emeController, null, coreComponents);
17258 this.cmcdController = this.createController(config.cmcdController, null, coreComponents);
17259 this.latencyController = this.createController(_controller_latency_controller__WEBPACK_IMPORTED_MODULE_4__["default"], null, coreComponents);
17260 this.coreComponents = coreComponents;
17261 }
17262
17263 var _proto = Hls.prototype;
17264
17265 _proto.createController = function createController(ControllerClass, fragmentTracker, components) {
17266 if (ControllerClass) {
17267 var controllerInstance = fragmentTracker ? new ControllerClass(this, fragmentTracker) : new ControllerClass(this);
17268
17269 if (components) {
17270 components.push(controllerInstance);
17271 }
17272
17273 return controllerInstance;
17274 }
17275
17276 return null;
17277 } // Delegate the EventEmitter through the public API of Hls.js
17278 ;
17279
17280 _proto.on = function on(event, listener, context) {
17281 if (context === void 0) {
17282 context = this;
17283 }
17284
17285 this._emitter.on(event, listener, context);
17286 };
17287
17288 _proto.once = function once(event, listener, context) {
17289 if (context === void 0) {
17290 context = this;
17291 }
17292
17293 this._emitter.once(event, listener, context);
17294 };
17295
17296 _proto.removeAllListeners = function removeAllListeners(event) {
17297 this._emitter.removeAllListeners(event);
17298 };
17299
17300 _proto.off = function off(event, listener, context, once) {
17301 if (context === void 0) {
17302 context = this;
17303 }
17304
17305 this._emitter.off(event, listener, context, once);
17306 };
17307
17308 _proto.listeners = function listeners(event) {
17309 return this._emitter.listeners(event);
17310 };
17311
17312 _proto.emit = function emit(event, name, eventObject) {
17313 return this._emitter.emit(event, name, eventObject);
17314 };
17315
17316 _proto.trigger = function trigger(event, eventObject) {
17317 if (this.config.debug) {
17318 return this.emit(event, event, eventObject);
17319 } else {
17320 try {
17321 return this.emit(event, event, eventObject);
17322 } catch (e) {
17323 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].error('An internal error happened while handling event ' + event + '. Error message: "' + e.message + '". Here is a stacktrace:', e);
17324 this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].ERROR, {
17325 type: _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorTypes"].OTHER_ERROR,
17326 details: _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].INTERNAL_EXCEPTION,
17327 fatal: false,
17328 event: event,
17329 error: e
17330 });
17331 }
17332 }
17333
17334 return false;
17335 };
17336
17337 _proto.listenerCount = function listenerCount(event) {
17338 return this._emitter.listenerCount(event);
17339 }
17340 /**
17341 * Dispose of the instance
17342 */
17343 ;
17344
17345 _proto.destroy = function destroy() {
17346 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('destroy');
17347 this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].DESTROYING, undefined);
17348 this.detachMedia();
17349 this.removeAllListeners();
17350 this._autoLevelCapping = -1;
17351 this.url = null;
17352 this.networkControllers.forEach(function (component) {
17353 return component.destroy();
17354 });
17355 this.networkControllers.length = 0;
17356 this.coreComponents.forEach(function (component) {
17357 return component.destroy();
17358 });
17359 this.coreComponents.length = 0;
17360 }
17361 /**
17362 * Attaches Hls.js to a media element
17363 * @param {HTMLMediaElement} media
17364 */
17365 ;
17366
17367 _proto.attachMedia = function attachMedia(media) {
17368 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('attachMedia');
17369 this._media = media;
17370 this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].MEDIA_ATTACHING, {
17371 media: media
17372 });
17373 }
17374 /**
17375 * Detach Hls.js from the media
17376 */
17377 ;
17378
17379 _proto.detachMedia = function detachMedia() {
17380 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('detachMedia');
17381 this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].MEDIA_DETACHING, undefined);
17382 this._media = null;
17383 }
17384 /**
17385 * Set the source URL. Can be relative or absolute.
17386 * @param {string} url
17387 */
17388 ;
17389
17390 _proto.loadSource = function loadSource(url) {
17391 this.stopLoad();
17392 var media = this.media;
17393 var loadedSource = this.url;
17394 var loadingSource = this.url = url_toolkit__WEBPACK_IMPORTED_MODULE_0__["buildAbsoluteURL"](self.location.href, url, {
17395 alwaysNormalize: true
17396 });
17397 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("loadSource:" + loadingSource);
17398
17399 if (media && loadedSource && loadedSource !== loadingSource && this.bufferController.hasSourceTypes()) {
17400 this.detachMedia();
17401 this.attachMedia(media);
17402 } // when attaching to a source URL, trigger a playlist load
17403
17404
17405 this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].MANIFEST_LOADING, {
17406 url: url
17407 });
17408 }
17409 /**
17410 * Start loading data from the stream source.
17411 * Depending on default config, client starts loading automatically when a source is set.
17412 *
17413 * @param {number} startPosition Set the start position to stream from
17414 * @default -1 None (from earliest point)
17415 */
17416 ;
17417
17418 _proto.startLoad = function startLoad(startPosition) {
17419 if (startPosition === void 0) {
17420 startPosition = -1;
17421 }
17422
17423 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("startLoad(" + startPosition + ")");
17424 this.networkControllers.forEach(function (controller) {
17425 controller.startLoad(startPosition);
17426 });
17427 }
17428 /**
17429 * Stop loading of any stream data.
17430 */
17431 ;
17432
17433 _proto.stopLoad = function stopLoad() {
17434 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('stopLoad');
17435 this.networkControllers.forEach(function (controller) {
17436 controller.stopLoad();
17437 });
17438 }
17439 /**
17440 * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
17441 */
17442 ;
17443
17444 _proto.swapAudioCodec = function swapAudioCodec() {
17445 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('swapAudioCodec');
17446 this.streamController.swapAudioCodec();
17447 }
17448 /**
17449 * When the media-element fails, this allows to detach and then re-attach it
17450 * as one call (convenience method).
17451 *
17452 * Automatic recovery of media-errors by this process is configurable.
17453 */
17454 ;
17455
17456 _proto.recoverMediaError = function recoverMediaError() {
17457 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('recoverMediaError');
17458 var media = this._media;
17459 this.detachMedia();
17460
17461 if (media) {
17462 this.attachMedia(media);
17463 }
17464 };
17465
17466 _proto.removeLevel = function removeLevel(levelIndex, urlId) {
17467 if (urlId === void 0) {
17468 urlId = 0;
17469 }
17470
17471 this.levelController.removeLevel(levelIndex, urlId);
17472 }
17473 /**
17474 * @type {Level[]}
17475 */
17476 ;
17477
17478 _createClass(Hls, [{
17479 key: "levels",
17480 get: function get() {
17481 var levels = this.levelController.levels;
17482 return levels ? levels : [];
17483 }
17484 /**
17485 * Index of quality level currently played
17486 * @type {number}
17487 */
17488
17489 }, {
17490 key: "currentLevel",
17491 get: function get() {
17492 return this.streamController.currentLevel;
17493 }
17494 /**
17495 * Set quality level index immediately .
17496 * This will flush the current buffer to replace the quality asap.
17497 * That means playback will interrupt at least shortly to re-buffer and re-sync eventually.
17498 * @type {number} -1 for automatic level selection
17499 */
17500 ,
17501 set: function set(newLevel) {
17502 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set currentLevel:" + newLevel);
17503 this.loadLevel = newLevel;
17504 this.abrController.clearTimer();
17505 this.streamController.immediateLevelSwitch();
17506 }
17507 /**
17508 * Index of next quality level loaded as scheduled by stream controller.
17509 * @type {number}
17510 */
17511
17512 }, {
17513 key: "nextLevel",
17514 get: function get() {
17515 return this.streamController.nextLevel;
17516 }
17517 /**
17518 * Set quality level index for next loaded data.
17519 * This will switch the video quality asap, without interrupting playback.
17520 * May abort current loading of data, and flush parts of buffer (outside currently played fragment region).
17521 * @type {number} -1 for automatic level selection
17522 */
17523 ,
17524 set: function set(newLevel) {
17525 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set nextLevel:" + newLevel);
17526 this.levelController.manualLevel = newLevel;
17527 this.streamController.nextLevelSwitch();
17528 }
17529 /**
17530 * Return the quality level of the currently or last (of none is loaded currently) segment
17531 * @type {number}
17532 */
17533
17534 }, {
17535 key: "loadLevel",
17536 get: function get() {
17537 return this.levelController.level;
17538 }
17539 /**
17540 * Set quality level index for next loaded data in a conservative way.
17541 * This will switch the quality without flushing, but interrupt current loading.
17542 * Thus the moment when the quality switch will appear in effect will only be after the already existing buffer.
17543 * @type {number} newLevel -1 for automatic level selection
17544 */
17545 ,
17546 set: function set(newLevel) {
17547 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set loadLevel:" + newLevel);
17548 this.levelController.manualLevel = newLevel;
17549 }
17550 /**
17551 * get next quality level loaded
17552 * @type {number}
17553 */
17554
17555 }, {
17556 key: "nextLoadLevel",
17557 get: function get() {
17558 return this.levelController.nextLoadLevel;
17559 }
17560 /**
17561 * Set quality level of next loaded segment in a fully "non-destructive" way.
17562 * Same as `loadLevel` but will wait for next switch (until current loading is done).
17563 * @type {number} level
17564 */
17565 ,
17566 set: function set(level) {
17567 this.levelController.nextLoadLevel = level;
17568 }
17569 /**
17570 * Return "first level": like a default level, if not set,
17571 * falls back to index of first level referenced in manifest
17572 * @type {number}
17573 */
17574
17575 }, {
17576 key: "firstLevel",
17577 get: function get() {
17578 return Math.max(this.levelController.firstLevel, this.minAutoLevel);
17579 }
17580 /**
17581 * Sets "first-level", see getter.
17582 * @type {number}
17583 */
17584 ,
17585 set: function set(newLevel) {
17586 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set firstLevel:" + newLevel);
17587 this.levelController.firstLevel = newLevel;
17588 }
17589 /**
17590 * Return start level (level of first fragment that will be played back)
17591 * if not overrided by user, first level appearing in manifest will be used as start level
17592 * if -1 : automatic start level selection, playback will start from level matching download bandwidth
17593 * (determined from download of first segment)
17594 * @type {number}
17595 */
17596
17597 }, {
17598 key: "startLevel",
17599 get: function get() {
17600 return this.levelController.startLevel;
17601 }
17602 /**
17603 * set start level (level of first fragment that will be played back)
17604 * if not overrided by user, first level appearing in manifest will be used as start level
17605 * if -1 : automatic start level selection, playback will start from level matching download bandwidth
17606 * (determined from download of first segment)
17607 * @type {number} newLevel
17608 */
17609 ,
17610 set: function set(newLevel) {
17611 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set startLevel:" + newLevel); // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
17612
17613 if (newLevel !== -1) {
17614 newLevel = Math.max(newLevel, this.minAutoLevel);
17615 }
17616
17617 this.levelController.startLevel = newLevel;
17618 }
17619 /**
17620 * Get the current setting for capLevelToPlayerSize
17621 *
17622 * @type {boolean}
17623 */
17624
17625 }, {
17626 key: "capLevelToPlayerSize",
17627 get: function get() {
17628 return this.config.capLevelToPlayerSize;
17629 }
17630 /**
17631 * set dynamically set capLevelToPlayerSize against (`CapLevelController`)
17632 *
17633 * @type {boolean}
17634 */
17635 ,
17636 set: function set(shouldStartCapping) {
17637 var newCapLevelToPlayerSize = !!shouldStartCapping;
17638
17639 if (newCapLevelToPlayerSize !== this.config.capLevelToPlayerSize) {
17640 if (newCapLevelToPlayerSize) {
17641 this.capLevelController.startCapping(); // If capping occurs, nextLevelSwitch will happen based on size.
17642 } else {
17643 this.capLevelController.stopCapping();
17644 this.autoLevelCapping = -1;
17645 this.streamController.nextLevelSwitch(); // Now we're uncapped, get the next level asap.
17646 }
17647
17648 this.config.capLevelToPlayerSize = newCapLevelToPlayerSize;
17649 }
17650 }
17651 /**
17652 * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
17653 * @type {number}
17654 */
17655
17656 }, {
17657 key: "autoLevelCapping",
17658 get: function get() {
17659 return this._autoLevelCapping;
17660 }
17661 /**
17662 * get bandwidth estimate
17663 * @type {number}
17664 */
17665 ,
17666 set:
17667 /**
17668 * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
17669 * @type {number}
17670 */
17671 function set(newLevel) {
17672 if (this._autoLevelCapping !== newLevel) {
17673 _utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set autoLevelCapping:" + newLevel);
17674 this._autoLevelCapping = newLevel;
17675 }
17676 }
17677 /**
17678 * True when automatic level selection enabled
17679 * @type {boolean}
17680 */
17681
17682 }, {
17683 key: "bandwidthEstimate",
17684 get: function get() {
17685 var bwEstimator = this.abrController.bwEstimator;
17686
17687 if (!bwEstimator) {
17688 return NaN;
17689 }
17690
17691 return bwEstimator.getEstimate();
17692 }
17693 }, {
17694 key: "autoLevelEnabled",
17695 get: function get() {
17696 return this.levelController.manualLevel === -1;
17697 }
17698 /**
17699 * Level set manually (if any)
17700 * @type {number}
17701 */
17702
17703 }, {
17704 key: "manualLevel",
17705 get: function get() {
17706 return this.levelController.manualLevel;
17707 }
17708 /**
17709 * min level selectable in auto mode according to config.minAutoBitrate
17710 * @type {number}
17711 */
17712
17713 }, {
17714 key: "minAutoLevel",
17715 get: function get() {
17716 var levels = this.levels,
17717 minAutoBitrate = this.config.minAutoBitrate;
17718 if (!levels) return 0;
17719 var len = levels.length;
17720
17721 for (var i = 0; i < len; i++) {
17722 if (levels[i].maxBitrate > minAutoBitrate) {
17723 return i;
17724 }
17725 }
17726
17727 return 0;
17728 }
17729 /**
17730 * max level selectable in auto mode according to autoLevelCapping
17731 * @type {number}
17732 */
17733
17734 }, {
17735 key: "maxAutoLevel",
17736 get: function get() {
17737 var levels = this.levels,
17738 autoLevelCapping = this.autoLevelCapping;
17739 var maxAutoLevel;
17740
17741 if (autoLevelCapping === -1 && levels && levels.length) {
17742 maxAutoLevel = levels.length - 1;
17743 } else {
17744 maxAutoLevel = autoLevelCapping;
17745 }
17746
17747 return maxAutoLevel;
17748 }
17749 /**
17750 * next automatically selected quality level
17751 * @type {number}
17752 */
17753
17754 }, {
17755 key: "nextAutoLevel",
17756 get: function get() {
17757 // ensure next auto level is between min and max auto level
17758 return Math.min(Math.max(this.abrController.nextAutoLevel, this.minAutoLevel), this.maxAutoLevel);
17759 }
17760 /**
17761 * this setter is used to force next auto level.
17762 * this is useful to force a switch down in auto mode:
17763 * in case of load error on level N, hls.js can set nextAutoLevel to N-1 for example)
17764 * forced value is valid for one fragment. upon successful frag loading at forced level,
17765 * this value will be resetted to -1 by ABR controller.
17766 * @type {number}
17767 */
17768 ,
17769 set: function set(nextLevel) {
17770 this.abrController.nextAutoLevel = Math.max(this.minAutoLevel, nextLevel);
17771 }
17772 /**
17773 * @type {AudioTrack[]}
17774 */
17775
17776 }, {
17777 key: "audioTracks",
17778 get: function get() {
17779 var audioTrackController = this.audioTrackController;
17780 return audioTrackController ? audioTrackController.audioTracks : [];
17781 }
17782 /**
17783 * index of the selected audio track (index in audio track lists)
17784 * @type {number}
17785 */
17786
17787 }, {
17788 key: "audioTrack",
17789 get: function get() {
17790 var audioTrackController = this.audioTrackController;
17791 return audioTrackController ? audioTrackController.audioTrack : -1;
17792 }
17793 /**
17794 * selects an audio track, based on its index in audio track lists
17795 * @type {number}
17796 */
17797 ,
17798 set: function set(audioTrackId) {
17799 var audioTrackController = this.audioTrackController;
17800
17801 if (audioTrackController) {
17802 audioTrackController.audioTrack = audioTrackId;
17803 }
17804 }
17805 /**
17806 * get alternate subtitle tracks list from playlist
17807 * @type {MediaPlaylist[]}
17808 */
17809
17810 }, {
17811 key: "subtitleTracks",
17812 get: function get() {
17813 var subtitleTrackController = this.subtitleTrackController;
17814 return subtitleTrackController ? subtitleTrackController.subtitleTracks : [];
17815 }
17816 /**
17817 * index of the selected subtitle track (index in subtitle track lists)
17818 * @type {number}
17819 */
17820
17821 }, {
17822 key: "subtitleTrack",
17823 get: function get() {
17824 var subtitleTrackController = this.subtitleTrackController;
17825 return subtitleTrackController ? subtitleTrackController.subtitleTrack : -1;
17826 },
17827 set:
17828 /**
17829 * select an subtitle track, based on its index in subtitle track lists
17830 * @type {number}
17831 */
17832 function set(subtitleTrackId) {
17833 var subtitleTrackController = this.subtitleTrackController;
17834
17835 if (subtitleTrackController) {
17836 subtitleTrackController.subtitleTrack = subtitleTrackId;
17837 }
17838 }
17839 /**
17840 * @type {boolean}
17841 */
17842
17843 }, {
17844 key: "media",
17845 get: function get() {
17846 return this._media;
17847 }
17848 }, {
17849 key: "subtitleDisplay",
17850 get: function get() {
17851 var subtitleTrackController = this.subtitleTrackController;
17852 return subtitleTrackController ? subtitleTrackController.subtitleDisplay : false;
17853 }
17854 /**
17855 * Enable/disable subtitle display rendering
17856 * @type {boolean}
17857 */
17858 ,
17859 set: function set(value) {
17860 var subtitleTrackController = this.subtitleTrackController;
17861
17862 if (subtitleTrackController) {
17863 subtitleTrackController.subtitleDisplay = value;
17864 }
17865 }
17866 /**
17867 * get mode for Low-Latency HLS loading
17868 * @type {boolean}
17869 */
17870
17871 }, {
17872 key: "lowLatencyMode",
17873 get: function get() {
17874 return this.config.lowLatencyMode;
17875 }
17876 /**
17877 * Enable/disable Low-Latency HLS part playlist and segment loading, and start live streams at playlist PART-HOLD-BACK rather than HOLD-BACK.
17878 * @type {boolean}
17879 */
17880 ,
17881 set: function set(mode) {
17882 this.config.lowLatencyMode = mode;
17883 }
17884 /**
17885 * position (in seconds) of live sync point (ie edge of live position minus safety delay defined by ```hls.config.liveSyncDuration```)
17886 * @type {number}
17887 */
17888
17889 }, {
17890 key: "liveSyncPosition",
17891 get: function get() {
17892 return this.latencyController.liveSyncPosition;
17893 }
17894 /**
17895 * estimated position (in seconds) of live edge (ie edge of live playlist plus time sync playlist advanced)
17896 * returns 0 before first playlist is loaded
17897 * @type {number}
17898 */
17899
17900 }, {
17901 key: "latency",
17902 get: function get() {
17903 return this.latencyController.latency;
17904 }
17905 /**
17906 * maximum distance from the edge before the player seeks forward to ```hls.liveSyncPosition```
17907 * configured using ```liveMaxLatencyDurationCount``` (multiple of target duration) or ```liveMaxLatencyDuration```
17908 * returns 0 before first playlist is loaded
17909 * @type {number}
17910 */
17911
17912 }, {
17913 key: "maxLatency",
17914 get: function get() {
17915 return this.latencyController.maxLatency;
17916 }
17917 /**
17918 * target distance from the edge as calculated by the latency controller
17919 * @type {number}
17920 */
17921
17922 }, {
17923 key: "targetLatency",
17924 get: function get() {
17925 return this.latencyController.targetLatency;
17926 }
17927 /**
17928 * the rate at which the edge of the current live playlist is advancing or 1 if there is none
17929 * @type {number}
17930 */
17931
17932 }, {
17933 key: "drift",
17934 get: function get() {
17935 return this.latencyController.drift;
17936 }
17937 /**
17938 * set to true when startLoad is called before MANIFEST_PARSED event
17939 * @type {boolean}
17940 */
17941
17942 }, {
17943 key: "forceStartLoad",
17944 get: function get() {
17945 return this.streamController.forceStartLoad;
17946 }
17947 }], [{
17948 key: "version",
17949 get: function get() {
17950 return "1.1.5";
17951 }
17952 }, {
17953 key: "Events",
17954 get: function get() {
17955 return _events__WEBPACK_IMPORTED_MODULE_12__["Events"];
17956 }
17957 }, {
17958 key: "ErrorTypes",
17959 get: function get() {
17960 return _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorTypes"];
17961 }
17962 }, {
17963 key: "ErrorDetails",
17964 get: function get() {
17965 return _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"];
17966 }
17967 }, {
17968 key: "DefaultConfig",
17969 get: function get() {
17970 if (!Hls.defaultConfig) {
17971 return _config__WEBPACK_IMPORTED_MODULE_10__["hlsDefaultConfig"];
17972 }
17973
17974 return Hls.defaultConfig;
17975 }
17976 /**
17977 * @type {HlsConfig}
17978 */
17979 ,
17980 set: function set(defaultConfig) {
17981 Hls.defaultConfig = defaultConfig;
17982 }
17983 }]);
17984
17985 return Hls;
17986}();
17987
17988Hls.defaultConfig = void 0;
17989
17990
17991/***/ }),
17992
17993/***/ "./src/is-supported.ts":
17994/*!*****************************!*\
17995 !*** ./src/is-supported.ts ***!
17996 \*****************************/
17997/*! exports provided: isSupported, changeTypeSupported */
17998/***/ (function(module, __webpack_exports__, __webpack_require__) {
17999__webpack_require__.r(__webpack_exports__);
18000/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isSupported", function() { return isSupported; });
18001/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "changeTypeSupported", function() { return changeTypeSupported; });
18002/* harmony import */ var _utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./utils/mediasource-helper */ "./src/utils/mediasource-helper.ts");
18003
18004
18005function getSourceBuffer() {
18006 return self.SourceBuffer || self.WebKitSourceBuffer;
18007}
18008
18009function isSupported() {
18010 var mediaSource = Object(_utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_0__["getMediaSource"])();
18011
18012 if (!mediaSource) {
18013 return false;
18014 }
18015
18016 var sourceBuffer = getSourceBuffer();
18017 var isTypeSupported = mediaSource && typeof mediaSource.isTypeSupported === 'function' && mediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'); // if SourceBuffer is exposed ensure its API is valid
18018 // safari and old version of Chrome doe not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
18019
18020 var sourceBufferValidAPI = !sourceBuffer || sourceBuffer.prototype && typeof sourceBuffer.prototype.appendBuffer === 'function' && typeof sourceBuffer.prototype.remove === 'function';
18021 return !!isTypeSupported && !!sourceBufferValidAPI;
18022}
18023function changeTypeSupported() {
18024 var _sourceBuffer$prototy;
18025
18026 var sourceBuffer = getSourceBuffer();
18027 return typeof (sourceBuffer === null || sourceBuffer === void 0 ? void 0 : (_sourceBuffer$prototy = sourceBuffer.prototype) === null || _sourceBuffer$prototy === void 0 ? void 0 : _sourceBuffer$prototy.changeType) === 'function';
18028}
18029
18030/***/ }),
18031
18032/***/ "./src/loader/fragment-loader.ts":
18033/*!***************************************!*\
18034 !*** ./src/loader/fragment-loader.ts ***!
18035 \***************************************/
18036/*! exports provided: default, LoadError */
18037/***/ (function(module, __webpack_exports__, __webpack_require__) {
18038__webpack_require__.r(__webpack_exports__);
18039/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return FragmentLoader; });
18040/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LoadError", function() { return LoadError; });
18041/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
18042/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
18043
18044
18045
18046function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
18047
18048function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
18049
18050function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
18051
18052function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
18053
18054function _isNativeFunction(fn) { return Function.toString.call(fn).indexOf("[native code]") !== -1; }
18055
18056function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
18057
18058function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
18059
18060
18061var MIN_CHUNK_SIZE = Math.pow(2, 17); // 128kb
18062
18063var FragmentLoader = /*#__PURE__*/function () {
18064 function FragmentLoader(config) {
18065 this.config = void 0;
18066 this.loader = null;
18067 this.partLoadTimeout = -1;
18068 this.config = config;
18069 }
18070
18071 var _proto = FragmentLoader.prototype;
18072
18073 _proto.destroy = function destroy() {
18074 if (this.loader) {
18075 this.loader.destroy();
18076 this.loader = null;
18077 }
18078 };
18079
18080 _proto.abort = function abort() {
18081 if (this.loader) {
18082 // Abort the loader for current fragment. Only one may load at any given time
18083 this.loader.abort();
18084 }
18085 };
18086
18087 _proto.load = function load(frag, _onProgress) {
18088 var _this = this;
18089
18090 var url = frag.url;
18091
18092 if (!url) {
18093 return Promise.reject(new LoadError({
18094 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18095 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_ERROR,
18096 fatal: false,
18097 frag: frag,
18098 networkDetails: null
18099 }, "Fragment does not have a " + (url ? 'part list' : 'url')));
18100 }
18101
18102 this.abort();
18103 var config = this.config;
18104 var FragmentILoader = config.fLoader;
18105 var DefaultILoader = config.loader;
18106 return new Promise(function (resolve, reject) {
18107 if (_this.loader) {
18108 _this.loader.destroy();
18109 }
18110
18111 var loader = _this.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config);
18112 var loaderContext = createLoaderContext(frag);
18113 var loaderConfig = {
18114 timeout: config.fragLoadingTimeOut,
18115 maxRetry: 0,
18116 retryDelay: 0,
18117 maxRetryDelay: config.fragLoadingMaxRetryTimeout,
18118 highWaterMark: MIN_CHUNK_SIZE
18119 }; // Assign frag stats to the loader's stats reference
18120
18121 frag.stats = loader.stats;
18122 loader.load(loaderContext, loaderConfig, {
18123 onSuccess: function onSuccess(response, stats, context, networkDetails) {
18124 _this.resetLoader(frag, loader);
18125
18126 resolve({
18127 frag: frag,
18128 part: null,
18129 payload: response.data,
18130 networkDetails: networkDetails
18131 });
18132 },
18133 onError: function onError(response, context, networkDetails) {
18134 _this.resetLoader(frag, loader);
18135
18136 reject(new LoadError({
18137 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18138 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_ERROR,
18139 fatal: false,
18140 frag: frag,
18141 response: response,
18142 networkDetails: networkDetails
18143 }));
18144 },
18145 onAbort: function onAbort(stats, context, networkDetails) {
18146 _this.resetLoader(frag, loader);
18147
18148 reject(new LoadError({
18149 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18150 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].INTERNAL_ABORTED,
18151 fatal: false,
18152 frag: frag,
18153 networkDetails: networkDetails
18154 }));
18155 },
18156 onTimeout: function onTimeout(response, context, networkDetails) {
18157 _this.resetLoader(frag, loader);
18158
18159 reject(new LoadError({
18160 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18161 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_TIMEOUT,
18162 fatal: false,
18163 frag: frag,
18164 networkDetails: networkDetails
18165 }));
18166 },
18167 onProgress: function onProgress(stats, context, data, networkDetails) {
18168 if (_onProgress) {
18169 _onProgress({
18170 frag: frag,
18171 part: null,
18172 payload: data,
18173 networkDetails: networkDetails
18174 });
18175 }
18176 }
18177 });
18178 });
18179 };
18180
18181 _proto.loadPart = function loadPart(frag, part, onProgress) {
18182 var _this2 = this;
18183
18184 this.abort();
18185 var config = this.config;
18186 var FragmentILoader = config.fLoader;
18187 var DefaultILoader = config.loader;
18188 return new Promise(function (resolve, reject) {
18189 if (_this2.loader) {
18190 _this2.loader.destroy();
18191 }
18192
18193 var loader = _this2.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config);
18194 var loaderContext = createLoaderContext(frag, part);
18195 var loaderConfig = {
18196 timeout: config.fragLoadingTimeOut,
18197 maxRetry: 0,
18198 retryDelay: 0,
18199 maxRetryDelay: config.fragLoadingMaxRetryTimeout,
18200 highWaterMark: MIN_CHUNK_SIZE
18201 }; // Assign part stats to the loader's stats reference
18202
18203 part.stats = loader.stats;
18204 loader.load(loaderContext, loaderConfig, {
18205 onSuccess: function onSuccess(response, stats, context, networkDetails) {
18206 _this2.resetLoader(frag, loader);
18207
18208 _this2.updateStatsFromPart(frag, part);
18209
18210 var partLoadedData = {
18211 frag: frag,
18212 part: part,
18213 payload: response.data,
18214 networkDetails: networkDetails
18215 };
18216 onProgress(partLoadedData);
18217 resolve(partLoadedData);
18218 },
18219 onError: function onError(response, context, networkDetails) {
18220 _this2.resetLoader(frag, loader);
18221
18222 reject(new LoadError({
18223 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18224 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_ERROR,
18225 fatal: false,
18226 frag: frag,
18227 part: part,
18228 response: response,
18229 networkDetails: networkDetails
18230 }));
18231 },
18232 onAbort: function onAbort(stats, context, networkDetails) {
18233 frag.stats.aborted = part.stats.aborted;
18234
18235 _this2.resetLoader(frag, loader);
18236
18237 reject(new LoadError({
18238 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18239 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].INTERNAL_ABORTED,
18240 fatal: false,
18241 frag: frag,
18242 part: part,
18243 networkDetails: networkDetails
18244 }));
18245 },
18246 onTimeout: function onTimeout(response, context, networkDetails) {
18247 _this2.resetLoader(frag, loader);
18248
18249 reject(new LoadError({
18250 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18251 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_TIMEOUT,
18252 fatal: false,
18253 frag: frag,
18254 part: part,
18255 networkDetails: networkDetails
18256 }));
18257 }
18258 });
18259 });
18260 };
18261
18262 _proto.updateStatsFromPart = function updateStatsFromPart(frag, part) {
18263 var fragStats = frag.stats;
18264 var partStats = part.stats;
18265 var partTotal = partStats.total;
18266 fragStats.loaded += partStats.loaded;
18267
18268 if (partTotal) {
18269 var estTotalParts = Math.round(frag.duration / part.duration);
18270 var estLoadedParts = Math.min(Math.round(fragStats.loaded / partTotal), estTotalParts);
18271 var estRemainingParts = estTotalParts - estLoadedParts;
18272 var estRemainingBytes = estRemainingParts * Math.round(fragStats.loaded / estLoadedParts);
18273 fragStats.total = fragStats.loaded + estRemainingBytes;
18274 } else {
18275 fragStats.total = Math.max(fragStats.loaded, fragStats.total);
18276 }
18277
18278 var fragLoading = fragStats.loading;
18279 var partLoading = partStats.loading;
18280
18281 if (fragLoading.start) {
18282 // add to fragment loader latency
18283 fragLoading.first += partLoading.first - partLoading.start;
18284 } else {
18285 fragLoading.start = partLoading.start;
18286 fragLoading.first = partLoading.first;
18287 }
18288
18289 fragLoading.end = partLoading.end;
18290 };
18291
18292 _proto.resetLoader = function resetLoader(frag, loader) {
18293 frag.loader = null;
18294
18295 if (this.loader === loader) {
18296 self.clearTimeout(this.partLoadTimeout);
18297 this.loader = null;
18298 }
18299
18300 loader.destroy();
18301 };
18302
18303 return FragmentLoader;
18304}();
18305
18306
18307
18308function createLoaderContext(frag, part) {
18309 if (part === void 0) {
18310 part = null;
18311 }
18312
18313 var segment = part || frag;
18314 var loaderContext = {
18315 frag: frag,
18316 part: part,
18317 responseType: 'arraybuffer',
18318 url: segment.url,
18319 headers: {},
18320 rangeStart: 0,
18321 rangeEnd: 0
18322 };
18323 var start = segment.byteRangeStartOffset;
18324 var end = segment.byteRangeEndOffset;
18325
18326 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(start) && Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(end)) {
18327 loaderContext.rangeStart = start;
18328 loaderContext.rangeEnd = end;
18329 }
18330
18331 return loaderContext;
18332}
18333
18334var LoadError = /*#__PURE__*/function (_Error) {
18335 _inheritsLoose(LoadError, _Error);
18336
18337 function LoadError(data) {
18338 var _this3;
18339
18340 for (var _len = arguments.length, params = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
18341 params[_key - 1] = arguments[_key];
18342 }
18343
18344 _this3 = _Error.call.apply(_Error, [this].concat(params)) || this;
18345 _this3.data = void 0;
18346 _this3.data = data;
18347 return _this3;
18348 }
18349
18350 return LoadError;
18351}( /*#__PURE__*/_wrapNativeSuper(Error));
18352
18353/***/ }),
18354
18355/***/ "./src/loader/fragment.ts":
18356/*!********************************!*\
18357 !*** ./src/loader/fragment.ts ***!
18358 \********************************/
18359/*! exports provided: ElementaryStreamTypes, BaseSegment, Fragment, Part */
18360/***/ (function(module, __webpack_exports__, __webpack_require__) {
18361__webpack_require__.r(__webpack_exports__);
18362/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ElementaryStreamTypes", function() { return ElementaryStreamTypes; });
18363/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "BaseSegment", function() { return BaseSegment; });
18364/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Fragment", function() { return Fragment; });
18365/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Part", function() { return Part; });
18366/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
18367/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
18368/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
18369/* harmony import */ var _level_key__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./level-key */ "./src/loader/level-key.ts");
18370/* harmony import */ var _load_stats__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./load-stats */ "./src/loader/load-stats.ts");
18371
18372
18373
18374function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
18375
18376function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
18377
18378function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
18379
18380function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
18381
18382
18383
18384
18385
18386var ElementaryStreamTypes;
18387
18388(function (ElementaryStreamTypes) {
18389 ElementaryStreamTypes["AUDIO"] = "audio";
18390 ElementaryStreamTypes["VIDEO"] = "video";
18391 ElementaryStreamTypes["AUDIOVIDEO"] = "audiovideo";
18392})(ElementaryStreamTypes || (ElementaryStreamTypes = {}));
18393
18394var BaseSegment = /*#__PURE__*/function () {
18395 // baseurl is the URL to the playlist
18396 // relurl is the portion of the URL that comes from inside the playlist.
18397 // Holds the types of data this fragment supports
18398 function BaseSegment(baseurl) {
18399 var _this$elementaryStrea;
18400
18401 this._byteRange = null;
18402 this._url = null;
18403 this.baseurl = void 0;
18404 this.relurl = void 0;
18405 this.elementaryStreams = (_this$elementaryStrea = {}, _this$elementaryStrea[ElementaryStreamTypes.AUDIO] = null, _this$elementaryStrea[ElementaryStreamTypes.VIDEO] = null, _this$elementaryStrea[ElementaryStreamTypes.AUDIOVIDEO] = null, _this$elementaryStrea);
18406 this.baseurl = baseurl;
18407 } // setByteRange converts a EXT-X-BYTERANGE attribute into a two element array
18408
18409
18410 var _proto = BaseSegment.prototype;
18411
18412 _proto.setByteRange = function setByteRange(value, previous) {
18413 var params = value.split('@', 2);
18414 var byteRange = [];
18415
18416 if (params.length === 1) {
18417 byteRange[0] = previous ? previous.byteRangeEndOffset : 0;
18418 } else {
18419 byteRange[0] = parseInt(params[1]);
18420 }
18421
18422 byteRange[1] = parseInt(params[0]) + byteRange[0];
18423 this._byteRange = byteRange;
18424 };
18425
18426 _createClass(BaseSegment, [{
18427 key: "byteRange",
18428 get: function get() {
18429 if (!this._byteRange) {
18430 return [];
18431 }
18432
18433 return this._byteRange;
18434 }
18435 }, {
18436 key: "byteRangeStartOffset",
18437 get: function get() {
18438 return this.byteRange[0];
18439 }
18440 }, {
18441 key: "byteRangeEndOffset",
18442 get: function get() {
18443 return this.byteRange[1];
18444 }
18445 }, {
18446 key: "url",
18447 get: function get() {
18448 if (!this._url && this.baseurl && this.relurl) {
18449 this._url = Object(url_toolkit__WEBPACK_IMPORTED_MODULE_1__["buildAbsoluteURL"])(this.baseurl, this.relurl, {
18450 alwaysNormalize: true
18451 });
18452 }
18453
18454 return this._url || '';
18455 },
18456 set: function set(value) {
18457 this._url = value;
18458 }
18459 }]);
18460
18461 return BaseSegment;
18462}();
18463var Fragment = /*#__PURE__*/function (_BaseSegment) {
18464 _inheritsLoose(Fragment, _BaseSegment);
18465
18466 // EXTINF has to be present for a m38 to be considered valid
18467 // sn notates the sequence number for a segment, and if set to a string can be 'initSegment'
18468 // levelkey is the EXT-X-KEY that applies to this segment for decryption
18469 // core difference from the private field _decryptdata is the lack of the initialized IV
18470 // _decryptdata will set the IV for this segment based on the segment number in the fragment
18471 // A string representing the fragment type
18472 // A reference to the loader. Set while the fragment is loading, and removed afterwards. Used to abort fragment loading
18473 // The level/track index to which the fragment belongs
18474 // The continuity counter of the fragment
18475 // The starting Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
18476 // The ending Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
18477 // The latest Presentation Time Stamp (PTS) appended to the buffer.
18478 // The starting Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
18479 // The ending Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
18480 // The start time of the fragment, as listed in the manifest. Updated after transmux complete.
18481 // Set by `updateFragPTSDTS` in level-helper
18482 // The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
18483 // The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
18484 // Load/parse timing information
18485 // A flag indicating whether the segment was downloaded in order to test bitrate, and was not buffered
18486 // #EXTINF segment title
18487 // The Media Initialization Section for this segment
18488 function Fragment(type, baseurl) {
18489 var _this;
18490
18491 _this = _BaseSegment.call(this, baseurl) || this;
18492 _this._decryptdata = null;
18493 _this.rawProgramDateTime = null;
18494 _this.programDateTime = null;
18495 _this.tagList = [];
18496 _this.duration = 0;
18497 _this.sn = 0;
18498 _this.levelkey = void 0;
18499 _this.type = void 0;
18500 _this.loader = null;
18501 _this.level = -1;
18502 _this.cc = 0;
18503 _this.startPTS = void 0;
18504 _this.endPTS = void 0;
18505 _this.appendedPTS = void 0;
18506 _this.startDTS = void 0;
18507 _this.endDTS = void 0;
18508 _this.start = 0;
18509 _this.deltaPTS = void 0;
18510 _this.maxStartPTS = void 0;
18511 _this.minEndPTS = void 0;
18512 _this.stats = new _load_stats__WEBPACK_IMPORTED_MODULE_4__["LoadStats"]();
18513 _this.urlId = 0;
18514 _this.data = void 0;
18515 _this.bitrateTest = false;
18516 _this.title = null;
18517 _this.initSegment = null;
18518 _this.type = type;
18519 return _this;
18520 }
18521
18522 var _proto2 = Fragment.prototype;
18523
18524 /**
18525 * Utility method for parseLevelPlaylist to create an initialization vector for a given segment
18526 * @param {number} segmentNumber - segment number to generate IV with
18527 * @returns {Uint8Array}
18528 */
18529 _proto2.createInitializationVector = function createInitializationVector(segmentNumber) {
18530 var uint8View = new Uint8Array(16);
18531
18532 for (var i = 12; i < 16; i++) {
18533 uint8View[i] = segmentNumber >> 8 * (15 - i) & 0xff;
18534 }
18535
18536 return uint8View;
18537 }
18538 /**
18539 * Utility method for parseLevelPlaylist to get a fragment's decryption data from the currently parsed encryption key data
18540 * @param levelkey - a playlist's encryption info
18541 * @param segmentNumber - the fragment's segment number
18542 * @returns {LevelKey} - an object to be applied as a fragment's decryptdata
18543 */
18544 ;
18545
18546 _proto2.setDecryptDataFromLevelKey = function setDecryptDataFromLevelKey(levelkey, segmentNumber) {
18547 var decryptdata = levelkey;
18548
18549 if ((levelkey === null || levelkey === void 0 ? void 0 : levelkey.method) === 'AES-128' && levelkey.uri && !levelkey.iv) {
18550 decryptdata = _level_key__WEBPACK_IMPORTED_MODULE_3__["LevelKey"].fromURI(levelkey.uri);
18551 decryptdata.method = levelkey.method;
18552 decryptdata.iv = this.createInitializationVector(segmentNumber);
18553 decryptdata.keyFormat = 'identity';
18554 }
18555
18556 return decryptdata;
18557 };
18558
18559 _proto2.setElementaryStreamInfo = function setElementaryStreamInfo(type, startPTS, endPTS, startDTS, endDTS, partial) {
18560 if (partial === void 0) {
18561 partial = false;
18562 }
18563
18564 var elementaryStreams = this.elementaryStreams;
18565 var info = elementaryStreams[type];
18566
18567 if (!info) {
18568 elementaryStreams[type] = {
18569 startPTS: startPTS,
18570 endPTS: endPTS,
18571 startDTS: startDTS,
18572 endDTS: endDTS,
18573 partial: partial
18574 };
18575 return;
18576 }
18577
18578 info.startPTS = Math.min(info.startPTS, startPTS);
18579 info.endPTS = Math.max(info.endPTS, endPTS);
18580 info.startDTS = Math.min(info.startDTS, startDTS);
18581 info.endDTS = Math.max(info.endDTS, endDTS);
18582 };
18583
18584 _proto2.clearElementaryStreamInfo = function clearElementaryStreamInfo() {
18585 var elementaryStreams = this.elementaryStreams;
18586 elementaryStreams[ElementaryStreamTypes.AUDIO] = null;
18587 elementaryStreams[ElementaryStreamTypes.VIDEO] = null;
18588 elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO] = null;
18589 };
18590
18591 _createClass(Fragment, [{
18592 key: "decryptdata",
18593 get: function get() {
18594 if (!this.levelkey && !this._decryptdata) {
18595 return null;
18596 }
18597
18598 if (!this._decryptdata && this.levelkey) {
18599 var sn = this.sn;
18600
18601 if (typeof sn !== 'number') {
18602 // We are fetching decryption data for a initialization segment
18603 // If the segment was encrypted with AES-128
18604 // It must have an IV defined. We cannot substitute the Segment Number in.
18605 if (this.levelkey && this.levelkey.method === 'AES-128' && !this.levelkey.iv) {
18606 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("missing IV for initialization segment with method=\"" + this.levelkey.method + "\" - compliance issue");
18607 }
18608 /*
18609 Be converted to a Number.
18610 'initSegment' will become NaN.
18611 NaN, which when converted through ToInt32() -> +0.
18612 ---
18613 Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
18614 */
18615
18616
18617 sn = 0;
18618 }
18619
18620 this._decryptdata = this.setDecryptDataFromLevelKey(this.levelkey, sn);
18621 }
18622
18623 return this._decryptdata;
18624 }
18625 }, {
18626 key: "end",
18627 get: function get() {
18628 return this.start + this.duration;
18629 }
18630 }, {
18631 key: "endProgramDateTime",
18632 get: function get() {
18633 if (this.programDateTime === null) {
18634 return null;
18635 }
18636
18637 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.programDateTime)) {
18638 return null;
18639 }
18640
18641 var duration = !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.duration) ? 0 : this.duration;
18642 return this.programDateTime + duration * 1000;
18643 }
18644 }, {
18645 key: "encrypted",
18646 get: function get() {
18647 var _this$decryptdata;
18648
18649 // At the m3u8-parser level we need to add support for manifest signalled keyformats
18650 // when we want the fragment to start reporting that it is encrypted.
18651 // Currently, keyFormat will only be set for identity keys
18652 if ((_this$decryptdata = this.decryptdata) !== null && _this$decryptdata !== void 0 && _this$decryptdata.keyFormat && this.decryptdata.uri) {
18653 return true;
18654 }
18655
18656 return false;
18657 }
18658 }]);
18659
18660 return Fragment;
18661}(BaseSegment);
18662var Part = /*#__PURE__*/function (_BaseSegment2) {
18663 _inheritsLoose(Part, _BaseSegment2);
18664
18665 function Part(partAttrs, frag, baseurl, index, previous) {
18666 var _this2;
18667
18668 _this2 = _BaseSegment2.call(this, baseurl) || this;
18669 _this2.fragOffset = 0;
18670 _this2.duration = 0;
18671 _this2.gap = false;
18672 _this2.independent = false;
18673 _this2.relurl = void 0;
18674 _this2.fragment = void 0;
18675 _this2.index = void 0;
18676 _this2.stats = new _load_stats__WEBPACK_IMPORTED_MODULE_4__["LoadStats"]();
18677 _this2.duration = partAttrs.decimalFloatingPoint('DURATION');
18678 _this2.gap = partAttrs.bool('GAP');
18679 _this2.independent = partAttrs.bool('INDEPENDENT');
18680 _this2.relurl = partAttrs.enumeratedString('URI');
18681 _this2.fragment = frag;
18682 _this2.index = index;
18683 var byteRange = partAttrs.enumeratedString('BYTERANGE');
18684
18685 if (byteRange) {
18686 _this2.setByteRange(byteRange, previous);
18687 }
18688
18689 if (previous) {
18690 _this2.fragOffset = previous.fragOffset + previous.duration;
18691 }
18692
18693 return _this2;
18694 }
18695
18696 _createClass(Part, [{
18697 key: "start",
18698 get: function get() {
18699 return this.fragment.start + this.fragOffset;
18700 }
18701 }, {
18702 key: "end",
18703 get: function get() {
18704 return this.start + this.duration;
18705 }
18706 }, {
18707 key: "loaded",
18708 get: function get() {
18709 var elementaryStreams = this.elementaryStreams;
18710 return !!(elementaryStreams.audio || elementaryStreams.video || elementaryStreams.audiovideo);
18711 }
18712 }]);
18713
18714 return Part;
18715}(BaseSegment);
18716
18717/***/ }),
18718
18719/***/ "./src/loader/key-loader.ts":
18720/*!**********************************!*\
18721 !*** ./src/loader/key-loader.ts ***!
18722 \**********************************/
18723/*! exports provided: default */
18724/***/ (function(module, __webpack_exports__, __webpack_require__) {
18725__webpack_require__.r(__webpack_exports__);
18726/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return KeyLoader; });
18727/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
18728/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
18729/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
18730/*
18731 * Decrypt key Loader
18732 */
18733
18734
18735
18736
18737var KeyLoader = /*#__PURE__*/function () {
18738 function KeyLoader(hls) {
18739 this.hls = void 0;
18740 this.loaders = {};
18741 this.decryptkey = null;
18742 this.decrypturl = null;
18743 this.hls = hls;
18744
18745 this._registerListeners();
18746 }
18747
18748 var _proto = KeyLoader.prototype;
18749
18750 _proto._registerListeners = function _registerListeners() {
18751 this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADING, this.onKeyLoading, this);
18752 };
18753
18754 _proto._unregisterListeners = function _unregisterListeners() {
18755 this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADING, this.onKeyLoading);
18756 };
18757
18758 _proto.destroy = function destroy() {
18759 this._unregisterListeners();
18760
18761 for (var loaderName in this.loaders) {
18762 var loader = this.loaders[loaderName];
18763
18764 if (loader) {
18765 loader.destroy();
18766 }
18767 }
18768
18769 this.loaders = {};
18770 };
18771
18772 _proto.onKeyLoading = function onKeyLoading(event, data) {
18773 var frag = data.frag;
18774 var type = frag.type;
18775 var loader = this.loaders[type];
18776
18777 if (!frag.decryptdata) {
18778 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('Missing decryption data on fragment in onKeyLoading');
18779 return;
18780 } // Load the key if the uri is different from previous one, or if the decrypt key has not yet been retrieved
18781
18782
18783 var uri = frag.decryptdata.uri;
18784
18785 if (uri !== this.decrypturl || this.decryptkey === null) {
18786 var config = this.hls.config;
18787
18788 if (loader) {
18789 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("abort previous key loader for type:" + type);
18790 loader.abort();
18791 }
18792
18793 if (!uri) {
18794 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('key uri is falsy');
18795 return;
18796 }
18797
18798 var Loader = config.loader;
18799 var fragLoader = frag.loader = this.loaders[type] = new Loader(config);
18800 this.decrypturl = uri;
18801 this.decryptkey = null;
18802 var loaderContext = {
18803 url: uri,
18804 frag: frag,
18805 responseType: 'arraybuffer'
18806 }; // maxRetry is 0 so that instead of retrying the same key on the same variant multiple times,
18807 // key-loader will trigger an error and rely on stream-controller to handle retry logic.
18808 // this will also align retry logic with fragment-loader
18809
18810 var loaderConfig = {
18811 timeout: config.fragLoadingTimeOut,
18812 maxRetry: 0,
18813 retryDelay: config.fragLoadingRetryDelay,
18814 maxRetryDelay: config.fragLoadingMaxRetryTimeout,
18815 highWaterMark: 0
18816 };
18817 var loaderCallbacks = {
18818 onSuccess: this.loadsuccess.bind(this),
18819 onError: this.loaderror.bind(this),
18820 onTimeout: this.loadtimeout.bind(this)
18821 };
18822 fragLoader.load(loaderContext, loaderConfig, loaderCallbacks);
18823 } else if (this.decryptkey) {
18824 // Return the key if it's already been loaded
18825 frag.decryptdata.key = this.decryptkey;
18826 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADED, {
18827 frag: frag
18828 });
18829 }
18830 };
18831
18832 _proto.loadsuccess = function loadsuccess(response, stats, context) {
18833 var frag = context.frag;
18834
18835 if (!frag.decryptdata) {
18836 _utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('after key load, decryptdata unset');
18837 return;
18838 }
18839
18840 this.decryptkey = frag.decryptdata.key = new Uint8Array(response.data); // detach fragment loader on load success
18841
18842 frag.loader = null;
18843 delete this.loaders[frag.type];
18844 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADED, {
18845 frag: frag
18846 });
18847 };
18848
18849 _proto.loaderror = function loaderror(response, context) {
18850 var frag = context.frag;
18851 var loader = frag.loader;
18852
18853 if (loader) {
18854 loader.abort();
18855 }
18856
18857 delete this.loaders[frag.type];
18858 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
18859 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18860 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_LOAD_ERROR,
18861 fatal: false,
18862 frag: frag,
18863 response: response
18864 });
18865 };
18866
18867 _proto.loadtimeout = function loadtimeout(stats, context) {
18868 var frag = context.frag;
18869 var loader = frag.loader;
18870
18871 if (loader) {
18872 loader.abort();
18873 }
18874
18875 delete this.loaders[frag.type];
18876 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
18877 type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
18878 details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_LOAD_TIMEOUT,
18879 fatal: false,
18880 frag: frag
18881 });
18882 };
18883
18884 return KeyLoader;
18885}();
18886
18887
18888
18889/***/ }),
18890
18891/***/ "./src/loader/level-details.ts":
18892/*!*************************************!*\
18893 !*** ./src/loader/level-details.ts ***!
18894 \*************************************/
18895/*! exports provided: LevelDetails */
18896/***/ (function(module, __webpack_exports__, __webpack_require__) {
18897__webpack_require__.r(__webpack_exports__);
18898/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LevelDetails", function() { return LevelDetails; });
18899/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
18900
18901
18902function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
18903
18904function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
18905
18906var DEFAULT_TARGET_DURATION = 10;
18907var LevelDetails = /*#__PURE__*/function () {
18908 // Manifest reload synchronization
18909 function LevelDetails(baseUrl) {
18910 this.PTSKnown = false;
18911 this.alignedSliding = false;
18912 this.averagetargetduration = void 0;
18913 this.endCC = 0;
18914 this.endSN = 0;
18915 this.fragments = void 0;
18916 this.fragmentHint = void 0;
18917 this.partList = null;
18918 this.live = true;
18919 this.ageHeader = 0;
18920 this.advancedDateTime = void 0;
18921 this.updated = true;
18922 this.advanced = true;
18923 this.availabilityDelay = void 0;
18924 this.misses = 0;
18925 this.needSidxRanges = false;
18926 this.startCC = 0;
18927 this.startSN = 0;
18928 this.startTimeOffset = null;
18929 this.targetduration = 0;
18930 this.totalduration = 0;
18931 this.type = null;
18932 this.url = void 0;
18933 this.m3u8 = '';
18934 this.version = null;
18935 this.canBlockReload = false;
18936 this.canSkipUntil = 0;
18937 this.canSkipDateRanges = false;
18938 this.skippedSegments = 0;
18939 this.recentlyRemovedDateranges = void 0;
18940 this.partHoldBack = 0;
18941 this.holdBack = 0;
18942 this.partTarget = 0;
18943 this.preloadHint = void 0;
18944 this.renditionReports = void 0;
18945 this.tuneInGoal = 0;
18946 this.deltaUpdateFailed = void 0;
18947 this.driftStartTime = 0;
18948 this.driftEndTime = 0;
18949 this.driftStart = 0;
18950 this.driftEnd = 0;
18951 this.fragments = [];
18952 this.url = baseUrl;
18953 }
18954
18955 var _proto = LevelDetails.prototype;
18956
18957 _proto.reloaded = function reloaded(previous) {
18958 if (!previous) {
18959 this.advanced = true;
18960 this.updated = true;
18961 return;
18962 }
18963
18964 var partSnDiff = this.lastPartSn - previous.lastPartSn;
18965 var partIndexDiff = this.lastPartIndex - previous.lastPartIndex;
18966 this.updated = this.endSN !== previous.endSN || !!partIndexDiff || !!partSnDiff;
18967 this.advanced = this.endSN > previous.endSN || partSnDiff > 0 || partSnDiff === 0 && partIndexDiff > 0;
18968
18969 if (this.updated || this.advanced) {
18970 this.misses = Math.floor(previous.misses * 0.6);
18971 } else {
18972 this.misses = previous.misses + 1;
18973 }
18974
18975 this.availabilityDelay = previous.availabilityDelay;
18976 };
18977
18978 _createClass(LevelDetails, [{
18979 key: "hasProgramDateTime",
18980 get: function get() {
18981 if (this.fragments.length) {
18982 return Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.fragments[this.fragments.length - 1].programDateTime);
18983 }
18984
18985 return false;
18986 }
18987 }, {
18988 key: "levelTargetDuration",
18989 get: function get() {
18990 return this.averagetargetduration || this.targetduration || DEFAULT_TARGET_DURATION;
18991 }
18992 }, {
18993 key: "drift",
18994 get: function get() {
18995 var runTime = this.driftEndTime - this.driftStartTime;
18996
18997 if (runTime > 0) {
18998 var runDuration = this.driftEnd - this.driftStart;
18999 return runDuration * 1000 / runTime;
19000 }
19001
19002 return 1;
19003 }
19004 }, {
19005 key: "edge",
19006 get: function get() {
19007 return this.partEnd || this.fragmentEnd;
19008 }
19009 }, {
19010 key: "partEnd",
19011 get: function get() {
19012 var _this$partList;
19013
19014 if ((_this$partList = this.partList) !== null && _this$partList !== void 0 && _this$partList.length) {
19015 return this.partList[this.partList.length - 1].end;
19016 }
19017
19018 return this.fragmentEnd;
19019 }
19020 }, {
19021 key: "fragmentEnd",
19022 get: function get() {
19023 var _this$fragments;
19024
19025 if ((_this$fragments = this.fragments) !== null && _this$fragments !== void 0 && _this$fragments.length) {
19026 return this.fragments[this.fragments.length - 1].end;
19027 }
19028
19029 return 0;
19030 }
19031 }, {
19032 key: "age",
19033 get: function get() {
19034 if (this.advancedDateTime) {
19035 return Math.max(Date.now() - this.advancedDateTime, 0) / 1000;
19036 }
19037
19038 return 0;
19039 }
19040 }, {
19041 key: "lastPartIndex",
19042 get: function get() {
19043 var _this$partList2;
19044
19045 if ((_this$partList2 = this.partList) !== null && _this$partList2 !== void 0 && _this$partList2.length) {
19046 return this.partList[this.partList.length - 1].index;
19047 }
19048
19049 return -1;
19050 }
19051 }, {
19052 key: "lastPartSn",
19053 get: function get() {
19054 var _this$partList3;
19055
19056 if ((_this$partList3 = this.partList) !== null && _this$partList3 !== void 0 && _this$partList3.length) {
19057 return this.partList[this.partList.length - 1].fragment.sn;
19058 }
19059
19060 return this.endSN;
19061 }
19062 }]);
19063
19064 return LevelDetails;
19065}();
19066
19067/***/ }),
19068
19069/***/ "./src/loader/level-key.ts":
19070/*!*********************************!*\
19071 !*** ./src/loader/level-key.ts ***!
19072 \*********************************/
19073/*! exports provided: LevelKey */
19074/***/ (function(module, __webpack_exports__, __webpack_require__) {
19075__webpack_require__.r(__webpack_exports__);
19076/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LevelKey", function() { return LevelKey; });
19077/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
19078function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
19079
19080function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
19081
19082
19083var LevelKey = /*#__PURE__*/function () {
19084 LevelKey.fromURL = function fromURL(baseUrl, relativeUrl) {
19085 return new LevelKey(baseUrl, relativeUrl);
19086 };
19087
19088 LevelKey.fromURI = function fromURI(uri) {
19089 return new LevelKey(uri);
19090 };
19091
19092 function LevelKey(absoluteOrBaseURI, relativeURL) {
19093 this._uri = null;
19094 this.method = null;
19095 this.keyFormat = null;
19096 this.keyFormatVersions = null;
19097 this.keyID = null;
19098 this.key = null;
19099 this.iv = null;
19100
19101 if (relativeURL) {
19102 this._uri = Object(url_toolkit__WEBPACK_IMPORTED_MODULE_0__["buildAbsoluteURL"])(absoluteOrBaseURI, relativeURL, {
19103 alwaysNormalize: true
19104 });
19105 } else {
19106 this._uri = absoluteOrBaseURI;
19107 }
19108 }
19109
19110 _createClass(LevelKey, [{
19111 key: "uri",
19112 get: function get() {
19113 return this._uri;
19114 }
19115 }]);
19116
19117 return LevelKey;
19118}();
19119
19120/***/ }),
19121
19122/***/ "./src/loader/load-stats.ts":
19123/*!**********************************!*\
19124 !*** ./src/loader/load-stats.ts ***!
19125 \**********************************/
19126/*! exports provided: LoadStats */
19127/***/ (function(module, __webpack_exports__, __webpack_require__) {
19128__webpack_require__.r(__webpack_exports__);
19129/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LoadStats", function() { return LoadStats; });
19130var LoadStats = function LoadStats() {
19131 this.aborted = false;
19132 this.loaded = 0;
19133 this.retry = 0;
19134 this.total = 0;
19135 this.chunkCount = 0;
19136 this.bwEstimate = 0;
19137 this.loading = {
19138 start: 0,
19139 first: 0,
19140 end: 0
19141 };
19142 this.parsing = {
19143 start: 0,
19144 end: 0
19145 };
19146 this.buffering = {
19147 start: 0,
19148 first: 0,
19149 end: 0
19150 };
19151};
19152
19153/***/ }),
19154
19155/***/ "./src/loader/m3u8-parser.ts":
19156/*!***********************************!*\
19157 !*** ./src/loader/m3u8-parser.ts ***!
19158 \***********************************/
19159/*! exports provided: default */
19160/***/ (function(module, __webpack_exports__, __webpack_require__) {
19161__webpack_require__.r(__webpack_exports__);
19162/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return M3U8Parser; });
19163/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
19164/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
19165/* harmony import */ var _fragment__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./fragment */ "./src/loader/fragment.ts");
19166/* harmony import */ var _level_details__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./level-details */ "./src/loader/level-details.ts");
19167/* harmony import */ var _level_key__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./level-key */ "./src/loader/level-key.ts");
19168/* harmony import */ var _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/attr-list */ "./src/utils/attr-list.ts");
19169/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
19170/* harmony import */ var _utils_codecs__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils/codecs */ "./src/utils/codecs.ts");
19171
19172
19173
19174
19175
19176
19177
19178
19179
19180
19181
19182// https://regex101.com is your friend
19183var MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-SESSION-DATA:([^\r\n]*)[\r\n]+/g;
19184var MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
19185var LEVEL_PLAYLIST_REGEX_FAST = new RegExp([/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source, // duration (#EXTINF:<duration>,<title>), group 1 => duration, group 2 => title
19186/(?!#) *(\S[\S ]*)/.source, // segment URI, group 3 => the URI (note newline is not eaten)
19187/#EXT-X-BYTERANGE:*(.+)/.source, // next segment's byterange, group 4 => range spec (x@y)
19188/#EXT-X-PROGRAM-DATE-TIME:(.+)/.source, // next segment's program date/time group 5 => the datetime spec
19189/#.*/.source // All other non-segment oriented tags will match with all groups empty
19190].join('|'), 'g');
19191var LEVEL_PLAYLIST_REGEX_SLOW = new RegExp([/#(EXTM3U)/.source, /#EXT-X-(PLAYLIST-TYPE):(.+)/.source, /#EXT-X-(MEDIA-SEQUENCE): *(\d+)/.source, /#EXT-X-(SKIP):(.+)/.source, /#EXT-X-(TARGETDURATION): *(\d+)/.source, /#EXT-X-(KEY):(.+)/.source, /#EXT-X-(START):(.+)/.source, /#EXT-X-(ENDLIST)/.source, /#EXT-X-(DISCONTINUITY-SEQ)UENCE: *(\d+)/.source, /#EXT-X-(DIS)CONTINUITY/.source, /#EXT-X-(VERSION):(\d+)/.source, /#EXT-X-(MAP):(.+)/.source, /#EXT-X-(SERVER-CONTROL):(.+)/.source, /#EXT-X-(PART-INF):(.+)/.source, /#EXT-X-(GAP)/.source, /#EXT-X-(BITRATE):\s*(\d+)/.source, /#EXT-X-(PART):(.+)/.source, /#EXT-X-(PRELOAD-HINT):(.+)/.source, /#EXT-X-(RENDITION-REPORT):(.+)/.source, /(#)([^:]*):(.*)/.source, /(#)(.*)(?:.*)\r?\n?/.source].join('|'));
19192var MP4_REGEX_SUFFIX = /\.(mp4|m4s|m4v|m4a)$/i;
19193
19194function isMP4Url(url) {
19195 var _URLToolkit$parseURL$, _URLToolkit$parseURL;
19196
19197 return MP4_REGEX_SUFFIX.test((_URLToolkit$parseURL$ = (_URLToolkit$parseURL = url_toolkit__WEBPACK_IMPORTED_MODULE_1__["parseURL"](url)) === null || _URLToolkit$parseURL === void 0 ? void 0 : _URLToolkit$parseURL.path) != null ? _URLToolkit$parseURL$ : '');
19198}
19199
19200var M3U8Parser = /*#__PURE__*/function () {
19201 function M3U8Parser() {}
19202
19203 M3U8Parser.findGroup = function findGroup(groups, mediaGroupId) {
19204 for (var i = 0; i < groups.length; i++) {
19205 var group = groups[i];
19206
19207 if (group.id === mediaGroupId) {
19208 return group;
19209 }
19210 }
19211 };
19212
19213 M3U8Parser.convertAVC1ToAVCOTI = function convertAVC1ToAVCOTI(codec) {
19214 // Convert avc1 codec string from RFC-4281 to RFC-6381 for MediaSource.isTypeSupported
19215 var avcdata = codec.split('.');
19216
19217 if (avcdata.length > 2) {
19218 var result = avcdata.shift() + '.';
19219 result += parseInt(avcdata.shift()).toString(16);
19220 result += ('000' + parseInt(avcdata.shift()).toString(16)).substr(-4);
19221 return result;
19222 }
19223
19224 return codec;
19225 };
19226
19227 M3U8Parser.resolve = function resolve(url, baseUrl) {
19228 return url_toolkit__WEBPACK_IMPORTED_MODULE_1__["buildAbsoluteURL"](baseUrl, url, {
19229 alwaysNormalize: true
19230 });
19231 };
19232
19233 M3U8Parser.parseMasterPlaylist = function parseMasterPlaylist(string, baseurl) {
19234 var levels = [];
19235 var sessionData = {};
19236 var hasSessionData = false;
19237 MASTER_PLAYLIST_REGEX.lastIndex = 0;
19238 var result;
19239
19240 while ((result = MASTER_PLAYLIST_REGEX.exec(string)) != null) {
19241 if (result[1]) {
19242 // '#EXT-X-STREAM-INF' is found, parse level tag in group 1
19243 var attrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](result[1]);
19244 var level = {
19245 attrs: attrs,
19246 bitrate: attrs.decimalInteger('AVERAGE-BANDWIDTH') || attrs.decimalInteger('BANDWIDTH'),
19247 name: attrs.NAME,
19248 url: M3U8Parser.resolve(result[2], baseurl)
19249 };
19250 var resolution = attrs.decimalResolution('RESOLUTION');
19251
19252 if (resolution) {
19253 level.width = resolution.width;
19254 level.height = resolution.height;
19255 }
19256
19257 setCodecs((attrs.CODECS || '').split(/[ ,]+/).filter(function (c) {
19258 return c;
19259 }), level);
19260
19261 if (level.videoCodec && level.videoCodec.indexOf('avc1') !== -1) {
19262 level.videoCodec = M3U8Parser.convertAVC1ToAVCOTI(level.videoCodec);
19263 }
19264
19265 levels.push(level);
19266 } else if (result[3]) {
19267 // '#EXT-X-SESSION-DATA' is found, parse session data in group 3
19268 var sessionAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](result[3]);
19269
19270 if (sessionAttrs['DATA-ID']) {
19271 hasSessionData = true;
19272 sessionData[sessionAttrs['DATA-ID']] = sessionAttrs;
19273 }
19274 }
19275 }
19276
19277 return {
19278 levels: levels,
19279 sessionData: hasSessionData ? sessionData : null
19280 };
19281 };
19282
19283 M3U8Parser.parseMasterPlaylistMedia = function parseMasterPlaylistMedia(string, baseurl, type, groups) {
19284 if (groups === void 0) {
19285 groups = [];
19286 }
19287
19288 var result;
19289 var medias = [];
19290 var id = 0;
19291 MASTER_PLAYLIST_MEDIA_REGEX.lastIndex = 0;
19292
19293 while ((result = MASTER_PLAYLIST_MEDIA_REGEX.exec(string)) !== null) {
19294 var attrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](result[1]);
19295
19296 if (attrs.TYPE === type) {
19297 var media = {
19298 attrs: attrs,
19299 bitrate: 0,
19300 id: id++,
19301 groupId: attrs['GROUP-ID'],
19302 instreamId: attrs['INSTREAM-ID'],
19303 name: attrs.NAME || attrs.LANGUAGE || '',
19304 type: type,
19305 default: attrs.bool('DEFAULT'),
19306 autoselect: attrs.bool('AUTOSELECT'),
19307 forced: attrs.bool('FORCED'),
19308 lang: attrs.LANGUAGE,
19309 url: attrs.URI ? M3U8Parser.resolve(attrs.URI, baseurl) : ''
19310 };
19311
19312 if (groups.length) {
19313 // If there are audio or text groups signalled in the manifest, let's look for a matching codec string for this track
19314 // If we don't find the track signalled, lets use the first audio groups codec we have
19315 // Acting as a best guess
19316 var groupCodec = M3U8Parser.findGroup(groups, media.groupId) || groups[0];
19317 assignCodec(media, groupCodec, 'audioCodec');
19318 assignCodec(media, groupCodec, 'textCodec');
19319 }
19320
19321 medias.push(media);
19322 }
19323 }
19324
19325 return medias;
19326 };
19327
19328 M3U8Parser.parseLevelPlaylist = function parseLevelPlaylist(string, baseurl, id, type, levelUrlId) {
19329 var level = new _level_details__WEBPACK_IMPORTED_MODULE_3__["LevelDetails"](baseurl);
19330 var fragments = level.fragments; // The most recent init segment seen (applies to all subsequent segments)
19331
19332 var currentInitSegment = null;
19333 var currentSN = 0;
19334 var currentPart = 0;
19335 var totalduration = 0;
19336 var discontinuityCounter = 0;
19337 var prevFrag = null;
19338 var frag = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Fragment"](type, baseurl);
19339 var result;
19340 var i;
19341 var levelkey;
19342 var firstPdtIndex = -1;
19343 var createNextFrag = false;
19344 LEVEL_PLAYLIST_REGEX_FAST.lastIndex = 0;
19345 level.m3u8 = string;
19346
19347 while ((result = LEVEL_PLAYLIST_REGEX_FAST.exec(string)) !== null) {
19348 if (createNextFrag) {
19349 createNextFrag = false;
19350 frag = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Fragment"](type, baseurl); // setup the next fragment for part loading
19351
19352 frag.start = totalduration;
19353 frag.sn = currentSN;
19354 frag.cc = discontinuityCounter;
19355 frag.level = id;
19356
19357 if (currentInitSegment) {
19358 frag.initSegment = currentInitSegment;
19359 frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime;
19360 }
19361 }
19362
19363 var duration = result[1];
19364
19365 if (duration) {
19366 // INF
19367 frag.duration = parseFloat(duration); // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
19368
19369 var title = (' ' + result[2]).slice(1);
19370 frag.title = title || null;
19371 frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]);
19372 } else if (result[3]) {
19373 // url
19374 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.duration)) {
19375 frag.start = totalduration;
19376
19377 if (levelkey) {
19378 frag.levelkey = levelkey;
19379 }
19380
19381 frag.sn = currentSN;
19382 frag.level = id;
19383 frag.cc = discontinuityCounter;
19384 frag.urlId = levelUrlId;
19385 fragments.push(frag); // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
19386
19387 frag.relurl = (' ' + result[3]).slice(1);
19388 assignProgramDateTime(frag, prevFrag);
19389 prevFrag = frag;
19390 totalduration += frag.duration;
19391 currentSN++;
19392 currentPart = 0;
19393 createNextFrag = true;
19394 }
19395 } else if (result[4]) {
19396 // X-BYTERANGE
19397 var data = (' ' + result[4]).slice(1);
19398
19399 if (prevFrag) {
19400 frag.setByteRange(data, prevFrag);
19401 } else {
19402 frag.setByteRange(data);
19403 }
19404 } else if (result[5]) {
19405 // PROGRAM-DATE-TIME
19406 // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
19407 frag.rawProgramDateTime = (' ' + result[5]).slice(1);
19408 frag.tagList.push(['PROGRAM-DATE-TIME', frag.rawProgramDateTime]);
19409
19410 if (firstPdtIndex === -1) {
19411 firstPdtIndex = fragments.length;
19412 }
19413 } else {
19414 result = result[0].match(LEVEL_PLAYLIST_REGEX_SLOW);
19415
19416 if (!result) {
19417 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('No matches on slow regex match for level playlist!');
19418 continue;
19419 }
19420
19421 for (i = 1; i < result.length; i++) {
19422 if (typeof result[i] !== 'undefined') {
19423 break;
19424 }
19425 } // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
19426
19427
19428 var tag = (' ' + result[i]).slice(1);
19429 var value1 = (' ' + result[i + 1]).slice(1);
19430 var value2 = result[i + 2] ? (' ' + result[i + 2]).slice(1) : '';
19431
19432 switch (tag) {
19433 case 'PLAYLIST-TYPE':
19434 level.type = value1.toUpperCase();
19435 break;
19436
19437 case 'MEDIA-SEQUENCE':
19438 currentSN = level.startSN = parseInt(value1);
19439 break;
19440
19441 case 'SKIP':
19442 {
19443 var skipAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19444 var skippedSegments = skipAttrs.decimalInteger('SKIPPED-SEGMENTS');
19445
19446 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(skippedSegments)) {
19447 level.skippedSegments = skippedSegments; // This will result in fragments[] containing undefined values, which we will fill in with `mergeDetails`
19448
19449 for (var _i = skippedSegments; _i--;) {
19450 fragments.unshift(null);
19451 }
19452
19453 currentSN += skippedSegments;
19454 }
19455
19456 var recentlyRemovedDateranges = skipAttrs.enumeratedString('RECENTLY-REMOVED-DATERANGES');
19457
19458 if (recentlyRemovedDateranges) {
19459 level.recentlyRemovedDateranges = recentlyRemovedDateranges.split('\t');
19460 }
19461
19462 break;
19463 }
19464
19465 case 'TARGETDURATION':
19466 level.targetduration = parseFloat(value1);
19467 break;
19468
19469 case 'VERSION':
19470 level.version = parseInt(value1);
19471 break;
19472
19473 case 'EXTM3U':
19474 break;
19475
19476 case 'ENDLIST':
19477 level.live = false;
19478 break;
19479
19480 case '#':
19481 if (value1 || value2) {
19482 frag.tagList.push(value2 ? [value1, value2] : [value1]);
19483 }
19484
19485 break;
19486
19487 case 'DIS':
19488 discontinuityCounter++;
19489
19490 /* falls through */
19491
19492 case 'GAP':
19493 frag.tagList.push([tag]);
19494 break;
19495
19496 case 'BITRATE':
19497 frag.tagList.push([tag, value1]);
19498 break;
19499
19500 case 'DISCONTINUITY-SEQ':
19501 discontinuityCounter = parseInt(value1);
19502 break;
19503
19504 case 'KEY':
19505 {
19506 var _keyAttrs$enumeratedS;
19507
19508 // https://tools.ietf.org/html/rfc8216#section-4.3.2.4
19509 var keyAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19510 var decryptmethod = keyAttrs.enumeratedString('METHOD');
19511 var decrypturi = keyAttrs.URI;
19512 var decryptiv = keyAttrs.hexadecimalInteger('IV');
19513 var decryptkeyformatversions = keyAttrs.enumeratedString('KEYFORMATVERSIONS');
19514 var decryptkeyid = keyAttrs.enumeratedString('KEYID'); // From RFC: This attribute is OPTIONAL; its absence indicates an implicit value of "identity".
19515
19516 var decryptkeyformat = (_keyAttrs$enumeratedS = keyAttrs.enumeratedString('KEYFORMAT')) != null ? _keyAttrs$enumeratedS : 'identity';
19517 var unsupportedKnownKeyformatsInManifest = ['com.apple.streamingkeydelivery', 'com.microsoft.playready', 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed', // widevine (v2)
19518 'com.widevine' // earlier widevine (v1)
19519 ];
19520
19521 if (unsupportedKnownKeyformatsInManifest.indexOf(decryptkeyformat) > -1) {
19522 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("Keyformat " + decryptkeyformat + " is not supported from the manifest");
19523 continue;
19524 } else if (decryptkeyformat !== 'identity') {
19525 // We are supposed to skip keys we don't understand.
19526 // As we currently only officially support identity keys
19527 // from the manifest we shouldn't save any other key.
19528 continue;
19529 } // TODO: multiple keys can be defined on a fragment, and we need to support this
19530 // for clients that support both playready and widevine
19531
19532
19533 if (decryptmethod) {
19534 // TODO: need to determine if the level key is actually a relative URL
19535 // if it isn't, then we should instead construct the LevelKey using fromURI.
19536 levelkey = _level_key__WEBPACK_IMPORTED_MODULE_4__["LevelKey"].fromURL(baseurl, decrypturi);
19537
19538 if (decrypturi && ['AES-128', 'SAMPLE-AES', 'SAMPLE-AES-CENC'].indexOf(decryptmethod) >= 0) {
19539 levelkey.method = decryptmethod;
19540 levelkey.keyFormat = decryptkeyformat;
19541
19542 if (decryptkeyid) {
19543 levelkey.keyID = decryptkeyid;
19544 }
19545
19546 if (decryptkeyformatversions) {
19547 levelkey.keyFormatVersions = decryptkeyformatversions;
19548 } // Initialization Vector (IV)
19549
19550
19551 levelkey.iv = decryptiv;
19552 }
19553 }
19554
19555 break;
19556 }
19557
19558 case 'START':
19559 {
19560 var startAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19561 var startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET'); // TIME-OFFSET can be 0
19562
19563 if (Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(startTimeOffset)) {
19564 level.startTimeOffset = startTimeOffset;
19565 }
19566
19567 break;
19568 }
19569
19570 case 'MAP':
19571 {
19572 var mapAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19573 frag.relurl = mapAttrs.URI;
19574
19575 if (mapAttrs.BYTERANGE) {
19576 frag.setByteRange(mapAttrs.BYTERANGE);
19577 }
19578
19579 frag.level = id;
19580 frag.sn = 'initSegment';
19581
19582 if (levelkey) {
19583 frag.levelkey = levelkey;
19584 }
19585
19586 frag.initSegment = null;
19587 currentInitSegment = frag;
19588 createNextFrag = true;
19589 break;
19590 }
19591
19592 case 'SERVER-CONTROL':
19593 {
19594 var serverControlAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19595 level.canBlockReload = serverControlAttrs.bool('CAN-BLOCK-RELOAD');
19596 level.canSkipUntil = serverControlAttrs.optionalFloat('CAN-SKIP-UNTIL', 0);
19597 level.canSkipDateRanges = level.canSkipUntil > 0 && serverControlAttrs.bool('CAN-SKIP-DATERANGES');
19598 level.partHoldBack = serverControlAttrs.optionalFloat('PART-HOLD-BACK', 0);
19599 level.holdBack = serverControlAttrs.optionalFloat('HOLD-BACK', 0);
19600 break;
19601 }
19602
19603 case 'PART-INF':
19604 {
19605 var partInfAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19606 level.partTarget = partInfAttrs.decimalFloatingPoint('PART-TARGET');
19607 break;
19608 }
19609
19610 case 'PART':
19611 {
19612 var partList = level.partList;
19613
19614 if (!partList) {
19615 partList = level.partList = [];
19616 }
19617
19618 var previousFragmentPart = currentPart > 0 ? partList[partList.length - 1] : undefined;
19619 var index = currentPart++;
19620 var part = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Part"](new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1), frag, baseurl, index, previousFragmentPart);
19621 partList.push(part);
19622 frag.duration += part.duration;
19623 break;
19624 }
19625
19626 case 'PRELOAD-HINT':
19627 {
19628 var preloadHintAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19629 level.preloadHint = preloadHintAttrs;
19630 break;
19631 }
19632
19633 case 'RENDITION-REPORT':
19634 {
19635 var renditionReportAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
19636 level.renditionReports = level.renditionReports || [];
19637 level.renditionReports.push(renditionReportAttrs);
19638 break;
19639 }
19640
19641 default:
19642 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("line parsed but not handled: " + result);
19643 break;
19644 }
19645 }
19646 }
19647
19648 if (prevFrag && !prevFrag.relurl) {
19649 fragments.pop();
19650 totalduration -= prevFrag.duration;
19651
19652 if (level.partList) {
19653 level.fragmentHint = prevFrag;
19654 }
19655 } else if (level.partList) {
19656 assignProgramDateTime(frag, prevFrag);
19657 frag.cc = discontinuityCounter;
19658 level.fragmentHint = frag;
19659 }
19660
19661 var fragmentLength = fragments.length;
19662 var firstFragment = fragments[0];
19663 var lastFragment = fragments[fragmentLength - 1];
19664 totalduration += level.skippedSegments * level.targetduration;
19665
19666 if (totalduration > 0 && fragmentLength && lastFragment) {
19667 level.averagetargetduration = totalduration / fragmentLength;
19668 var lastSn = lastFragment.sn;
19669 level.endSN = lastSn !== 'initSegment' ? lastSn : 0;
19670
19671 if (firstFragment) {
19672 level.startCC = firstFragment.cc;
19673
19674 if (!firstFragment.initSegment) {
19675 // this is a bit lurky but HLS really has no other way to tell us
19676 // if the fragments are TS or MP4, except if we download them :/
19677 // but this is to be able to handle SIDX.
19678 if (level.fragments.every(function (frag) {
19679 return frag.relurl && isMP4Url(frag.relurl);
19680 })) {
19681 _utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('MP4 fragments found but no init segment (probably no MAP, incomplete M3U8), trying to fetch SIDX');
19682 frag = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Fragment"](type, baseurl);
19683 frag.relurl = lastFragment.relurl;
19684 frag.level = id;
19685 frag.sn = 'initSegment';
19686 firstFragment.initSegment = frag;
19687 level.needSidxRanges = true;
19688 }
19689 }
19690 }
19691 } else {
19692 level.endSN = 0;
19693 level.startCC = 0;
19694 }
19695
19696 if (level.fragmentHint) {
19697 totalduration += level.fragmentHint.duration;
19698 }
19699
19700 level.totalduration = totalduration;
19701 level.endCC = discontinuityCounter;
19702 /**
19703 * Backfill any missing PDT values
19704 * "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after
19705 * one or more Media Segment URIs, the client SHOULD extrapolate
19706 * backward from that tag (using EXTINF durations and/or media
19707 * timestamps) to associate dates with those segments."
19708 * We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs
19709 * computed.
19710 */
19711
19712 if (firstPdtIndex > 0) {
19713 backfillProgramDateTimes(fragments, firstPdtIndex);
19714 }
19715
19716 return level;
19717 };
19718
19719 return M3U8Parser;
19720}();
19721
19722
19723
19724function setCodecs(codecs, level) {
19725 ['video', 'audio', 'text'].forEach(function (type) {
19726 var filtered = codecs.filter(function (codec) {
19727 return Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_7__["isCodecType"])(codec, type);
19728 });
19729
19730 if (filtered.length) {
19731 var preferred = filtered.filter(function (codec) {
19732 return codec.lastIndexOf('avc1', 0) === 0 || codec.lastIndexOf('mp4a', 0) === 0;
19733 });
19734 level[type + "Codec"] = preferred.length > 0 ? preferred[0] : filtered[0]; // remove from list
19735
19736 codecs = codecs.filter(function (codec) {
19737 return filtered.indexOf(codec) === -1;
19738 });
19739 }
19740 });
19741 level.unknownCodecs = codecs;
19742}
19743
19744function assignCodec(media, groupItem, codecProperty) {
19745 var codecValue = groupItem[codecProperty];
19746
19747 if (codecValue) {
19748 media[codecProperty] = codecValue;
19749 }
19750}
19751
19752function backfillProgramDateTimes(fragments, firstPdtIndex) {
19753 var fragPrev = fragments[firstPdtIndex];
19754
19755 for (var i = firstPdtIndex; i--;) {
19756 var frag = fragments[i]; // Exit on delta-playlist skipped segments
19757
19758 if (!frag) {
19759 return;
19760 }
19761
19762 frag.programDateTime = fragPrev.programDateTime - frag.duration * 1000;
19763 fragPrev = frag;
19764 }
19765}
19766
19767function assignProgramDateTime(frag, prevFrag) {
19768 if (frag.rawProgramDateTime) {
19769 frag.programDateTime = Date.parse(frag.rawProgramDateTime);
19770 } else if (prevFrag !== null && prevFrag !== void 0 && prevFrag.programDateTime) {
19771 frag.programDateTime = prevFrag.endProgramDateTime;
19772 }
19773
19774 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.programDateTime)) {
19775 frag.programDateTime = null;
19776 frag.rawProgramDateTime = null;
19777 }
19778}
19779
19780/***/ }),
19781
19782/***/ "./src/loader/playlist-loader.ts":
19783/*!***************************************!*\
19784 !*** ./src/loader/playlist-loader.ts ***!
19785 \***************************************/
19786/*! exports provided: default */
19787/***/ (function(module, __webpack_exports__, __webpack_require__) {
19788__webpack_require__.r(__webpack_exports__);
19789/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
19790/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
19791/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
19792/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
19793/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
19794/* harmony import */ var _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./m3u8-parser */ "./src/loader/m3u8-parser.ts");
19795/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
19796/* harmony import */ var _utils_attr_list__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils/attr-list */ "./src/utils/attr-list.ts");
19797
19798
19799
19800/**
19801 * PlaylistLoader - delegate for media manifest/playlist loading tasks. Takes care of parsing media to internal data-models.
19802 *
19803 * Once loaded, dispatches events with parsed data-models of manifest/levels/audio/subtitle tracks.
19804 *
19805 * Uses loader(s) set in config to do actual internal loading of resource tasks.
19806 *
19807 * @module
19808 *
19809 */
19810
19811
19812
19813
19814
19815
19816
19817
19818function mapContextToLevelType(context) {
19819 var type = context.type;
19820
19821 switch (type) {
19822 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
19823 return _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO;
19824
19825 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
19826 return _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].SUBTITLE;
19827
19828 default:
19829 return _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN;
19830 }
19831}
19832
19833function getResponseUrl(response, context) {
19834 var url = response.url; // responseURL not supported on some browsers (it is used to detect URL redirection)
19835 // data-uri mode also not supported (but no need to detect redirection)
19836
19837 if (url === undefined || url.indexOf('data:') === 0) {
19838 // fallback to initial URL
19839 url = context.url;
19840 }
19841
19842 return url;
19843}
19844
19845var PlaylistLoader = /*#__PURE__*/function () {
19846 function PlaylistLoader(hls) {
19847 this.hls = void 0;
19848 this.loaders = Object.create(null);
19849 this.hls = hls;
19850 this.registerListeners();
19851 }
19852
19853 var _proto = PlaylistLoader.prototype;
19854
19855 _proto.registerListeners = function registerListeners() {
19856 var hls = this.hls;
19857 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
19858 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
19859 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
19860 hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
19861 };
19862
19863 _proto.unregisterListeners = function unregisterListeners() {
19864 var hls = this.hls;
19865 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
19866 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
19867 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
19868 hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
19869 }
19870 /**
19871 * Returns defaults or configured loader-type overloads (pLoader and loader config params)
19872 */
19873 ;
19874
19875 _proto.createInternalLoader = function createInternalLoader(context) {
19876 var config = this.hls.config;
19877 var PLoader = config.pLoader;
19878 var Loader = config.loader;
19879 var InternalLoader = PLoader || Loader;
19880 var loader = new InternalLoader(config);
19881 context.loader = loader;
19882 this.loaders[context.type] = loader;
19883 return loader;
19884 };
19885
19886 _proto.getInternalLoader = function getInternalLoader(context) {
19887 return this.loaders[context.type];
19888 };
19889
19890 _proto.resetInternalLoader = function resetInternalLoader(contextType) {
19891 if (this.loaders[contextType]) {
19892 delete this.loaders[contextType];
19893 }
19894 }
19895 /**
19896 * Call `destroy` on all internal loader instances mapped (one per context type)
19897 */
19898 ;
19899
19900 _proto.destroyInternalLoaders = function destroyInternalLoaders() {
19901 for (var contextType in this.loaders) {
19902 var loader = this.loaders[contextType];
19903
19904 if (loader) {
19905 loader.destroy();
19906 }
19907
19908 this.resetInternalLoader(contextType);
19909 }
19910 };
19911
19912 _proto.destroy = function destroy() {
19913 this.unregisterListeners();
19914 this.destroyInternalLoaders();
19915 };
19916
19917 _proto.onManifestLoading = function onManifestLoading(event, data) {
19918 var url = data.url;
19919 this.load({
19920 id: null,
19921 groupId: null,
19922 level: 0,
19923 responseType: 'text',
19924 type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST,
19925 url: url,
19926 deliveryDirectives: null
19927 });
19928 };
19929
19930 _proto.onLevelLoading = function onLevelLoading(event, data) {
19931 var id = data.id,
19932 level = data.level,
19933 url = data.url,
19934 deliveryDirectives = data.deliveryDirectives;
19935 this.load({
19936 id: id,
19937 groupId: null,
19938 level: level,
19939 responseType: 'text',
19940 type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL,
19941 url: url,
19942 deliveryDirectives: deliveryDirectives
19943 });
19944 };
19945
19946 _proto.onAudioTrackLoading = function onAudioTrackLoading(event, data) {
19947 var id = data.id,
19948 groupId = data.groupId,
19949 url = data.url,
19950 deliveryDirectives = data.deliveryDirectives;
19951 this.load({
19952 id: id,
19953 groupId: groupId,
19954 level: null,
19955 responseType: 'text',
19956 type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK,
19957 url: url,
19958 deliveryDirectives: deliveryDirectives
19959 });
19960 };
19961
19962 _proto.onSubtitleTrackLoading = function onSubtitleTrackLoading(event, data) {
19963 var id = data.id,
19964 groupId = data.groupId,
19965 url = data.url,
19966 deliveryDirectives = data.deliveryDirectives;
19967 this.load({
19968 id: id,
19969 groupId: groupId,
19970 level: null,
19971 responseType: 'text',
19972 type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK,
19973 url: url,
19974 deliveryDirectives: deliveryDirectives
19975 });
19976 };
19977
19978 _proto.load = function load(context) {
19979 var _context$deliveryDire;
19980
19981 var config = this.hls.config; // logger.debug(`[playlist-loader]: Loading playlist of type ${context.type}, level: ${context.level}, id: ${context.id}`);
19982 // Check if a loader for this context already exists
19983
19984 var loader = this.getInternalLoader(context);
19985
19986 if (loader) {
19987 var loaderContext = loader.context;
19988
19989 if (loaderContext && loaderContext.url === context.url) {
19990 // same URL can't overlap
19991 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].trace('[playlist-loader]: playlist request ongoing');
19992 return;
19993 }
19994
19995 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log("[playlist-loader]: aborting previous loader for type: " + context.type);
19996 loader.abort();
19997 }
19998
19999 var maxRetry;
20000 var timeout;
20001 var retryDelay;
20002 var maxRetryDelay; // apply different configs for retries depending on
20003 // context (manifest, level, audio/subs playlist)
20004
20005 switch (context.type) {
20006 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST:
20007 maxRetry = config.manifestLoadingMaxRetry;
20008 timeout = config.manifestLoadingTimeOut;
20009 retryDelay = config.manifestLoadingRetryDelay;
20010 maxRetryDelay = config.manifestLoadingMaxRetryTimeout;
20011 break;
20012
20013 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL:
20014 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
20015 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
20016 // Manage retries in Level/Track Controller
20017 maxRetry = 0;
20018 timeout = config.levelLoadingTimeOut;
20019 break;
20020
20021 default:
20022 maxRetry = config.levelLoadingMaxRetry;
20023 timeout = config.levelLoadingTimeOut;
20024 retryDelay = config.levelLoadingRetryDelay;
20025 maxRetryDelay = config.levelLoadingMaxRetryTimeout;
20026 break;
20027 }
20028
20029 loader = this.createInternalLoader(context); // Override level/track timeout for LL-HLS requests
20030 // (the default of 10000ms is counter productive to blocking playlist reload requests)
20031
20032 if ((_context$deliveryDire = context.deliveryDirectives) !== null && _context$deliveryDire !== void 0 && _context$deliveryDire.part) {
20033 var levelDetails;
20034
20035 if (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL && context.level !== null) {
20036 levelDetails = this.hls.levels[context.level].details;
20037 } else if (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK && context.id !== null) {
20038 levelDetails = this.hls.audioTracks[context.id].details;
20039 } else if (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK && context.id !== null) {
20040 levelDetails = this.hls.subtitleTracks[context.id].details;
20041 }
20042
20043 if (levelDetails) {
20044 var partTarget = levelDetails.partTarget;
20045 var targetDuration = levelDetails.targetduration;
20046
20047 if (partTarget && targetDuration) {
20048 timeout = Math.min(Math.max(partTarget * 3, targetDuration * 0.8) * 1000, timeout);
20049 }
20050 }
20051 }
20052
20053 var loaderConfig = {
20054 timeout: timeout,
20055 maxRetry: maxRetry,
20056 retryDelay: retryDelay,
20057 maxRetryDelay: maxRetryDelay,
20058 highWaterMark: 0
20059 };
20060 var loaderCallbacks = {
20061 onSuccess: this.loadsuccess.bind(this),
20062 onError: this.loaderror.bind(this),
20063 onTimeout: this.loadtimeout.bind(this)
20064 }; // logger.debug(`[playlist-loader]: Calling internal loader delegate for URL: ${context.url}`);
20065
20066 loader.load(context, loaderConfig, loaderCallbacks);
20067 };
20068
20069 _proto.loadsuccess = function loadsuccess(response, stats, context, networkDetails) {
20070 if (networkDetails === void 0) {
20071 networkDetails = null;
20072 }
20073
20074 if (context.isSidxRequest) {
20075 this.handleSidxRequest(response, context);
20076 this.handlePlaylistLoaded(response, stats, context, networkDetails);
20077 return;
20078 }
20079
20080 this.resetInternalLoader(context.type);
20081 var string = response.data; // Validate if it is an M3U8 at all
20082
20083 if (string.indexOf('#EXTM3U') !== 0) {
20084 this.handleManifestParsingError(response, context, 'no EXTM3U delimiter', networkDetails);
20085 return;
20086 }
20087
20088 stats.parsing.start = performance.now(); // Check if chunk-list or master. handle empty chunk list case (first EXTINF not signaled, but TARGETDURATION present)
20089
20090 if (string.indexOf('#EXTINF:') > 0 || string.indexOf('#EXT-X-TARGETDURATION:') > 0) {
20091 this.handleTrackOrLevelPlaylist(response, stats, context, networkDetails);
20092 } else {
20093 this.handleMasterPlaylist(response, stats, context, networkDetails);
20094 }
20095 };
20096
20097 _proto.loaderror = function loaderror(response, context, networkDetails) {
20098 if (networkDetails === void 0) {
20099 networkDetails = null;
20100 }
20101
20102 this.handleNetworkError(context, networkDetails, false, response);
20103 };
20104
20105 _proto.loadtimeout = function loadtimeout(stats, context, networkDetails) {
20106 if (networkDetails === void 0) {
20107 networkDetails = null;
20108 }
20109
20110 this.handleNetworkError(context, networkDetails, true);
20111 };
20112
20113 _proto.handleMasterPlaylist = function handleMasterPlaylist(response, stats, context, networkDetails) {
20114 var hls = this.hls;
20115 var string = response.data;
20116 var url = getResponseUrl(response, context);
20117
20118 var _M3U8Parser$parseMast = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylist(string, url),
20119 levels = _M3U8Parser$parseMast.levels,
20120 sessionData = _M3U8Parser$parseMast.sessionData;
20121
20122 if (!levels.length) {
20123 this.handleManifestParsingError(response, context, 'no level found in manifest', networkDetails);
20124 return;
20125 } // multi level playlist, parse level info
20126
20127
20128 var audioGroups = levels.map(function (level) {
20129 return {
20130 id: level.attrs.AUDIO,
20131 audioCodec: level.audioCodec
20132 };
20133 });
20134 var subtitleGroups = levels.map(function (level) {
20135 return {
20136 id: level.attrs.SUBTITLES,
20137 textCodec: level.textCodec
20138 };
20139 });
20140 var audioTracks = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylistMedia(string, url, 'AUDIO', audioGroups);
20141 var subtitles = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylistMedia(string, url, 'SUBTITLES', subtitleGroups);
20142 var captions = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylistMedia(string, url, 'CLOSED-CAPTIONS');
20143
20144 if (audioTracks.length) {
20145 // check if we have found an audio track embedded in main playlist (audio track without URI attribute)
20146 var embeddedAudioFound = audioTracks.some(function (audioTrack) {
20147 return !audioTrack.url;
20148 }); // if no embedded audio track defined, but audio codec signaled in quality level,
20149 // we need to signal this main audio track this could happen with playlists with
20150 // alt audio rendition in which quality levels (main)
20151 // contains both audio+video. but with mixed audio track not signaled
20152
20153 if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
20154 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
20155 audioTracks.unshift({
20156 type: 'main',
20157 name: 'main',
20158 default: false,
20159 autoselect: false,
20160 forced: false,
20161 id: -1,
20162 attrs: new _utils_attr_list__WEBPACK_IMPORTED_MODULE_7__["AttrList"]({}),
20163 bitrate: 0,
20164 url: ''
20165 });
20166 }
20167 }
20168
20169 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, {
20170 levels: levels,
20171 audioTracks: audioTracks,
20172 subtitles: subtitles,
20173 captions: captions,
20174 url: url,
20175 stats: stats,
20176 networkDetails: networkDetails,
20177 sessionData: sessionData
20178 });
20179 };
20180
20181 _proto.handleTrackOrLevelPlaylist = function handleTrackOrLevelPlaylist(response, stats, context, networkDetails) {
20182 var hls = this.hls;
20183 var id = context.id,
20184 level = context.level,
20185 type = context.type;
20186 var url = getResponseUrl(response, context);
20187 var levelUrlId = Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(id) ? id : 0;
20188 var levelId = Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(level) ? level : levelUrlId;
20189 var levelType = mapContextToLevelType(context);
20190 var levelDetails = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseLevelPlaylist(response.data, url, levelId, levelType, levelUrlId);
20191
20192 if (!levelDetails.fragments.length) {
20193 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
20194 type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].NETWORK_ERROR,
20195 details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_EMPTY_ERROR,
20196 fatal: false,
20197 url: url,
20198 reason: 'no fragments found in level',
20199 level: typeof context.level === 'number' ? context.level : undefined
20200 });
20201 return;
20202 } // We have done our first request (Manifest-type) and receive
20203 // not a master playlist but a chunk-list (track/level)
20204 // We fire the manifest-loaded event anyway with the parsed level-details
20205 // by creating a single-level structure for it.
20206
20207
20208 if (type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST) {
20209 var singleLevel = {
20210 attrs: new _utils_attr_list__WEBPACK_IMPORTED_MODULE_7__["AttrList"]({}),
20211 bitrate: 0,
20212 details: levelDetails,
20213 name: '',
20214 url: url
20215 };
20216 hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, {
20217 levels: [singleLevel],
20218 audioTracks: [],
20219 url: url,
20220 stats: stats,
20221 networkDetails: networkDetails,
20222 sessionData: null
20223 });
20224 } // save parsing time
20225
20226
20227 stats.parsing.end = performance.now(); // in case we need SIDX ranges
20228 // return early after calling load for
20229 // the SIDX box.
20230
20231 if (levelDetails.needSidxRanges) {
20232 var _levelDetails$fragmen;
20233
20234 var sidxUrl = (_levelDetails$fragmen = levelDetails.fragments[0].initSegment) === null || _levelDetails$fragmen === void 0 ? void 0 : _levelDetails$fragmen.url;
20235 this.load({
20236 url: sidxUrl,
20237 isSidxRequest: true,
20238 type: type,
20239 level: level,
20240 levelDetails: levelDetails,
20241 id: id,
20242 groupId: null,
20243 rangeStart: 0,
20244 rangeEnd: 2048,
20245 responseType: 'arraybuffer',
20246 deliveryDirectives: null
20247 });
20248 return;
20249 } // extend the context with the new levelDetails property
20250
20251
20252 context.levelDetails = levelDetails;
20253 this.handlePlaylistLoaded(response, stats, context, networkDetails);
20254 };
20255
20256 _proto.handleSidxRequest = function handleSidxRequest(response, context) {
20257 var sidxInfo = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__["parseSegmentIndex"])(new Uint8Array(response.data)); // if provided fragment does not contain sidx, early return
20258
20259 if (!sidxInfo) {
20260 return;
20261 }
20262
20263 var sidxReferences = sidxInfo.references;
20264 var levelDetails = context.levelDetails;
20265 sidxReferences.forEach(function (segmentRef, index) {
20266 var segRefInfo = segmentRef.info;
20267 var frag = levelDetails.fragments[index];
20268
20269 if (frag.byteRange.length === 0) {
20270 frag.setByteRange(String(1 + segRefInfo.end - segRefInfo.start) + '@' + String(segRefInfo.start));
20271 }
20272
20273 if (frag.initSegment) {
20274 frag.initSegment.setByteRange(String(sidxInfo.moovEndOffset) + '@0');
20275 }
20276 });
20277 };
20278
20279 _proto.handleManifestParsingError = function handleManifestParsingError(response, context, reason, networkDetails) {
20280 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
20281 type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].NETWORK_ERROR,
20282 details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_PARSING_ERROR,
20283 fatal: context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST,
20284 url: response.url,
20285 reason: reason,
20286 response: response,
20287 context: context,
20288 networkDetails: networkDetails
20289 });
20290 };
20291
20292 _proto.handleNetworkError = function handleNetworkError(context, networkDetails, timeout, response) {
20293 if (timeout === void 0) {
20294 timeout = false;
20295 }
20296
20297 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("[playlist-loader]: A network " + (timeout ? 'timeout' : 'error') + " occurred while loading " + context.type + " level: " + context.level + " id: " + context.id + " group-id: \"" + context.groupId + "\"");
20298 var details = _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].UNKNOWN;
20299 var fatal = false;
20300 var loader = this.getInternalLoader(context);
20301
20302 switch (context.type) {
20303 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST:
20304 details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_LOAD_ERROR;
20305 fatal = true;
20306 break;
20307
20308 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL:
20309 details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_ERROR;
20310 fatal = false;
20311 break;
20312
20313 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
20314 details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].AUDIO_TRACK_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR;
20315 fatal = false;
20316 break;
20317
20318 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
20319 details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].SUBTITLE_TRACK_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].SUBTITLE_LOAD_ERROR;
20320 fatal = false;
20321 break;
20322 }
20323
20324 if (loader) {
20325 this.resetInternalLoader(context.type);
20326 }
20327
20328 var errorData = {
20329 type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].NETWORK_ERROR,
20330 details: details,
20331 fatal: fatal,
20332 url: context.url,
20333 loader: loader,
20334 context: context,
20335 networkDetails: networkDetails
20336 };
20337
20338 if (response) {
20339 errorData.response = response;
20340 }
20341
20342 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, errorData);
20343 };
20344
20345 _proto.handlePlaylistLoaded = function handlePlaylistLoaded(response, stats, context, networkDetails) {
20346 var type = context.type,
20347 level = context.level,
20348 id = context.id,
20349 groupId = context.groupId,
20350 loader = context.loader,
20351 levelDetails = context.levelDetails,
20352 deliveryDirectives = context.deliveryDirectives;
20353
20354 if (!(levelDetails !== null && levelDetails !== void 0 && levelDetails.targetduration)) {
20355 this.handleManifestParsingError(response, context, 'invalid target duration', networkDetails);
20356 return;
20357 }
20358
20359 if (!loader) {
20360 return;
20361 }
20362
20363 if (levelDetails.live) {
20364 if (loader.getCacheAge) {
20365 levelDetails.ageHeader = loader.getCacheAge() || 0;
20366 }
20367
20368 if (!loader.getCacheAge || isNaN(levelDetails.ageHeader)) {
20369 levelDetails.ageHeader = 0;
20370 }
20371 }
20372
20373 switch (type) {
20374 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST:
20375 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL:
20376 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADED, {
20377 details: levelDetails,
20378 level: level || 0,
20379 id: id || 0,
20380 stats: stats,
20381 networkDetails: networkDetails,
20382 deliveryDirectives: deliveryDirectives
20383 });
20384 break;
20385
20386 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
20387 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_LOADED, {
20388 details: levelDetails,
20389 id: id || 0,
20390 groupId: groupId || '',
20391 stats: stats,
20392 networkDetails: networkDetails,
20393 deliveryDirectives: deliveryDirectives
20394 });
20395 break;
20396
20397 case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
20398 this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACK_LOADED, {
20399 details: levelDetails,
20400 id: id || 0,
20401 groupId: groupId || '',
20402 stats: stats,
20403 networkDetails: networkDetails,
20404 deliveryDirectives: deliveryDirectives
20405 });
20406 break;
20407 }
20408 };
20409
20410 return PlaylistLoader;
20411}();
20412
20413/* harmony default export */ __webpack_exports__["default"] = (PlaylistLoader);
20414
20415/***/ }),
20416
20417/***/ "./src/polyfills/number.ts":
20418/*!*********************************!*\
20419 !*** ./src/polyfills/number.ts ***!
20420 \*********************************/
20421/*! exports provided: isFiniteNumber, MAX_SAFE_INTEGER */
20422/***/ (function(module, __webpack_exports__, __webpack_require__) {
20423__webpack_require__.r(__webpack_exports__);
20424/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isFiniteNumber", function() { return isFiniteNumber; });
20425/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "MAX_SAFE_INTEGER", function() { return MAX_SAFE_INTEGER; });
20426var isFiniteNumber = Number.isFinite || function (value) {
20427 return typeof value === 'number' && isFinite(value);
20428};
20429var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
20430
20431/***/ }),
20432
20433/***/ "./src/remux/aac-helper.ts":
20434/*!*********************************!*\
20435 !*** ./src/remux/aac-helper.ts ***!
20436 \*********************************/
20437/*! exports provided: default */
20438/***/ (function(module, __webpack_exports__, __webpack_require__) {
20439__webpack_require__.r(__webpack_exports__);
20440/**
20441 * AAC helper
20442 */
20443var AAC = /*#__PURE__*/function () {
20444 function AAC() {}
20445
20446 AAC.getSilentFrame = function getSilentFrame(codec, channelCount) {
20447 switch (codec) {
20448 case 'mp4a.40.2':
20449 if (channelCount === 1) {
20450 return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]);
20451 } else if (channelCount === 2) {
20452 return new Uint8Array([0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80]);
20453 } else if (channelCount === 3) {
20454 return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x8e]);
20455 } else if (channelCount === 4) {
20456 return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38]);
20457 } else if (channelCount === 5) {
20458 return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38]);
20459 } else if (channelCount === 6) {
20460 return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2, 0x00, 0x20, 0x08, 0xe0]);
20461 }
20462
20463 break;
20464 // handle HE-AAC below (mp4a.40.5 / mp4a.40.29)
20465
20466 default:
20467 if (channelCount === 1) {
20468 // ffmpeg -y -f lavfi -i "aevalsrc=0:d=0.05" -c:a libfdk_aac -profile:a aac_he -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
20469 return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x4e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x1c, 0x6, 0xf1, 0xc1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
20470 } else if (channelCount === 2) {
20471 // ffmpeg -y -f lavfi -i "aevalsrc=0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
20472 return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
20473 } else if (channelCount === 3) {
20474 // ffmpeg -y -f lavfi -i "aevalsrc=0|0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
20475 return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
20476 }
20477
20478 break;
20479 }
20480
20481 return undefined;
20482 };
20483
20484 return AAC;
20485}();
20486
20487/* harmony default export */ __webpack_exports__["default"] = (AAC);
20488
20489/***/ }),
20490
20491/***/ "./src/remux/mp4-generator.ts":
20492/*!************************************!*\
20493 !*** ./src/remux/mp4-generator.ts ***!
20494 \************************************/
20495/*! exports provided: default */
20496/***/ (function(module, __webpack_exports__, __webpack_require__) {
20497__webpack_require__.r(__webpack_exports__);
20498/**
20499 * Generate MP4 Box
20500 */
20501var UINT32_MAX = Math.pow(2, 32) - 1;
20502
20503var MP4 = /*#__PURE__*/function () {
20504 function MP4() {}
20505
20506 MP4.init = function init() {
20507 MP4.types = {
20508 avc1: [],
20509 // codingname
20510 avcC: [],
20511 btrt: [],
20512 dinf: [],
20513 dref: [],
20514 esds: [],
20515 ftyp: [],
20516 hdlr: [],
20517 mdat: [],
20518 mdhd: [],
20519 mdia: [],
20520 mfhd: [],
20521 minf: [],
20522 moof: [],
20523 moov: [],
20524 mp4a: [],
20525 '.mp3': [],
20526 mvex: [],
20527 mvhd: [],
20528 pasp: [],
20529 sdtp: [],
20530 stbl: [],
20531 stco: [],
20532 stsc: [],
20533 stsd: [],
20534 stsz: [],
20535 stts: [],
20536 tfdt: [],
20537 tfhd: [],
20538 traf: [],
20539 trak: [],
20540 trun: [],
20541 trex: [],
20542 tkhd: [],
20543 vmhd: [],
20544 smhd: []
20545 };
20546 var i;
20547
20548 for (i in MP4.types) {
20549 if (MP4.types.hasOwnProperty(i)) {
20550 MP4.types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
20551 }
20552 }
20553
20554 var videoHdlr = new Uint8Array([0x00, // version 0
20555 0x00, 0x00, 0x00, // flags
20556 0x00, 0x00, 0x00, 0x00, // pre_defined
20557 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
20558 0x00, 0x00, 0x00, 0x00, // reserved
20559 0x00, 0x00, 0x00, 0x00, // reserved
20560 0x00, 0x00, 0x00, 0x00, // reserved
20561 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
20562 ]);
20563 var audioHdlr = new Uint8Array([0x00, // version 0
20564 0x00, 0x00, 0x00, // flags
20565 0x00, 0x00, 0x00, 0x00, // pre_defined
20566 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
20567 0x00, 0x00, 0x00, 0x00, // reserved
20568 0x00, 0x00, 0x00, 0x00, // reserved
20569 0x00, 0x00, 0x00, 0x00, // reserved
20570 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
20571 ]);
20572 MP4.HDLR_TYPES = {
20573 video: videoHdlr,
20574 audio: audioHdlr
20575 };
20576 var dref = new Uint8Array([0x00, // version 0
20577 0x00, 0x00, 0x00, // flags
20578 0x00, 0x00, 0x00, 0x01, // entry_count
20579 0x00, 0x00, 0x00, 0x0c, // entry_size
20580 0x75, 0x72, 0x6c, 0x20, // 'url' type
20581 0x00, // version 0
20582 0x00, 0x00, 0x01 // entry_flags
20583 ]);
20584 var stco = new Uint8Array([0x00, // version
20585 0x00, 0x00, 0x00, // flags
20586 0x00, 0x00, 0x00, 0x00 // entry_count
20587 ]);
20588 MP4.STTS = MP4.STSC = MP4.STCO = stco;
20589 MP4.STSZ = new Uint8Array([0x00, // version
20590 0x00, 0x00, 0x00, // flags
20591 0x00, 0x00, 0x00, 0x00, // sample_size
20592 0x00, 0x00, 0x00, 0x00 // sample_count
20593 ]);
20594 MP4.VMHD = new Uint8Array([0x00, // version
20595 0x00, 0x00, 0x01, // flags
20596 0x00, 0x00, // graphicsmode
20597 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
20598 ]);
20599 MP4.SMHD = new Uint8Array([0x00, // version
20600 0x00, 0x00, 0x00, // flags
20601 0x00, 0x00, // balance
20602 0x00, 0x00 // reserved
20603 ]);
20604 MP4.STSD = new Uint8Array([0x00, // version 0
20605 0x00, 0x00, 0x00, // flags
20606 0x00, 0x00, 0x00, 0x01]); // entry_count
20607
20608 var majorBrand = new Uint8Array([105, 115, 111, 109]); // isom
20609
20610 var avc1Brand = new Uint8Array([97, 118, 99, 49]); // avc1
20611
20612 var minorVersion = new Uint8Array([0, 0, 0, 1]);
20613 MP4.FTYP = MP4.box(MP4.types.ftyp, majorBrand, minorVersion, majorBrand, avc1Brand);
20614 MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref));
20615 };
20616
20617 MP4.box = function box(type) {
20618 var size = 8;
20619
20620 for (var _len = arguments.length, payload = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
20621 payload[_key - 1] = arguments[_key];
20622 }
20623
20624 var i = payload.length;
20625 var len = i; // calculate the total size we need to allocate
20626
20627 while (i--) {
20628 size += payload[i].byteLength;
20629 }
20630
20631 var result = new Uint8Array(size);
20632 result[0] = size >> 24 & 0xff;
20633 result[1] = size >> 16 & 0xff;
20634 result[2] = size >> 8 & 0xff;
20635 result[3] = size & 0xff;
20636 result.set(type, 4); // copy the payload into the result
20637
20638 for (i = 0, size = 8; i < len; i++) {
20639 // copy payload[i] array @ offset size
20640 result.set(payload[i], size);
20641 size += payload[i].byteLength;
20642 }
20643
20644 return result;
20645 };
20646
20647 MP4.hdlr = function hdlr(type) {
20648 return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]);
20649 };
20650
20651 MP4.mdat = function mdat(data) {
20652 return MP4.box(MP4.types.mdat, data);
20653 };
20654
20655 MP4.mdhd = function mdhd(timescale, duration) {
20656 duration *= timescale;
20657 var upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
20658 var lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
20659 return MP4.box(MP4.types.mdhd, new Uint8Array([0x01, // version 1
20660 0x00, 0x00, 0x00, // flags
20661 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time
20662 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time
20663 timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff, // timescale
20664 upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x55, 0xc4, // 'und' language (undetermined)
20665 0x00, 0x00]));
20666 };
20667
20668 MP4.mdia = function mdia(track) {
20669 return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track));
20670 };
20671
20672 MP4.mfhd = function mfhd(sequenceNumber) {
20673 return MP4.box(MP4.types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
20674 sequenceNumber >> 24, sequenceNumber >> 16 & 0xff, sequenceNumber >> 8 & 0xff, sequenceNumber & 0xff // sequence_number
20675 ]));
20676 };
20677
20678 MP4.minf = function minf(track) {
20679 if (track.type === 'audio') {
20680 return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track));
20681 } else {
20682 return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
20683 }
20684 };
20685
20686 MP4.moof = function moof(sn, baseMediaDecodeTime, track) {
20687 return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track, baseMediaDecodeTime));
20688 }
20689 /**
20690 * @param tracks... (optional) {array} the tracks associated with this movie
20691 */
20692 ;
20693
20694 MP4.moov = function moov(tracks) {
20695 var i = tracks.length;
20696 var boxes = [];
20697
20698 while (i--) {
20699 boxes[i] = MP4.trak(tracks[i]);
20700 }
20701
20702 return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks)));
20703 };
20704
20705 MP4.mvex = function mvex(tracks) {
20706 var i = tracks.length;
20707 var boxes = [];
20708
20709 while (i--) {
20710 boxes[i] = MP4.trex(tracks[i]);
20711 }
20712
20713 return MP4.box.apply(null, [MP4.types.mvex].concat(boxes));
20714 };
20715
20716 MP4.mvhd = function mvhd(timescale, duration) {
20717 duration *= timescale;
20718 var upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
20719 var lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
20720 var bytes = new Uint8Array([0x01, // version 1
20721 0x00, 0x00, 0x00, // flags
20722 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time
20723 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time
20724 timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff, // timescale
20725 upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x01, 0x00, 0x00, // 1.0 rate
20726 0x01, 0x00, // 1.0 volume
20727 0x00, 0x00, // reserved
20728 0x00, 0x00, 0x00, 0x00, // reserved
20729 0x00, 0x00, 0x00, 0x00, // reserved
20730 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
20731 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
20732 0xff, 0xff, 0xff, 0xff // next_track_ID
20733 ]);
20734 return MP4.box(MP4.types.mvhd, bytes);
20735 };
20736
20737 MP4.sdtp = function sdtp(track) {
20738 var samples = track.samples || [];
20739 var bytes = new Uint8Array(4 + samples.length);
20740 var i;
20741 var flags; // leave the full box header (4 bytes) all zero
20742 // write the sample table
20743
20744 for (i = 0; i < samples.length; i++) {
20745 flags = samples[i].flags;
20746 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
20747 }
20748
20749 return MP4.box(MP4.types.sdtp, bytes);
20750 };
20751
20752 MP4.stbl = function stbl(track) {
20753 return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
20754 };
20755
20756 MP4.avc1 = function avc1(track) {
20757 var sps = [];
20758 var pps = [];
20759 var i;
20760 var data;
20761 var len; // assemble the SPSs
20762
20763 for (i = 0; i < track.sps.length; i++) {
20764 data = track.sps[i];
20765 len = data.byteLength;
20766 sps.push(len >>> 8 & 0xff);
20767 sps.push(len & 0xff); // SPS
20768
20769 sps = sps.concat(Array.prototype.slice.call(data));
20770 } // assemble the PPSs
20771
20772
20773 for (i = 0; i < track.pps.length; i++) {
20774 data = track.pps[i];
20775 len = data.byteLength;
20776 pps.push(len >>> 8 & 0xff);
20777 pps.push(len & 0xff);
20778 pps = pps.concat(Array.prototype.slice.call(data));
20779 }
20780
20781 var avcc = MP4.box(MP4.types.avcC, new Uint8Array([0x01, // version
20782 sps[3], // profile
20783 sps[4], // profile compat
20784 sps[5], // level
20785 0xfc | 3, // lengthSizeMinusOne, hard-coded to 4 bytes
20786 0xe0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets
20787 ].concat(sps).concat([track.pps.length // numOfPictureParameterSets
20788 ]).concat(pps))); // "PPS"
20789
20790 var width = track.width;
20791 var height = track.height;
20792 var hSpacing = track.pixelRatio[0];
20793 var vSpacing = track.pixelRatio[1];
20794 return MP4.box(MP4.types.avc1, new Uint8Array([0x00, 0x00, 0x00, // reserved
20795 0x00, 0x00, 0x00, // reserved
20796 0x00, 0x01, // data_reference_index
20797 0x00, 0x00, // pre_defined
20798 0x00, 0x00, // reserved
20799 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
20800 width >> 8 & 0xff, width & 0xff, // width
20801 height >> 8 & 0xff, height & 0xff, // height
20802 0x00, 0x48, 0x00, 0x00, // horizresolution
20803 0x00, 0x48, 0x00, 0x00, // vertresolution
20804 0x00, 0x00, 0x00, 0x00, // reserved
20805 0x00, 0x01, // frame_count
20806 0x12, 0x64, 0x61, 0x69, 0x6c, // dailymotion/hls.js
20807 0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
20808 0x00, 0x18, // depth = 24
20809 0x11, 0x11]), // pre_defined = -1
20810 avcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
20811 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
20812 0x00, 0x2d, 0xc6, 0xc0])), // avgBitrate
20813 MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24, // hSpacing
20814 hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24, // vSpacing
20815 vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
20816 };
20817
20818 MP4.esds = function esds(track) {
20819 var configlen = track.config.length;
20820 return new Uint8Array([0x00, // version 0
20821 0x00, 0x00, 0x00, // flags
20822 0x03, // descriptor_type
20823 0x17 + configlen, // length
20824 0x00, 0x01, // es_id
20825 0x00, // stream_priority
20826 0x04, // descriptor_type
20827 0x0f + configlen, // length
20828 0x40, // codec : mpeg4_audio
20829 0x15, // stream_type
20830 0x00, 0x00, 0x00, // buffer_size
20831 0x00, 0x00, 0x00, 0x00, // maxBitrate
20832 0x00, 0x00, 0x00, 0x00, // avgBitrate
20833 0x05 // descriptor_type
20834 ].concat([configlen]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
20835 };
20836
20837 MP4.mp4a = function mp4a(track) {
20838 var samplerate = track.samplerate;
20839 return MP4.box(MP4.types.mp4a, new Uint8Array([0x00, 0x00, 0x00, // reserved
20840 0x00, 0x00, 0x00, // reserved
20841 0x00, 0x01, // data_reference_index
20842 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
20843 0x00, track.channelCount, // channelcount
20844 0x00, 0x10, // sampleSize:16bits
20845 0x00, 0x00, 0x00, 0x00, // reserved2
20846 samplerate >> 8 & 0xff, samplerate & 0xff, //
20847 0x00, 0x00]), MP4.box(MP4.types.esds, MP4.esds(track)));
20848 };
20849
20850 MP4.mp3 = function mp3(track) {
20851 var samplerate = track.samplerate;
20852 return MP4.box(MP4.types['.mp3'], new Uint8Array([0x00, 0x00, 0x00, // reserved
20853 0x00, 0x00, 0x00, // reserved
20854 0x00, 0x01, // data_reference_index
20855 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
20856 0x00, track.channelCount, // channelcount
20857 0x00, 0x10, // sampleSize:16bits
20858 0x00, 0x00, 0x00, 0x00, // reserved2
20859 samplerate >> 8 & 0xff, samplerate & 0xff, //
20860 0x00, 0x00]));
20861 };
20862
20863 MP4.stsd = function stsd(track) {
20864 if (track.type === 'audio') {
20865 if (!track.isAAC && track.codec === 'mp3') {
20866 return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp3(track));
20867 }
20868
20869 return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
20870 } else {
20871 return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
20872 }
20873 };
20874
20875 MP4.tkhd = function tkhd(track) {
20876 var id = track.id;
20877 var duration = track.duration * track.timescale;
20878 var width = track.width;
20879 var height = track.height;
20880 var upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
20881 var lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
20882 return MP4.box(MP4.types.tkhd, new Uint8Array([0x01, // version 1
20883 0x00, 0x00, 0x07, // flags
20884 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time
20885 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time
20886 id >> 24 & 0xff, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff, // track_ID
20887 0x00, 0x00, 0x00, 0x00, // reserved
20888 upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
20889 0x00, 0x00, // layer
20890 0x00, 0x00, // alternate_group
20891 0x00, 0x00, // non-audio track volume
20892 0x00, 0x00, // reserved
20893 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
20894 width >> 8 & 0xff, width & 0xff, 0x00, 0x00, // width
20895 height >> 8 & 0xff, height & 0xff, 0x00, 0x00 // height
20896 ]));
20897 };
20898
20899 MP4.traf = function traf(track, baseMediaDecodeTime) {
20900 var sampleDependencyTable = MP4.sdtp(track);
20901 var id = track.id;
20902 var upperWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1));
20903 var lowerWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1));
20904 return MP4.box(MP4.types.traf, MP4.box(MP4.types.tfhd, new Uint8Array([0x00, // version 0
20905 0x00, 0x00, 0x00, // flags
20906 id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff // track_ID
20907 ])), MP4.box(MP4.types.tfdt, new Uint8Array([0x01, // version 1
20908 0x00, 0x00, 0x00, // flags
20909 upperWordBaseMediaDecodeTime >> 24, upperWordBaseMediaDecodeTime >> 16 & 0xff, upperWordBaseMediaDecodeTime >> 8 & 0xff, upperWordBaseMediaDecodeTime & 0xff, lowerWordBaseMediaDecodeTime >> 24, lowerWordBaseMediaDecodeTime >> 16 & 0xff, lowerWordBaseMediaDecodeTime >> 8 & 0xff, lowerWordBaseMediaDecodeTime & 0xff])), MP4.trun(track, sampleDependencyTable.length + 16 + // tfhd
20910 20 + // tfdt
20911 8 + // traf header
20912 16 + // mfhd
20913 8 + // moof header
20914 8), // mdat header
20915 sampleDependencyTable);
20916 }
20917 /**
20918 * Generate a track box.
20919 * @param track {object} a track definition
20920 * @return {Uint8Array} the track box
20921 */
20922 ;
20923
20924 MP4.trak = function trak(track) {
20925 track.duration = track.duration || 0xffffffff;
20926 return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
20927 };
20928
20929 MP4.trex = function trex(track) {
20930 var id = track.id;
20931 return MP4.box(MP4.types.trex, new Uint8Array([0x00, // version 0
20932 0x00, 0x00, 0x00, // flags
20933 id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff, // track_ID
20934 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
20935 0x00, 0x00, 0x00, 0x00, // default_sample_duration
20936 0x00, 0x00, 0x00, 0x00, // default_sample_size
20937 0x00, 0x01, 0x00, 0x01 // default_sample_flags
20938 ]));
20939 };
20940
20941 MP4.trun = function trun(track, offset) {
20942 var samples = track.samples || [];
20943 var len = samples.length;
20944 var arraylen = 12 + 16 * len;
20945 var array = new Uint8Array(arraylen);
20946 var i;
20947 var sample;
20948 var duration;
20949 var size;
20950 var flags;
20951 var cts;
20952 offset += 8 + arraylen;
20953 array.set([0x00, // version 0
20954 0x00, 0x0f, 0x01, // flags
20955 len >>> 24 & 0xff, len >>> 16 & 0xff, len >>> 8 & 0xff, len & 0xff, // sample_count
20956 offset >>> 24 & 0xff, offset >>> 16 & 0xff, offset >>> 8 & 0xff, offset & 0xff // data_offset
20957 ], 0);
20958
20959 for (i = 0; i < len; i++) {
20960 sample = samples[i];
20961 duration = sample.duration;
20962 size = sample.size;
20963 flags = sample.flags;
20964 cts = sample.cts;
20965 array.set([duration >>> 24 & 0xff, duration >>> 16 & 0xff, duration >>> 8 & 0xff, duration & 0xff, // sample_duration
20966 size >>> 24 & 0xff, size >>> 16 & 0xff, size >>> 8 & 0xff, size & 0xff, // sample_size
20967 flags.isLeading << 2 | flags.dependsOn, flags.isDependedOn << 6 | flags.hasRedundancy << 4 | flags.paddingValue << 1 | flags.isNonSync, flags.degradPrio & 0xf0 << 8, flags.degradPrio & 0x0f, // sample_flags
20968 cts >>> 24 & 0xff, cts >>> 16 & 0xff, cts >>> 8 & 0xff, cts & 0xff // sample_composition_time_offset
20969 ], 12 + 16 * i);
20970 }
20971
20972 return MP4.box(MP4.types.trun, array);
20973 };
20974
20975 MP4.initSegment = function initSegment(tracks) {
20976 if (!MP4.types) {
20977 MP4.init();
20978 }
20979
20980 var movie = MP4.moov(tracks);
20981 var result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength);
20982 result.set(MP4.FTYP);
20983 result.set(movie, MP4.FTYP.byteLength);
20984 return result;
20985 };
20986
20987 return MP4;
20988}();
20989
20990MP4.types = void 0;
20991MP4.HDLR_TYPES = void 0;
20992MP4.STTS = void 0;
20993MP4.STSC = void 0;
20994MP4.STCO = void 0;
20995MP4.STSZ = void 0;
20996MP4.VMHD = void 0;
20997MP4.SMHD = void 0;
20998MP4.STSD = void 0;
20999MP4.FTYP = void 0;
21000MP4.DINF = void 0;
21001/* harmony default export */ __webpack_exports__["default"] = (MP4);
21002
21003/***/ }),
21004
21005/***/ "./src/remux/mp4-remuxer.ts":
21006/*!**********************************!*\
21007 !*** ./src/remux/mp4-remuxer.ts ***!
21008 \**********************************/
21009/*! exports provided: default, normalizePts */
21010/***/ (function(module, __webpack_exports__, __webpack_require__) {
21011__webpack_require__.r(__webpack_exports__);
21012/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return MP4Remuxer; });
21013/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "normalizePts", function() { return normalizePts; });
21014/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
21015/* harmony import */ var _aac_helper__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./aac-helper */ "./src/remux/aac-helper.ts");
21016/* harmony import */ var _mp4_generator__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./mp4-generator */ "./src/remux/mp4-generator.ts");
21017/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../events */ "./src/events.ts");
21018/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
21019/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
21020/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
21021/* harmony import */ var _utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils/timescale-conversion */ "./src/utils/timescale-conversion.ts");
21022
21023
21024function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
21025
21026
21027
21028
21029
21030
21031
21032
21033var MAX_SILENT_FRAME_DURATION = 10 * 1000; // 10 seconds
21034
21035var AAC_SAMPLES_PER_FRAME = 1024;
21036var MPEG_AUDIO_SAMPLE_PER_FRAME = 1152;
21037var chromeVersion = null;
21038var safariWebkitVersion = null;
21039var requiresPositiveDts = false;
21040
21041var MP4Remuxer = /*#__PURE__*/function () {
21042 function MP4Remuxer(observer, config, typeSupported, vendor) {
21043
21044 this.observer = void 0;
21045 this.config = void 0;
21046 this.typeSupported = void 0;
21047 this.ISGenerated = false;
21048 this._initPTS = void 0;
21049 this._initDTS = void 0;
21050 this.nextAvcDts = null;
21051 this.nextAudioPts = null;
21052 this.isAudioContiguous = false;
21053 this.isVideoContiguous = false;
21054 this.observer = observer;
21055 this.config = config;
21056 this.typeSupported = typeSupported;
21057 this.ISGenerated = false;
21058
21059 if (chromeVersion === null) {
21060 var userAgent = navigator.userAgent || '';
21061 var result = userAgent.match(/Chrome\/(\d+)/i);
21062 chromeVersion = result ? parseInt(result[1]) : 0;
21063 }
21064
21065 if (safariWebkitVersion === null) {
21066 var _result = navigator.userAgent.match(/Safari\/(\d+)/i);
21067
21068 safariWebkitVersion = _result ? parseInt(_result[1]) : 0;
21069 }
21070
21071 requiresPositiveDts = !!chromeVersion && chromeVersion < 75 || !!safariWebkitVersion && safariWebkitVersion < 600;
21072 }
21073
21074 var _proto = MP4Remuxer.prototype;
21075
21076 _proto.destroy = function destroy() {};
21077
21078 _proto.resetTimeStamp = function resetTimeStamp(defaultTimeStamp) {
21079 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: initPTS & initDTS reset');
21080 this._initPTS = this._initDTS = defaultTimeStamp;
21081 };
21082
21083 _proto.resetNextTimestamp = function resetNextTimestamp() {
21084 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: reset next timestamp');
21085 this.isVideoContiguous = false;
21086 this.isAudioContiguous = false;
21087 };
21088
21089 _proto.resetInitSegment = function resetInitSegment() {
21090 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: ISGenerated flag reset');
21091 this.ISGenerated = false;
21092 };
21093
21094 _proto.getVideoStartPts = function getVideoStartPts(videoSamples) {
21095 var rolloverDetected = false;
21096 var startPTS = videoSamples.reduce(function (minPTS, sample) {
21097 var delta = sample.pts - minPTS;
21098
21099 if (delta < -4294967296) {
21100 // 2^32, see PTSNormalize for reasoning, but we're hitting a rollover here, and we don't want that to impact the timeOffset calculation
21101 rolloverDetected = true;
21102 return normalizePts(minPTS, sample.pts);
21103 } else if (delta > 0) {
21104 return minPTS;
21105 } else {
21106 return sample.pts;
21107 }
21108 }, videoSamples[0].pts);
21109
21110 if (rolloverDetected) {
21111 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].debug('PTS rollover detected');
21112 }
21113
21114 return startPTS;
21115 };
21116
21117 _proto.remux = function remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, flush, playlistType) {
21118 var video;
21119 var audio;
21120 var initSegment;
21121 var text;
21122 var id3;
21123 var independent;
21124 var audioTimeOffset = timeOffset;
21125 var videoTimeOffset = timeOffset; // If we're remuxing audio and video progressively, wait until we've received enough samples for each track before proceeding.
21126 // This is done to synchronize the audio and video streams. We know if the current segment will have samples if the "pid"
21127 // parameter is greater than -1. The pid is set when the PMT is parsed, which contains the tracks list.
21128 // However, if the initSegment has already been generated, or we've reached the end of a segment (flush),
21129 // then we can remux one track without waiting for the other.
21130
21131 var hasAudio = audioTrack.pid > -1;
21132 var hasVideo = videoTrack.pid > -1;
21133 var length = videoTrack.samples.length;
21134 var enoughAudioSamples = audioTrack.samples.length > 0;
21135 var enoughVideoSamples = length > 1;
21136 var canRemuxAvc = (!hasAudio || enoughAudioSamples) && (!hasVideo || enoughVideoSamples) || this.ISGenerated || flush;
21137
21138 if (canRemuxAvc) {
21139 if (!this.ISGenerated) {
21140 initSegment = this.generateIS(audioTrack, videoTrack, timeOffset);
21141 }
21142
21143 var isVideoContiguous = this.isVideoContiguous;
21144 var firstKeyFrameIndex = -1;
21145
21146 if (enoughVideoSamples) {
21147 firstKeyFrameIndex = findKeyframeIndex(videoTrack.samples);
21148
21149 if (!isVideoContiguous && this.config.forceKeyFrameOnDiscontinuity) {
21150 independent = true;
21151
21152 if (firstKeyFrameIndex > 0) {
21153 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("[mp4-remuxer]: Dropped " + firstKeyFrameIndex + " out of " + length + " video samples due to a missing keyframe");
21154 var startPTS = this.getVideoStartPts(videoTrack.samples);
21155 videoTrack.samples = videoTrack.samples.slice(firstKeyFrameIndex);
21156 videoTrack.dropped += firstKeyFrameIndex;
21157 videoTimeOffset += (videoTrack.samples[0].pts - startPTS) / (videoTrack.timescale || 90000);
21158 } else if (firstKeyFrameIndex === -1) {
21159 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("[mp4-remuxer]: No keyframe found out of " + length + " video samples");
21160 independent = false;
21161 }
21162 }
21163 }
21164
21165 if (this.ISGenerated) {
21166 if (enoughAudioSamples && enoughVideoSamples) {
21167 // timeOffset is expected to be the offset of the first timestamp of this fragment (first DTS)
21168 // if first audio DTS is not aligned with first video DTS then we need to take that into account
21169 // when providing timeOffset to remuxAudio / remuxVideo. if we don't do that, there might be a permanent / small
21170 // drift between audio and video streams
21171 var _startPTS = this.getVideoStartPts(videoTrack.samples);
21172
21173 var tsDelta = normalizePts(audioTrack.samples[0].pts, _startPTS) - _startPTS;
21174
21175 var audiovideoTimestampDelta = tsDelta / videoTrack.inputTimeScale;
21176 audioTimeOffset += Math.max(0, audiovideoTimestampDelta);
21177 videoTimeOffset += Math.max(0, -audiovideoTimestampDelta);
21178 } // Purposefully remuxing audio before video, so that remuxVideo can use nextAudioPts, which is calculated in remuxAudio.
21179
21180
21181 if (enoughAudioSamples) {
21182 // if initSegment was generated without audio samples, regenerate it again
21183 if (!audioTrack.samplerate) {
21184 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn('[mp4-remuxer]: regenerate InitSegment as audio detected');
21185 initSegment = this.generateIS(audioTrack, videoTrack, timeOffset);
21186 }
21187
21188 audio = this.remuxAudio(audioTrack, audioTimeOffset, this.isAudioContiguous, accurateTimeOffset, hasVideo || enoughVideoSamples || playlistType === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO ? videoTimeOffset : undefined);
21189
21190 if (enoughVideoSamples) {
21191 var audioTrackLength = audio ? audio.endPTS - audio.startPTS : 0; // if initSegment was generated without video samples, regenerate it again
21192
21193 if (!videoTrack.inputTimeScale) {
21194 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn('[mp4-remuxer]: regenerate InitSegment as video detected');
21195 initSegment = this.generateIS(audioTrack, videoTrack, timeOffset);
21196 }
21197
21198 video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, audioTrackLength);
21199 }
21200 } else if (enoughVideoSamples) {
21201 video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, 0);
21202 }
21203
21204 if (video) {
21205 video.firstKeyFrame = firstKeyFrameIndex;
21206 video.independent = firstKeyFrameIndex !== -1;
21207 }
21208 }
21209 } // Allow ID3 and text to remux, even if more audio/video samples are required
21210
21211
21212 if (this.ISGenerated) {
21213 if (id3Track.samples.length) {
21214 id3 = this.remuxID3(id3Track, timeOffset);
21215 }
21216
21217 if (textTrack.samples.length) {
21218 text = this.remuxText(textTrack, timeOffset);
21219 }
21220 }
21221
21222 return {
21223 audio: audio,
21224 video: video,
21225 initSegment: initSegment,
21226 independent: independent,
21227 text: text,
21228 id3: id3
21229 };
21230 };
21231
21232 _proto.generateIS = function generateIS(audioTrack, videoTrack, timeOffset) {
21233 var audioSamples = audioTrack.samples;
21234 var videoSamples = videoTrack.samples;
21235 var typeSupported = this.typeSupported;
21236 var tracks = {};
21237 var computePTSDTS = !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this._initPTS);
21238 var container = 'audio/mp4';
21239 var initPTS;
21240 var initDTS;
21241 var timescale;
21242
21243 if (computePTSDTS) {
21244 initPTS = initDTS = Infinity;
21245 }
21246
21247 if (audioTrack.config && audioSamples.length) {
21248 // let's use audio sampling rate as MP4 time scale.
21249 // rationale is that there is a integer nb of audio frames per audio sample (1024 for AAC)
21250 // using audio sampling rate here helps having an integer MP4 frame duration
21251 // this avoids potential rounding issue and AV sync issue
21252 audioTrack.timescale = audioTrack.samplerate;
21253
21254 if (!audioTrack.isAAC) {
21255 if (typeSupported.mpeg) {
21256 // Chrome and Safari
21257 container = 'audio/mpeg';
21258 audioTrack.codec = '';
21259 } else if (typeSupported.mp3) {
21260 // Firefox
21261 audioTrack.codec = 'mp3';
21262 }
21263 }
21264
21265 tracks.audio = {
21266 id: 'audio',
21267 container: container,
21268 codec: audioTrack.codec,
21269 initSegment: !audioTrack.isAAC && typeSupported.mpeg ? new Uint8Array(0) : _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].initSegment([audioTrack]),
21270 metadata: {
21271 channelCount: audioTrack.channelCount
21272 }
21273 };
21274
21275 if (computePTSDTS) {
21276 timescale = audioTrack.inputTimeScale; // remember first PTS of this demuxing context. for audio, PTS = DTS
21277
21278 initPTS = initDTS = audioSamples[0].pts - Math.round(timescale * timeOffset);
21279 }
21280 }
21281
21282 if (videoTrack.sps && videoTrack.pps && videoSamples.length) {
21283 // let's use input time scale as MP4 video timescale
21284 // we use input time scale straight away to avoid rounding issues on frame duration / cts computation
21285 videoTrack.timescale = videoTrack.inputTimeScale;
21286 tracks.video = {
21287 id: 'main',
21288 container: 'video/mp4',
21289 codec: videoTrack.codec,
21290 initSegment: _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].initSegment([videoTrack]),
21291 metadata: {
21292 width: videoTrack.width,
21293 height: videoTrack.height
21294 }
21295 };
21296
21297 if (computePTSDTS) {
21298 timescale = videoTrack.inputTimeScale;
21299 var startPTS = this.getVideoStartPts(videoSamples);
21300 var startOffset = Math.round(timescale * timeOffset);
21301 initDTS = Math.min(initDTS, normalizePts(videoSamples[0].dts, startPTS) - startOffset);
21302 initPTS = Math.min(initPTS, startPTS - startOffset);
21303 }
21304 }
21305
21306 if (Object.keys(tracks).length) {
21307 this.ISGenerated = true;
21308
21309 if (computePTSDTS) {
21310 this._initPTS = initPTS;
21311 this._initDTS = initDTS;
21312 }
21313
21314 return {
21315 tracks: tracks,
21316 initPTS: initPTS,
21317 timescale: timescale
21318 };
21319 }
21320 };
21321
21322 _proto.remuxVideo = function remuxVideo(track, timeOffset, contiguous, audioTrackLength) {
21323 var timeScale = track.inputTimeScale;
21324 var inputSamples = track.samples;
21325 var outputSamples = [];
21326 var nbSamples = inputSamples.length;
21327 var initPTS = this._initPTS;
21328 var nextAvcDts = this.nextAvcDts;
21329 var offset = 8;
21330 var mp4SampleDuration;
21331 var firstDTS;
21332 var lastDTS;
21333 var minPTS = Number.POSITIVE_INFINITY;
21334 var maxPTS = Number.NEGATIVE_INFINITY;
21335 var ptsDtsShift = 0;
21336 var sortSamples = false; // if parsed fragment is contiguous with last one, let's use last DTS value as reference
21337
21338 if (!contiguous || nextAvcDts === null) {
21339 var pts = timeOffset * timeScale;
21340 var cts = inputSamples[0].pts - normalizePts(inputSamples[0].dts, inputSamples[0].pts); // if not contiguous, let's use target timeOffset
21341
21342 nextAvcDts = pts - cts;
21343 } // PTS is coded on 33bits, and can loop from -2^32 to 2^32
21344 // PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value
21345
21346
21347 for (var i = 0; i < nbSamples; i++) {
21348 var sample = inputSamples[i];
21349 sample.pts = normalizePts(sample.pts - initPTS, nextAvcDts);
21350 sample.dts = normalizePts(sample.dts - initPTS, nextAvcDts);
21351
21352 if (sample.dts > sample.pts) {
21353 var PTS_DTS_SHIFT_TOLERANCE_90KHZ = 90000 * 0.2;
21354 ptsDtsShift = Math.max(Math.min(ptsDtsShift, sample.pts - sample.dts), -1 * PTS_DTS_SHIFT_TOLERANCE_90KHZ);
21355 }
21356
21357 if (sample.dts < inputSamples[i > 0 ? i - 1 : i].dts) {
21358 sortSamples = true;
21359 }
21360 } // sort video samples by DTS then PTS then demux id order
21361
21362
21363 if (sortSamples) {
21364 inputSamples.sort(function (a, b) {
21365 var deltadts = a.dts - b.dts;
21366 var deltapts = a.pts - b.pts;
21367 return deltadts || deltapts;
21368 });
21369 } // Get first/last DTS
21370
21371
21372 firstDTS = inputSamples[0].dts;
21373 lastDTS = inputSamples[inputSamples.length - 1].dts; // on Safari let's signal the same sample duration for all samples
21374 // sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
21375 // set this constant duration as being the avg delta between consecutive DTS.
21376
21377 var averageSampleDuration = Math.round((lastDTS - firstDTS) / (nbSamples - 1)); // handle broken streams with PTS < DTS, tolerance up 0.2 seconds
21378
21379 if (ptsDtsShift < 0) {
21380 if (ptsDtsShift < averageSampleDuration * -2) {
21381 // Fix for "CNN special report, with CC" in test-streams (including Safari browser)
21382 // With large PTS < DTS errors such as this, we want to correct CTS while maintaining increasing DTS values
21383 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("PTS < DTS detected in video samples, offsetting DTS from PTS by " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(-averageSampleDuration, true) + " ms");
21384 var lastDts = ptsDtsShift;
21385
21386 for (var _i = 0; _i < nbSamples; _i++) {
21387 inputSamples[_i].dts = lastDts = Math.max(lastDts, inputSamples[_i].pts - averageSampleDuration);
21388 inputSamples[_i].pts = Math.max(lastDts, inputSamples[_i].pts);
21389 }
21390 } else {
21391 // Fix for "Custom IV with bad PTS DTS" in test-streams
21392 // With smaller PTS < DTS errors we can simply move all DTS back. This increases CTS without causing buffer gaps or decode errors in Safari
21393 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("PTS < DTS detected in video samples, shifting DTS by " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(ptsDtsShift, true) + " ms to overcome this issue");
21394
21395 for (var _i2 = 0; _i2 < nbSamples; _i2++) {
21396 inputSamples[_i2].dts = inputSamples[_i2].dts + ptsDtsShift;
21397 }
21398 }
21399
21400 firstDTS = inputSamples[0].dts;
21401 } // if fragment are contiguous, detect hole/overlapping between fragments
21402
21403
21404 if (contiguous) {
21405 // check timestamp continuity across consecutive fragments (this is to remove inter-fragment gap/hole)
21406 var delta = firstDTS - nextAvcDts;
21407 var foundHole = delta > averageSampleDuration;
21408 var foundOverlap = delta < -1;
21409
21410 if (foundHole || foundOverlap) {
21411 if (foundHole) {
21412 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("AVC: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(delta, true) + " ms (" + delta + "dts) hole between fragments detected, filling it");
21413 } else {
21414 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("AVC: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(-delta, true) + " ms (" + delta + "dts) overlapping between fragments detected");
21415 }
21416
21417 firstDTS = nextAvcDts;
21418 var firstPTS = inputSamples[0].pts - delta;
21419 inputSamples[0].dts = firstDTS;
21420 inputSamples[0].pts = firstPTS;
21421 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log("Video: First PTS/DTS adjusted: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(firstPTS, true) + "/" + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(firstDTS, true) + ", delta: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(delta, true) + " ms");
21422 }
21423 }
21424
21425 if (requiresPositiveDts) {
21426 firstDTS = Math.max(0, firstDTS);
21427 }
21428
21429 var nbNalu = 0;
21430 var naluLen = 0;
21431
21432 for (var _i3 = 0; _i3 < nbSamples; _i3++) {
21433 // compute total/avc sample length and nb of NAL units
21434 var _sample = inputSamples[_i3];
21435 var units = _sample.units;
21436 var nbUnits = units.length;
21437 var sampleLen = 0;
21438
21439 for (var j = 0; j < nbUnits; j++) {
21440 sampleLen += units[j].data.length;
21441 }
21442
21443 naluLen += sampleLen;
21444 nbNalu += nbUnits;
21445 _sample.length = sampleLen; // normalize PTS/DTS
21446 // ensure sample monotonic DTS
21447
21448 _sample.dts = Math.max(_sample.dts, firstDTS); // ensure that computed value is greater or equal than sample DTS
21449
21450 _sample.pts = Math.max(_sample.pts, _sample.dts, 0);
21451 minPTS = Math.min(_sample.pts, minPTS);
21452 maxPTS = Math.max(_sample.pts, maxPTS);
21453 }
21454
21455 lastDTS = inputSamples[nbSamples - 1].dts;
21456 /* concatenate the video data and construct the mdat in place
21457 (need 8 more bytes to fill length and mpdat type) */
21458
21459 var mdatSize = naluLen + 4 * nbNalu + 8;
21460 var mdat;
21461
21462 try {
21463 mdat = new Uint8Array(mdatSize);
21464 } catch (err) {
21465 this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, {
21466 type: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].MUX_ERROR,
21467 details: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].REMUX_ALLOC_ERROR,
21468 fatal: false,
21469 bytes: mdatSize,
21470 reason: "fail allocating video mdat " + mdatSize
21471 });
21472 return;
21473 }
21474
21475 var view = new DataView(mdat.buffer);
21476 view.setUint32(0, mdatSize);
21477 mdat.set(_mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].types.mdat, 4);
21478
21479 for (var _i4 = 0; _i4 < nbSamples; _i4++) {
21480 var avcSample = inputSamples[_i4];
21481 var avcSampleUnits = avcSample.units;
21482 var mp4SampleLength = 0; // convert NALU bitstream to MP4 format (prepend NALU with size field)
21483
21484 for (var _j = 0, _nbUnits = avcSampleUnits.length; _j < _nbUnits; _j++) {
21485 var unit = avcSampleUnits[_j];
21486 var unitData = unit.data;
21487 var unitDataLen = unit.data.byteLength;
21488 view.setUint32(offset, unitDataLen);
21489 offset += 4;
21490 mdat.set(unitData, offset);
21491 offset += unitDataLen;
21492 mp4SampleLength += 4 + unitDataLen;
21493 } // expected sample duration is the Decoding Timestamp diff of consecutive samples
21494
21495
21496 if (_i4 < nbSamples - 1) {
21497 mp4SampleDuration = inputSamples[_i4 + 1].dts - avcSample.dts;
21498 } else {
21499 var config = this.config;
21500 var lastFrameDuration = avcSample.dts - inputSamples[_i4 > 0 ? _i4 - 1 : _i4].dts;
21501
21502 if (config.stretchShortVideoTrack && this.nextAudioPts !== null) {
21503 // In some cases, a segment's audio track duration may exceed the video track duration.
21504 // Since we've already remuxed audio, and we know how long the audio track is, we look to
21505 // see if the delta to the next segment is longer than maxBufferHole.
21506 // If so, playback would potentially get stuck, so we artificially inflate
21507 // the duration of the last frame to minimize any potential gap between segments.
21508 var gapTolerance = Math.floor(config.maxBufferHole * timeScale);
21509 var deltaToFrameEnd = (audioTrackLength ? minPTS + audioTrackLength * timeScale : this.nextAudioPts) - avcSample.pts;
21510
21511 if (deltaToFrameEnd > gapTolerance) {
21512 // We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video
21513 // frame overlap. maxBufferHole should be >> lastFrameDuration anyway.
21514 mp4SampleDuration = deltaToFrameEnd - lastFrameDuration;
21515
21516 if (mp4SampleDuration < 0) {
21517 mp4SampleDuration = lastFrameDuration;
21518 }
21519
21520 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log("[mp4-remuxer]: It is approximately " + deltaToFrameEnd / 90 + " ms to the next segment; using duration " + mp4SampleDuration / 90 + " ms for the last video frame.");
21521 } else {
21522 mp4SampleDuration = lastFrameDuration;
21523 }
21524 } else {
21525 mp4SampleDuration = lastFrameDuration;
21526 }
21527 }
21528
21529 var compositionTimeOffset = Math.round(avcSample.pts - avcSample.dts);
21530 outputSamples.push(new Mp4Sample(avcSample.key, mp4SampleDuration, mp4SampleLength, compositionTimeOffset));
21531 }
21532
21533 if (outputSamples.length && chromeVersion && chromeVersion < 70) {
21534 // Chrome workaround, mark first sample as being a Random Access Point (keyframe) to avoid sourcebuffer append issue
21535 // https://code.google.com/p/chromium/issues/detail?id=229412
21536 var flags = outputSamples[0].flags;
21537 flags.dependsOn = 2;
21538 flags.isNonSync = 0;
21539 }
21540
21541 console.assert(mp4SampleDuration !== undefined, 'mp4SampleDuration must be computed'); // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
21542
21543 this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
21544 this.isVideoContiguous = true;
21545 var moof = _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].moof(track.sequenceNumber++, firstDTS, _extends({}, track, {
21546 samples: outputSamples
21547 }));
21548 var type = 'video';
21549 var data = {
21550 data1: moof,
21551 data2: mdat,
21552 startPTS: minPTS / timeScale,
21553 endPTS: (maxPTS + mp4SampleDuration) / timeScale,
21554 startDTS: firstDTS / timeScale,
21555 endDTS: nextAvcDts / timeScale,
21556 type: type,
21557 hasAudio: false,
21558 hasVideo: true,
21559 nb: outputSamples.length,
21560 dropped: track.dropped
21561 };
21562 track.samples = [];
21563 track.dropped = 0;
21564 console.assert(mdat.length, 'MDAT length must not be zero');
21565 return data;
21566 };
21567
21568 _proto.remuxAudio = function remuxAudio(track, timeOffset, contiguous, accurateTimeOffset, videoTimeOffset) {
21569 var inputTimeScale = track.inputTimeScale;
21570 var mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale;
21571 var scaleFactor = inputTimeScale / mp4timeScale;
21572 var mp4SampleDuration = track.isAAC ? AAC_SAMPLES_PER_FRAME : MPEG_AUDIO_SAMPLE_PER_FRAME;
21573 var inputSampleDuration = mp4SampleDuration * scaleFactor;
21574 var initPTS = this._initPTS;
21575 var rawMPEG = !track.isAAC && this.typeSupported.mpeg;
21576 var outputSamples = [];
21577 var inputSamples = track.samples;
21578 var offset = rawMPEG ? 0 : 8;
21579 var nextAudioPts = this.nextAudioPts || -1; // window.audioSamples ? window.audioSamples.push(inputSamples.map(s => s.pts)) : (window.audioSamples = [inputSamples.map(s => s.pts)]);
21580 // for audio samples, also consider consecutive fragments as being contiguous (even if a level switch occurs),
21581 // for sake of clarity:
21582 // consecutive fragments are frags with
21583 // - less than 100ms gaps between new time offset (if accurate) and next expected PTS OR
21584 // - less than 20 audio frames distance
21585 // contiguous fragments are consecutive fragments from same quality level (same level, new SN = old SN + 1)
21586 // this helps ensuring audio continuity
21587 // and this also avoids audio glitches/cut when switching quality, or reporting wrong duration on first audio frame
21588
21589 var timeOffsetMpegTS = timeOffset * inputTimeScale;
21590 this.isAudioContiguous = contiguous = contiguous || inputSamples.length && nextAudioPts > 0 && (accurateTimeOffset && Math.abs(timeOffsetMpegTS - nextAudioPts) < 9000 || Math.abs(normalizePts(inputSamples[0].pts - initPTS, timeOffsetMpegTS) - nextAudioPts) < 20 * inputSampleDuration); // compute normalized PTS
21591
21592 inputSamples.forEach(function (sample) {
21593 sample.pts = normalizePts(sample.pts - initPTS, timeOffsetMpegTS);
21594 });
21595
21596 if (!contiguous || nextAudioPts < 0) {
21597 // filter out sample with negative PTS that are not playable anyway
21598 // if we don't remove these negative samples, they will shift all audio samples forward.
21599 // leading to audio overlap between current / next fragment
21600 inputSamples = inputSamples.filter(function (sample) {
21601 return sample.pts >= 0;
21602 }); // in case all samples have negative PTS, and have been filtered out, return now
21603
21604 if (!inputSamples.length) {
21605 return;
21606 }
21607
21608 if (videoTimeOffset === 0) {
21609 // Set the start to 0 to match video so that start gaps larger than inputSampleDuration are filled with silence
21610 nextAudioPts = 0;
21611 } else if (accurateTimeOffset) {
21612 // When not seeking, not live, and LevelDetails.PTSKnown, use fragment start as predicted next audio PTS
21613 nextAudioPts = Math.max(0, timeOffsetMpegTS);
21614 } else {
21615 // if frags are not contiguous and if we cant trust time offset, let's use first sample PTS as next audio PTS
21616 nextAudioPts = inputSamples[0].pts;
21617 }
21618 } // If the audio track is missing samples, the frames seem to get "left-shifted" within the
21619 // resulting mp4 segment, causing sync issues and leaving gaps at the end of the audio segment.
21620 // In an effort to prevent this from happening, we inject frames here where there are gaps.
21621 // When possible, we inject a silent frame; when that's not possible, we duplicate the last
21622 // frame.
21623
21624
21625 if (track.isAAC) {
21626 var alignedWithVideo = videoTimeOffset !== undefined;
21627 var maxAudioFramesDrift = this.config.maxAudioFramesDrift;
21628
21629 for (var i = 0, nextPts = nextAudioPts; i < inputSamples.length; i++) {
21630 // First, let's see how far off this frame is from where we expect it to be
21631 var sample = inputSamples[i];
21632 var pts = sample.pts;
21633 var delta = pts - nextPts;
21634 var duration = Math.abs(1000 * delta / inputTimeScale); // When remuxing with video, if we're overlapping by more than a duration, drop this sample to stay in sync
21635
21636 if (delta <= -maxAudioFramesDrift * inputSampleDuration && alignedWithVideo) {
21637 if (i === 0) {
21638 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("Audio frame @ " + (pts / inputTimeScale).toFixed(3) + "s overlaps nextAudioPts by " + Math.round(1000 * delta / inputTimeScale) + " ms.");
21639 this.nextAudioPts = nextAudioPts = nextPts = pts;
21640 }
21641 } // eslint-disable-line brace-style
21642 // Insert missing frames if:
21643 // 1: We're more than maxAudioFramesDrift frame away
21644 // 2: Not more than MAX_SILENT_FRAME_DURATION away
21645 // 3: currentTime (aka nextPtsNorm) is not 0
21646 // 4: remuxing with video (videoTimeOffset !== undefined)
21647 else if (delta >= maxAudioFramesDrift * inputSampleDuration && duration < MAX_SILENT_FRAME_DURATION && alignedWithVideo) {
21648 var missing = Math.round(delta / inputSampleDuration); // Adjust nextPts so that silent samples are aligned with media pts. This will prevent media samples from
21649 // later being shifted if nextPts is based on timeOffset and delta is not a multiple of inputSampleDuration.
21650
21651 nextPts = pts - missing * inputSampleDuration;
21652
21653 if (nextPts < 0) {
21654 missing--;
21655 nextPts += inputSampleDuration;
21656 }
21657
21658 if (i === 0) {
21659 this.nextAudioPts = nextAudioPts = nextPts;
21660 }
21661
21662 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("[mp4-remuxer]: Injecting " + missing + " audio frame @ " + (nextPts / inputTimeScale).toFixed(3) + "s due to " + Math.round(1000 * delta / inputTimeScale) + " ms gap.");
21663
21664 for (var j = 0; j < missing; j++) {
21665 var newStamp = Math.max(nextPts, 0);
21666 var fillFrame = _aac_helper__WEBPACK_IMPORTED_MODULE_1__["default"].getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
21667
21668 if (!fillFrame) {
21669 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
21670 fillFrame = sample.unit.subarray();
21671 }
21672
21673 inputSamples.splice(i, 0, {
21674 unit: fillFrame,
21675 pts: newStamp
21676 });
21677 nextPts += inputSampleDuration;
21678 i++;
21679 }
21680 }
21681
21682 sample.pts = nextPts;
21683 nextPts += inputSampleDuration;
21684 }
21685 }
21686
21687 var firstPTS = null;
21688 var lastPTS = null;
21689 var mdat;
21690 var mdatSize = 0;
21691 var sampleLength = inputSamples.length;
21692
21693 while (sampleLength--) {
21694 mdatSize += inputSamples[sampleLength].unit.byteLength;
21695 }
21696
21697 for (var _j2 = 0, _nbSamples = inputSamples.length; _j2 < _nbSamples; _j2++) {
21698 var audioSample = inputSamples[_j2];
21699 var unit = audioSample.unit;
21700 var _pts = audioSample.pts;
21701
21702 if (lastPTS !== null) {
21703 // If we have more than one sample, set the duration of the sample to the "real" duration; the PTS diff with
21704 // the previous sample
21705 var prevSample = outputSamples[_j2 - 1];
21706 prevSample.duration = Math.round((_pts - lastPTS) / scaleFactor);
21707 } else {
21708 if (contiguous && track.isAAC) {
21709 // set PTS/DTS to expected PTS/DTS
21710 _pts = nextAudioPts;
21711 } // remember first PTS of our audioSamples
21712
21713
21714 firstPTS = _pts;
21715
21716 if (mdatSize > 0) {
21717 /* concatenate the audio data and construct the mdat in place
21718 (need 8 more bytes to fill length and mdat type) */
21719 mdatSize += offset;
21720
21721 try {
21722 mdat = new Uint8Array(mdatSize);
21723 } catch (err) {
21724 this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, {
21725 type: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].MUX_ERROR,
21726 details: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].REMUX_ALLOC_ERROR,
21727 fatal: false,
21728 bytes: mdatSize,
21729 reason: "fail allocating audio mdat " + mdatSize
21730 });
21731 return;
21732 }
21733
21734 if (!rawMPEG) {
21735 var view = new DataView(mdat.buffer);
21736 view.setUint32(0, mdatSize);
21737 mdat.set(_mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].types.mdat, 4);
21738 }
21739 } else {
21740 // no audio samples
21741 return;
21742 }
21743 }
21744
21745 mdat.set(unit, offset);
21746 var unitLen = unit.byteLength;
21747 offset += unitLen; // Default the sample's duration to the computed mp4SampleDuration, which will either be 1024 for AAC or 1152 for MPEG
21748 // In the case that we have 1 sample, this will be the duration. If we have more than one sample, the duration
21749 // becomes the PTS diff with the previous sample
21750
21751 outputSamples.push(new Mp4Sample(true, mp4SampleDuration, unitLen, 0));
21752 lastPTS = _pts;
21753 } // We could end up with no audio samples if all input samples were overlapping with the previously remuxed ones
21754
21755
21756 var nbSamples = outputSamples.length;
21757
21758 if (!nbSamples) {
21759 return;
21760 } // The next audio sample PTS should be equal to last sample PTS + duration
21761
21762
21763 var lastSample = outputSamples[outputSamples.length - 1];
21764 this.nextAudioPts = nextAudioPts = lastPTS + scaleFactor * lastSample.duration; // Set the track samples from inputSamples to outputSamples before remuxing
21765
21766 var moof = rawMPEG ? new Uint8Array(0) : _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].moof(track.sequenceNumber++, firstPTS / scaleFactor, _extends({}, track, {
21767 samples: outputSamples
21768 })); // Clear the track samples. This also clears the samples array in the demuxer, since the reference is shared
21769
21770 track.samples = [];
21771 var start = firstPTS / inputTimeScale;
21772 var end = nextAudioPts / inputTimeScale;
21773 var type = 'audio';
21774 var audioData = {
21775 data1: moof,
21776 data2: mdat,
21777 startPTS: start,
21778 endPTS: end,
21779 startDTS: start,
21780 endDTS: end,
21781 type: type,
21782 hasAudio: true,
21783 hasVideo: false,
21784 nb: nbSamples
21785 };
21786 this.isAudioContiguous = true;
21787 console.assert(mdat.length, 'MDAT length must not be zero');
21788 return audioData;
21789 };
21790
21791 _proto.remuxEmptyAudio = function remuxEmptyAudio(track, timeOffset, contiguous, videoData) {
21792 var inputTimeScale = track.inputTimeScale;
21793 var mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale;
21794 var scaleFactor = inputTimeScale / mp4timeScale;
21795 var nextAudioPts = this.nextAudioPts; // sync with video's timestamp
21796
21797 var startDTS = (nextAudioPts !== null ? nextAudioPts : videoData.startDTS * inputTimeScale) + this._initDTS;
21798 var endDTS = videoData.endDTS * inputTimeScale + this._initDTS; // one sample's duration value
21799
21800 var frameDuration = scaleFactor * AAC_SAMPLES_PER_FRAME; // samples count of this segment's duration
21801
21802 var nbSamples = Math.ceil((endDTS - startDTS) / frameDuration); // silent frame
21803
21804 var silentFrame = _aac_helper__WEBPACK_IMPORTED_MODULE_1__["default"].getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
21805 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn('[mp4-remuxer]: remux empty Audio'); // Can't remux if we can't generate a silent frame...
21806
21807 if (!silentFrame) {
21808 _utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].trace('[mp4-remuxer]: Unable to remuxEmptyAudio since we were unable to get a silent frame for given audio codec');
21809 return;
21810 }
21811
21812 var samples = [];
21813
21814 for (var i = 0; i < nbSamples; i++) {
21815 var stamp = startDTS + i * frameDuration;
21816 samples.push({
21817 unit: silentFrame,
21818 pts: stamp,
21819 dts: stamp
21820 });
21821 }
21822
21823 track.samples = samples;
21824 return this.remuxAudio(track, timeOffset, contiguous, false);
21825 };
21826
21827 _proto.remuxID3 = function remuxID3(track, timeOffset) {
21828 var length = track.samples.length;
21829
21830 if (!length) {
21831 return;
21832 }
21833
21834 var inputTimeScale = track.inputTimeScale;
21835 var initPTS = this._initPTS;
21836 var initDTS = this._initDTS;
21837
21838 for (var index = 0; index < length; index++) {
21839 var sample = track.samples[index]; // setting id3 pts, dts to relative time
21840 // using this._initPTS and this._initDTS to calculate relative time
21841
21842 sample.pts = normalizePts(sample.pts - initPTS, timeOffset * inputTimeScale) / inputTimeScale;
21843 sample.dts = normalizePts(sample.dts - initDTS, timeOffset * inputTimeScale) / inputTimeScale;
21844 }
21845
21846 var samples = track.samples;
21847 track.samples = [];
21848 return {
21849 samples: samples
21850 };
21851 };
21852
21853 _proto.remuxText = function remuxText(track, timeOffset) {
21854 var length = track.samples.length;
21855
21856 if (!length) {
21857 return;
21858 }
21859
21860 var inputTimeScale = track.inputTimeScale;
21861 var initPTS = this._initPTS;
21862
21863 for (var index = 0; index < length; index++) {
21864 var sample = track.samples[index]; // setting text pts, dts to relative time
21865 // using this._initPTS and this._initDTS to calculate relative time
21866
21867 sample.pts = normalizePts(sample.pts - initPTS, timeOffset * inputTimeScale) / inputTimeScale;
21868 }
21869
21870 track.samples.sort(function (a, b) {
21871 return a.pts - b.pts;
21872 });
21873 var samples = track.samples;
21874 track.samples = [];
21875 return {
21876 samples: samples
21877 };
21878 };
21879
21880 return MP4Remuxer;
21881}();
21882
21883
21884function normalizePts(value, reference) {
21885 var offset;
21886
21887 if (reference === null) {
21888 return value;
21889 }
21890
21891 if (reference < value) {
21892 // - 2^33
21893 offset = -8589934592;
21894 } else {
21895 // + 2^33
21896 offset = 8589934592;
21897 }
21898 /* PTS is 33bit (from 0 to 2^33 -1)
21899 if diff between value and reference is bigger than half of the amplitude (2^32) then it means that
21900 PTS looping occured. fill the gap */
21901
21902
21903 while (Math.abs(value - reference) > 4294967296) {
21904 value += offset;
21905 }
21906
21907 return value;
21908}
21909
21910function findKeyframeIndex(samples) {
21911 for (var i = 0; i < samples.length; i++) {
21912 if (samples[i].key) {
21913 return i;
21914 }
21915 }
21916
21917 return -1;
21918}
21919
21920var Mp4Sample = function Mp4Sample(isKeyframe, duration, size, cts) {
21921 this.size = void 0;
21922 this.duration = void 0;
21923 this.cts = void 0;
21924 this.flags = void 0;
21925 this.duration = duration;
21926 this.size = size;
21927 this.cts = cts;
21928 this.flags = new Mp4SampleFlags(isKeyframe);
21929};
21930
21931var Mp4SampleFlags = function Mp4SampleFlags(isKeyframe) {
21932 this.isLeading = 0;
21933 this.isDependedOn = 0;
21934 this.hasRedundancy = 0;
21935 this.degradPrio = 0;
21936 this.dependsOn = 1;
21937 this.isNonSync = 1;
21938 this.dependsOn = isKeyframe ? 2 : 1;
21939 this.isNonSync = isKeyframe ? 0 : 1;
21940};
21941
21942/***/ }),
21943
21944/***/ "./src/remux/passthrough-remuxer.ts":
21945/*!******************************************!*\
21946 !*** ./src/remux/passthrough-remuxer.ts ***!
21947 \******************************************/
21948/*! exports provided: default */
21949/***/ (function(module, __webpack_exports__, __webpack_require__) {
21950__webpack_require__.r(__webpack_exports__);
21951/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
21952/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
21953/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
21954/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
21955
21956
21957
21958
21959
21960
21961var PassThroughRemuxer = /*#__PURE__*/function () {
21962 function PassThroughRemuxer() {
21963 this.emitInitSegment = false;
21964 this.audioCodec = void 0;
21965 this.videoCodec = void 0;
21966 this.initData = void 0;
21967 this.initPTS = void 0;
21968 this.initTracks = void 0;
21969 this.lastEndDTS = null;
21970 }
21971
21972 var _proto = PassThroughRemuxer.prototype;
21973
21974 _proto.destroy = function destroy() {};
21975
21976 _proto.resetTimeStamp = function resetTimeStamp(defaultInitPTS) {
21977 this.initPTS = defaultInitPTS;
21978 this.lastEndDTS = null;
21979 };
21980
21981 _proto.resetNextTimestamp = function resetNextTimestamp() {
21982 this.lastEndDTS = null;
21983 };
21984
21985 _proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec) {
21986 this.audioCodec = audioCodec;
21987 this.videoCodec = videoCodec;
21988 this.generateInitSegment(initSegment);
21989 this.emitInitSegment = true;
21990 };
21991
21992 _proto.generateInitSegment = function generateInitSegment(initSegment) {
21993 var audioCodec = this.audioCodec,
21994 videoCodec = this.videoCodec;
21995
21996 if (!initSegment || !initSegment.byteLength) {
21997 this.initTracks = undefined;
21998 this.initData = undefined;
21999 return;
22000 }
22001
22002 var initData = this.initData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["parseInitSegment"])(initSegment); // Get codec from initSegment or fallback to default
22003
22004 if (!audioCodec) {
22005 audioCodec = getParsedTrackCodec(initData.audio, _loader_fragment__WEBPACK_IMPORTED_MODULE_2__["ElementaryStreamTypes"].AUDIO);
22006 }
22007
22008 if (!videoCodec) {
22009 videoCodec = getParsedTrackCodec(initData.video, _loader_fragment__WEBPACK_IMPORTED_MODULE_2__["ElementaryStreamTypes"].VIDEO);
22010 }
22011
22012 var tracks = {};
22013
22014 if (initData.audio && initData.video) {
22015 tracks.audiovideo = {
22016 container: 'video/mp4',
22017 codec: audioCodec + ',' + videoCodec,
22018 initSegment: initSegment,
22019 id: 'main'
22020 };
22021 } else if (initData.audio) {
22022 tracks.audio = {
22023 container: 'audio/mp4',
22024 codec: audioCodec,
22025 initSegment: initSegment,
22026 id: 'audio'
22027 };
22028 } else if (initData.video) {
22029 tracks.video = {
22030 container: 'video/mp4',
22031 codec: videoCodec,
22032 initSegment: initSegment,
22033 id: 'main'
22034 };
22035 } else {
22036 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('[passthrough-remuxer.ts]: initSegment does not contain moov or trak boxes.');
22037 }
22038
22039 this.initTracks = tracks;
22040 };
22041
22042 _proto.remux = function remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset) {
22043 var initPTS = this.initPTS,
22044 lastEndDTS = this.lastEndDTS;
22045 var result = {
22046 audio: undefined,
22047 video: undefined,
22048 text: textTrack,
22049 id3: id3Track,
22050 initSegment: undefined
22051 }; // If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the
22052 // lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update
22053 // the media duration (which is what timeOffset is provided as) before we need to process the next chunk.
22054
22055 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(lastEndDTS)) {
22056 lastEndDTS = this.lastEndDTS = timeOffset || 0;
22057 } // The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only
22058 // audio or video (or both); adding it to video was an arbitrary choice.
22059
22060
22061 var data = videoTrack.samples;
22062
22063 if (!data || !data.length) {
22064 return result;
22065 }
22066
22067 var initSegment = {
22068 initPTS: undefined,
22069 timescale: 1
22070 };
22071 var initData = this.initData;
22072
22073 if (!initData || !initData.length) {
22074 this.generateInitSegment(data);
22075 initData = this.initData;
22076 }
22077
22078 if (!initData || !initData.length) {
22079 // We can't remux if the initSegment could not be generated
22080 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('[passthrough-remuxer.ts]: Failed to generate initSegment.');
22081 return result;
22082 }
22083
22084 if (this.emitInitSegment) {
22085 initSegment.tracks = this.initTracks;
22086 this.emitInitSegment = false;
22087 }
22088
22089 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(initPTS)) {
22090 this.initPTS = initSegment.initPTS = initPTS = computeInitPTS(initData, data, lastEndDTS);
22091 }
22092
22093 var duration = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["getDuration"])(data, initData);
22094 var startDTS = lastEndDTS;
22095 var endDTS = duration + startDTS;
22096 Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["offsetStartDTS"])(initData, data, initPTS);
22097
22098 if (duration > 0) {
22099 this.lastEndDTS = endDTS;
22100 } else {
22101 _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Duration parsed from mp4 should be greater than zero');
22102 this.resetNextTimestamp();
22103 }
22104
22105 var hasAudio = !!initData.audio;
22106 var hasVideo = !!initData.video;
22107 var type = '';
22108
22109 if (hasAudio) {
22110 type += 'audio';
22111 }
22112
22113 if (hasVideo) {
22114 type += 'video';
22115 }
22116
22117 var track = {
22118 data1: data,
22119 startPTS: startDTS,
22120 startDTS: startDTS,
22121 endPTS: endDTS,
22122 endDTS: endDTS,
22123 type: type,
22124 hasAudio: hasAudio,
22125 hasVideo: hasVideo,
22126 nb: 1,
22127 dropped: 0
22128 };
22129 result.audio = track.type === 'audio' ? track : undefined;
22130 result.video = track.type !== 'audio' ? track : undefined;
22131 result.text = textTrack;
22132 result.id3 = id3Track;
22133 result.initSegment = initSegment;
22134 return result;
22135 };
22136
22137 return PassThroughRemuxer;
22138}();
22139
22140var computeInitPTS = function computeInitPTS(initData, data, timeOffset) {
22141 return Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["getStartDTS"])(initData, data) - timeOffset;
22142};
22143
22144function getParsedTrackCodec(track, type) {
22145 var parsedCodec = track === null || track === void 0 ? void 0 : track.codec;
22146
22147 if (parsedCodec && parsedCodec.length > 4) {
22148 return parsedCodec;
22149 } // Since mp4-tools cannot parse full codec string (see 'TODO: Parse codec details'... in mp4-tools)
22150 // Provide defaults based on codec type
22151 // This allows for some playback of some fmp4 playlists without CODECS defined in manifest
22152
22153
22154 if (parsedCodec === 'hvc1') {
22155 return 'hvc1.1.c.L120.90';
22156 }
22157
22158 if (parsedCodec === 'av01') {
22159 return 'av01.0.04M.08';
22160 }
22161
22162 if (parsedCodec === 'avc1' || type === _loader_fragment__WEBPACK_IMPORTED_MODULE_2__["ElementaryStreamTypes"].VIDEO) {
22163 return 'avc1.42e01e';
22164 }
22165
22166 return 'mp4a.40.5';
22167}
22168
22169/* harmony default export */ __webpack_exports__["default"] = (PassThroughRemuxer);
22170
22171/***/ }),
22172
22173/***/ "./src/task-loop.ts":
22174/*!**************************!*\
22175 !*** ./src/task-loop.ts ***!
22176 \**************************/
22177/*! exports provided: default */
22178/***/ (function(module, __webpack_exports__, __webpack_require__) {
22179__webpack_require__.r(__webpack_exports__);
22180/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return TaskLoop; });
22181/**
22182 * Sub-class specialization of EventHandler base class.
22183 *
22184 * TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
22185 * scheduled asynchroneously, avoiding recursive calls in the same tick.
22186 *
22187 * The task itself is implemented in `doTick`. It can be requested and called for single execution
22188 * using the `tick` method.
22189 *
22190 * It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
22191 * no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
22192 *
22193 * If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
22194 * and cancelled with `clearNextTick`.
22195 *
22196 * The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
22197 *
22198 * Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
22199 *
22200 * Further explanations:
22201 *
22202 * The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
22203 * only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
22204 *
22205 * When the task execution (`tick` method) is called in re-entrant way this is detected and
22206 * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
22207 * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
22208 */
22209var TaskLoop = /*#__PURE__*/function () {
22210 function TaskLoop() {
22211 this._boundTick = void 0;
22212 this._tickTimer = null;
22213 this._tickInterval = null;
22214 this._tickCallCount = 0;
22215 this._boundTick = this.tick.bind(this);
22216 }
22217
22218 var _proto = TaskLoop.prototype;
22219
22220 _proto.destroy = function destroy() {
22221 this.onHandlerDestroying();
22222 this.onHandlerDestroyed();
22223 };
22224
22225 _proto.onHandlerDestroying = function onHandlerDestroying() {
22226 // clear all timers before unregistering from event bus
22227 this.clearNextTick();
22228 this.clearInterval();
22229 };
22230
22231 _proto.onHandlerDestroyed = function onHandlerDestroyed() {}
22232 /**
22233 * @returns {boolean}
22234 */
22235 ;
22236
22237 _proto.hasInterval = function hasInterval() {
22238 return !!this._tickInterval;
22239 }
22240 /**
22241 * @returns {boolean}
22242 */
22243 ;
22244
22245 _proto.hasNextTick = function hasNextTick() {
22246 return !!this._tickTimer;
22247 }
22248 /**
22249 * @param {number} millis Interval time (ms)
22250 * @returns {boolean} True when interval has been scheduled, false when already scheduled (no effect)
22251 */
22252 ;
22253
22254 _proto.setInterval = function setInterval(millis) {
22255 if (!this._tickInterval) {
22256 this._tickInterval = self.setInterval(this._boundTick, millis);
22257 return true;
22258 }
22259
22260 return false;
22261 }
22262 /**
22263 * @returns {boolean} True when interval was cleared, false when none was set (no effect)
22264 */
22265 ;
22266
22267 _proto.clearInterval = function clearInterval() {
22268 if (this._tickInterval) {
22269 self.clearInterval(this._tickInterval);
22270 this._tickInterval = null;
22271 return true;
22272 }
22273
22274 return false;
22275 }
22276 /**
22277 * @returns {boolean} True when timeout was cleared, false when none was set (no effect)
22278 */
22279 ;
22280
22281 _proto.clearNextTick = function clearNextTick() {
22282 if (this._tickTimer) {
22283 self.clearTimeout(this._tickTimer);
22284 this._tickTimer = null;
22285 return true;
22286 }
22287
22288 return false;
22289 }
22290 /**
22291 * Will call the subclass doTick implementation in this main loop tick
22292 * or in the next one (via setTimeout(,0)) in case it has already been called
22293 * in this tick (in case this is a re-entrant call).
22294 */
22295 ;
22296
22297 _proto.tick = function tick() {
22298 this._tickCallCount++;
22299
22300 if (this._tickCallCount === 1) {
22301 this.doTick(); // re-entrant call to tick from previous doTick call stack
22302 // -> schedule a call on the next main loop iteration to process this task processing request
22303
22304 if (this._tickCallCount > 1) {
22305 // make sure only one timer exists at any time at max
22306 this.tickImmediate();
22307 }
22308
22309 this._tickCallCount = 0;
22310 }
22311 };
22312
22313 _proto.tickImmediate = function tickImmediate() {
22314 this.clearNextTick();
22315 this._tickTimer = self.setTimeout(this._boundTick, 0);
22316 }
22317 /**
22318 * For subclass to implement task logic
22319 * @abstract
22320 */
22321 ;
22322
22323 _proto.doTick = function doTick() {};
22324
22325 return TaskLoop;
22326}();
22327
22328
22329
22330/***/ }),
22331
22332/***/ "./src/types/cmcd.ts":
22333/*!***************************!*\
22334 !*** ./src/types/cmcd.ts ***!
22335 \***************************/
22336/*! exports provided: CMCDVersion, CMCDObjectType, CMCDStreamingFormat, CMCDStreamType */
22337/***/ (function(module, __webpack_exports__, __webpack_require__) {
22338__webpack_require__.r(__webpack_exports__);
22339/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDVersion", function() { return CMCDVersion; });
22340/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDObjectType", function() { return CMCDObjectType; });
22341/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDStreamingFormat", function() { return CMCDStreamingFormat; });
22342/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDStreamType", function() { return CMCDStreamType; });
22343/**
22344 * CMCD spec version
22345 */
22346var CMCDVersion = 1;
22347/**
22348 * CMCD Object Type
22349 */
22350
22351var CMCDObjectType;
22352/**
22353 * CMCD Streaming Format
22354 */
22355
22356(function (CMCDObjectType) {
22357 CMCDObjectType["MANIFEST"] = "m";
22358 CMCDObjectType["AUDIO"] = "a";
22359 CMCDObjectType["VIDEO"] = "v";
22360 CMCDObjectType["MUXED"] = "av";
22361 CMCDObjectType["INIT"] = "i";
22362 CMCDObjectType["CAPTION"] = "c";
22363 CMCDObjectType["TIMED_TEXT"] = "tt";
22364 CMCDObjectType["KEY"] = "k";
22365 CMCDObjectType["OTHER"] = "o";
22366})(CMCDObjectType || (CMCDObjectType = {}));
22367
22368var CMCDStreamingFormat;
22369/**
22370 * CMCD Streaming Type
22371 */
22372
22373(function (CMCDStreamingFormat) {
22374 CMCDStreamingFormat["DASH"] = "d";
22375 CMCDStreamingFormat["HLS"] = "h";
22376 CMCDStreamingFormat["SMOOTH"] = "s";
22377 CMCDStreamingFormat["OTHER"] = "o";
22378})(CMCDStreamingFormat || (CMCDStreamingFormat = {}));
22379
22380var CMCDStreamType;
22381/**
22382 * CMCD Headers
22383 */
22384
22385(function (CMCDStreamType) {
22386 CMCDStreamType["VOD"] = "v";
22387 CMCDStreamType["LIVE"] = "l";
22388})(CMCDStreamType || (CMCDStreamType = {}));
22389
22390/***/ }),
22391
22392/***/ "./src/types/level.ts":
22393/*!****************************!*\
22394 !*** ./src/types/level.ts ***!
22395 \****************************/
22396/*! exports provided: HlsSkip, getSkipValue, HlsUrlParameters, Level */
22397/***/ (function(module, __webpack_exports__, __webpack_require__) {
22398__webpack_require__.r(__webpack_exports__);
22399/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "HlsSkip", function() { return HlsSkip; });
22400/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getSkipValue", function() { return getSkipValue; });
22401/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "HlsUrlParameters", function() { return HlsUrlParameters; });
22402/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Level", function() { return Level; });
22403function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
22404
22405function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
22406
22407var HlsSkip;
22408
22409(function (HlsSkip) {
22410 HlsSkip["No"] = "";
22411 HlsSkip["Yes"] = "YES";
22412 HlsSkip["v2"] = "v2";
22413})(HlsSkip || (HlsSkip = {}));
22414
22415function getSkipValue(details, msn) {
22416 var canSkipUntil = details.canSkipUntil,
22417 canSkipDateRanges = details.canSkipDateRanges,
22418 endSN = details.endSN;
22419 var snChangeGoal = msn !== undefined ? msn - endSN : 0;
22420
22421 if (canSkipUntil && snChangeGoal < canSkipUntil) {
22422 if (canSkipDateRanges) {
22423 return HlsSkip.v2;
22424 }
22425
22426 return HlsSkip.Yes;
22427 }
22428
22429 return HlsSkip.No;
22430}
22431var HlsUrlParameters = /*#__PURE__*/function () {
22432 function HlsUrlParameters(msn, part, skip) {
22433 this.msn = void 0;
22434 this.part = void 0;
22435 this.skip = void 0;
22436 this.msn = msn;
22437 this.part = part;
22438 this.skip = skip;
22439 }
22440
22441 var _proto = HlsUrlParameters.prototype;
22442
22443 _proto.addDirectives = function addDirectives(uri) {
22444 var url = new self.URL(uri);
22445
22446 if (this.msn !== undefined) {
22447 url.searchParams.set('_HLS_msn', this.msn.toString());
22448 }
22449
22450 if (this.part !== undefined) {
22451 url.searchParams.set('_HLS_part', this.part.toString());
22452 }
22453
22454 if (this.skip) {
22455 url.searchParams.set('_HLS_skip', this.skip);
22456 }
22457
22458 return url.toString();
22459 };
22460
22461 return HlsUrlParameters;
22462}();
22463var Level = /*#__PURE__*/function () {
22464 function Level(data) {
22465 this.attrs = void 0;
22466 this.audioCodec = void 0;
22467 this.bitrate = void 0;
22468 this.codecSet = void 0;
22469 this.height = void 0;
22470 this.id = void 0;
22471 this.name = void 0;
22472 this.videoCodec = void 0;
22473 this.width = void 0;
22474 this.unknownCodecs = void 0;
22475 this.audioGroupIds = void 0;
22476 this.details = void 0;
22477 this.fragmentError = 0;
22478 this.loadError = 0;
22479 this.loaded = void 0;
22480 this.realBitrate = 0;
22481 this.textGroupIds = void 0;
22482 this.url = void 0;
22483 this._urlId = 0;
22484 this.url = [data.url];
22485 this.attrs = data.attrs;
22486 this.bitrate = data.bitrate;
22487
22488 if (data.details) {
22489 this.details = data.details;
22490 }
22491
22492 this.id = data.id || 0;
22493 this.name = data.name;
22494 this.width = data.width || 0;
22495 this.height = data.height || 0;
22496 this.audioCodec = data.audioCodec;
22497 this.videoCodec = data.videoCodec;
22498 this.unknownCodecs = data.unknownCodecs;
22499 this.codecSet = [data.videoCodec, data.audioCodec].filter(function (c) {
22500 return c;
22501 }).join(',').replace(/\.[^.,]+/g, '');
22502 }
22503
22504 _createClass(Level, [{
22505 key: "maxBitrate",
22506 get: function get() {
22507 return Math.max(this.realBitrate, this.bitrate);
22508 }
22509 }, {
22510 key: "uri",
22511 get: function get() {
22512 return this.url[this._urlId] || '';
22513 }
22514 }, {
22515 key: "urlId",
22516 get: function get() {
22517 return this._urlId;
22518 },
22519 set: function set(value) {
22520 var newValue = value % this.url.length;
22521
22522 if (this._urlId !== newValue) {
22523 this.details = undefined;
22524 this._urlId = newValue;
22525 }
22526 }
22527 }]);
22528
22529 return Level;
22530}();
22531
22532/***/ }),
22533
22534/***/ "./src/types/loader.ts":
22535/*!*****************************!*\
22536 !*** ./src/types/loader.ts ***!
22537 \*****************************/
22538/*! exports provided: PlaylistContextType, PlaylistLevelType */
22539/***/ (function(module, __webpack_exports__, __webpack_require__) {
22540__webpack_require__.r(__webpack_exports__);
22541/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "PlaylistContextType", function() { return PlaylistContextType; });
22542/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "PlaylistLevelType", function() { return PlaylistLevelType; });
22543var PlaylistContextType;
22544
22545(function (PlaylistContextType) {
22546 PlaylistContextType["MANIFEST"] = "manifest";
22547 PlaylistContextType["LEVEL"] = "level";
22548 PlaylistContextType["AUDIO_TRACK"] = "audioTrack";
22549 PlaylistContextType["SUBTITLE_TRACK"] = "subtitleTrack";
22550})(PlaylistContextType || (PlaylistContextType = {}));
22551
22552var PlaylistLevelType;
22553
22554(function (PlaylistLevelType) {
22555 PlaylistLevelType["MAIN"] = "main";
22556 PlaylistLevelType["AUDIO"] = "audio";
22557 PlaylistLevelType["SUBTITLE"] = "subtitle";
22558})(PlaylistLevelType || (PlaylistLevelType = {}));
22559
22560/***/ }),
22561
22562/***/ "./src/types/transmuxer.ts":
22563/*!*********************************!*\
22564 !*** ./src/types/transmuxer.ts ***!
22565 \*********************************/
22566/*! exports provided: ChunkMetadata */
22567/***/ (function(module, __webpack_exports__, __webpack_require__) {
22568__webpack_require__.r(__webpack_exports__);
22569/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ChunkMetadata", function() { return ChunkMetadata; });
22570var ChunkMetadata = function ChunkMetadata(level, sn, id, size, part, partial) {
22571 if (size === void 0) {
22572 size = 0;
22573 }
22574
22575 if (part === void 0) {
22576 part = -1;
22577 }
22578
22579 if (partial === void 0) {
22580 partial = false;
22581 }
22582
22583 this.level = void 0;
22584 this.sn = void 0;
22585 this.part = void 0;
22586 this.id = void 0;
22587 this.size = void 0;
22588 this.partial = void 0;
22589 this.transmuxing = getNewPerformanceTiming();
22590 this.buffering = {
22591 audio: getNewPerformanceTiming(),
22592 video: getNewPerformanceTiming(),
22593 audiovideo: getNewPerformanceTiming()
22594 };
22595 this.level = level;
22596 this.sn = sn;
22597 this.id = id;
22598 this.size = size;
22599 this.part = part;
22600 this.partial = partial;
22601};
22602
22603function getNewPerformanceTiming() {
22604 return {
22605 start: 0,
22606 executeStart: 0,
22607 executeEnd: 0,
22608 end: 0
22609 };
22610}
22611
22612/***/ }),
22613
22614/***/ "./src/utils/attr-list.ts":
22615/*!********************************!*\
22616 !*** ./src/utils/attr-list.ts ***!
22617 \********************************/
22618/*! exports provided: AttrList */
22619/***/ (function(module, __webpack_exports__, __webpack_require__) {
22620__webpack_require__.r(__webpack_exports__);
22621/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "AttrList", function() { return AttrList; });
22622var DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/; // eslint-disable-line no-useless-escape
22623
22624var ATTR_LIST_REGEX = /\s*(.+?)\s*=((?:\".*?\")|.*?)(?:,|$)/g; // eslint-disable-line no-useless-escape
22625// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
22626
22627var AttrList = /*#__PURE__*/function () {
22628 function AttrList(attrs) {
22629 if (typeof attrs === 'string') {
22630 attrs = AttrList.parseAttrList(attrs);
22631 }
22632
22633 for (var attr in attrs) {
22634 if (attrs.hasOwnProperty(attr)) {
22635 this[attr] = attrs[attr];
22636 }
22637 }
22638 }
22639
22640 var _proto = AttrList.prototype;
22641
22642 _proto.decimalInteger = function decimalInteger(attrName) {
22643 var intValue = parseInt(this[attrName], 10);
22644
22645 if (intValue > Number.MAX_SAFE_INTEGER) {
22646 return Infinity;
22647 }
22648
22649 return intValue;
22650 };
22651
22652 _proto.hexadecimalInteger = function hexadecimalInteger(attrName) {
22653 if (this[attrName]) {
22654 var stringValue = (this[attrName] || '0x').slice(2);
22655 stringValue = (stringValue.length & 1 ? '0' : '') + stringValue;
22656 var value = new Uint8Array(stringValue.length / 2);
22657
22658 for (var i = 0; i < stringValue.length / 2; i++) {
22659 value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
22660 }
22661
22662 return value;
22663 } else {
22664 return null;
22665 }
22666 };
22667
22668 _proto.hexadecimalIntegerAsNumber = function hexadecimalIntegerAsNumber(attrName) {
22669 var intValue = parseInt(this[attrName], 16);
22670
22671 if (intValue > Number.MAX_SAFE_INTEGER) {
22672 return Infinity;
22673 }
22674
22675 return intValue;
22676 };
22677
22678 _proto.decimalFloatingPoint = function decimalFloatingPoint(attrName) {
22679 return parseFloat(this[attrName]);
22680 };
22681
22682 _proto.optionalFloat = function optionalFloat(attrName, defaultValue) {
22683 var value = this[attrName];
22684 return value ? parseFloat(value) : defaultValue;
22685 };
22686
22687 _proto.enumeratedString = function enumeratedString(attrName) {
22688 return this[attrName];
22689 };
22690
22691 _proto.bool = function bool(attrName) {
22692 return this[attrName] === 'YES';
22693 };
22694
22695 _proto.decimalResolution = function decimalResolution(attrName) {
22696 var res = DECIMAL_RESOLUTION_REGEX.exec(this[attrName]);
22697
22698 if (res === null) {
22699 return undefined;
22700 }
22701
22702 return {
22703 width: parseInt(res[1], 10),
22704 height: parseInt(res[2], 10)
22705 };
22706 };
22707
22708 AttrList.parseAttrList = function parseAttrList(input) {
22709 var match;
22710 var attrs = {};
22711 var quote = '"';
22712 ATTR_LIST_REGEX.lastIndex = 0;
22713
22714 while ((match = ATTR_LIST_REGEX.exec(input)) !== null) {
22715 var value = match[2];
22716
22717 if (value.indexOf(quote) === 0 && value.lastIndexOf(quote) === value.length - 1) {
22718 value = value.slice(1, -1);
22719 }
22720
22721 attrs[match[1]] = value;
22722 }
22723
22724 return attrs;
22725 };
22726
22727 return AttrList;
22728}();
22729
22730/***/ }),
22731
22732/***/ "./src/utils/binary-search.ts":
22733/*!************************************!*\
22734 !*** ./src/utils/binary-search.ts ***!
22735 \************************************/
22736/*! exports provided: default */
22737/***/ (function(module, __webpack_exports__, __webpack_require__) {
22738__webpack_require__.r(__webpack_exports__);
22739var BinarySearch = {
22740 /**
22741 * Searches for an item in an array which matches a certain condition.
22742 * This requires the condition to only match one item in the array,
22743 * and for the array to be ordered.
22744 *
22745 * @param {Array<T>} list The array to search.
22746 * @param {BinarySearchComparison<T>} comparisonFn
22747 * Called and provided a candidate item as the first argument.
22748 * Should return:
22749 * > -1 if the item should be located at a lower index than the provided item.
22750 * > 1 if the item should be located at a higher index than the provided item.
22751 * > 0 if the item is the item you're looking for.
22752 *
22753 * @return {T | null} The object if it is found or null otherwise.
22754 */
22755 search: function search(list, comparisonFn) {
22756 var minIndex = 0;
22757 var maxIndex = list.length - 1;
22758 var currentIndex = null;
22759 var currentElement = null;
22760
22761 while (minIndex <= maxIndex) {
22762 currentIndex = (minIndex + maxIndex) / 2 | 0;
22763 currentElement = list[currentIndex];
22764 var comparisonResult = comparisonFn(currentElement);
22765
22766 if (comparisonResult > 0) {
22767 minIndex = currentIndex + 1;
22768 } else if (comparisonResult < 0) {
22769 maxIndex = currentIndex - 1;
22770 } else {
22771 return currentElement;
22772 }
22773 }
22774
22775 return null;
22776 }
22777};
22778/* harmony default export */ __webpack_exports__["default"] = (BinarySearch);
22779
22780/***/ }),
22781
22782/***/ "./src/utils/buffer-helper.ts":
22783/*!************************************!*\
22784 !*** ./src/utils/buffer-helper.ts ***!
22785 \************************************/
22786/*! exports provided: BufferHelper */
22787/***/ (function(module, __webpack_exports__, __webpack_require__) {
22788__webpack_require__.r(__webpack_exports__);
22789/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "BufferHelper", function() { return BufferHelper; });
22790/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./logger */ "./src/utils/logger.ts");
22791/**
22792 * @module BufferHelper
22793 *
22794 * Providing methods dealing with buffer length retrieval for example.
22795 *
22796 * In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
22797 *
22798 * Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
22799 */
22800
22801var noopBuffered = {
22802 length: 0,
22803 start: function start() {
22804 return 0;
22805 },
22806 end: function end() {
22807 return 0;
22808 }
22809};
22810var BufferHelper = /*#__PURE__*/function () {
22811 function BufferHelper() {}
22812
22813 /**
22814 * Return true if `media`'s buffered include `position`
22815 * @param {Bufferable} media
22816 * @param {number} position
22817 * @returns {boolean}
22818 */
22819 BufferHelper.isBuffered = function isBuffered(media, position) {
22820 try {
22821 if (media) {
22822 var buffered = BufferHelper.getBuffered(media);
22823
22824 for (var i = 0; i < buffered.length; i++) {
22825 if (position >= buffered.start(i) && position <= buffered.end(i)) {
22826 return true;
22827 }
22828 }
22829 }
22830 } catch (error) {// this is to catch
22831 // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
22832 // This SourceBuffer has been removed from the parent media source
22833 }
22834
22835 return false;
22836 };
22837
22838 BufferHelper.bufferInfo = function bufferInfo(media, pos, maxHoleDuration) {
22839 try {
22840 if (media) {
22841 var vbuffered = BufferHelper.getBuffered(media);
22842 var buffered = [];
22843 var i;
22844
22845 for (i = 0; i < vbuffered.length; i++) {
22846 buffered.push({
22847 start: vbuffered.start(i),
22848 end: vbuffered.end(i)
22849 });
22850 }
22851
22852 return this.bufferedInfo(buffered, pos, maxHoleDuration);
22853 }
22854 } catch (error) {// this is to catch
22855 // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
22856 // This SourceBuffer has been removed from the parent media source
22857 }
22858
22859 return {
22860 len: 0,
22861 start: pos,
22862 end: pos,
22863 nextStart: undefined
22864 };
22865 };
22866
22867 BufferHelper.bufferedInfo = function bufferedInfo(buffered, pos, maxHoleDuration) {
22868 pos = Math.max(0, pos); // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
22869
22870 buffered.sort(function (a, b) {
22871 var diff = a.start - b.start;
22872
22873 if (diff) {
22874 return diff;
22875 } else {
22876 return b.end - a.end;
22877 }
22878 });
22879 var buffered2 = [];
22880
22881 if (maxHoleDuration) {
22882 // there might be some small holes between buffer time range
22883 // consider that holes smaller than maxHoleDuration are irrelevant and build another
22884 // buffer time range representations that discards those holes
22885 for (var i = 0; i < buffered.length; i++) {
22886 var buf2len = buffered2.length;
22887
22888 if (buf2len) {
22889 var buf2end = buffered2[buf2len - 1].end; // if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
22890
22891 if (buffered[i].start - buf2end < maxHoleDuration) {
22892 // merge overlapping time ranges
22893 // update lastRange.end only if smaller than item.end
22894 // e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
22895 // whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
22896 if (buffered[i].end > buf2end) {
22897 buffered2[buf2len - 1].end = buffered[i].end;
22898 }
22899 } else {
22900 // big hole
22901 buffered2.push(buffered[i]);
22902 }
22903 } else {
22904 // first value
22905 buffered2.push(buffered[i]);
22906 }
22907 }
22908 } else {
22909 buffered2 = buffered;
22910 }
22911
22912 var bufferLen = 0; // bufferStartNext can possibly be undefined based on the conditional logic below
22913
22914 var bufferStartNext; // bufferStart and bufferEnd are buffer boundaries around current video position
22915
22916 var bufferStart = pos;
22917 var bufferEnd = pos;
22918
22919 for (var _i = 0; _i < buffered2.length; _i++) {
22920 var start = buffered2[_i].start;
22921 var end = buffered2[_i].end; // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
22922
22923 if (pos + maxHoleDuration >= start && pos < end) {
22924 // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
22925 bufferStart = start;
22926 bufferEnd = end;
22927 bufferLen = bufferEnd - pos;
22928 } else if (pos + maxHoleDuration < start) {
22929 bufferStartNext = start;
22930 break;
22931 }
22932 }
22933
22934 return {
22935 len: bufferLen,
22936 start: bufferStart || 0,
22937 end: bufferEnd || 0,
22938 nextStart: bufferStartNext
22939 };
22940 }
22941 /**
22942 * Safe method to get buffered property.
22943 * SourceBuffer.buffered may throw if SourceBuffer is removed from it's MediaSource
22944 */
22945 ;
22946
22947 BufferHelper.getBuffered = function getBuffered(media) {
22948 try {
22949 return media.buffered;
22950 } catch (e) {
22951 _logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log('failed to get media.buffered', e);
22952 return noopBuffered;
22953 }
22954 };
22955
22956 return BufferHelper;
22957}();
22958
22959/***/ }),
22960
22961/***/ "./src/utils/cea-608-parser.ts":
22962/*!*************************************!*\
22963 !*** ./src/utils/cea-608-parser.ts ***!
22964 \*************************************/
22965/*! exports provided: Row, CaptionScreen, default */
22966/***/ (function(module, __webpack_exports__, __webpack_require__) {
22967__webpack_require__.r(__webpack_exports__);
22968/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Row", function() { return Row; });
22969/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CaptionScreen", function() { return CaptionScreen; });
22970/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
22971
22972/**
22973 *
22974 * This code was ported from the dash.js project at:
22975 * https://github.com/Dash-Industry-Forum/dash.js/blob/development/externals/cea608-parser.js
22976 * https://github.com/Dash-Industry-Forum/dash.js/commit/8269b26a761e0853bb21d78780ed945144ecdd4d#diff-71bc295a2d6b6b7093a1d3290d53a4b2
22977 *
22978 * The original copyright appears below:
22979 *
22980 * The copyright in this software is being made available under the BSD License,
22981 * included below. This software may be subject to other third party and contributor
22982 * rights, including patent rights, and no such rights are granted under this license.
22983 *
22984 * Copyright (c) 2015-2016, DASH Industry Forum.
22985 * All rights reserved.
22986 *
22987 * Redistribution and use in source and binary forms, with or without modification,
22988 * are permitted provided that the following conditions are met:
22989 * 1. Redistributions of source code must retain the above copyright notice, this
22990 * list of conditions and the following disclaimer.
22991 * * Redistributions in binary form must reproduce the above copyright notice,
22992 * this list of conditions and the following disclaimer in the documentation and/or
22993 * other materials provided with the distribution.
22994 * 2. Neither the name of Dash Industry Forum nor the names of its
22995 * contributors may be used to endorse or promote products derived from this software
22996 * without specific prior written permission.
22997 *
22998 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY
22999 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
23000 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
23001 * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
23002 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23003 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23004 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23005 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
23006 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
23007 * POSSIBILITY OF SUCH DAMAGE.
23008 */
23009
23010/**
23011 * Exceptions from regular ASCII. CodePoints are mapped to UTF-16 codes
23012 */
23013
23014var specialCea608CharsCodes = {
23015 0x2a: 0xe1,
23016 // lowercase a, acute accent
23017 0x5c: 0xe9,
23018 // lowercase e, acute accent
23019 0x5e: 0xed,
23020 // lowercase i, acute accent
23021 0x5f: 0xf3,
23022 // lowercase o, acute accent
23023 0x60: 0xfa,
23024 // lowercase u, acute accent
23025 0x7b: 0xe7,
23026 // lowercase c with cedilla
23027 0x7c: 0xf7,
23028 // division symbol
23029 0x7d: 0xd1,
23030 // uppercase N tilde
23031 0x7e: 0xf1,
23032 // lowercase n tilde
23033 0x7f: 0x2588,
23034 // Full block
23035 // THIS BLOCK INCLUDES THE 16 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
23036 // THAT COME FROM HI BYTE=0x11 AND LOW BETWEEN 0x30 AND 0x3F
23037 // THIS MEANS THAT \x50 MUST BE ADDED TO THE VALUES
23038 0x80: 0xae,
23039 // Registered symbol (R)
23040 0x81: 0xb0,
23041 // degree sign
23042 0x82: 0xbd,
23043 // 1/2 symbol
23044 0x83: 0xbf,
23045 // Inverted (open) question mark
23046 0x84: 0x2122,
23047 // Trademark symbol (TM)
23048 0x85: 0xa2,
23049 // Cents symbol
23050 0x86: 0xa3,
23051 // Pounds sterling
23052 0x87: 0x266a,
23053 // Music 8'th note
23054 0x88: 0xe0,
23055 // lowercase a, grave accent
23056 0x89: 0x20,
23057 // transparent space (regular)
23058 0x8a: 0xe8,
23059 // lowercase e, grave accent
23060 0x8b: 0xe2,
23061 // lowercase a, circumflex accent
23062 0x8c: 0xea,
23063 // lowercase e, circumflex accent
23064 0x8d: 0xee,
23065 // lowercase i, circumflex accent
23066 0x8e: 0xf4,
23067 // lowercase o, circumflex accent
23068 0x8f: 0xfb,
23069 // lowercase u, circumflex accent
23070 // THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
23071 // THAT COME FROM HI BYTE=0x12 AND LOW BETWEEN 0x20 AND 0x3F
23072 0x90: 0xc1,
23073 // capital letter A with acute
23074 0x91: 0xc9,
23075 // capital letter E with acute
23076 0x92: 0xd3,
23077 // capital letter O with acute
23078 0x93: 0xda,
23079 // capital letter U with acute
23080 0x94: 0xdc,
23081 // capital letter U with diaresis
23082 0x95: 0xfc,
23083 // lowercase letter U with diaeresis
23084 0x96: 0x2018,
23085 // opening single quote
23086 0x97: 0xa1,
23087 // inverted exclamation mark
23088 0x98: 0x2a,
23089 // asterisk
23090 0x99: 0x2019,
23091 // closing single quote
23092 0x9a: 0x2501,
23093 // box drawings heavy horizontal
23094 0x9b: 0xa9,
23095 // copyright sign
23096 0x9c: 0x2120,
23097 // Service mark
23098 0x9d: 0x2022,
23099 // (round) bullet
23100 0x9e: 0x201c,
23101 // Left double quotation mark
23102 0x9f: 0x201d,
23103 // Right double quotation mark
23104 0xa0: 0xc0,
23105 // uppercase A, grave accent
23106 0xa1: 0xc2,
23107 // uppercase A, circumflex
23108 0xa2: 0xc7,
23109 // uppercase C with cedilla
23110 0xa3: 0xc8,
23111 // uppercase E, grave accent
23112 0xa4: 0xca,
23113 // uppercase E, circumflex
23114 0xa5: 0xcb,
23115 // capital letter E with diaresis
23116 0xa6: 0xeb,
23117 // lowercase letter e with diaresis
23118 0xa7: 0xce,
23119 // uppercase I, circumflex
23120 0xa8: 0xcf,
23121 // uppercase I, with diaresis
23122 0xa9: 0xef,
23123 // lowercase i, with diaresis
23124 0xaa: 0xd4,
23125 // uppercase O, circumflex
23126 0xab: 0xd9,
23127 // uppercase U, grave accent
23128 0xac: 0xf9,
23129 // lowercase u, grave accent
23130 0xad: 0xdb,
23131 // uppercase U, circumflex
23132 0xae: 0xab,
23133 // left-pointing double angle quotation mark
23134 0xaf: 0xbb,
23135 // right-pointing double angle quotation mark
23136 // THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
23137 // THAT COME FROM HI BYTE=0x13 AND LOW BETWEEN 0x20 AND 0x3F
23138 0xb0: 0xc3,
23139 // Uppercase A, tilde
23140 0xb1: 0xe3,
23141 // Lowercase a, tilde
23142 0xb2: 0xcd,
23143 // Uppercase I, acute accent
23144 0xb3: 0xcc,
23145 // Uppercase I, grave accent
23146 0xb4: 0xec,
23147 // Lowercase i, grave accent
23148 0xb5: 0xd2,
23149 // Uppercase O, grave accent
23150 0xb6: 0xf2,
23151 // Lowercase o, grave accent
23152 0xb7: 0xd5,
23153 // Uppercase O, tilde
23154 0xb8: 0xf5,
23155 // Lowercase o, tilde
23156 0xb9: 0x7b,
23157 // Open curly brace
23158 0xba: 0x7d,
23159 // Closing curly brace
23160 0xbb: 0x5c,
23161 // Backslash
23162 0xbc: 0x5e,
23163 // Caret
23164 0xbd: 0x5f,
23165 // Underscore
23166 0xbe: 0x7c,
23167 // Pipe (vertical line)
23168 0xbf: 0x223c,
23169 // Tilde operator
23170 0xc0: 0xc4,
23171 // Uppercase A, umlaut
23172 0xc1: 0xe4,
23173 // Lowercase A, umlaut
23174 0xc2: 0xd6,
23175 // Uppercase O, umlaut
23176 0xc3: 0xf6,
23177 // Lowercase o, umlaut
23178 0xc4: 0xdf,
23179 // Esszett (sharp S)
23180 0xc5: 0xa5,
23181 // Yen symbol
23182 0xc6: 0xa4,
23183 // Generic currency sign
23184 0xc7: 0x2503,
23185 // Box drawings heavy vertical
23186 0xc8: 0xc5,
23187 // Uppercase A, ring
23188 0xc9: 0xe5,
23189 // Lowercase A, ring
23190 0xca: 0xd8,
23191 // Uppercase O, stroke
23192 0xcb: 0xf8,
23193 // Lowercase o, strok
23194 0xcc: 0x250f,
23195 // Box drawings heavy down and right
23196 0xcd: 0x2513,
23197 // Box drawings heavy down and left
23198 0xce: 0x2517,
23199 // Box drawings heavy up and right
23200 0xcf: 0x251b // Box drawings heavy up and left
23201
23202};
23203/**
23204 * Utils
23205 */
23206
23207var getCharForByte = function getCharForByte(_byte) {
23208 var charCode = _byte;
23209
23210 if (specialCea608CharsCodes.hasOwnProperty(_byte)) {
23211 charCode = specialCea608CharsCodes[_byte];
23212 }
23213
23214 return String.fromCharCode(charCode);
23215};
23216
23217var NR_ROWS = 15;
23218var NR_COLS = 100; // Tables to look up row from PAC data
23219
23220var rowsLowCh1 = {
23221 0x11: 1,
23222 0x12: 3,
23223 0x15: 5,
23224 0x16: 7,
23225 0x17: 9,
23226 0x10: 11,
23227 0x13: 12,
23228 0x14: 14
23229};
23230var rowsHighCh1 = {
23231 0x11: 2,
23232 0x12: 4,
23233 0x15: 6,
23234 0x16: 8,
23235 0x17: 10,
23236 0x13: 13,
23237 0x14: 15
23238};
23239var rowsLowCh2 = {
23240 0x19: 1,
23241 0x1a: 3,
23242 0x1d: 5,
23243 0x1e: 7,
23244 0x1f: 9,
23245 0x18: 11,
23246 0x1b: 12,
23247 0x1c: 14
23248};
23249var rowsHighCh2 = {
23250 0x19: 2,
23251 0x1a: 4,
23252 0x1d: 6,
23253 0x1e: 8,
23254 0x1f: 10,
23255 0x1b: 13,
23256 0x1c: 15
23257};
23258var backgroundColors = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta', 'black', 'transparent'];
23259var VerboseLevel;
23260
23261(function (VerboseLevel) {
23262 VerboseLevel[VerboseLevel["ERROR"] = 0] = "ERROR";
23263 VerboseLevel[VerboseLevel["TEXT"] = 1] = "TEXT";
23264 VerboseLevel[VerboseLevel["WARNING"] = 2] = "WARNING";
23265 VerboseLevel[VerboseLevel["INFO"] = 2] = "INFO";
23266 VerboseLevel[VerboseLevel["DEBUG"] = 3] = "DEBUG";
23267 VerboseLevel[VerboseLevel["DATA"] = 3] = "DATA";
23268})(VerboseLevel || (VerboseLevel = {}));
23269
23270var CaptionsLogger = /*#__PURE__*/function () {
23271 function CaptionsLogger() {
23272 this.time = null;
23273 this.verboseLevel = VerboseLevel.ERROR;
23274 }
23275
23276 var _proto = CaptionsLogger.prototype;
23277
23278 _proto.log = function log(severity, msg) {
23279 if (this.verboseLevel >= severity) {
23280 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log(this.time + " [" + severity + "] " + msg);
23281 }
23282 };
23283
23284 return CaptionsLogger;
23285}();
23286
23287var numArrayToHexArray = function numArrayToHexArray(numArray) {
23288 var hexArray = [];
23289
23290 for (var j = 0; j < numArray.length; j++) {
23291 hexArray.push(numArray[j].toString(16));
23292 }
23293
23294 return hexArray;
23295};
23296
23297var PenState = /*#__PURE__*/function () {
23298 function PenState(foreground, underline, italics, background, flash) {
23299 this.foreground = void 0;
23300 this.underline = void 0;
23301 this.italics = void 0;
23302 this.background = void 0;
23303 this.flash = void 0;
23304 this.foreground = foreground || 'white';
23305 this.underline = underline || false;
23306 this.italics = italics || false;
23307 this.background = background || 'black';
23308 this.flash = flash || false;
23309 }
23310
23311 var _proto2 = PenState.prototype;
23312
23313 _proto2.reset = function reset() {
23314 this.foreground = 'white';
23315 this.underline = false;
23316 this.italics = false;
23317 this.background = 'black';
23318 this.flash = false;
23319 };
23320
23321 _proto2.setStyles = function setStyles(styles) {
23322 var attribs = ['foreground', 'underline', 'italics', 'background', 'flash'];
23323
23324 for (var i = 0; i < attribs.length; i++) {
23325 var style = attribs[i];
23326
23327 if (styles.hasOwnProperty(style)) {
23328 this[style] = styles[style];
23329 }
23330 }
23331 };
23332
23333 _proto2.isDefault = function isDefault() {
23334 return this.foreground === 'white' && !this.underline && !this.italics && this.background === 'black' && !this.flash;
23335 };
23336
23337 _proto2.equals = function equals(other) {
23338 return this.foreground === other.foreground && this.underline === other.underline && this.italics === other.italics && this.background === other.background && this.flash === other.flash;
23339 };
23340
23341 _proto2.copy = function copy(newPenState) {
23342 this.foreground = newPenState.foreground;
23343 this.underline = newPenState.underline;
23344 this.italics = newPenState.italics;
23345 this.background = newPenState.background;
23346 this.flash = newPenState.flash;
23347 };
23348
23349 _proto2.toString = function toString() {
23350 return 'color=' + this.foreground + ', underline=' + this.underline + ', italics=' + this.italics + ', background=' + this.background + ', flash=' + this.flash;
23351 };
23352
23353 return PenState;
23354}();
23355/**
23356 * Unicode character with styling and background.
23357 * @constructor
23358 */
23359
23360
23361var StyledUnicodeChar = /*#__PURE__*/function () {
23362 function StyledUnicodeChar(uchar, foreground, underline, italics, background, flash) {
23363 this.uchar = void 0;
23364 this.penState = void 0;
23365 this.uchar = uchar || ' '; // unicode character
23366
23367 this.penState = new PenState(foreground, underline, italics, background, flash);
23368 }
23369
23370 var _proto3 = StyledUnicodeChar.prototype;
23371
23372 _proto3.reset = function reset() {
23373 this.uchar = ' ';
23374 this.penState.reset();
23375 };
23376
23377 _proto3.setChar = function setChar(uchar, newPenState) {
23378 this.uchar = uchar;
23379 this.penState.copy(newPenState);
23380 };
23381
23382 _proto3.setPenState = function setPenState(newPenState) {
23383 this.penState.copy(newPenState);
23384 };
23385
23386 _proto3.equals = function equals(other) {
23387 return this.uchar === other.uchar && this.penState.equals(other.penState);
23388 };
23389
23390 _proto3.copy = function copy(newChar) {
23391 this.uchar = newChar.uchar;
23392 this.penState.copy(newChar.penState);
23393 };
23394
23395 _proto3.isEmpty = function isEmpty() {
23396 return this.uchar === ' ' && this.penState.isDefault();
23397 };
23398
23399 return StyledUnicodeChar;
23400}();
23401/**
23402 * CEA-608 row consisting of NR_COLS instances of StyledUnicodeChar.
23403 * @constructor
23404 */
23405
23406
23407var Row = /*#__PURE__*/function () {
23408 function Row(logger) {
23409 this.chars = void 0;
23410 this.pos = void 0;
23411 this.currPenState = void 0;
23412 this.cueStartTime = void 0;
23413 this.logger = void 0;
23414 this.chars = [];
23415
23416 for (var i = 0; i < NR_COLS; i++) {
23417 this.chars.push(new StyledUnicodeChar());
23418 }
23419
23420 this.logger = logger;
23421 this.pos = 0;
23422 this.currPenState = new PenState();
23423 }
23424
23425 var _proto4 = Row.prototype;
23426
23427 _proto4.equals = function equals(other) {
23428 var equal = true;
23429
23430 for (var i = 0; i < NR_COLS; i++) {
23431 if (!this.chars[i].equals(other.chars[i])) {
23432 equal = false;
23433 break;
23434 }
23435 }
23436
23437 return equal;
23438 };
23439
23440 _proto4.copy = function copy(other) {
23441 for (var i = 0; i < NR_COLS; i++) {
23442 this.chars[i].copy(other.chars[i]);
23443 }
23444 };
23445
23446 _proto4.isEmpty = function isEmpty() {
23447 var empty = true;
23448
23449 for (var i = 0; i < NR_COLS; i++) {
23450 if (!this.chars[i].isEmpty()) {
23451 empty = false;
23452 break;
23453 }
23454 }
23455
23456 return empty;
23457 }
23458 /**
23459 * Set the cursor to a valid column.
23460 */
23461 ;
23462
23463 _proto4.setCursor = function setCursor(absPos) {
23464 if (this.pos !== absPos) {
23465 this.pos = absPos;
23466 }
23467
23468 if (this.pos < 0) {
23469 this.logger.log(VerboseLevel.DEBUG, 'Negative cursor position ' + this.pos);
23470 this.pos = 0;
23471 } else if (this.pos > NR_COLS) {
23472 this.logger.log(VerboseLevel.DEBUG, 'Too large cursor position ' + this.pos);
23473 this.pos = NR_COLS;
23474 }
23475 }
23476 /**
23477 * Move the cursor relative to current position.
23478 */
23479 ;
23480
23481 _proto4.moveCursor = function moveCursor(relPos) {
23482 var newPos = this.pos + relPos;
23483
23484 if (relPos > 1) {
23485 for (var i = this.pos + 1; i < newPos + 1; i++) {
23486 this.chars[i].setPenState(this.currPenState);
23487 }
23488 }
23489
23490 this.setCursor(newPos);
23491 }
23492 /**
23493 * Backspace, move one step back and clear character.
23494 */
23495 ;
23496
23497 _proto4.backSpace = function backSpace() {
23498 this.moveCursor(-1);
23499 this.chars[this.pos].setChar(' ', this.currPenState);
23500 };
23501
23502 _proto4.insertChar = function insertChar(_byte2) {
23503 if (_byte2 >= 0x90) {
23504 // Extended char
23505 this.backSpace();
23506 }
23507
23508 var _char = getCharForByte(_byte2);
23509
23510 if (this.pos >= NR_COLS) {
23511 this.logger.log(VerboseLevel.ERROR, 'Cannot insert ' + _byte2.toString(16) + ' (' + _char + ') at position ' + this.pos + '. Skipping it!');
23512 return;
23513 }
23514
23515 this.chars[this.pos].setChar(_char, this.currPenState);
23516 this.moveCursor(1);
23517 };
23518
23519 _proto4.clearFromPos = function clearFromPos(startPos) {
23520 var i;
23521
23522 for (i = startPos; i < NR_COLS; i++) {
23523 this.chars[i].reset();
23524 }
23525 };
23526
23527 _proto4.clear = function clear() {
23528 this.clearFromPos(0);
23529 this.pos = 0;
23530 this.currPenState.reset();
23531 };
23532
23533 _proto4.clearToEndOfRow = function clearToEndOfRow() {
23534 this.clearFromPos(this.pos);
23535 };
23536
23537 _proto4.getTextString = function getTextString() {
23538 var chars = [];
23539 var empty = true;
23540
23541 for (var i = 0; i < NR_COLS; i++) {
23542 var _char2 = this.chars[i].uchar;
23543
23544 if (_char2 !== ' ') {
23545 empty = false;
23546 }
23547
23548 chars.push(_char2);
23549 }
23550
23551 if (empty) {
23552 return '';
23553 } else {
23554 return chars.join('');
23555 }
23556 };
23557
23558 _proto4.setPenStyles = function setPenStyles(styles) {
23559 this.currPenState.setStyles(styles);
23560 var currChar = this.chars[this.pos];
23561 currChar.setPenState(this.currPenState);
23562 };
23563
23564 return Row;
23565}();
23566/**
23567 * Keep a CEA-608 screen of 32x15 styled characters
23568 * @constructor
23569 */
23570
23571var CaptionScreen = /*#__PURE__*/function () {
23572 function CaptionScreen(logger) {
23573 this.rows = void 0;
23574 this.currRow = void 0;
23575 this.nrRollUpRows = void 0;
23576 this.lastOutputScreen = void 0;
23577 this.logger = void 0;
23578 this.rows = [];
23579
23580 for (var i = 0; i < NR_ROWS; i++) {
23581 this.rows.push(new Row(logger));
23582 } // Note that we use zero-based numbering (0-14)
23583
23584
23585 this.logger = logger;
23586 this.currRow = NR_ROWS - 1;
23587 this.nrRollUpRows = null;
23588 this.lastOutputScreen = null;
23589 this.reset();
23590 }
23591
23592 var _proto5 = CaptionScreen.prototype;
23593
23594 _proto5.reset = function reset() {
23595 for (var i = 0; i < NR_ROWS; i++) {
23596 this.rows[i].clear();
23597 }
23598
23599 this.currRow = NR_ROWS - 1;
23600 };
23601
23602 _proto5.equals = function equals(other) {
23603 var equal = true;
23604
23605 for (var i = 0; i < NR_ROWS; i++) {
23606 if (!this.rows[i].equals(other.rows[i])) {
23607 equal = false;
23608 break;
23609 }
23610 }
23611
23612 return equal;
23613 };
23614
23615 _proto5.copy = function copy(other) {
23616 for (var i = 0; i < NR_ROWS; i++) {
23617 this.rows[i].copy(other.rows[i]);
23618 }
23619 };
23620
23621 _proto5.isEmpty = function isEmpty() {
23622 var empty = true;
23623
23624 for (var i = 0; i < NR_ROWS; i++) {
23625 if (!this.rows[i].isEmpty()) {
23626 empty = false;
23627 break;
23628 }
23629 }
23630
23631 return empty;
23632 };
23633
23634 _proto5.backSpace = function backSpace() {
23635 var row = this.rows[this.currRow];
23636 row.backSpace();
23637 };
23638
23639 _proto5.clearToEndOfRow = function clearToEndOfRow() {
23640 var row = this.rows[this.currRow];
23641 row.clearToEndOfRow();
23642 }
23643 /**
23644 * Insert a character (without styling) in the current row.
23645 */
23646 ;
23647
23648 _proto5.insertChar = function insertChar(_char3) {
23649 var row = this.rows[this.currRow];
23650 row.insertChar(_char3);
23651 };
23652
23653 _proto5.setPen = function setPen(styles) {
23654 var row = this.rows[this.currRow];
23655 row.setPenStyles(styles);
23656 };
23657
23658 _proto5.moveCursor = function moveCursor(relPos) {
23659 var row = this.rows[this.currRow];
23660 row.moveCursor(relPos);
23661 };
23662
23663 _proto5.setCursor = function setCursor(absPos) {
23664 this.logger.log(VerboseLevel.INFO, 'setCursor: ' + absPos);
23665 var row = this.rows[this.currRow];
23666 row.setCursor(absPos);
23667 };
23668
23669 _proto5.setPAC = function setPAC(pacData) {
23670 this.logger.log(VerboseLevel.INFO, 'pacData = ' + JSON.stringify(pacData));
23671 var newRow = pacData.row - 1;
23672
23673 if (this.nrRollUpRows && newRow < this.nrRollUpRows - 1) {
23674 newRow = this.nrRollUpRows - 1;
23675 } // Make sure this only affects Roll-up Captions by checking this.nrRollUpRows
23676
23677
23678 if (this.nrRollUpRows && this.currRow !== newRow) {
23679 // clear all rows first
23680 for (var i = 0; i < NR_ROWS; i++) {
23681 this.rows[i].clear();
23682 } // Copy this.nrRollUpRows rows from lastOutputScreen and place it in the newRow location
23683 // topRowIndex - the start of rows to copy (inclusive index)
23684
23685
23686 var topRowIndex = this.currRow + 1 - this.nrRollUpRows; // We only copy if the last position was already shown.
23687 // We use the cueStartTime value to check this.
23688
23689 var lastOutputScreen = this.lastOutputScreen;
23690
23691 if (lastOutputScreen) {
23692 var prevLineTime = lastOutputScreen.rows[topRowIndex].cueStartTime;
23693 var time = this.logger.time;
23694
23695 if (prevLineTime && time !== null && prevLineTime < time) {
23696 for (var _i = 0; _i < this.nrRollUpRows; _i++) {
23697 this.rows[newRow - this.nrRollUpRows + _i + 1].copy(lastOutputScreen.rows[topRowIndex + _i]);
23698 }
23699 }
23700 }
23701 }
23702
23703 this.currRow = newRow;
23704 var row = this.rows[this.currRow];
23705
23706 if (pacData.indent !== null) {
23707 var indent = pacData.indent;
23708 var prevPos = Math.max(indent - 1, 0);
23709 row.setCursor(pacData.indent);
23710 pacData.color = row.chars[prevPos].penState.foreground;
23711 }
23712
23713 var styles = {
23714 foreground: pacData.color,
23715 underline: pacData.underline,
23716 italics: pacData.italics,
23717 background: 'black',
23718 flash: false
23719 };
23720 this.setPen(styles);
23721 }
23722 /**
23723 * Set background/extra foreground, but first do back_space, and then insert space (backwards compatibility).
23724 */
23725 ;
23726
23727 _proto5.setBkgData = function setBkgData(bkgData) {
23728 this.logger.log(VerboseLevel.INFO, 'bkgData = ' + JSON.stringify(bkgData));
23729 this.backSpace();
23730 this.setPen(bkgData);
23731 this.insertChar(0x20); // Space
23732 };
23733
23734 _proto5.setRollUpRows = function setRollUpRows(nrRows) {
23735 this.nrRollUpRows = nrRows;
23736 };
23737
23738 _proto5.rollUp = function rollUp() {
23739 if (this.nrRollUpRows === null) {
23740 this.logger.log(VerboseLevel.DEBUG, 'roll_up but nrRollUpRows not set yet');
23741 return; // Not properly setup
23742 }
23743
23744 this.logger.log(VerboseLevel.TEXT, this.getDisplayText());
23745 var topRowIndex = this.currRow + 1 - this.nrRollUpRows;
23746 var topRow = this.rows.splice(topRowIndex, 1)[0];
23747 topRow.clear();
23748 this.rows.splice(this.currRow, 0, topRow);
23749 this.logger.log(VerboseLevel.INFO, 'Rolling up'); // this.logger.log(VerboseLevel.TEXT, this.get_display_text())
23750 }
23751 /**
23752 * Get all non-empty rows with as unicode text.
23753 */
23754 ;
23755
23756 _proto5.getDisplayText = function getDisplayText(asOneRow) {
23757 asOneRow = asOneRow || false;
23758 var displayText = [];
23759 var text = '';
23760 var rowNr = -1;
23761
23762 for (var i = 0; i < NR_ROWS; i++) {
23763 var rowText = this.rows[i].getTextString();
23764
23765 if (rowText) {
23766 rowNr = i + 1;
23767
23768 if (asOneRow) {
23769 displayText.push('Row ' + rowNr + ": '" + rowText + "'");
23770 } else {
23771 displayText.push(rowText.trim());
23772 }
23773 }
23774 }
23775
23776 if (displayText.length > 0) {
23777 if (asOneRow) {
23778 text = '[' + displayText.join(' | ') + ']';
23779 } else {
23780 text = displayText.join('\n');
23781 }
23782 }
23783
23784 return text;
23785 };
23786
23787 _proto5.getTextAndFormat = function getTextAndFormat() {
23788 return this.rows;
23789 };
23790
23791 return CaptionScreen;
23792}(); // var modes = ['MODE_ROLL-UP', 'MODE_POP-ON', 'MODE_PAINT-ON', 'MODE_TEXT'];
23793
23794var Cea608Channel = /*#__PURE__*/function () {
23795 function Cea608Channel(channelNumber, outputFilter, logger) {
23796 this.chNr = void 0;
23797 this.outputFilter = void 0;
23798 this.mode = void 0;
23799 this.verbose = void 0;
23800 this.displayedMemory = void 0;
23801 this.nonDisplayedMemory = void 0;
23802 this.lastOutputScreen = void 0;
23803 this.currRollUpRow = void 0;
23804 this.writeScreen = void 0;
23805 this.cueStartTime = void 0;
23806 this.logger = void 0;
23807 this.chNr = channelNumber;
23808 this.outputFilter = outputFilter;
23809 this.mode = null;
23810 this.verbose = 0;
23811 this.displayedMemory = new CaptionScreen(logger);
23812 this.nonDisplayedMemory = new CaptionScreen(logger);
23813 this.lastOutputScreen = new CaptionScreen(logger);
23814 this.currRollUpRow = this.displayedMemory.rows[NR_ROWS - 1];
23815 this.writeScreen = this.displayedMemory;
23816 this.mode = null;
23817 this.cueStartTime = null; // Keeps track of where a cue started.
23818
23819 this.logger = logger;
23820 }
23821
23822 var _proto6 = Cea608Channel.prototype;
23823
23824 _proto6.reset = function reset() {
23825 this.mode = null;
23826 this.displayedMemory.reset();
23827 this.nonDisplayedMemory.reset();
23828 this.lastOutputScreen.reset();
23829 this.outputFilter.reset();
23830 this.currRollUpRow = this.displayedMemory.rows[NR_ROWS - 1];
23831 this.writeScreen = this.displayedMemory;
23832 this.mode = null;
23833 this.cueStartTime = null;
23834 };
23835
23836 _proto6.getHandler = function getHandler() {
23837 return this.outputFilter;
23838 };
23839
23840 _proto6.setHandler = function setHandler(newHandler) {
23841 this.outputFilter = newHandler;
23842 };
23843
23844 _proto6.setPAC = function setPAC(pacData) {
23845 this.writeScreen.setPAC(pacData);
23846 };
23847
23848 _proto6.setBkgData = function setBkgData(bkgData) {
23849 this.writeScreen.setBkgData(bkgData);
23850 };
23851
23852 _proto6.setMode = function setMode(newMode) {
23853 if (newMode === this.mode) {
23854 return;
23855 }
23856
23857 this.mode = newMode;
23858 this.logger.log(VerboseLevel.INFO, 'MODE=' + newMode);
23859
23860 if (this.mode === 'MODE_POP-ON') {
23861 this.writeScreen = this.nonDisplayedMemory;
23862 } else {
23863 this.writeScreen = this.displayedMemory;
23864 this.writeScreen.reset();
23865 }
23866
23867 if (this.mode !== 'MODE_ROLL-UP') {
23868 this.displayedMemory.nrRollUpRows = null;
23869 this.nonDisplayedMemory.nrRollUpRows = null;
23870 }
23871
23872 this.mode = newMode;
23873 };
23874
23875 _proto6.insertChars = function insertChars(chars) {
23876 for (var i = 0; i < chars.length; i++) {
23877 this.writeScreen.insertChar(chars[i]);
23878 }
23879
23880 var screen = this.writeScreen === this.displayedMemory ? 'DISP' : 'NON_DISP';
23881 this.logger.log(VerboseLevel.INFO, screen + ': ' + this.writeScreen.getDisplayText(true));
23882
23883 if (this.mode === 'MODE_PAINT-ON' || this.mode === 'MODE_ROLL-UP') {
23884 this.logger.log(VerboseLevel.TEXT, 'DISPLAYED: ' + this.displayedMemory.getDisplayText(true));
23885 this.outputDataUpdate();
23886 }
23887 };
23888
23889 _proto6.ccRCL = function ccRCL() {
23890 // Resume Caption Loading (switch mode to Pop On)
23891 this.logger.log(VerboseLevel.INFO, 'RCL - Resume Caption Loading');
23892 this.setMode('MODE_POP-ON');
23893 };
23894
23895 _proto6.ccBS = function ccBS() {
23896 // BackSpace
23897 this.logger.log(VerboseLevel.INFO, 'BS - BackSpace');
23898
23899 if (this.mode === 'MODE_TEXT') {
23900 return;
23901 }
23902
23903 this.writeScreen.backSpace();
23904
23905 if (this.writeScreen === this.displayedMemory) {
23906 this.outputDataUpdate();
23907 }
23908 };
23909
23910 _proto6.ccAOF = function ccAOF() {// Reserved (formerly Alarm Off)
23911 };
23912
23913 _proto6.ccAON = function ccAON() {// Reserved (formerly Alarm On)
23914 };
23915
23916 _proto6.ccDER = function ccDER() {
23917 // Delete to End of Row
23918 this.logger.log(VerboseLevel.INFO, 'DER- Delete to End of Row');
23919 this.writeScreen.clearToEndOfRow();
23920 this.outputDataUpdate();
23921 };
23922
23923 _proto6.ccRU = function ccRU(nrRows) {
23924 // Roll-Up Captions-2,3,or 4 Rows
23925 this.logger.log(VerboseLevel.INFO, 'RU(' + nrRows + ') - Roll Up');
23926 this.writeScreen = this.displayedMemory;
23927 this.setMode('MODE_ROLL-UP');
23928 this.writeScreen.setRollUpRows(nrRows);
23929 };
23930
23931 _proto6.ccFON = function ccFON() {
23932 // Flash On
23933 this.logger.log(VerboseLevel.INFO, 'FON - Flash On');
23934 this.writeScreen.setPen({
23935 flash: true
23936 });
23937 };
23938
23939 _proto6.ccRDC = function ccRDC() {
23940 // Resume Direct Captioning (switch mode to PaintOn)
23941 this.logger.log(VerboseLevel.INFO, 'RDC - Resume Direct Captioning');
23942 this.setMode('MODE_PAINT-ON');
23943 };
23944
23945 _proto6.ccTR = function ccTR() {
23946 // Text Restart in text mode (not supported, however)
23947 this.logger.log(VerboseLevel.INFO, 'TR');
23948 this.setMode('MODE_TEXT');
23949 };
23950
23951 _proto6.ccRTD = function ccRTD() {
23952 // Resume Text Display in Text mode (not supported, however)
23953 this.logger.log(VerboseLevel.INFO, 'RTD');
23954 this.setMode('MODE_TEXT');
23955 };
23956
23957 _proto6.ccEDM = function ccEDM() {
23958 // Erase Displayed Memory
23959 this.logger.log(VerboseLevel.INFO, 'EDM - Erase Displayed Memory');
23960 this.displayedMemory.reset();
23961 this.outputDataUpdate(true);
23962 };
23963
23964 _proto6.ccCR = function ccCR() {
23965 // Carriage Return
23966 this.logger.log(VerboseLevel.INFO, 'CR - Carriage Return');
23967 this.writeScreen.rollUp();
23968 this.outputDataUpdate(true);
23969 };
23970
23971 _proto6.ccENM = function ccENM() {
23972 // Erase Non-Displayed Memory
23973 this.logger.log(VerboseLevel.INFO, 'ENM - Erase Non-displayed Memory');
23974 this.nonDisplayedMemory.reset();
23975 };
23976
23977 _proto6.ccEOC = function ccEOC() {
23978 // End of Caption (Flip Memories)
23979 this.logger.log(VerboseLevel.INFO, 'EOC - End Of Caption');
23980
23981 if (this.mode === 'MODE_POP-ON') {
23982 var tmp = this.displayedMemory;
23983 this.displayedMemory = this.nonDisplayedMemory;
23984 this.nonDisplayedMemory = tmp;
23985 this.writeScreen = this.nonDisplayedMemory;
23986 this.logger.log(VerboseLevel.TEXT, 'DISP: ' + this.displayedMemory.getDisplayText());
23987 }
23988
23989 this.outputDataUpdate(true);
23990 };
23991
23992 _proto6.ccTO = function ccTO(nrCols) {
23993 // Tab Offset 1,2, or 3 columns
23994 this.logger.log(VerboseLevel.INFO, 'TO(' + nrCols + ') - Tab Offset');
23995 this.writeScreen.moveCursor(nrCols);
23996 };
23997
23998 _proto6.ccMIDROW = function ccMIDROW(secondByte) {
23999 // Parse MIDROW command
24000 var styles = {
24001 flash: false
24002 };
24003 styles.underline = secondByte % 2 === 1;
24004 styles.italics = secondByte >= 0x2e;
24005
24006 if (!styles.italics) {
24007 var colorIndex = Math.floor(secondByte / 2) - 0x10;
24008 var colors = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta'];
24009 styles.foreground = colors[colorIndex];
24010 } else {
24011 styles.foreground = 'white';
24012 }
24013
24014 this.logger.log(VerboseLevel.INFO, 'MIDROW: ' + JSON.stringify(styles));
24015 this.writeScreen.setPen(styles);
24016 };
24017
24018 _proto6.outputDataUpdate = function outputDataUpdate(dispatch) {
24019 if (dispatch === void 0) {
24020 dispatch = false;
24021 }
24022
24023 var time = this.logger.time;
24024
24025 if (time === null) {
24026 return;
24027 }
24028
24029 if (this.outputFilter) {
24030 if (this.cueStartTime === null && !this.displayedMemory.isEmpty()) {
24031 // Start of a new cue
24032 this.cueStartTime = time;
24033 } else {
24034 if (!this.displayedMemory.equals(this.lastOutputScreen)) {
24035 this.outputFilter.newCue(this.cueStartTime, time, this.lastOutputScreen);
24036
24037 if (dispatch && this.outputFilter.dispatchCue) {
24038 this.outputFilter.dispatchCue();
24039 }
24040
24041 this.cueStartTime = this.displayedMemory.isEmpty() ? null : time;
24042 }
24043 }
24044
24045 this.lastOutputScreen.copy(this.displayedMemory);
24046 }
24047 };
24048
24049 _proto6.cueSplitAtTime = function cueSplitAtTime(t) {
24050 if (this.outputFilter) {
24051 if (!this.displayedMemory.isEmpty()) {
24052 if (this.outputFilter.newCue) {
24053 this.outputFilter.newCue(this.cueStartTime, t, this.displayedMemory);
24054 }
24055
24056 this.cueStartTime = t;
24057 }
24058 }
24059 };
24060
24061 return Cea608Channel;
24062}();
24063
24064var Cea608Parser = /*#__PURE__*/function () {
24065 function Cea608Parser(field, out1, out2) {
24066 this.channels = void 0;
24067 this.currentChannel = 0;
24068 this.cmdHistory = void 0;
24069 this.logger = void 0;
24070 var logger = new CaptionsLogger();
24071 this.channels = [null, new Cea608Channel(field, out1, logger), new Cea608Channel(field + 1, out2, logger)];
24072 this.cmdHistory = createCmdHistory();
24073 this.logger = logger;
24074 }
24075
24076 var _proto7 = Cea608Parser.prototype;
24077
24078 _proto7.getHandler = function getHandler(channel) {
24079 return this.channels[channel].getHandler();
24080 };
24081
24082 _proto7.setHandler = function setHandler(channel, newHandler) {
24083 this.channels[channel].setHandler(newHandler);
24084 }
24085 /**
24086 * Add data for time t in forms of list of bytes (unsigned ints). The bytes are treated as pairs.
24087 */
24088 ;
24089
24090 _proto7.addData = function addData(time, byteList) {
24091 var cmdFound;
24092 var a;
24093 var b;
24094 var charsFound = false;
24095 this.logger.time = time;
24096
24097 for (var i = 0; i < byteList.length; i += 2) {
24098 a = byteList[i] & 0x7f;
24099 b = byteList[i + 1] & 0x7f;
24100
24101 if (a === 0 && b === 0) {
24102 continue;
24103 } else {
24104 this.logger.log(VerboseLevel.DATA, '[' + numArrayToHexArray([byteList[i], byteList[i + 1]]) + '] -> (' + numArrayToHexArray([a, b]) + ')');
24105 }
24106
24107 cmdFound = this.parseCmd(a, b);
24108
24109 if (!cmdFound) {
24110 cmdFound = this.parseMidrow(a, b);
24111 }
24112
24113 if (!cmdFound) {
24114 cmdFound = this.parsePAC(a, b);
24115 }
24116
24117 if (!cmdFound) {
24118 cmdFound = this.parseBackgroundAttributes(a, b);
24119 }
24120
24121 if (!cmdFound) {
24122 charsFound = this.parseChars(a, b);
24123
24124 if (charsFound) {
24125 var currChNr = this.currentChannel;
24126
24127 if (currChNr && currChNr > 0) {
24128 var channel = this.channels[currChNr];
24129 channel.insertChars(charsFound);
24130 } else {
24131 this.logger.log(VerboseLevel.WARNING, 'No channel found yet. TEXT-MODE?');
24132 }
24133 }
24134 }
24135
24136 if (!cmdFound && !charsFound) {
24137 this.logger.log(VerboseLevel.WARNING, "Couldn't parse cleaned data " + numArrayToHexArray([a, b]) + ' orig: ' + numArrayToHexArray([byteList[i], byteList[i + 1]]));
24138 }
24139 }
24140 }
24141 /**
24142 * Parse Command.
24143 * @returns {Boolean} Tells if a command was found
24144 */
24145 ;
24146
24147 _proto7.parseCmd = function parseCmd(a, b) {
24148 var cmdHistory = this.cmdHistory;
24149 var cond1 = (a === 0x14 || a === 0x1c || a === 0x15 || a === 0x1d) && b >= 0x20 && b <= 0x2f;
24150 var cond2 = (a === 0x17 || a === 0x1f) && b >= 0x21 && b <= 0x23;
24151
24152 if (!(cond1 || cond2)) {
24153 return false;
24154 }
24155
24156 if (hasCmdRepeated(a, b, cmdHistory)) {
24157 setLastCmd(null, null, cmdHistory);
24158 this.logger.log(VerboseLevel.DEBUG, 'Repeated command (' + numArrayToHexArray([a, b]) + ') is dropped');
24159 return true;
24160 }
24161
24162 var chNr = a === 0x14 || a === 0x15 || a === 0x17 ? 1 : 2;
24163 var channel = this.channels[chNr];
24164
24165 if (a === 0x14 || a === 0x15 || a === 0x1c || a === 0x1d) {
24166 if (b === 0x20) {
24167 channel.ccRCL();
24168 } else if (b === 0x21) {
24169 channel.ccBS();
24170 } else if (b === 0x22) {
24171 channel.ccAOF();
24172 } else if (b === 0x23) {
24173 channel.ccAON();
24174 } else if (b === 0x24) {
24175 channel.ccDER();
24176 } else if (b === 0x25) {
24177 channel.ccRU(2);
24178 } else if (b === 0x26) {
24179 channel.ccRU(3);
24180 } else if (b === 0x27) {
24181 channel.ccRU(4);
24182 } else if (b === 0x28) {
24183 channel.ccFON();
24184 } else if (b === 0x29) {
24185 channel.ccRDC();
24186 } else if (b === 0x2a) {
24187 channel.ccTR();
24188 } else if (b === 0x2b) {
24189 channel.ccRTD();
24190 } else if (b === 0x2c) {
24191 channel.ccEDM();
24192 } else if (b === 0x2d) {
24193 channel.ccCR();
24194 } else if (b === 0x2e) {
24195 channel.ccENM();
24196 } else if (b === 0x2f) {
24197 channel.ccEOC();
24198 }
24199 } else {
24200 // a == 0x17 || a == 0x1F
24201 channel.ccTO(b - 0x20);
24202 }
24203
24204 setLastCmd(a, b, cmdHistory);
24205 this.currentChannel = chNr;
24206 return true;
24207 }
24208 /**
24209 * Parse midrow styling command
24210 * @returns {Boolean}
24211 */
24212 ;
24213
24214 _proto7.parseMidrow = function parseMidrow(a, b) {
24215 var chNr = 0;
24216
24217 if ((a === 0x11 || a === 0x19) && b >= 0x20 && b <= 0x2f) {
24218 if (a === 0x11) {
24219 chNr = 1;
24220 } else {
24221 chNr = 2;
24222 }
24223
24224 if (chNr !== this.currentChannel) {
24225 this.logger.log(VerboseLevel.ERROR, 'Mismatch channel in midrow parsing');
24226 return false;
24227 }
24228
24229 var channel = this.channels[chNr];
24230
24231 if (!channel) {
24232 return false;
24233 }
24234
24235 channel.ccMIDROW(b);
24236 this.logger.log(VerboseLevel.DEBUG, 'MIDROW (' + numArrayToHexArray([a, b]) + ')');
24237 return true;
24238 }
24239
24240 return false;
24241 }
24242 /**
24243 * Parse Preable Access Codes (Table 53).
24244 * @returns {Boolean} Tells if PAC found
24245 */
24246 ;
24247
24248 _proto7.parsePAC = function parsePAC(a, b) {
24249 var row;
24250 var cmdHistory = this.cmdHistory;
24251 var case1 = (a >= 0x11 && a <= 0x17 || a >= 0x19 && a <= 0x1f) && b >= 0x40 && b <= 0x7f;
24252 var case2 = (a === 0x10 || a === 0x18) && b >= 0x40 && b <= 0x5f;
24253
24254 if (!(case1 || case2)) {
24255 return false;
24256 }
24257
24258 if (hasCmdRepeated(a, b, cmdHistory)) {
24259 setLastCmd(null, null, cmdHistory);
24260 return true; // Repeated commands are dropped (once)
24261 }
24262
24263 var chNr = a <= 0x17 ? 1 : 2;
24264
24265 if (b >= 0x40 && b <= 0x5f) {
24266 row = chNr === 1 ? rowsLowCh1[a] : rowsLowCh2[a];
24267 } else {
24268 // 0x60 <= b <= 0x7F
24269 row = chNr === 1 ? rowsHighCh1[a] : rowsHighCh2[a];
24270 }
24271
24272 var channel = this.channels[chNr];
24273
24274 if (!channel) {
24275 return false;
24276 }
24277
24278 channel.setPAC(this.interpretPAC(row, b));
24279 setLastCmd(a, b, cmdHistory);
24280 this.currentChannel = chNr;
24281 return true;
24282 }
24283 /**
24284 * Interpret the second byte of the pac, and return the information.
24285 * @returns {Object} pacData with style parameters.
24286 */
24287 ;
24288
24289 _proto7.interpretPAC = function interpretPAC(row, _byte3) {
24290 var pacIndex;
24291 var pacData = {
24292 color: null,
24293 italics: false,
24294 indent: null,
24295 underline: false,
24296 row: row
24297 };
24298
24299 if (_byte3 > 0x5f) {
24300 pacIndex = _byte3 - 0x60;
24301 } else {
24302 pacIndex = _byte3 - 0x40;
24303 }
24304
24305 pacData.underline = (pacIndex & 1) === 1;
24306
24307 if (pacIndex <= 0xd) {
24308 pacData.color = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta', 'white'][Math.floor(pacIndex / 2)];
24309 } else if (pacIndex <= 0xf) {
24310 pacData.italics = true;
24311 pacData.color = 'white';
24312 } else {
24313 pacData.indent = Math.floor((pacIndex - 0x10) / 2) * 4;
24314 }
24315
24316 return pacData; // Note that row has zero offset. The spec uses 1.
24317 }
24318 /**
24319 * Parse characters.
24320 * @returns An array with 1 to 2 codes corresponding to chars, if found. null otherwise.
24321 */
24322 ;
24323
24324 _proto7.parseChars = function parseChars(a, b) {
24325 var channelNr;
24326 var charCodes = null;
24327 var charCode1 = null;
24328
24329 if (a >= 0x19) {
24330 channelNr = 2;
24331 charCode1 = a - 8;
24332 } else {
24333 channelNr = 1;
24334 charCode1 = a;
24335 }
24336
24337 if (charCode1 >= 0x11 && charCode1 <= 0x13) {
24338 // Special character
24339 var oneCode;
24340
24341 if (charCode1 === 0x11) {
24342 oneCode = b + 0x50;
24343 } else if (charCode1 === 0x12) {
24344 oneCode = b + 0x70;
24345 } else {
24346 oneCode = b + 0x90;
24347 }
24348
24349 this.logger.log(VerboseLevel.INFO, "Special char '" + getCharForByte(oneCode) + "' in channel " + channelNr);
24350 charCodes = [oneCode];
24351 } else if (a >= 0x20 && a <= 0x7f) {
24352 charCodes = b === 0 ? [a] : [a, b];
24353 }
24354
24355 if (charCodes) {
24356 var hexCodes = numArrayToHexArray(charCodes);
24357 this.logger.log(VerboseLevel.DEBUG, 'Char codes = ' + hexCodes.join(','));
24358 setLastCmd(a, b, this.cmdHistory);
24359 }
24360
24361 return charCodes;
24362 }
24363 /**
24364 * Parse extended background attributes as well as new foreground color black.
24365 * @returns {Boolean} Tells if background attributes are found
24366 */
24367 ;
24368
24369 _proto7.parseBackgroundAttributes = function parseBackgroundAttributes(a, b) {
24370 var case1 = (a === 0x10 || a === 0x18) && b >= 0x20 && b <= 0x2f;
24371 var case2 = (a === 0x17 || a === 0x1f) && b >= 0x2d && b <= 0x2f;
24372
24373 if (!(case1 || case2)) {
24374 return false;
24375 }
24376
24377 var index;
24378 var bkgData = {};
24379
24380 if (a === 0x10 || a === 0x18) {
24381 index = Math.floor((b - 0x20) / 2);
24382 bkgData.background = backgroundColors[index];
24383
24384 if (b % 2 === 1) {
24385 bkgData.background = bkgData.background + '_semi';
24386 }
24387 } else if (b === 0x2d) {
24388 bkgData.background = 'transparent';
24389 } else {
24390 bkgData.foreground = 'black';
24391
24392 if (b === 0x2f) {
24393 bkgData.underline = true;
24394 }
24395 }
24396
24397 var chNr = a <= 0x17 ? 1 : 2;
24398 var channel = this.channels[chNr];
24399 channel.setBkgData(bkgData);
24400 setLastCmd(a, b, this.cmdHistory);
24401 return true;
24402 }
24403 /**
24404 * Reset state of parser and its channels.
24405 */
24406 ;
24407
24408 _proto7.reset = function reset() {
24409 for (var i = 0; i < Object.keys(this.channels).length; i++) {
24410 var channel = this.channels[i];
24411
24412 if (channel) {
24413 channel.reset();
24414 }
24415 }
24416
24417 this.cmdHistory = createCmdHistory();
24418 }
24419 /**
24420 * Trigger the generation of a cue, and the start of a new one if displayScreens are not empty.
24421 */
24422 ;
24423
24424 _proto7.cueSplitAtTime = function cueSplitAtTime(t) {
24425 for (var i = 0; i < this.channels.length; i++) {
24426 var channel = this.channels[i];
24427
24428 if (channel) {
24429 channel.cueSplitAtTime(t);
24430 }
24431 }
24432 };
24433
24434 return Cea608Parser;
24435}();
24436
24437function setLastCmd(a, b, cmdHistory) {
24438 cmdHistory.a = a;
24439 cmdHistory.b = b;
24440}
24441
24442function hasCmdRepeated(a, b, cmdHistory) {
24443 return cmdHistory.a === a && cmdHistory.b === b;
24444}
24445
24446function createCmdHistory() {
24447 return {
24448 a: null,
24449 b: null
24450 };
24451}
24452
24453/* harmony default export */ __webpack_exports__["default"] = (Cea608Parser);
24454
24455/***/ }),
24456
24457/***/ "./src/utils/codecs.ts":
24458/*!*****************************!*\
24459 !*** ./src/utils/codecs.ts ***!
24460 \*****************************/
24461/*! exports provided: isCodecType, isCodecSupportedInMp4 */
24462/***/ (function(module, __webpack_exports__, __webpack_require__) {
24463__webpack_require__.r(__webpack_exports__);
24464/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isCodecType", function() { return isCodecType; });
24465/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isCodecSupportedInMp4", function() { return isCodecSupportedInMp4; });
24466// from http://mp4ra.org/codecs.html
24467var sampleEntryCodesISO = {
24468 audio: {
24469 a3ds: true,
24470 'ac-3': true,
24471 'ac-4': true,
24472 alac: true,
24473 alaw: true,
24474 dra1: true,
24475 'dts+': true,
24476 'dts-': true,
24477 dtsc: true,
24478 dtse: true,
24479 dtsh: true,
24480 'ec-3': true,
24481 enca: true,
24482 g719: true,
24483 g726: true,
24484 m4ae: true,
24485 mha1: true,
24486 mha2: true,
24487 mhm1: true,
24488 mhm2: true,
24489 mlpa: true,
24490 mp4a: true,
24491 'raw ': true,
24492 Opus: true,
24493 samr: true,
24494 sawb: true,
24495 sawp: true,
24496 sevc: true,
24497 sqcp: true,
24498 ssmv: true,
24499 twos: true,
24500 ulaw: true
24501 },
24502 video: {
24503 avc1: true,
24504 avc2: true,
24505 avc3: true,
24506 avc4: true,
24507 avcp: true,
24508 av01: true,
24509 drac: true,
24510 dvav: true,
24511 dvhe: true,
24512 encv: true,
24513 hev1: true,
24514 hvc1: true,
24515 mjp2: true,
24516 mp4v: true,
24517 mvc1: true,
24518 mvc2: true,
24519 mvc3: true,
24520 mvc4: true,
24521 resv: true,
24522 rv60: true,
24523 s263: true,
24524 svc1: true,
24525 svc2: true,
24526 'vc-1': true,
24527 vp08: true,
24528 vp09: true
24529 },
24530 text: {
24531 stpp: true,
24532 wvtt: true
24533 }
24534};
24535function isCodecType(codec, type) {
24536 var typeCodes = sampleEntryCodesISO[type];
24537 return !!typeCodes && typeCodes[codec.slice(0, 4)] === true;
24538}
24539function isCodecSupportedInMp4(codec, type) {
24540 return MediaSource.isTypeSupported((type || 'video') + "/mp4;codecs=\"" + codec + "\"");
24541}
24542
24543/***/ }),
24544
24545/***/ "./src/utils/cues.ts":
24546/*!***************************!*\
24547 !*** ./src/utils/cues.ts ***!
24548 \***************************/
24549/*! exports provided: default */
24550/***/ (function(module, __webpack_exports__, __webpack_require__) {
24551__webpack_require__.r(__webpack_exports__);
24552/* harmony import */ var _vttparser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./vttparser */ "./src/utils/vttparser.ts");
24553/* harmony import */ var _webvtt_parser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./webvtt-parser */ "./src/utils/webvtt-parser.ts");
24554/* harmony import */ var _texttrack_utils__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./texttrack-utils */ "./src/utils/texttrack-utils.ts");
24555
24556
24557
24558var WHITESPACE_CHAR = /\s/;
24559var Cues = {
24560 newCue: function newCue(track, startTime, endTime, captionScreen) {
24561 var result = [];
24562 var row; // the type data states this is VTTCue, but it can potentially be a TextTrackCue on old browsers
24563
24564 var cue;
24565 var indenting;
24566 var indent;
24567 var text;
24568 var Cue = self.VTTCue || self.TextTrackCue;
24569
24570 for (var r = 0; r < captionScreen.rows.length; r++) {
24571 row = captionScreen.rows[r];
24572 indenting = true;
24573 indent = 0;
24574 text = '';
24575
24576 if (!row.isEmpty()) {
24577 for (var c = 0; c < row.chars.length; c++) {
24578 if (WHITESPACE_CHAR.test(row.chars[c].uchar) && indenting) {
24579 indent++;
24580 } else {
24581 text += row.chars[c].uchar;
24582 indenting = false;
24583 }
24584 } // To be used for cleaning-up orphaned roll-up captions
24585
24586
24587 row.cueStartTime = startTime; // Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
24588
24589 if (startTime === endTime) {
24590 endTime += 0.0001;
24591 }
24592
24593 if (indent >= 16) {
24594 indent--;
24595 } else {
24596 indent++;
24597 }
24598
24599 var cueText = Object(_vttparser__WEBPACK_IMPORTED_MODULE_0__["fixLineBreaks"])(text.trim());
24600 var id = Object(_webvtt_parser__WEBPACK_IMPORTED_MODULE_1__["generateCueId"])(startTime, endTime, cueText); // If this cue already exists in the track do not push it
24601
24602 if (!track || !track.cues || !track.cues.getCueById(id)) {
24603 cue = new Cue(startTime, endTime, cueText);
24604 cue.id = id;
24605 cue.line = r + 1;
24606 cue.align = 'left'; // Clamp the position between 10 and 80 percent (CEA-608 PAC indent code)
24607 // https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608
24608 // Firefox throws an exception and captions break with out of bounds 0-100 values
24609
24610 cue.position = 10 + Math.min(80, Math.floor(indent * 8 / 32) * 10);
24611 result.push(cue);
24612 }
24613 }
24614 }
24615
24616 if (track && result.length) {
24617 // Sort bottom cues in reverse order so that they render in line order when overlapping in Chrome
24618 result.sort(function (cueA, cueB) {
24619 if (cueA.line === 'auto' || cueB.line === 'auto') {
24620 return 0;
24621 }
24622
24623 if (cueA.line > 8 && cueB.line > 8) {
24624 return cueB.line - cueA.line;
24625 }
24626
24627 return cueA.line - cueB.line;
24628 });
24629 result.forEach(function (cue) {
24630 return Object(_texttrack_utils__WEBPACK_IMPORTED_MODULE_2__["addCueToTrack"])(track, cue);
24631 });
24632 }
24633
24634 return result;
24635 }
24636};
24637/* harmony default export */ __webpack_exports__["default"] = (Cues);
24638
24639/***/ }),
24640
24641/***/ "./src/utils/discontinuities.ts":
24642/*!**************************************!*\
24643 !*** ./src/utils/discontinuities.ts ***!
24644 \**************************************/
24645/*! exports provided: findFirstFragWithCC, shouldAlignOnDiscontinuities, findDiscontinuousReferenceFrag, adjustSlidingStart, alignStream, alignPDT, alignFragmentByPDTDelta, alignMediaPlaylistByPDT */
24646/***/ (function(module, __webpack_exports__, __webpack_require__) {
24647__webpack_require__.r(__webpack_exports__);
24648/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFirstFragWithCC", function() { return findFirstFragWithCC; });
24649/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "shouldAlignOnDiscontinuities", function() { return shouldAlignOnDiscontinuities; });
24650/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findDiscontinuousReferenceFrag", function() { return findDiscontinuousReferenceFrag; });
24651/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "adjustSlidingStart", function() { return adjustSlidingStart; });
24652/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignStream", function() { return alignStream; });
24653/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignPDT", function() { return alignPDT; });
24654/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignFragmentByPDTDelta", function() { return alignFragmentByPDTDelta; });
24655/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignMediaPlaylistByPDT", function() { return alignMediaPlaylistByPDT; });
24656/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
24657/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./logger */ "./src/utils/logger.ts");
24658/* harmony import */ var _controller_level_helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../controller/level-helper */ "./src/controller/level-helper.ts");
24659
24660
24661
24662
24663function findFirstFragWithCC(fragments, cc) {
24664 var firstFrag = null;
24665
24666 for (var i = 0, len = fragments.length; i < len; i++) {
24667 var currentFrag = fragments[i];
24668
24669 if (currentFrag && currentFrag.cc === cc) {
24670 firstFrag = currentFrag;
24671 break;
24672 }
24673 }
24674
24675 return firstFrag;
24676}
24677function shouldAlignOnDiscontinuities(lastFrag, lastLevel, details) {
24678 if (lastLevel.details) {
24679 if (details.endCC > details.startCC || lastFrag && lastFrag.cc < details.startCC) {
24680 return true;
24681 }
24682 }
24683
24684 return false;
24685} // Find the first frag in the previous level which matches the CC of the first frag of the new level
24686
24687function findDiscontinuousReferenceFrag(prevDetails, curDetails) {
24688 var prevFrags = prevDetails.fragments;
24689 var curFrags = curDetails.fragments;
24690
24691 if (!curFrags.length || !prevFrags.length) {
24692 _logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log('No fragments to align');
24693 return;
24694 }
24695
24696 var prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc);
24697
24698 if (!prevStartFrag || prevStartFrag && !prevStartFrag.startPTS) {
24699 _logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log('No frag in previous level to align on');
24700 return;
24701 }
24702
24703 return prevStartFrag;
24704}
24705
24706function adjustFragmentStart(frag, sliding) {
24707 if (frag) {
24708 var start = frag.start + sliding;
24709 frag.start = frag.startPTS = start;
24710 frag.endPTS = start + frag.duration;
24711 }
24712}
24713
24714function adjustSlidingStart(sliding, details) {
24715 // Update segments
24716 var fragments = details.fragments;
24717
24718 for (var i = 0, len = fragments.length; i < len; i++) {
24719 adjustFragmentStart(fragments[i], sliding);
24720 } // Update LL-HLS parts at the end of the playlist
24721
24722
24723 if (details.fragmentHint) {
24724 adjustFragmentStart(details.fragmentHint, sliding);
24725 }
24726
24727 details.alignedSliding = true;
24728}
24729/**
24730 * Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
24731 * contiguous stream with the last fragments.
24732 * The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
24733 * download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
24734 * and an extra download.
24735 * @param lastFrag
24736 * @param lastLevel
24737 * @param details
24738 */
24739
24740function alignStream(lastFrag, lastLevel, details) {
24741 if (!lastLevel) {
24742 return;
24743 }
24744
24745 alignDiscontinuities(lastFrag, details, lastLevel);
24746
24747 if (!details.alignedSliding && lastLevel.details) {
24748 // If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
24749 // Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
24750 // discontinuity sequence.
24751 alignPDT(details, lastLevel.details);
24752 }
24753
24754 if (!details.alignedSliding && lastLevel.details && !details.skippedSegments) {
24755 // Try to align on sn so that we pick a better start fragment.
24756 // Do not perform this on playlists with delta updates as this is only to align levels on switch
24757 // and adjustSliding only adjusts fragments after skippedSegments.
24758 Object(_controller_level_helper__WEBPACK_IMPORTED_MODULE_2__["adjustSliding"])(lastLevel.details, details);
24759 }
24760}
24761/**
24762 * Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
24763 * discontinuity sequence.
24764 * @param lastFrag - The last Fragment which shares the same discontinuity sequence
24765 * @param lastLevel - The details of the last loaded level
24766 * @param details - The details of the new level
24767 */
24768
24769function alignDiscontinuities(lastFrag, details, lastLevel) {
24770 if (shouldAlignOnDiscontinuities(lastFrag, lastLevel, details)) {
24771 var referenceFrag = findDiscontinuousReferenceFrag(lastLevel.details, details);
24772
24773 if (referenceFrag && Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(referenceFrag.start)) {
24774 _logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log("Adjusting PTS using last level due to CC increase within current level " + details.url);
24775 adjustSlidingStart(referenceFrag.start, details);
24776 }
24777 }
24778}
24779/**
24780 * Computes the PTS of a new level's fragments using the difference in Program Date Time from the last level.
24781 * @param details - The details of the new level
24782 * @param lastDetails - The details of the last loaded level
24783 */
24784
24785
24786function alignPDT(details, lastDetails) {
24787 // This check protects the unsafe "!" usage below for null program date time access.
24788 if (!lastDetails.fragments.length || !details.hasProgramDateTime || !lastDetails.hasProgramDateTime) {
24789 return;
24790 } // if last level sliding is 1000 and its first frag PROGRAM-DATE-TIME is 2017-08-20 1:10:00 AM
24791 // and if new details first frag PROGRAM DATE-TIME is 2017-08-20 1:10:08 AM
24792 // then we can deduce that playlist B sliding is 1000+8 = 1008s
24793
24794
24795 var lastPDT = lastDetails.fragments[0].programDateTime; // hasProgramDateTime check above makes this safe.
24796
24797 var newPDT = details.fragments[0].programDateTime; // date diff is in ms. frag.start is in seconds
24798
24799 var sliding = (newPDT - lastPDT) / 1000 + lastDetails.fragments[0].start;
24800
24801 if (sliding && Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(sliding)) {
24802 _logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log("Adjusting PTS using programDateTime delta " + (newPDT - lastPDT) + "ms, sliding:" + sliding.toFixed(3) + " " + details.url + " ");
24803 adjustSlidingStart(sliding, details);
24804 }
24805}
24806function alignFragmentByPDTDelta(frag, delta) {
24807 var programDateTime = frag.programDateTime;
24808 if (!programDateTime) return;
24809 var start = (programDateTime - delta) / 1000;
24810 frag.start = frag.startPTS = start;
24811 frag.endPTS = start + frag.duration;
24812}
24813/**
24814 * Ensures appropriate time-alignment between renditions based on PDT. Unlike `alignPDT`, which adjusts
24815 * the timeline based on the delta between PDTs of the 0th fragment of two playlists/`LevelDetails`,
24816 * this function assumes the timelines represented in `refDetails` are accurate, including the PDTs,
24817 * and uses the "wallclock"/PDT timeline as a cross-reference to `details`, adjusting the presentation
24818 * times/timelines of `details` accordingly.
24819 * Given the asynchronous nature of fetches and initial loads of live `main` and audio/subtitle tracks,
24820 * the primary purpose of this function is to ensure the "local timelines" of audio/subtitle tracks
24821 * are aligned to the main/video timeline, using PDT as the cross-reference/"anchor" that should
24822 * be consistent across playlists, per the HLS spec.
24823 * @param details - The details of the rendition you'd like to time-align (e.g. an audio rendition).
24824 * @param refDetails - The details of the reference rendition with start and PDT times for alignment.
24825 */
24826
24827function alignMediaPlaylistByPDT(details, refDetails) {
24828 // This check protects the unsafe "!" usage below for null program date time access.
24829 if (!refDetails.fragments.length || !details.hasProgramDateTime || !refDetails.hasProgramDateTime) {
24830 return;
24831 }
24832
24833 var refPDT = refDetails.fragments[0].programDateTime; // hasProgramDateTime check above makes this safe.
24834
24835 var refStart = refDetails.fragments[0].start; // Use the delta between the reference details' presentation timeline's start time and its PDT
24836 // to align the other rendtion's timeline.
24837
24838 var delta = refPDT - refStart * 1000; // Per spec: "If any Media Playlist in a Master Playlist contains an EXT-X-PROGRAM-DATE-TIME tag, then all
24839 // Media Playlists in that Master Playlist MUST contain EXT-X-PROGRAM-DATE-TIME tags with consistent mappings
24840 // of date and time to media timestamps."
24841 // So we should be able to use each rendition's PDT as a reference time and use the delta to compute our relevant
24842 // start and end times.
24843 // NOTE: This code assumes each level/details timelines have already been made "internally consistent"
24844
24845 details.fragments.forEach(function (frag) {
24846 alignFragmentByPDTDelta(frag, delta);
24847 });
24848
24849 if (details.fragmentHint) {
24850 alignFragmentByPDTDelta(details.fragmentHint, delta);
24851 }
24852
24853 details.alignedSliding = true;
24854}
24855
24856/***/ }),
24857
24858/***/ "./src/utils/ewma-bandwidth-estimator.ts":
24859/*!***********************************************!*\
24860 !*** ./src/utils/ewma-bandwidth-estimator.ts ***!
24861 \***********************************************/
24862/*! exports provided: default */
24863/***/ (function(module, __webpack_exports__, __webpack_require__) {
24864__webpack_require__.r(__webpack_exports__);
24865/* harmony import */ var _utils_ewma__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/ewma */ "./src/utils/ewma.ts");
24866/*
24867 * EWMA Bandwidth Estimator
24868 * - heavily inspired from shaka-player
24869 * Tracks bandwidth samples and estimates available bandwidth.
24870 * Based on the minimum of two exponentially-weighted moving averages with
24871 * different half-lives.
24872 */
24873
24874
24875var EwmaBandWidthEstimator = /*#__PURE__*/function () {
24876 function EwmaBandWidthEstimator(slow, fast, defaultEstimate) {
24877 this.defaultEstimate_ = void 0;
24878 this.minWeight_ = void 0;
24879 this.minDelayMs_ = void 0;
24880 this.slow_ = void 0;
24881 this.fast_ = void 0;
24882 this.defaultEstimate_ = defaultEstimate;
24883 this.minWeight_ = 0.001;
24884 this.minDelayMs_ = 50;
24885 this.slow_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](slow);
24886 this.fast_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](fast);
24887 }
24888
24889 var _proto = EwmaBandWidthEstimator.prototype;
24890
24891 _proto.update = function update(slow, fast) {
24892 var slow_ = this.slow_,
24893 fast_ = this.fast_;
24894
24895 if (this.slow_.halfLife !== slow) {
24896 this.slow_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](slow, slow_.getEstimate(), slow_.getTotalWeight());
24897 }
24898
24899 if (this.fast_.halfLife !== fast) {
24900 this.fast_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](fast, fast_.getEstimate(), fast_.getTotalWeight());
24901 }
24902 };
24903
24904 _proto.sample = function sample(durationMs, numBytes) {
24905 durationMs = Math.max(durationMs, this.minDelayMs_);
24906 var numBits = 8 * numBytes; // weight is duration in seconds
24907
24908 var durationS = durationMs / 1000; // value is bandwidth in bits/s
24909
24910 var bandwidthInBps = numBits / durationS;
24911 this.fast_.sample(durationS, bandwidthInBps);
24912 this.slow_.sample(durationS, bandwidthInBps);
24913 };
24914
24915 _proto.canEstimate = function canEstimate() {
24916 var fast = this.fast_;
24917 return fast && fast.getTotalWeight() >= this.minWeight_;
24918 };
24919
24920 _proto.getEstimate = function getEstimate() {
24921 if (this.canEstimate()) {
24922 // console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
24923 // console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
24924 // Take the minimum of these two estimates. This should have the effect of
24925 // adapting down quickly, but up more slowly.
24926 return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
24927 } else {
24928 return this.defaultEstimate_;
24929 }
24930 };
24931
24932 _proto.destroy = function destroy() {};
24933
24934 return EwmaBandWidthEstimator;
24935}();
24936
24937/* harmony default export */ __webpack_exports__["default"] = (EwmaBandWidthEstimator);
24938
24939/***/ }),
24940
24941/***/ "./src/utils/ewma.ts":
24942/*!***************************!*\
24943 !*** ./src/utils/ewma.ts ***!
24944 \***************************/
24945/*! exports provided: default */
24946/***/ (function(module, __webpack_exports__, __webpack_require__) {
24947__webpack_require__.r(__webpack_exports__);
24948/*
24949 * compute an Exponential Weighted moving average
24950 * - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
24951 * - heavily inspired from shaka-player
24952 */
24953var EWMA = /*#__PURE__*/function () {
24954 // About half of the estimated value will be from the last |halfLife| samples by weight.
24955 function EWMA(halfLife, estimate, weight) {
24956 if (estimate === void 0) {
24957 estimate = 0;
24958 }
24959
24960 if (weight === void 0) {
24961 weight = 0;
24962 }
24963
24964 this.halfLife = void 0;
24965 this.alpha_ = void 0;
24966 this.estimate_ = void 0;
24967 this.totalWeight_ = void 0;
24968 this.halfLife = halfLife; // Larger values of alpha expire historical data more slowly.
24969
24970 this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
24971 this.estimate_ = estimate;
24972 this.totalWeight_ = weight;
24973 }
24974
24975 var _proto = EWMA.prototype;
24976
24977 _proto.sample = function sample(weight, value) {
24978 var adjAlpha = Math.pow(this.alpha_, weight);
24979 this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
24980 this.totalWeight_ += weight;
24981 };
24982
24983 _proto.getTotalWeight = function getTotalWeight() {
24984 return this.totalWeight_;
24985 };
24986
24987 _proto.getEstimate = function getEstimate() {
24988 if (this.alpha_) {
24989 var zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
24990
24991 if (zeroFactor) {
24992 return this.estimate_ / zeroFactor;
24993 }
24994 }
24995
24996 return this.estimate_;
24997 };
24998
24999 return EWMA;
25000}();
25001
25002/* harmony default export */ __webpack_exports__["default"] = (EWMA);
25003
25004/***/ }),
25005
25006/***/ "./src/utils/fetch-loader.ts":
25007/*!***********************************!*\
25008 !*** ./src/utils/fetch-loader.ts ***!
25009 \***********************************/
25010/*! exports provided: fetchSupported, default */
25011/***/ (function(module, __webpack_exports__, __webpack_require__) {
25012__webpack_require__.r(__webpack_exports__);
25013/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fetchSupported", function() { return fetchSupported; });
25014/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
25015/* harmony import */ var _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../loader/load-stats */ "./src/loader/load-stats.ts");
25016/* harmony import */ var _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/chunk-cache */ "./src/demux/chunk-cache.ts");
25017
25018
25019
25020function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
25021
25022function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
25023
25024function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
25025
25026function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
25027
25028function _isNativeFunction(fn) { return Function.toString.call(fn).indexOf("[native code]") !== -1; }
25029
25030function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
25031
25032function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
25033
25034function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
25035
25036
25037
25038function fetchSupported() {
25039 if ( // @ts-ignore
25040 self.fetch && self.AbortController && self.ReadableStream && self.Request) {
25041 try {
25042 new self.ReadableStream({}); // eslint-disable-line no-new
25043
25044 return true;
25045 } catch (e) {
25046 /* noop */
25047 }
25048 }
25049
25050 return false;
25051}
25052
25053var FetchLoader = /*#__PURE__*/function () {
25054 function FetchLoader(config
25055 /* HlsConfig */
25056 ) {
25057 this.fetchSetup = void 0;
25058 this.requestTimeout = void 0;
25059 this.request = void 0;
25060 this.response = void 0;
25061 this.controller = void 0;
25062 this.context = void 0;
25063 this.config = null;
25064 this.callbacks = null;
25065 this.stats = void 0;
25066 this.loader = null;
25067 this.fetchSetup = config.fetchSetup || getRequest;
25068 this.controller = new self.AbortController();
25069 this.stats = new _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__["LoadStats"]();
25070 }
25071
25072 var _proto = FetchLoader.prototype;
25073
25074 _proto.destroy = function destroy() {
25075 this.loader = this.callbacks = null;
25076 this.abortInternal();
25077 };
25078
25079 _proto.abortInternal = function abortInternal() {
25080 var response = this.response;
25081
25082 if (!response || !response.ok) {
25083 this.stats.aborted = true;
25084 this.controller.abort();
25085 }
25086 };
25087
25088 _proto.abort = function abort() {
25089 var _this$callbacks;
25090
25091 this.abortInternal();
25092
25093 if ((_this$callbacks = this.callbacks) !== null && _this$callbacks !== void 0 && _this$callbacks.onAbort) {
25094 this.callbacks.onAbort(this.stats, this.context, this.response);
25095 }
25096 };
25097
25098 _proto.load = function load(context, config, callbacks) {
25099 var _this = this;
25100
25101 var stats = this.stats;
25102
25103 if (stats.loading.start) {
25104 throw new Error('Loader can only be used once.');
25105 }
25106
25107 stats.loading.start = self.performance.now();
25108 var initParams = getRequestParameters(context, this.controller.signal);
25109 var onProgress = callbacks.onProgress;
25110 var isArrayBuffer = context.responseType === 'arraybuffer';
25111 var LENGTH = isArrayBuffer ? 'byteLength' : 'length';
25112 this.context = context;
25113 this.config = config;
25114 this.callbacks = callbacks;
25115 this.request = this.fetchSetup(context, initParams);
25116 self.clearTimeout(this.requestTimeout);
25117 this.requestTimeout = self.setTimeout(function () {
25118 _this.abortInternal();
25119
25120 callbacks.onTimeout(stats, context, _this.response);
25121 }, config.timeout);
25122 self.fetch(this.request).then(function (response) {
25123 _this.response = _this.loader = response;
25124
25125 if (!response.ok) {
25126 var status = response.status,
25127 statusText = response.statusText;
25128 throw new FetchError(statusText || 'fetch, bad network response', status, response);
25129 }
25130
25131 stats.loading.first = Math.max(self.performance.now(), stats.loading.start);
25132 stats.total = parseInt(response.headers.get('Content-Length') || '0');
25133
25134 if (onProgress && Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(config.highWaterMark)) {
25135 return _this.loadProgressively(response, stats, context, config.highWaterMark, onProgress);
25136 }
25137
25138 if (isArrayBuffer) {
25139 return response.arrayBuffer();
25140 }
25141
25142 return response.text();
25143 }).then(function (responseData) {
25144 var response = _this.response;
25145 self.clearTimeout(_this.requestTimeout);
25146 stats.loading.end = Math.max(self.performance.now(), stats.loading.first);
25147 stats.loaded = stats.total = responseData[LENGTH];
25148 var loaderResponse = {
25149 url: response.url,
25150 data: responseData
25151 };
25152
25153 if (onProgress && !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(config.highWaterMark)) {
25154 onProgress(stats, context, responseData, response);
25155 }
25156
25157 callbacks.onSuccess(loaderResponse, stats, context, response);
25158 }).catch(function (error) {
25159 self.clearTimeout(_this.requestTimeout);
25160
25161 if (stats.aborted) {
25162 return;
25163 } // CORS errors result in an undefined code. Set it to 0 here to align with XHR's behavior
25164
25165
25166 var code = error.code || 0;
25167 callbacks.onError({
25168 code: code,
25169 text: error.message
25170 }, context, error.details);
25171 });
25172 };
25173
25174 _proto.getCacheAge = function getCacheAge() {
25175 var result = null;
25176
25177 if (this.response) {
25178 var ageHeader = this.response.headers.get('age');
25179 result = ageHeader ? parseFloat(ageHeader) : null;
25180 }
25181
25182 return result;
25183 };
25184
25185 _proto.loadProgressively = function loadProgressively(response, stats, context, highWaterMark, onProgress) {
25186 if (highWaterMark === void 0) {
25187 highWaterMark = 0;
25188 }
25189
25190 var chunkCache = new _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_2__["default"]();
25191 var reader = response.body.getReader();
25192
25193 var pump = function pump() {
25194 return reader.read().then(function (data) {
25195 if (data.done) {
25196 if (chunkCache.dataLength) {
25197 onProgress(stats, context, chunkCache.flush(), response);
25198 }
25199
25200 return Promise.resolve(new ArrayBuffer(0));
25201 }
25202
25203 var chunk = data.value;
25204 var len = chunk.length;
25205 stats.loaded += len;
25206
25207 if (len < highWaterMark || chunkCache.dataLength) {
25208 // The current chunk is too small to to be emitted or the cache already has data
25209 // Push it to the cache
25210 chunkCache.push(chunk);
25211
25212 if (chunkCache.dataLength >= highWaterMark) {
25213 // flush in order to join the typed arrays
25214 onProgress(stats, context, chunkCache.flush(), response);
25215 }
25216 } else {
25217 // If there's nothing cached already, and the chache is large enough
25218 // just emit the progress event
25219 onProgress(stats, context, chunk, response);
25220 }
25221
25222 return pump();
25223 }).catch(function () {
25224 /* aborted */
25225 return Promise.reject();
25226 });
25227 };
25228
25229 return pump();
25230 };
25231
25232 return FetchLoader;
25233}();
25234
25235function getRequestParameters(context, signal) {
25236 var initParams = {
25237 method: 'GET',
25238 mode: 'cors',
25239 credentials: 'same-origin',
25240 signal: signal,
25241 headers: new self.Headers(_extends({}, context.headers))
25242 };
25243
25244 if (context.rangeEnd) {
25245 initParams.headers.set('Range', 'bytes=' + context.rangeStart + '-' + String(context.rangeEnd - 1));
25246 }
25247
25248 return initParams;
25249}
25250
25251function getRequest(context, initParams) {
25252 return new self.Request(context.url, initParams);
25253}
25254
25255var FetchError = /*#__PURE__*/function (_Error) {
25256 _inheritsLoose(FetchError, _Error);
25257
25258 function FetchError(message, code, details) {
25259 var _this2;
25260
25261 _this2 = _Error.call(this, message) || this;
25262 _this2.code = void 0;
25263 _this2.details = void 0;
25264 _this2.code = code;
25265 _this2.details = details;
25266 return _this2;
25267 }
25268
25269 return FetchError;
25270}( /*#__PURE__*/_wrapNativeSuper(Error));
25271
25272/* harmony default export */ __webpack_exports__["default"] = (FetchLoader);
25273
25274/***/ }),
25275
25276/***/ "./src/utils/imsc1-ttml-parser.ts":
25277/*!****************************************!*\
25278 !*** ./src/utils/imsc1-ttml-parser.ts ***!
25279 \****************************************/
25280/*! exports provided: IMSC1_CODEC, parseIMSC1 */
25281/***/ (function(module, __webpack_exports__, __webpack_require__) {
25282__webpack_require__.r(__webpack_exports__);
25283/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "IMSC1_CODEC", function() { return IMSC1_CODEC; });
25284/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseIMSC1", function() { return parseIMSC1; });
25285/* harmony import */ var _mp4_tools__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./mp4-tools */ "./src/utils/mp4-tools.ts");
25286/* harmony import */ var _vttparser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./vttparser */ "./src/utils/vttparser.ts");
25287/* harmony import */ var _vttcue__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./vttcue */ "./src/utils/vttcue.ts");
25288/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
25289/* harmony import */ var _timescale_conversion__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./timescale-conversion */ "./src/utils/timescale-conversion.ts");
25290/* harmony import */ var _webvtt_parser__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./webvtt-parser */ "./src/utils/webvtt-parser.ts");
25291function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
25292
25293
25294
25295
25296
25297
25298
25299var IMSC1_CODEC = 'stpp.ttml.im1t'; // Time format: h:m:s:frames(.subframes)
25300
25301var HMSF_REGEX = /^(\d{2,}):(\d{2}):(\d{2}):(\d{2})\.?(\d+)?$/; // Time format: hours, minutes, seconds, milliseconds, frames, ticks
25302
25303var TIME_UNIT_REGEX = /^(\d*(?:\.\d*)?)(h|m|s|ms|f|t)$/;
25304var textAlignToLineAlign = {
25305 left: 'start',
25306 center: 'center',
25307 right: 'end',
25308 start: 'start',
25309 end: 'end'
25310};
25311function parseIMSC1(payload, initPTS, timescale, callBack, errorCallBack) {
25312 var results = Object(_mp4_tools__WEBPACK_IMPORTED_MODULE_0__["findBox"])(new Uint8Array(payload), ['mdat']);
25313
25314 if (results.length === 0) {
25315 errorCallBack(new Error('Could not parse IMSC1 mdat'));
25316 return;
25317 }
25318
25319 var mdat = results[0];
25320 var ttml = Object(_demux_id3__WEBPACK_IMPORTED_MODULE_3__["utf8ArrayToStr"])(new Uint8Array(payload, mdat.start, mdat.end - mdat.start));
25321 var syncTime = Object(_timescale_conversion__WEBPACK_IMPORTED_MODULE_4__["toTimescaleFromScale"])(initPTS, 1, timescale);
25322
25323 try {
25324 callBack(parseTTML(ttml, syncTime));
25325 } catch (error) {
25326 errorCallBack(error);
25327 }
25328}
25329
25330function parseTTML(ttml, syncTime) {
25331 var parser = new DOMParser();
25332 var xmlDoc = parser.parseFromString(ttml, 'text/xml');
25333 var tt = xmlDoc.getElementsByTagName('tt')[0];
25334
25335 if (!tt) {
25336 throw new Error('Invalid ttml');
25337 }
25338
25339 var defaultRateInfo = {
25340 frameRate: 30,
25341 subFrameRate: 1,
25342 frameRateMultiplier: 0,
25343 tickRate: 0
25344 };
25345 var rateInfo = Object.keys(defaultRateInfo).reduce(function (result, key) {
25346 result[key] = tt.getAttribute("ttp:" + key) || defaultRateInfo[key];
25347 return result;
25348 }, {});
25349 var trim = tt.getAttribute('xml:space') !== 'preserve';
25350 var styleElements = collectionToDictionary(getElementCollection(tt, 'styling', 'style'));
25351 var regionElements = collectionToDictionary(getElementCollection(tt, 'layout', 'region'));
25352 var cueElements = getElementCollection(tt, 'body', '[begin]');
25353 return [].map.call(cueElements, function (cueElement) {
25354 var cueText = getTextContent(cueElement, trim);
25355
25356 if (!cueText || !cueElement.hasAttribute('begin')) {
25357 return null;
25358 }
25359
25360 var startTime = parseTtmlTime(cueElement.getAttribute('begin'), rateInfo);
25361 var duration = parseTtmlTime(cueElement.getAttribute('dur'), rateInfo);
25362 var endTime = parseTtmlTime(cueElement.getAttribute('end'), rateInfo);
25363
25364 if (startTime === null) {
25365 throw timestampParsingError(cueElement);
25366 }
25367
25368 if (endTime === null) {
25369 if (duration === null) {
25370 throw timestampParsingError(cueElement);
25371 }
25372
25373 endTime = startTime + duration;
25374 }
25375
25376 var cue = new _vttcue__WEBPACK_IMPORTED_MODULE_2__["default"](startTime - syncTime, endTime - syncTime, cueText);
25377 cue.id = Object(_webvtt_parser__WEBPACK_IMPORTED_MODULE_5__["generateCueId"])(cue.startTime, cue.endTime, cue.text);
25378 var region = regionElements[cueElement.getAttribute('region')];
25379 var style = styleElements[cueElement.getAttribute('style')]; // TODO: Add regions to track and cue (origin and extend)
25380 // These values are hard-coded (for now) to simulate region settings in the demo
25381
25382 cue.position = 10;
25383 cue.size = 80; // Apply styles to cue
25384
25385 var styles = getTtmlStyles(region, style);
25386 var textAlign = styles.textAlign;
25387
25388 if (textAlign) {
25389 // cue.positionAlign not settable in FF~2016
25390 var lineAlign = textAlignToLineAlign[textAlign];
25391
25392 if (lineAlign) {
25393 cue.lineAlign = lineAlign;
25394 }
25395
25396 cue.align = textAlign;
25397 }
25398
25399 _extends(cue, styles);
25400
25401 return cue;
25402 }).filter(function (cue) {
25403 return cue !== null;
25404 });
25405}
25406
25407function getElementCollection(fromElement, parentName, childName) {
25408 var parent = fromElement.getElementsByTagName(parentName)[0];
25409
25410 if (parent) {
25411 return [].slice.call(parent.querySelectorAll(childName));
25412 }
25413
25414 return [];
25415}
25416
25417function collectionToDictionary(elementsWithId) {
25418 return elementsWithId.reduce(function (dict, element) {
25419 var id = element.getAttribute('xml:id');
25420
25421 if (id) {
25422 dict[id] = element;
25423 }
25424
25425 return dict;
25426 }, {});
25427}
25428
25429function getTextContent(element, trim) {
25430 return [].slice.call(element.childNodes).reduce(function (str, node, i) {
25431 var _node$childNodes;
25432
25433 if (node.nodeName === 'br' && i) {
25434 return str + '\n';
25435 }
25436
25437 if ((_node$childNodes = node.childNodes) !== null && _node$childNodes !== void 0 && _node$childNodes.length) {
25438 return getTextContent(node, trim);
25439 } else if (trim) {
25440 return str + node.textContent.trim().replace(/\s+/g, ' ');
25441 }
25442
25443 return str + node.textContent;
25444 }, '');
25445}
25446
25447function getTtmlStyles(region, style) {
25448 var ttsNs = 'http://www.w3.org/ns/ttml#styling';
25449 var styleAttributes = ['displayAlign', 'textAlign', 'color', 'backgroundColor', 'fontSize', 'fontFamily' // 'fontWeight',
25450 // 'lineHeight',
25451 // 'wrapOption',
25452 // 'fontStyle',
25453 // 'direction',
25454 // 'writingMode'
25455 ];
25456 return styleAttributes.reduce(function (styles, name) {
25457 var value = getAttributeNS(style, ttsNs, name) || getAttributeNS(region, ttsNs, name);
25458
25459 if (value) {
25460 styles[name] = value;
25461 }
25462
25463 return styles;
25464 }, {});
25465}
25466
25467function getAttributeNS(element, ns, name) {
25468 return element.hasAttributeNS(ns, name) ? element.getAttributeNS(ns, name) : null;
25469}
25470
25471function timestampParsingError(node) {
25472 return new Error("Could not parse ttml timestamp " + node);
25473}
25474
25475function parseTtmlTime(timeAttributeValue, rateInfo) {
25476 if (!timeAttributeValue) {
25477 return null;
25478 }
25479
25480 var seconds = Object(_vttparser__WEBPACK_IMPORTED_MODULE_1__["parseTimeStamp"])(timeAttributeValue);
25481
25482 if (seconds === null) {
25483 if (HMSF_REGEX.test(timeAttributeValue)) {
25484 seconds = parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo);
25485 } else if (TIME_UNIT_REGEX.test(timeAttributeValue)) {
25486 seconds = parseTimeUnits(timeAttributeValue, rateInfo);
25487 }
25488 }
25489
25490 return seconds;
25491}
25492
25493function parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo) {
25494 var m = HMSF_REGEX.exec(timeAttributeValue);
25495 var frames = (m[4] | 0) + (m[5] | 0) / rateInfo.subFrameRate;
25496 return (m[1] | 0) * 3600 + (m[2] | 0) * 60 + (m[3] | 0) + frames / rateInfo.frameRate;
25497}
25498
25499function parseTimeUnits(timeAttributeValue, rateInfo) {
25500 var m = TIME_UNIT_REGEX.exec(timeAttributeValue);
25501 var value = Number(m[1]);
25502 var unit = m[2];
25503
25504 switch (unit) {
25505 case 'h':
25506 return value * 3600;
25507
25508 case 'm':
25509 return value * 60;
25510
25511 case 'ms':
25512 return value * 1000;
25513
25514 case 'f':
25515 return value / rateInfo.frameRate;
25516
25517 case 't':
25518 return value / rateInfo.tickRate;
25519 }
25520
25521 return value;
25522}
25523
25524/***/ }),
25525
25526/***/ "./src/utils/logger.ts":
25527/*!*****************************!*\
25528 !*** ./src/utils/logger.ts ***!
25529 \*****************************/
25530/*! exports provided: enableLogs, logger */
25531/***/ (function(module, __webpack_exports__, __webpack_require__) {
25532__webpack_require__.r(__webpack_exports__);
25533/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "enableLogs", function() { return enableLogs; });
25534/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "logger", function() { return logger; });
25535var noop = function noop() {};
25536
25537var fakeLogger = {
25538 trace: noop,
25539 debug: noop,
25540 log: noop,
25541 warn: noop,
25542 info: noop,
25543 error: noop
25544};
25545var exportedLogger = fakeLogger; // let lastCallTime;
25546// function formatMsgWithTimeInfo(type, msg) {
25547// const now = Date.now();
25548// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
25549// lastCallTime = now;
25550// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
25551// return msg;
25552// }
25553
25554function consolePrintFn(type) {
25555 var func = self.console[type];
25556
25557 if (func) {
25558 return func.bind(self.console, "[" + type + "] >");
25559 }
25560
25561 return noop;
25562}
25563
25564function exportLoggerFunctions(debugConfig) {
25565 for (var _len = arguments.length, functions = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
25566 functions[_key - 1] = arguments[_key];
25567 }
25568
25569 functions.forEach(function (type) {
25570 exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
25571 });
25572}
25573
25574function enableLogs(debugConfig) {
25575 // check that console is available
25576 if (self.console && debugConfig === true || typeof debugConfig === 'object') {
25577 exportLoggerFunctions(debugConfig, // Remove out from list here to hard-disable a log-level
25578 // 'trace',
25579 'debug', 'log', 'info', 'warn', 'error'); // Some browsers don't allow to use bind on console object anyway
25580 // fallback to default if needed
25581
25582 try {
25583 exportedLogger.log();
25584 } catch (e) {
25585 exportedLogger = fakeLogger;
25586 }
25587 } else {
25588 exportedLogger = fakeLogger;
25589 }
25590}
25591var logger = exportedLogger;
25592
25593/***/ }),
25594
25595/***/ "./src/utils/mediakeys-helper.ts":
25596/*!***************************************!*\
25597 !*** ./src/utils/mediakeys-helper.ts ***!
25598 \***************************************/
25599/*! exports provided: KeySystems, requestMediaKeySystemAccess */
25600/***/ (function(module, __webpack_exports__, __webpack_require__) {
25601__webpack_require__.r(__webpack_exports__);
25602/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "KeySystems", function() { return KeySystems; });
25603/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "requestMediaKeySystemAccess", function() { return requestMediaKeySystemAccess; });
25604/**
25605 * @see https://developer.mozilla.org/en-US/docs/Web/API/Navigator/requestMediaKeySystemAccess
25606 */
25607var KeySystems;
25608
25609(function (KeySystems) {
25610 KeySystems["WIDEVINE"] = "com.widevine.alpha";
25611 KeySystems["PLAYREADY"] = "com.microsoft.playready";
25612})(KeySystems || (KeySystems = {}));
25613
25614var requestMediaKeySystemAccess = function () {
25615 if (typeof self !== 'undefined' && self.navigator && self.navigator.requestMediaKeySystemAccess) {
25616 return self.navigator.requestMediaKeySystemAccess.bind(self.navigator);
25617 } else {
25618 return null;
25619 }
25620}();
25621
25622
25623
25624/***/ }),
25625
25626/***/ "./src/utils/mediasource-helper.ts":
25627/*!*****************************************!*\
25628 !*** ./src/utils/mediasource-helper.ts ***!
25629 \*****************************************/
25630/*! exports provided: getMediaSource */
25631/***/ (function(module, __webpack_exports__, __webpack_require__) {
25632__webpack_require__.r(__webpack_exports__);
25633/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getMediaSource", function() { return getMediaSource; });
25634/**
25635 * MediaSource helper
25636 */
25637function getMediaSource() {
25638 return self.MediaSource || self.WebKitMediaSource;
25639}
25640
25641/***/ }),
25642
25643/***/ "./src/utils/mp4-tools.ts":
25644/*!********************************!*\
25645 !*** ./src/utils/mp4-tools.ts ***!
25646 \********************************/
25647/*! exports provided: bin2str, readUint16, readUint32, writeUint32, findBox, parseSegmentIndex, parseInitSegment, getStartDTS, getDuration, computeRawDurationFromSamples, offsetStartDTS, segmentValidRange, appendUint8Array */
25648/***/ (function(module, __webpack_exports__, __webpack_require__) {
25649__webpack_require__.r(__webpack_exports__);
25650/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "bin2str", function() { return bin2str; });
25651/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "readUint16", function() { return readUint16; });
25652/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "readUint32", function() { return readUint32; });
25653/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "writeUint32", function() { return writeUint32; });
25654/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findBox", function() { return findBox; });
25655/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseSegmentIndex", function() { return parseSegmentIndex; });
25656/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseInitSegment", function() { return parseInitSegment; });
25657/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getStartDTS", function() { return getStartDTS; });
25658/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getDuration", function() { return getDuration; });
25659/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "computeRawDurationFromSamples", function() { return computeRawDurationFromSamples; });
25660/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "offsetStartDTS", function() { return offsetStartDTS; });
25661/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "segmentValidRange", function() { return segmentValidRange; });
25662/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "appendUint8Array", function() { return appendUint8Array; });
25663/* harmony import */ var _typed_array__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./typed-array */ "./src/utils/typed-array.ts");
25664/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
25665
25666
25667var UINT32_MAX = Math.pow(2, 32) - 1;
25668var push = [].push;
25669function bin2str(data) {
25670 return String.fromCharCode.apply(null, data);
25671}
25672function readUint16(buffer, offset) {
25673 if ('data' in buffer) {
25674 offset += buffer.start;
25675 buffer = buffer.data;
25676 }
25677
25678 var val = buffer[offset] << 8 | buffer[offset + 1];
25679 return val < 0 ? 65536 + val : val;
25680}
25681function readUint32(buffer, offset) {
25682 if ('data' in buffer) {
25683 offset += buffer.start;
25684 buffer = buffer.data;
25685 }
25686
25687 var val = buffer[offset] << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3];
25688 return val < 0 ? 4294967296 + val : val;
25689}
25690function writeUint32(buffer, offset, value) {
25691 if ('data' in buffer) {
25692 offset += buffer.start;
25693 buffer = buffer.data;
25694 }
25695
25696 buffer[offset] = value >> 24;
25697 buffer[offset + 1] = value >> 16 & 0xff;
25698 buffer[offset + 2] = value >> 8 & 0xff;
25699 buffer[offset + 3] = value & 0xff;
25700} // Find the data for a box specified by its path
25701
25702function findBox(input, path) {
25703 var results = [];
25704
25705 if (!path.length) {
25706 // short-circuit the search for empty paths
25707 return results;
25708 }
25709
25710 var data;
25711 var start;
25712 var end;
25713
25714 if ('data' in input) {
25715 data = input.data;
25716 start = input.start;
25717 end = input.end;
25718 } else {
25719 data = input;
25720 start = 0;
25721 end = data.byteLength;
25722 }
25723
25724 for (var i = start; i < end;) {
25725 var size = readUint32(data, i);
25726 var type = bin2str(data.subarray(i + 4, i + 8));
25727 var endbox = size > 1 ? i + size : end;
25728
25729 if (type === path[0]) {
25730 if (path.length === 1) {
25731 // this is the end of the path and we've found the box we were
25732 // looking for
25733 results.push({
25734 data: data,
25735 start: i + 8,
25736 end: endbox
25737 });
25738 } else {
25739 // recursively search for the next box along the path
25740 var subresults = findBox({
25741 data: data,
25742 start: i + 8,
25743 end: endbox
25744 }, path.slice(1));
25745
25746 if (subresults.length) {
25747 push.apply(results, subresults);
25748 }
25749 }
25750 }
25751
25752 i = endbox;
25753 } // we've finished searching all of data
25754
25755
25756 return results;
25757}
25758function parseSegmentIndex(initSegment) {
25759 var moovBox = findBox(initSegment, ['moov']);
25760 var moov = moovBox[0];
25761 var moovEndOffset = moov ? moov.end : null; // we need this in case we need to chop of garbage of the end of current data
25762
25763 var sidxBox = findBox(initSegment, ['sidx']);
25764
25765 if (!sidxBox || !sidxBox[0]) {
25766 return null;
25767 }
25768
25769 var references = [];
25770 var sidx = sidxBox[0];
25771 var version = sidx.data[0]; // set initial offset, we skip the reference ID (not needed)
25772
25773 var index = version === 0 ? 8 : 16;
25774 var timescale = readUint32(sidx, index);
25775 index += 4; // TODO: parse earliestPresentationTime and firstOffset
25776 // usually zero in our case
25777
25778 var earliestPresentationTime = 0;
25779 var firstOffset = 0;
25780
25781 if (version === 0) {
25782 index += 8;
25783 } else {
25784 index += 16;
25785 } // skip reserved
25786
25787
25788 index += 2;
25789 var startByte = sidx.end + firstOffset;
25790 var referencesCount = readUint16(sidx, index);
25791 index += 2;
25792
25793 for (var i = 0; i < referencesCount; i++) {
25794 var referenceIndex = index;
25795 var referenceInfo = readUint32(sidx, referenceIndex);
25796 referenceIndex += 4;
25797 var referenceSize = referenceInfo & 0x7fffffff;
25798 var referenceType = (referenceInfo & 0x80000000) >>> 31;
25799
25800 if (referenceType === 1) {
25801 // eslint-disable-next-line no-console
25802 console.warn('SIDX has hierarchical references (not supported)');
25803 return null;
25804 }
25805
25806 var subsegmentDuration = readUint32(sidx, referenceIndex);
25807 referenceIndex += 4;
25808 references.push({
25809 referenceSize: referenceSize,
25810 subsegmentDuration: subsegmentDuration,
25811 // unscaled
25812 info: {
25813 duration: subsegmentDuration / timescale,
25814 start: startByte,
25815 end: startByte + referenceSize - 1
25816 }
25817 });
25818 startByte += referenceSize; // Skipping 1 bit for |startsWithSap|, 3 bits for |sapType|, and 28 bits
25819 // for |sapDelta|.
25820
25821 referenceIndex += 4; // skip to next ref
25822
25823 index = referenceIndex;
25824 }
25825
25826 return {
25827 earliestPresentationTime: earliestPresentationTime,
25828 timescale: timescale,
25829 version: version,
25830 referencesCount: referencesCount,
25831 references: references,
25832 moovEndOffset: moovEndOffset
25833 };
25834}
25835/**
25836 * Parses an MP4 initialization segment and extracts stream type and
25837 * timescale values for any declared tracks. Timescale values indicate the
25838 * number of clock ticks per second to assume for time-based values
25839 * elsewhere in the MP4.
25840 *
25841 * To determine the start time of an MP4, you need two pieces of
25842 * information: the timescale unit and the earliest base media decode
25843 * time. Multiple timescales can be specified within an MP4 but the
25844 * base media decode time is always expressed in the timescale from
25845 * the media header box for the track:
25846 * ```
25847 * moov > trak > mdia > mdhd.timescale
25848 * moov > trak > mdia > hdlr
25849 * ```
25850 * @param initSegment {Uint8Array} the bytes of the init segment
25851 * @return {InitData} a hash of track type to timescale values or null if
25852 * the init segment is malformed.
25853 */
25854
25855function parseInitSegment(initSegment) {
25856 var result = [];
25857 var traks = findBox(initSegment, ['moov', 'trak']);
25858
25859 for (var i = 0; i < traks.length; i++) {
25860 var trak = traks[i];
25861 var tkhd = findBox(trak, ['tkhd'])[0];
25862
25863 if (tkhd) {
25864 var version = tkhd.data[tkhd.start];
25865
25866 var _index = version === 0 ? 12 : 20;
25867
25868 var trackId = readUint32(tkhd, _index);
25869 var mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
25870
25871 if (mdhd) {
25872 version = mdhd.data[mdhd.start];
25873 _index = version === 0 ? 12 : 20;
25874 var timescale = readUint32(mdhd, _index);
25875 var hdlr = findBox(trak, ['mdia', 'hdlr'])[0];
25876
25877 if (hdlr) {
25878 var hdlrType = bin2str(hdlr.data.subarray(hdlr.start + 8, hdlr.start + 12));
25879 var type = {
25880 soun: _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].AUDIO,
25881 vide: _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].VIDEO
25882 }[hdlrType];
25883
25884 if (type) {
25885 // Parse codec details
25886 var stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
25887 var codec = void 0;
25888
25889 if (stsd) {
25890 codec = bin2str(stsd.data.subarray(stsd.start + 12, stsd.start + 16)); // TODO: Parse codec details to be able to build MIME type.
25891 // stsd.start += 8;
25892 // const codecBox = findBox(stsd, [codec])[0];
25893 // if (codecBox) {
25894 // TODO: Codec parsing support for avc1, mp4a, hevc, av01...
25895 // }
25896 }
25897
25898 result[trackId] = {
25899 timescale: timescale,
25900 type: type
25901 };
25902 result[type] = {
25903 timescale: timescale,
25904 id: trackId,
25905 codec: codec
25906 };
25907 }
25908 }
25909 }
25910 }
25911 }
25912
25913 var trex = findBox(initSegment, ['moov', 'mvex', 'trex']);
25914 trex.forEach(function (trex) {
25915 var trackId = readUint32(trex, 4);
25916 var track = result[trackId];
25917
25918 if (track) {
25919 track.default = {
25920 duration: readUint32(trex, 12),
25921 flags: readUint32(trex, 20)
25922 };
25923 }
25924 });
25925 return result;
25926}
25927/**
25928 * Determine the base media decode start time, in seconds, for an MP4
25929 * fragment. If multiple fragments are specified, the earliest time is
25930 * returned.
25931 *
25932 * The base media decode time can be parsed from track fragment
25933 * metadata:
25934 * ```
25935 * moof > traf > tfdt.baseMediaDecodeTime
25936 * ```
25937 * It requires the timescale value from the mdhd to interpret.
25938 *
25939 * @param initData {InitData} a hash of track type to timescale values
25940 * @param fmp4 {Uint8Array} the bytes of the mp4 fragment
25941 * @return {number} the earliest base media decode start time for the
25942 * fragment, in seconds
25943 */
25944
25945function getStartDTS(initData, fmp4) {
25946 // we need info from two children of each track fragment box
25947 return findBox(fmp4, ['moof', 'traf']).reduce(function (result, traf) {
25948 var tfdt = findBox(traf, ['tfdt'])[0];
25949 var version = tfdt.data[tfdt.start];
25950 var start = findBox(traf, ['tfhd']).reduce(function (result, tfhd) {
25951 // get the track id from the tfhd
25952 var id = readUint32(tfhd, 4);
25953 var track = initData[id];
25954
25955 if (track) {
25956 var baseTime = readUint32(tfdt, 4);
25957
25958 if (version === 1) {
25959 baseTime *= Math.pow(2, 32);
25960 baseTime += readUint32(tfdt, 8);
25961 } // assume a 90kHz clock if no timescale was specified
25962
25963
25964 var scale = track.timescale || 90e3; // convert base time to seconds
25965
25966 var startTime = baseTime / scale;
25967
25968 if (isFinite(startTime) && (result === null || startTime < result)) {
25969 return startTime;
25970 }
25971 }
25972
25973 return result;
25974 }, null);
25975
25976 if (start !== null && isFinite(start) && (result === null || start < result)) {
25977 return start;
25978 }
25979
25980 return result;
25981 }, null) || 0;
25982}
25983/*
25984 For Reference:
25985 aligned(8) class TrackFragmentHeaderBox
25986 extends FullBox(‘tfhd’, 0, tf_flags){
25987 unsigned int(32) track_ID;
25988 // all the following are optional fields
25989 unsigned int(64) base_data_offset;
25990 unsigned int(32) sample_description_index;
25991 unsigned int(32) default_sample_duration;
25992 unsigned int(32) default_sample_size;
25993 unsigned int(32) default_sample_flags
25994 }
25995 */
25996
25997function getDuration(data, initData) {
25998 var rawDuration = 0;
25999 var videoDuration = 0;
26000 var audioDuration = 0;
26001 var trafs = findBox(data, ['moof', 'traf']);
26002
26003 for (var i = 0; i < trafs.length; i++) {
26004 var traf = trafs[i]; // There is only one tfhd & trun per traf
26005 // This is true for CMAF style content, and we should perhaps check the ftyp
26006 // and only look for a single trun then, but for ISOBMFF we should check
26007 // for multiple track runs.
26008
26009 var tfhd = findBox(traf, ['tfhd'])[0]; // get the track id from the tfhd
26010
26011 var id = readUint32(tfhd, 4);
26012 var track = initData[id];
26013
26014 if (!track) {
26015 continue;
26016 }
26017
26018 var trackDefault = track.default;
26019 var tfhdFlags = readUint32(tfhd, 0) | (trackDefault === null || trackDefault === void 0 ? void 0 : trackDefault.flags);
26020 var sampleDuration = trackDefault === null || trackDefault === void 0 ? void 0 : trackDefault.duration;
26021
26022 if (tfhdFlags & 0x000008) {
26023 // 0x000008 indicates the presence of the default_sample_duration field
26024 if (tfhdFlags & 0x000002) {
26025 // 0x000002 indicates the presence of the sample_description_index field, which precedes default_sample_duration
26026 // If present, the default_sample_duration exists at byte offset 12
26027 sampleDuration = readUint32(tfhd, 12);
26028 } else {
26029 // Otherwise, the duration is at byte offset 8
26030 sampleDuration = readUint32(tfhd, 8);
26031 }
26032 } // assume a 90kHz clock if no timescale was specified
26033
26034
26035 var timescale = track.timescale || 90e3;
26036 var truns = findBox(traf, ['trun']);
26037
26038 for (var j = 0; j < truns.length; j++) {
26039 rawDuration = computeRawDurationFromSamples(truns[j]);
26040
26041 if (!rawDuration && sampleDuration) {
26042 var sampleCount = readUint32(truns[j], 4);
26043 rawDuration = sampleDuration * sampleCount;
26044 }
26045
26046 if (track.type === _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].VIDEO) {
26047 videoDuration += rawDuration / timescale;
26048 } else if (track.type === _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].AUDIO) {
26049 audioDuration += rawDuration / timescale;
26050 }
26051 }
26052 }
26053
26054 if (videoDuration === 0 && audioDuration === 0) {
26055 // If duration samples are not available in the traf use sidx subsegment_duration
26056 var sidx = parseSegmentIndex(data);
26057
26058 if (sidx !== null && sidx !== void 0 && sidx.references) {
26059 return sidx.references.reduce(function (dur, ref) {
26060 return dur + ref.info.duration || 0;
26061 }, 0);
26062 }
26063 }
26064
26065 if (videoDuration) {
26066 return videoDuration;
26067 }
26068
26069 return audioDuration;
26070}
26071/*
26072 For Reference:
26073 aligned(8) class TrackRunBox
26074 extends FullBox(‘trun’, version, tr_flags) {
26075 unsigned int(32) sample_count;
26076 // the following are optional fields
26077 signed int(32) data_offset;
26078 unsigned int(32) first_sample_flags;
26079 // all fields in the following array are optional
26080 {
26081 unsigned int(32) sample_duration;
26082 unsigned int(32) sample_size;
26083 unsigned int(32) sample_flags
26084 if (version == 0)
26085 { unsigned int(32)
26086 else
26087 { signed int(32)
26088 }[ sample_count ]
26089 }
26090 */
26091
26092function computeRawDurationFromSamples(trun) {
26093 var flags = readUint32(trun, 0); // Flags are at offset 0, non-optional sample_count is at offset 4. Therefore we start 8 bytes in.
26094 // Each field is an int32, which is 4 bytes
26095
26096 var offset = 8; // data-offset-present flag
26097
26098 if (flags & 0x000001) {
26099 offset += 4;
26100 } // first-sample-flags-present flag
26101
26102
26103 if (flags & 0x000004) {
26104 offset += 4;
26105 }
26106
26107 var duration = 0;
26108 var sampleCount = readUint32(trun, 4);
26109
26110 for (var i = 0; i < sampleCount; i++) {
26111 // sample-duration-present flag
26112 if (flags & 0x000100) {
26113 var sampleDuration = readUint32(trun, offset);
26114 duration += sampleDuration;
26115 offset += 4;
26116 } // sample-size-present flag
26117
26118
26119 if (flags & 0x000200) {
26120 offset += 4;
26121 } // sample-flags-present flag
26122
26123
26124 if (flags & 0x000400) {
26125 offset += 4;
26126 } // sample-composition-time-offsets-present flag
26127
26128
26129 if (flags & 0x000800) {
26130 offset += 4;
26131 }
26132 }
26133
26134 return duration;
26135}
26136function offsetStartDTS(initData, fmp4, timeOffset) {
26137 findBox(fmp4, ['moof', 'traf']).forEach(function (traf) {
26138 findBox(traf, ['tfhd']).forEach(function (tfhd) {
26139 // get the track id from the tfhd
26140 var id = readUint32(tfhd, 4);
26141 var track = initData[id];
26142
26143 if (!track) {
26144 return;
26145 } // assume a 90kHz clock if no timescale was specified
26146
26147
26148 var timescale = track.timescale || 90e3; // get the base media decode time from the tfdt
26149
26150 findBox(traf, ['tfdt']).forEach(function (tfdt) {
26151 var version = tfdt.data[tfdt.start];
26152 var baseMediaDecodeTime = readUint32(tfdt, 4);
26153
26154 if (version === 0) {
26155 writeUint32(tfdt, 4, baseMediaDecodeTime - timeOffset * timescale);
26156 } else {
26157 baseMediaDecodeTime *= Math.pow(2, 32);
26158 baseMediaDecodeTime += readUint32(tfdt, 8);
26159 baseMediaDecodeTime -= timeOffset * timescale;
26160 baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0);
26161 var upper = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1));
26162 var lower = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1));
26163 writeUint32(tfdt, 4, upper);
26164 writeUint32(tfdt, 8, lower);
26165 }
26166 });
26167 });
26168 });
26169} // TODO: Check if the last moof+mdat pair is part of the valid range
26170
26171function segmentValidRange(data) {
26172 var segmentedRange = {
26173 valid: null,
26174 remainder: null
26175 };
26176 var moofs = findBox(data, ['moof']);
26177
26178 if (!moofs) {
26179 return segmentedRange;
26180 } else if (moofs.length < 2) {
26181 segmentedRange.remainder = data;
26182 return segmentedRange;
26183 }
26184
26185 var last = moofs[moofs.length - 1]; // Offset by 8 bytes; findBox offsets the start by as much
26186
26187 segmentedRange.valid = Object(_typed_array__WEBPACK_IMPORTED_MODULE_0__["sliceUint8"])(data, 0, last.start - 8);
26188 segmentedRange.remainder = Object(_typed_array__WEBPACK_IMPORTED_MODULE_0__["sliceUint8"])(data, last.start - 8);
26189 return segmentedRange;
26190}
26191function appendUint8Array(data1, data2) {
26192 var temp = new Uint8Array(data1.length + data2.length);
26193 temp.set(data1);
26194 temp.set(data2, data1.length);
26195 return temp;
26196}
26197
26198/***/ }),
26199
26200/***/ "./src/utils/output-filter.ts":
26201/*!************************************!*\
26202 !*** ./src/utils/output-filter.ts ***!
26203 \************************************/
26204/*! exports provided: default */
26205/***/ (function(module, __webpack_exports__, __webpack_require__) {
26206__webpack_require__.r(__webpack_exports__);
26207/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return OutputFilter; });
26208var OutputFilter = /*#__PURE__*/function () {
26209 function OutputFilter(timelineController, trackName) {
26210 this.timelineController = void 0;
26211 this.cueRanges = [];
26212 this.trackName = void 0;
26213 this.startTime = null;
26214 this.endTime = null;
26215 this.screen = null;
26216 this.timelineController = timelineController;
26217 this.trackName = trackName;
26218 }
26219
26220 var _proto = OutputFilter.prototype;
26221
26222 _proto.dispatchCue = function dispatchCue() {
26223 if (this.startTime === null) {
26224 return;
26225 }
26226
26227 this.timelineController.addCues(this.trackName, this.startTime, this.endTime, this.screen, this.cueRanges);
26228 this.startTime = null;
26229 };
26230
26231 _proto.newCue = function newCue(startTime, endTime, screen) {
26232 if (this.startTime === null || this.startTime > startTime) {
26233 this.startTime = startTime;
26234 }
26235
26236 this.endTime = endTime;
26237 this.screen = screen;
26238 this.timelineController.createCaptionsTrack(this.trackName);
26239 };
26240
26241 _proto.reset = function reset() {
26242 this.cueRanges = [];
26243 this.startTime = null;
26244 };
26245
26246 return OutputFilter;
26247}();
26248
26249
26250
26251/***/ }),
26252
26253/***/ "./src/utils/texttrack-utils.ts":
26254/*!**************************************!*\
26255 !*** ./src/utils/texttrack-utils.ts ***!
26256 \**************************************/
26257/*! exports provided: sendAddTrackEvent, addCueToTrack, clearCurrentCues, removeCuesInRange, getCuesInRange */
26258/***/ (function(module, __webpack_exports__, __webpack_require__) {
26259__webpack_require__.r(__webpack_exports__);
26260/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sendAddTrackEvent", function() { return sendAddTrackEvent; });
26261/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addCueToTrack", function() { return addCueToTrack; });
26262/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "clearCurrentCues", function() { return clearCurrentCues; });
26263/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "removeCuesInRange", function() { return removeCuesInRange; });
26264/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getCuesInRange", function() { return getCuesInRange; });
26265/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./logger */ "./src/utils/logger.ts");
26266
26267function sendAddTrackEvent(track, videoEl) {
26268 var event;
26269
26270 try {
26271 event = new Event('addtrack');
26272 } catch (err) {
26273 // for IE11
26274 event = document.createEvent('Event');
26275 event.initEvent('addtrack', false, false);
26276 }
26277
26278 event.track = track;
26279 videoEl.dispatchEvent(event);
26280}
26281function addCueToTrack(track, cue) {
26282 // Sometimes there are cue overlaps on segmented vtts so the same
26283 // cue can appear more than once in different vtt files.
26284 // This avoid showing duplicated cues with same timecode and text.
26285 var mode = track.mode;
26286
26287 if (mode === 'disabled') {
26288 track.mode = 'hidden';
26289 }
26290
26291 if (track.cues && !track.cues.getCueById(cue.id)) {
26292 try {
26293 track.addCue(cue);
26294
26295 if (!track.cues.getCueById(cue.id)) {
26296 throw new Error("addCue is failed for: " + cue);
26297 }
26298 } catch (err) {
26299 _logger__WEBPACK_IMPORTED_MODULE_0__["logger"].debug("[texttrack-utils]: " + err);
26300 var textTrackCue = new self.TextTrackCue(cue.startTime, cue.endTime, cue.text);
26301 textTrackCue.id = cue.id;
26302 track.addCue(textTrackCue);
26303 }
26304 }
26305
26306 if (mode === 'disabled') {
26307 track.mode = mode;
26308 }
26309}
26310function clearCurrentCues(track) {
26311 // When track.mode is disabled, track.cues will be null.
26312 // To guarantee the removal of cues, we need to temporarily
26313 // change the mode to hidden
26314 var mode = track.mode;
26315
26316 if (mode === 'disabled') {
26317 track.mode = 'hidden';
26318 }
26319
26320 if (track.cues) {
26321 for (var i = track.cues.length; i--;) {
26322 track.removeCue(track.cues[i]);
26323 }
26324 }
26325
26326 if (mode === 'disabled') {
26327 track.mode = mode;
26328 }
26329}
26330function removeCuesInRange(track, start, end) {
26331 var mode = track.mode;
26332
26333 if (mode === 'disabled') {
26334 track.mode = 'hidden';
26335 }
26336
26337 if (track.cues && track.cues.length > 0) {
26338 var cues = getCuesInRange(track.cues, start, end);
26339
26340 for (var i = 0; i < cues.length; i++) {
26341 track.removeCue(cues[i]);
26342 }
26343 }
26344
26345 if (mode === 'disabled') {
26346 track.mode = mode;
26347 }
26348} // Find first cue starting after given time.
26349// Modified version of binary search O(log(n)).
26350
26351function getFirstCueIndexAfterTime(cues, time) {
26352 // If first cue starts after time, start there
26353 if (time < cues[0].startTime) {
26354 return 0;
26355 } // If the last cue ends before time there is no overlap
26356
26357
26358 var len = cues.length - 1;
26359
26360 if (time > cues[len].endTime) {
26361 return -1;
26362 }
26363
26364 var left = 0;
26365 var right = len;
26366
26367 while (left <= right) {
26368 var mid = Math.floor((right + left) / 2);
26369
26370 if (time < cues[mid].startTime) {
26371 right = mid - 1;
26372 } else if (time > cues[mid].startTime && left < len) {
26373 left = mid + 1;
26374 } else {
26375 // If it's not lower or higher, it must be equal.
26376 return mid;
26377 }
26378 } // At this point, left and right have swapped.
26379 // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
26380
26381
26382 return cues[left].startTime - time < time - cues[right].startTime ? left : right;
26383}
26384
26385function getCuesInRange(cues, start, end) {
26386 var cuesFound = [];
26387 var firstCueInRange = getFirstCueIndexAfterTime(cues, start);
26388
26389 if (firstCueInRange > -1) {
26390 for (var i = firstCueInRange, len = cues.length; i < len; i++) {
26391 var cue = cues[i];
26392
26393 if (cue.startTime >= start && cue.endTime <= end) {
26394 cuesFound.push(cue);
26395 } else if (cue.startTime > end) {
26396 return cuesFound;
26397 }
26398 }
26399 }
26400
26401 return cuesFound;
26402}
26403
26404/***/ }),
26405
26406/***/ "./src/utils/time-ranges.ts":
26407/*!**********************************!*\
26408 !*** ./src/utils/time-ranges.ts ***!
26409 \**********************************/
26410/*! exports provided: default */
26411/***/ (function(module, __webpack_exports__, __webpack_require__) {
26412__webpack_require__.r(__webpack_exports__);
26413/**
26414 * TimeRanges to string helper
26415 */
26416var TimeRanges = {
26417 toString: function toString(r) {
26418 var log = '';
26419 var len = r.length;
26420
26421 for (var i = 0; i < len; i++) {
26422 log += '[' + r.start(i).toFixed(3) + ',' + r.end(i).toFixed(3) + ']';
26423 }
26424
26425 return log;
26426 }
26427};
26428/* harmony default export */ __webpack_exports__["default"] = (TimeRanges);
26429
26430/***/ }),
26431
26432/***/ "./src/utils/timescale-conversion.ts":
26433/*!*******************************************!*\
26434 !*** ./src/utils/timescale-conversion.ts ***!
26435 \*******************************************/
26436/*! exports provided: toTimescaleFromBase, toTimescaleFromScale, toMsFromMpegTsClock, toMpegTsClockFromTimescale */
26437/***/ (function(module, __webpack_exports__, __webpack_require__) {
26438__webpack_require__.r(__webpack_exports__);
26439/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toTimescaleFromBase", function() { return toTimescaleFromBase; });
26440/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toTimescaleFromScale", function() { return toTimescaleFromScale; });
26441/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toMsFromMpegTsClock", function() { return toMsFromMpegTsClock; });
26442/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toMpegTsClockFromTimescale", function() { return toMpegTsClockFromTimescale; });
26443var MPEG_TS_CLOCK_FREQ_HZ = 90000;
26444function toTimescaleFromBase(value, destScale, srcBase, round) {
26445 if (srcBase === void 0) {
26446 srcBase = 1;
26447 }
26448
26449 if (round === void 0) {
26450 round = false;
26451 }
26452
26453 var result = value * destScale * srcBase; // equivalent to `(value * scale) / (1 / base)`
26454
26455 return round ? Math.round(result) : result;
26456}
26457function toTimescaleFromScale(value, destScale, srcScale, round) {
26458 if (srcScale === void 0) {
26459 srcScale = 1;
26460 }
26461
26462 if (round === void 0) {
26463 round = false;
26464 }
26465
26466 return toTimescaleFromBase(value, destScale, 1 / srcScale, round);
26467}
26468function toMsFromMpegTsClock(value, round) {
26469 if (round === void 0) {
26470 round = false;
26471 }
26472
26473 return toTimescaleFromBase(value, 1000, 1 / MPEG_TS_CLOCK_FREQ_HZ, round);
26474}
26475function toMpegTsClockFromTimescale(value, srcScale) {
26476 if (srcScale === void 0) {
26477 srcScale = 1;
26478 }
26479
26480 return toTimescaleFromBase(value, MPEG_TS_CLOCK_FREQ_HZ, 1 / srcScale);
26481}
26482
26483/***/ }),
26484
26485/***/ "./src/utils/typed-array.ts":
26486/*!**********************************!*\
26487 !*** ./src/utils/typed-array.ts ***!
26488 \**********************************/
26489/*! exports provided: sliceUint8 */
26490/***/ (function(module, __webpack_exports__, __webpack_require__) {
26491__webpack_require__.r(__webpack_exports__);
26492/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sliceUint8", function() { return sliceUint8; });
26493function sliceUint8(array, start, end) {
26494 // @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
26495 // It always exists in the TypeScript definition so fails, but it fails at runtime on IE11.
26496 return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
26497}
26498
26499/***/ }),
26500
26501/***/ "./src/utils/vttcue.ts":
26502/*!*****************************!*\
26503 !*** ./src/utils/vttcue.ts ***!
26504 \*****************************/
26505/*! exports provided: default */
26506/***/ (function(module, __webpack_exports__, __webpack_require__) {
26507__webpack_require__.r(__webpack_exports__);
26508/**
26509 * Copyright 2013 vtt.js Contributors
26510 *
26511 * Licensed under the Apache License, Version 2.0 (the 'License');
26512 * you may not use this file except in compliance with the License.
26513 * You may obtain a copy of the License at
26514 *
26515 * http://www.apache.org/licenses/LICENSE-2.0
26516 *
26517 * Unless required by applicable law or agreed to in writing, software
26518 * distributed under the License is distributed on an 'AS IS' BASIS,
26519 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
26520 * See the License for the specific language governing permissions and
26521 * limitations under the License.
26522 */
26523/* harmony default export */ __webpack_exports__["default"] = ((function () {
26524 if (typeof self !== 'undefined' && self.VTTCue) {
26525 return self.VTTCue;
26526 }
26527
26528 var AllowedDirections = ['', 'lr', 'rl'];
26529 var AllowedAlignments = ['start', 'middle', 'end', 'left', 'right'];
26530
26531 function isAllowedValue(allowed, value) {
26532 if (typeof value !== 'string') {
26533 return false;
26534 } // necessary for assuring the generic conforms to the Array interface
26535
26536
26537 if (!Array.isArray(allowed)) {
26538 return false;
26539 } // reset the type so that the next narrowing works well
26540
26541
26542 var lcValue = value.toLowerCase(); // use the allow list to narrow the type to a specific subset of strings
26543
26544 if (~allowed.indexOf(lcValue)) {
26545 return lcValue;
26546 }
26547
26548 return false;
26549 }
26550
26551 function findDirectionSetting(value) {
26552 return isAllowedValue(AllowedDirections, value);
26553 }
26554
26555 function findAlignSetting(value) {
26556 return isAllowedValue(AllowedAlignments, value);
26557 }
26558
26559 function extend(obj) {
26560 for (var _len = arguments.length, rest = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
26561 rest[_key - 1] = arguments[_key];
26562 }
26563
26564 var i = 1;
26565
26566 for (; i < arguments.length; i++) {
26567 var cobj = arguments[i];
26568
26569 for (var p in cobj) {
26570 obj[p] = cobj[p];
26571 }
26572 }
26573
26574 return obj;
26575 }
26576
26577 function VTTCue(startTime, endTime, text) {
26578 var cue = this;
26579 var baseObj = {
26580 enumerable: true
26581 };
26582 /**
26583 * Shim implementation specific properties. These properties are not in
26584 * the spec.
26585 */
26586 // Lets us know when the VTTCue's data has changed in such a way that we need
26587 // to recompute its display state. This lets us compute its display state
26588 // lazily.
26589
26590 cue.hasBeenReset = false;
26591 /**
26592 * VTTCue and TextTrackCue properties
26593 * http://dev.w3.org/html5/webvtt/#vttcue-interface
26594 */
26595
26596 var _id = '';
26597 var _pauseOnExit = false;
26598 var _startTime = startTime;
26599 var _endTime = endTime;
26600 var _text = text;
26601 var _region = null;
26602 var _vertical = '';
26603 var _snapToLines = true;
26604 var _line = 'auto';
26605 var _lineAlign = 'start';
26606 var _position = 50;
26607 var _positionAlign = 'middle';
26608 var _size = 50;
26609 var _align = 'middle';
26610 Object.defineProperty(cue, 'id', extend({}, baseObj, {
26611 get: function get() {
26612 return _id;
26613 },
26614 set: function set(value) {
26615 _id = '' + value;
26616 }
26617 }));
26618 Object.defineProperty(cue, 'pauseOnExit', extend({}, baseObj, {
26619 get: function get() {
26620 return _pauseOnExit;
26621 },
26622 set: function set(value) {
26623 _pauseOnExit = !!value;
26624 }
26625 }));
26626 Object.defineProperty(cue, 'startTime', extend({}, baseObj, {
26627 get: function get() {
26628 return _startTime;
26629 },
26630 set: function set(value) {
26631 if (typeof value !== 'number') {
26632 throw new TypeError('Start time must be set to a number.');
26633 }
26634
26635 _startTime = value;
26636 this.hasBeenReset = true;
26637 }
26638 }));
26639 Object.defineProperty(cue, 'endTime', extend({}, baseObj, {
26640 get: function get() {
26641 return _endTime;
26642 },
26643 set: function set(value) {
26644 if (typeof value !== 'number') {
26645 throw new TypeError('End time must be set to a number.');
26646 }
26647
26648 _endTime = value;
26649 this.hasBeenReset = true;
26650 }
26651 }));
26652 Object.defineProperty(cue, 'text', extend({}, baseObj, {
26653 get: function get() {
26654 return _text;
26655 },
26656 set: function set(value) {
26657 _text = '' + value;
26658 this.hasBeenReset = true;
26659 }
26660 })); // todo: implement VTTRegion polyfill?
26661
26662 Object.defineProperty(cue, 'region', extend({}, baseObj, {
26663 get: function get() {
26664 return _region;
26665 },
26666 set: function set(value) {
26667 _region = value;
26668 this.hasBeenReset = true;
26669 }
26670 }));
26671 Object.defineProperty(cue, 'vertical', extend({}, baseObj, {
26672 get: function get() {
26673 return _vertical;
26674 },
26675 set: function set(value) {
26676 var setting = findDirectionSetting(value); // Have to check for false because the setting an be an empty string.
26677
26678 if (setting === false) {
26679 throw new SyntaxError('An invalid or illegal string was specified.');
26680 }
26681
26682 _vertical = setting;
26683 this.hasBeenReset = true;
26684 }
26685 }));
26686 Object.defineProperty(cue, 'snapToLines', extend({}, baseObj, {
26687 get: function get() {
26688 return _snapToLines;
26689 },
26690 set: function set(value) {
26691 _snapToLines = !!value;
26692 this.hasBeenReset = true;
26693 }
26694 }));
26695 Object.defineProperty(cue, 'line', extend({}, baseObj, {
26696 get: function get() {
26697 return _line;
26698 },
26699 set: function set(value) {
26700 if (typeof value !== 'number' && value !== 'auto') {
26701 throw new SyntaxError('An invalid number or illegal string was specified.');
26702 }
26703
26704 _line = value;
26705 this.hasBeenReset = true;
26706 }
26707 }));
26708 Object.defineProperty(cue, 'lineAlign', extend({}, baseObj, {
26709 get: function get() {
26710 return _lineAlign;
26711 },
26712 set: function set(value) {
26713 var setting = findAlignSetting(value);
26714
26715 if (!setting) {
26716 throw new SyntaxError('An invalid or illegal string was specified.');
26717 }
26718
26719 _lineAlign = setting;
26720 this.hasBeenReset = true;
26721 }
26722 }));
26723 Object.defineProperty(cue, 'position', extend({}, baseObj, {
26724 get: function get() {
26725 return _position;
26726 },
26727 set: function set(value) {
26728 if (value < 0 || value > 100) {
26729 throw new Error('Position must be between 0 and 100.');
26730 }
26731
26732 _position = value;
26733 this.hasBeenReset = true;
26734 }
26735 }));
26736 Object.defineProperty(cue, 'positionAlign', extend({}, baseObj, {
26737 get: function get() {
26738 return _positionAlign;
26739 },
26740 set: function set(value) {
26741 var setting = findAlignSetting(value);
26742
26743 if (!setting) {
26744 throw new SyntaxError('An invalid or illegal string was specified.');
26745 }
26746
26747 _positionAlign = setting;
26748 this.hasBeenReset = true;
26749 }
26750 }));
26751 Object.defineProperty(cue, 'size', extend({}, baseObj, {
26752 get: function get() {
26753 return _size;
26754 },
26755 set: function set(value) {
26756 if (value < 0 || value > 100) {
26757 throw new Error('Size must be between 0 and 100.');
26758 }
26759
26760 _size = value;
26761 this.hasBeenReset = true;
26762 }
26763 }));
26764 Object.defineProperty(cue, 'align', extend({}, baseObj, {
26765 get: function get() {
26766 return _align;
26767 },
26768 set: function set(value) {
26769 var setting = findAlignSetting(value);
26770
26771 if (!setting) {
26772 throw new SyntaxError('An invalid or illegal string was specified.');
26773 }
26774
26775 _align = setting;
26776 this.hasBeenReset = true;
26777 }
26778 }));
26779 /**
26780 * Other <track> spec defined properties
26781 */
26782 // http://www.whatwg.org/specs/web-apps/current-work/multipage/the-video-element.html#text-track-cue-display-state
26783
26784 cue.displayState = undefined;
26785 }
26786 /**
26787 * VTTCue methods
26788 */
26789
26790
26791 VTTCue.prototype.getCueAsHTML = function () {
26792 // Assume WebVTT.convertCueToDOMTree is on the global.
26793 var WebVTT = self.WebVTT;
26794 return WebVTT.convertCueToDOMTree(self, this.text);
26795 }; // this is a polyfill hack
26796
26797
26798 return VTTCue;
26799})());
26800
26801/***/ }),
26802
26803/***/ "./src/utils/vttparser.ts":
26804/*!********************************!*\
26805 !*** ./src/utils/vttparser.ts ***!
26806 \********************************/
26807/*! exports provided: parseTimeStamp, fixLineBreaks, VTTParser */
26808/***/ (function(module, __webpack_exports__, __webpack_require__) {
26809__webpack_require__.r(__webpack_exports__);
26810/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseTimeStamp", function() { return parseTimeStamp; });
26811/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fixLineBreaks", function() { return fixLineBreaks; });
26812/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "VTTParser", function() { return VTTParser; });
26813/* harmony import */ var _vttcue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./vttcue */ "./src/utils/vttcue.ts");
26814/*
26815 * Source: https://github.com/mozilla/vtt.js/blob/master/dist/vtt.js
26816 */
26817
26818
26819var StringDecoder = /*#__PURE__*/function () {
26820 function StringDecoder() {}
26821
26822 var _proto = StringDecoder.prototype;
26823
26824 // eslint-disable-next-line @typescript-eslint/no-unused-vars
26825 _proto.decode = function decode(data, options) {
26826 if (!data) {
26827 return '';
26828 }
26829
26830 if (typeof data !== 'string') {
26831 throw new Error('Error - expected string data.');
26832 }
26833
26834 return decodeURIComponent(encodeURIComponent(data));
26835 };
26836
26837 return StringDecoder;
26838}(); // Try to parse input as a time stamp.
26839
26840
26841function parseTimeStamp(input) {
26842 function computeSeconds(h, m, s, f) {
26843 return (h | 0) * 3600 + (m | 0) * 60 + (s | 0) + parseFloat(f || 0);
26844 }
26845
26846 var m = input.match(/^(?:(\d+):)?(\d{2}):(\d{2})(\.\d+)?/);
26847
26848 if (!m) {
26849 return null;
26850 }
26851
26852 if (parseFloat(m[2]) > 59) {
26853 // Timestamp takes the form of [hours]:[minutes].[milliseconds]
26854 // First position is hours as it's over 59.
26855 return computeSeconds(m[2], m[3], 0, m[4]);
26856 } // Timestamp takes the form of [hours (optional)]:[minutes]:[seconds].[milliseconds]
26857
26858
26859 return computeSeconds(m[1], m[2], m[3], m[4]);
26860} // A settings object holds key/value pairs and will ignore anything but the first
26861// assignment to a specific key.
26862
26863var Settings = /*#__PURE__*/function () {
26864 function Settings() {
26865 this.values = Object.create(null);
26866 }
26867
26868 var _proto2 = Settings.prototype;
26869
26870 // Only accept the first assignment to any key.
26871 _proto2.set = function set(k, v) {
26872 if (!this.get(k) && v !== '') {
26873 this.values[k] = v;
26874 }
26875 } // Return the value for a key, or a default value.
26876 // If 'defaultKey' is passed then 'dflt' is assumed to be an object with
26877 // a number of possible default values as properties where 'defaultKey' is
26878 // the key of the property that will be chosen; otherwise it's assumed to be
26879 // a single value.
26880 ;
26881
26882 _proto2.get = function get(k, dflt, defaultKey) {
26883 if (defaultKey) {
26884 return this.has(k) ? this.values[k] : dflt[defaultKey];
26885 }
26886
26887 return this.has(k) ? this.values[k] : dflt;
26888 } // Check whether we have a value for a key.
26889 ;
26890
26891 _proto2.has = function has(k) {
26892 return k in this.values;
26893 } // Accept a setting if its one of the given alternatives.
26894 ;
26895
26896 _proto2.alt = function alt(k, v, a) {
26897 for (var n = 0; n < a.length; ++n) {
26898 if (v === a[n]) {
26899 this.set(k, v);
26900 break;
26901 }
26902 }
26903 } // Accept a setting if its a valid (signed) integer.
26904 ;
26905
26906 _proto2.integer = function integer(k, v) {
26907 if (/^-?\d+$/.test(v)) {
26908 // integer
26909 this.set(k, parseInt(v, 10));
26910 }
26911 } // Accept a setting if its a valid percentage.
26912 ;
26913
26914 _proto2.percent = function percent(k, v) {
26915 if (/^([\d]{1,3})(\.[\d]*)?%$/.test(v)) {
26916 var percent = parseFloat(v);
26917
26918 if (percent >= 0 && percent <= 100) {
26919 this.set(k, percent);
26920 return true;
26921 }
26922 }
26923
26924 return false;
26925 };
26926
26927 return Settings;
26928}(); // Helper function to parse input into groups separated by 'groupDelim', and
26929// interpret each group as a key/value pair separated by 'keyValueDelim'.
26930
26931
26932function parseOptions(input, callback, keyValueDelim, groupDelim) {
26933 var groups = groupDelim ? input.split(groupDelim) : [input];
26934
26935 for (var i in groups) {
26936 if (typeof groups[i] !== 'string') {
26937 continue;
26938 }
26939
26940 var kv = groups[i].split(keyValueDelim);
26941
26942 if (kv.length !== 2) {
26943 continue;
26944 }
26945
26946 var _k = kv[0];
26947 var _v = kv[1];
26948 callback(_k, _v);
26949 }
26950}
26951
26952var defaults = new _vttcue__WEBPACK_IMPORTED_MODULE_0__["default"](0, 0, ''); // 'middle' was changed to 'center' in the spec: https://github.com/w3c/webvtt/pull/244
26953// Safari doesn't yet support this change, but FF and Chrome do.
26954
26955var center = defaults.align === 'middle' ? 'middle' : 'center';
26956
26957function parseCue(input, cue, regionList) {
26958 // Remember the original input if we need to throw an error.
26959 var oInput = input; // 4.1 WebVTT timestamp
26960
26961 function consumeTimeStamp() {
26962 var ts = parseTimeStamp(input);
26963
26964 if (ts === null) {
26965 throw new Error('Malformed timestamp: ' + oInput);
26966 } // Remove time stamp from input.
26967
26968
26969 input = input.replace(/^[^\sa-zA-Z-]+/, '');
26970 return ts;
26971 } // 4.4.2 WebVTT cue settings
26972
26973
26974 function consumeCueSettings(input, cue) {
26975 var settings = new Settings();
26976 parseOptions(input, function (k, v) {
26977 var vals;
26978
26979 switch (k) {
26980 case 'region':
26981 // Find the last region we parsed with the same region id.
26982 for (var i = regionList.length - 1; i >= 0; i--) {
26983 if (regionList[i].id === v) {
26984 settings.set(k, regionList[i].region);
26985 break;
26986 }
26987 }
26988
26989 break;
26990
26991 case 'vertical':
26992 settings.alt(k, v, ['rl', 'lr']);
26993 break;
26994
26995 case 'line':
26996 vals = v.split(',');
26997 settings.integer(k, vals[0]);
26998
26999 if (settings.percent(k, vals[0])) {
27000 settings.set('snapToLines', false);
27001 }
27002
27003 settings.alt(k, vals[0], ['auto']);
27004
27005 if (vals.length === 2) {
27006 settings.alt('lineAlign', vals[1], ['start', center, 'end']);
27007 }
27008
27009 break;
27010
27011 case 'position':
27012 vals = v.split(',');
27013 settings.percent(k, vals[0]);
27014
27015 if (vals.length === 2) {
27016 settings.alt('positionAlign', vals[1], ['start', center, 'end', 'line-left', 'line-right', 'auto']);
27017 }
27018
27019 break;
27020
27021 case 'size':
27022 settings.percent(k, v);
27023 break;
27024
27025 case 'align':
27026 settings.alt(k, v, ['start', center, 'end', 'left', 'right']);
27027 break;
27028 }
27029 }, /:/, /\s/); // Apply default values for any missing fields.
27030
27031 cue.region = settings.get('region', null);
27032 cue.vertical = settings.get('vertical', '');
27033 var line = settings.get('line', 'auto');
27034
27035 if (line === 'auto' && defaults.line === -1) {
27036 // set numeric line number for Safari
27037 line = -1;
27038 }
27039
27040 cue.line = line;
27041 cue.lineAlign = settings.get('lineAlign', 'start');
27042 cue.snapToLines = settings.get('snapToLines', true);
27043 cue.size = settings.get('size', 100);
27044 cue.align = settings.get('align', center);
27045 var position = settings.get('position', 'auto');
27046
27047 if (position === 'auto' && defaults.position === 50) {
27048 // set numeric position for Safari
27049 position = cue.align === 'start' || cue.align === 'left' ? 0 : cue.align === 'end' || cue.align === 'right' ? 100 : 50;
27050 }
27051
27052 cue.position = position;
27053 }
27054
27055 function skipWhitespace() {
27056 input = input.replace(/^\s+/, '');
27057 } // 4.1 WebVTT cue timings.
27058
27059
27060 skipWhitespace();
27061 cue.startTime = consumeTimeStamp(); // (1) collect cue start time
27062
27063 skipWhitespace();
27064
27065 if (input.substr(0, 3) !== '-->') {
27066 // (3) next characters must match '-->'
27067 throw new Error("Malformed time stamp (time stamps must be separated by '-->'): " + oInput);
27068 }
27069
27070 input = input.substr(3);
27071 skipWhitespace();
27072 cue.endTime = consumeTimeStamp(); // (5) collect cue end time
27073 // 4.1 WebVTT cue settings list.
27074
27075 skipWhitespace();
27076 consumeCueSettings(input, cue);
27077}
27078
27079function fixLineBreaks(input) {
27080 return input.replace(/<br(?: \/)?>/gi, '\n');
27081}
27082var VTTParser = /*#__PURE__*/function () {
27083 function VTTParser() {
27084 this.state = 'INITIAL';
27085 this.buffer = '';
27086 this.decoder = new StringDecoder();
27087 this.regionList = [];
27088 this.cue = null;
27089 this.oncue = void 0;
27090 this.onparsingerror = void 0;
27091 this.onflush = void 0;
27092 }
27093
27094 var _proto3 = VTTParser.prototype;
27095
27096 _proto3.parse = function parse(data) {
27097 var _this = this; // If there is no data then we won't decode it, but will just try to parse
27098 // whatever is in buffer already. This may occur in circumstances, for
27099 // example when flush() is called.
27100
27101
27102 if (data) {
27103 // Try to decode the data that we received.
27104 _this.buffer += _this.decoder.decode(data, {
27105 stream: true
27106 });
27107 }
27108
27109 function collectNextLine() {
27110 var buffer = _this.buffer;
27111 var pos = 0;
27112 buffer = fixLineBreaks(buffer);
27113
27114 while (pos < buffer.length && buffer[pos] !== '\r' && buffer[pos] !== '\n') {
27115 ++pos;
27116 }
27117
27118 var line = buffer.substr(0, pos); // Advance the buffer early in case we fail below.
27119
27120 if (buffer[pos] === '\r') {
27121 ++pos;
27122 }
27123
27124 if (buffer[pos] === '\n') {
27125 ++pos;
27126 }
27127
27128 _this.buffer = buffer.substr(pos);
27129 return line;
27130 } // 3.2 WebVTT metadata header syntax
27131
27132
27133 function parseHeader(input) {
27134 parseOptions(input, function (k, v) {// switch (k) {
27135 // case 'region':
27136 // 3.3 WebVTT region metadata header syntax
27137 // console.log('parse region', v);
27138 // parseRegion(v);
27139 // break;
27140 // }
27141 }, /:/);
27142 } // 5.1 WebVTT file parsing.
27143
27144
27145 try {
27146 var line = '';
27147
27148 if (_this.state === 'INITIAL') {
27149 // We can't start parsing until we have the first line.
27150 if (!/\r\n|\n/.test(_this.buffer)) {
27151 return this;
27152 }
27153
27154 line = collectNextLine(); // strip of UTF-8 BOM if any
27155 // https://en.wikipedia.org/wiki/Byte_order_mark#UTF-8
27156
27157 var m = line.match(/^()?WEBVTT([ \t].*)?$/);
27158
27159 if (!m || !m[0]) {
27160 throw new Error('Malformed WebVTT signature.');
27161 }
27162
27163 _this.state = 'HEADER';
27164 }
27165
27166 var alreadyCollectedLine = false;
27167
27168 while (_this.buffer) {
27169 // We can't parse a line until we have the full line.
27170 if (!/\r\n|\n/.test(_this.buffer)) {
27171 return this;
27172 }
27173
27174 if (!alreadyCollectedLine) {
27175 line = collectNextLine();
27176 } else {
27177 alreadyCollectedLine = false;
27178 }
27179
27180 switch (_this.state) {
27181 case 'HEADER':
27182 // 13-18 - Allow a header (metadata) under the WEBVTT line.
27183 if (/:/.test(line)) {
27184 parseHeader(line);
27185 } else if (!line) {
27186 // An empty line terminates the header and starts the body (cues).
27187 _this.state = 'ID';
27188 }
27189
27190 continue;
27191
27192 case 'NOTE':
27193 // Ignore NOTE blocks.
27194 if (!line) {
27195 _this.state = 'ID';
27196 }
27197
27198 continue;
27199
27200 case 'ID':
27201 // Check for the start of NOTE blocks.
27202 if (/^NOTE($|[ \t])/.test(line)) {
27203 _this.state = 'NOTE';
27204 break;
27205 } // 19-29 - Allow any number of line terminators, then initialize new cue values.
27206
27207
27208 if (!line) {
27209 continue;
27210 }
27211
27212 _this.cue = new _vttcue__WEBPACK_IMPORTED_MODULE_0__["default"](0, 0, '');
27213 _this.state = 'CUE'; // 30-39 - Check if self line contains an optional identifier or timing data.
27214
27215 if (line.indexOf('-->') === -1) {
27216 _this.cue.id = line;
27217 continue;
27218 }
27219
27220 // Process line as start of a cue.
27221
27222 /* falls through */
27223
27224 case 'CUE':
27225 // 40 - Collect cue timings and settings.
27226 if (!_this.cue) {
27227 _this.state = 'BADCUE';
27228 continue;
27229 }
27230
27231 try {
27232 parseCue(line, _this.cue, _this.regionList);
27233 } catch (e) {
27234 // In case of an error ignore rest of the cue.
27235 _this.cue = null;
27236 _this.state = 'BADCUE';
27237 continue;
27238 }
27239
27240 _this.state = 'CUETEXT';
27241 continue;
27242
27243 case 'CUETEXT':
27244 {
27245 var hasSubstring = line.indexOf('-->') !== -1; // 34 - If we have an empty line then report the cue.
27246 // 35 - If we have the special substring '-->' then report the cue,
27247 // but do not collect the line as we need to process the current
27248 // one as a new cue.
27249
27250 if (!line || hasSubstring && (alreadyCollectedLine = true)) {
27251 // We are done parsing self cue.
27252 if (_this.oncue && _this.cue) {
27253 _this.oncue(_this.cue);
27254 }
27255
27256 _this.cue = null;
27257 _this.state = 'ID';
27258 continue;
27259 }
27260
27261 if (_this.cue === null) {
27262 continue;
27263 }
27264
27265 if (_this.cue.text) {
27266 _this.cue.text += '\n';
27267 }
27268
27269 _this.cue.text += line;
27270 }
27271 continue;
27272
27273 case 'BADCUE':
27274 // 54-62 - Collect and discard the remaining cue.
27275 if (!line) {
27276 _this.state = 'ID';
27277 }
27278
27279 }
27280 }
27281 } catch (e) {
27282 // If we are currently parsing a cue, report what we have.
27283 if (_this.state === 'CUETEXT' && _this.cue && _this.oncue) {
27284 _this.oncue(_this.cue);
27285 }
27286
27287 _this.cue = null; // Enter BADWEBVTT state if header was not parsed correctly otherwise
27288 // another exception occurred so enter BADCUE state.
27289
27290 _this.state = _this.state === 'INITIAL' ? 'BADWEBVTT' : 'BADCUE';
27291 }
27292
27293 return this;
27294 };
27295
27296 _proto3.flush = function flush() {
27297 var _this = this;
27298
27299 try {
27300 // Finish decoding the stream.
27301 // _this.buffer += _this.decoder.decode();
27302 // Synthesize the end of the current cue or region.
27303 if (_this.cue || _this.state === 'HEADER') {
27304 _this.buffer += '\n\n';
27305
27306 _this.parse();
27307 } // If we've flushed, parsed, and we're still on the INITIAL state then
27308 // that means we don't have enough of the stream to parse the first
27309 // line.
27310
27311
27312 if (_this.state === 'INITIAL' || _this.state === 'BADWEBVTT') {
27313 throw new Error('Malformed WebVTT signature.');
27314 }
27315 } catch (e) {
27316 if (_this.onparsingerror) {
27317 _this.onparsingerror(e);
27318 }
27319 }
27320
27321 if (_this.onflush) {
27322 _this.onflush();
27323 }
27324
27325 return this;
27326 };
27327
27328 return VTTParser;
27329}();
27330
27331/***/ }),
27332
27333/***/ "./src/utils/webvtt-parser.ts":
27334/*!************************************!*\
27335 !*** ./src/utils/webvtt-parser.ts ***!
27336 \************************************/
27337/*! exports provided: generateCueId, parseWebVTT */
27338/***/ (function(module, __webpack_exports__, __webpack_require__) {
27339__webpack_require__.r(__webpack_exports__);
27340/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "generateCueId", function() { return generateCueId; });
27341/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseWebVTT", function() { return parseWebVTT; });
27342/* harmony import */ var _home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
27343/* harmony import */ var _vttparser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./vttparser */ "./src/utils/vttparser.ts");
27344/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
27345/* harmony import */ var _timescale_conversion__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./timescale-conversion */ "./src/utils/timescale-conversion.ts");
27346/* harmony import */ var _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../remux/mp4-remuxer */ "./src/remux/mp4-remuxer.ts");
27347
27348
27349
27350
27351
27352
27353
27354
27355var LINEBREAKS = /\r\n|\n\r|\n|\r/g; // String.prototype.startsWith is not supported in IE11
27356
27357var startsWith = function startsWith(inputString, searchString, position) {
27358 if (position === void 0) {
27359 position = 0;
27360 }
27361
27362 return inputString.substr(position, searchString.length) === searchString;
27363};
27364
27365var cueString2millis = function cueString2millis(timeString) {
27366 var ts = parseInt(timeString.substr(-3));
27367 var secs = parseInt(timeString.substr(-6, 2));
27368 var mins = parseInt(timeString.substr(-9, 2));
27369 var hours = timeString.length > 9 ? parseInt(timeString.substr(0, timeString.indexOf(':'))) : 0;
27370
27371 if (!Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(ts) || !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(secs) || !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mins) || !Object(_home_runner_work_hls_js_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(hours)) {
27372 throw Error("Malformed X-TIMESTAMP-MAP: Local:" + timeString);
27373 }
27374
27375 ts += 1000 * secs;
27376 ts += 60 * 1000 * mins;
27377 ts += 60 * 60 * 1000 * hours;
27378 return ts;
27379}; // From https://github.com/darkskyapp/string-hash
27380
27381
27382var hash = function hash(text) {
27383 var hash = 5381;
27384 var i = text.length;
27385
27386 while (i) {
27387 hash = hash * 33 ^ text.charCodeAt(--i);
27388 }
27389
27390 return (hash >>> 0).toString();
27391}; // Create a unique hash id for a cue based on start/end times and text.
27392// This helps timeline-controller to avoid showing repeated captions.
27393
27394
27395function generateCueId(startTime, endTime, text) {
27396 return hash(startTime.toString()) + hash(endTime.toString()) + hash(text);
27397}
27398
27399var calculateOffset = function calculateOffset(vttCCs, cc, presentationTime) {
27400 var currCC = vttCCs[cc];
27401 var prevCC = vttCCs[currCC.prevCC]; // This is the first discontinuity or cues have been processed since the last discontinuity
27402 // Offset = current discontinuity time
27403
27404 if (!prevCC || !prevCC.new && currCC.new) {
27405 vttCCs.ccOffset = vttCCs.presentationOffset = currCC.start;
27406 currCC.new = false;
27407 return;
27408 } // There have been discontinuities since cues were last parsed.
27409 // Offset = time elapsed
27410
27411
27412 while ((_prevCC = prevCC) !== null && _prevCC !== void 0 && _prevCC.new) {
27413 var _prevCC;
27414
27415 vttCCs.ccOffset += currCC.start - prevCC.start;
27416 currCC.new = false;
27417 currCC = prevCC;
27418 prevCC = vttCCs[currCC.prevCC];
27419 }
27420
27421 vttCCs.presentationOffset = presentationTime;
27422};
27423
27424function parseWebVTT(vttByteArray, initPTS, timescale, vttCCs, cc, timeOffset, callBack, errorCallBack) {
27425 var parser = new _vttparser__WEBPACK_IMPORTED_MODULE_1__["VTTParser"](); // Convert byteArray into string, replacing any somewhat exotic linefeeds with "\n", then split on that character.
27426 // Uint8Array.prototype.reduce is not implemented in IE11
27427
27428 var vttLines = Object(_demux_id3__WEBPACK_IMPORTED_MODULE_2__["utf8ArrayToStr"])(new Uint8Array(vttByteArray)).trim().replace(LINEBREAKS, '\n').split('\n');
27429 var cues = [];
27430 var initPTS90Hz = Object(_timescale_conversion__WEBPACK_IMPORTED_MODULE_3__["toMpegTsClockFromTimescale"])(initPTS, timescale);
27431 var cueTime = '00:00.000';
27432 var timestampMapMPEGTS = 0;
27433 var timestampMapLOCAL = 0;
27434 var parsingError;
27435 var inHeader = true;
27436 var timestampMap = false;
27437
27438 parser.oncue = function (cue) {
27439 // Adjust cue timing; clamp cues to start no earlier than - and drop cues that don't end after - 0 on timeline.
27440 var currCC = vttCCs[cc];
27441 var cueOffset = vttCCs.ccOffset; // Calculate subtitle PTS offset
27442
27443 var webVttMpegTsMapOffset = (timestampMapMPEGTS - initPTS90Hz) / 90000; // Update offsets for new discontinuities
27444
27445 if (currCC !== null && currCC !== void 0 && currCC.new) {
27446 if (timestampMapLOCAL !== undefined) {
27447 // When local time is provided, offset = discontinuity start time - local time
27448 cueOffset = vttCCs.ccOffset = currCC.start;
27449 } else {
27450 calculateOffset(vttCCs, cc, webVttMpegTsMapOffset);
27451 }
27452 }
27453
27454 if (webVttMpegTsMapOffset) {
27455 // If we have MPEGTS, offset = presentation time + discontinuity offset
27456 cueOffset = webVttMpegTsMapOffset - vttCCs.presentationOffset;
27457 }
27458
27459 if (timestampMap) {
27460 var duration = cue.endTime - cue.startTime;
27461 var startTime = Object(_remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_4__["normalizePts"])((cue.startTime + cueOffset - timestampMapLOCAL) * 90000, timeOffset * 90000) / 90000;
27462 cue.startTime = startTime;
27463 cue.endTime = startTime + duration;
27464 } //trim trailing webvtt block whitespaces
27465
27466
27467 var text = cue.text.trim(); // Fix encoding of special characters
27468
27469 cue.text = decodeURIComponent(encodeURIComponent(text)); // If the cue was not assigned an id from the VTT file (line above the content), create one.
27470
27471 if (!cue.id) {
27472 cue.id = generateCueId(cue.startTime, cue.endTime, text);
27473 }
27474
27475 if (cue.endTime > 0) {
27476 cues.push(cue);
27477 }
27478 };
27479
27480 parser.onparsingerror = function (error) {
27481 parsingError = error;
27482 };
27483
27484 parser.onflush = function () {
27485 if (parsingError) {
27486 errorCallBack(parsingError);
27487 return;
27488 }
27489
27490 callBack(cues);
27491 }; // Go through contents line by line.
27492
27493
27494 vttLines.forEach(function (line) {
27495 if (inHeader) {
27496 // Look for X-TIMESTAMP-MAP in header.
27497 if (startsWith(line, 'X-TIMESTAMP-MAP=')) {
27498 // Once found, no more are allowed anyway, so stop searching.
27499 inHeader = false;
27500 timestampMap = true; // Extract LOCAL and MPEGTS.
27501
27502 line.substr(16).split(',').forEach(function (timestamp) {
27503 if (startsWith(timestamp, 'LOCAL:')) {
27504 cueTime = timestamp.substr(6);
27505 } else if (startsWith(timestamp, 'MPEGTS:')) {
27506 timestampMapMPEGTS = parseInt(timestamp.substr(7));
27507 }
27508 });
27509
27510 try {
27511 // Convert cue time to seconds
27512 timestampMapLOCAL = cueString2millis(cueTime) / 1000;
27513 } catch (error) {
27514 timestampMap = false;
27515 parsingError = error;
27516 } // Return without parsing X-TIMESTAMP-MAP line.
27517
27518
27519 return;
27520 } else if (line === '') {
27521 inHeader = false;
27522 }
27523 } // Parse line by default.
27524
27525
27526 parser.parse(line + '\n');
27527 });
27528 parser.flush();
27529}
27530
27531/***/ }),
27532
27533/***/ "./src/utils/xhr-loader.ts":
27534/*!*********************************!*\
27535 !*** ./src/utils/xhr-loader.ts ***!
27536 \*********************************/
27537/*! exports provided: default */
27538/***/ (function(module, __webpack_exports__, __webpack_require__) {
27539__webpack_require__.r(__webpack_exports__);
27540/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
27541/* harmony import */ var _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../loader/load-stats */ "./src/loader/load-stats.ts");
27542
27543
27544var AGE_HEADER_LINE_REGEX = /^age:\s*[\d.]+\s*$/m;
27545
27546var XhrLoader = /*#__PURE__*/function () {
27547 function XhrLoader(config
27548 /* HlsConfig */
27549 ) {
27550 this.xhrSetup = void 0;
27551 this.requestTimeout = void 0;
27552 this.retryTimeout = void 0;
27553 this.retryDelay = void 0;
27554 this.config = null;
27555 this.callbacks = null;
27556 this.context = void 0;
27557 this.loader = null;
27558 this.stats = void 0;
27559 this.xhrSetup = config ? config.xhrSetup : null;
27560 this.stats = new _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__["LoadStats"]();
27561 this.retryDelay = 0;
27562 }
27563
27564 var _proto = XhrLoader.prototype;
27565
27566 _proto.destroy = function destroy() {
27567 this.callbacks = null;
27568 this.abortInternal();
27569 this.loader = null;
27570 this.config = null;
27571 };
27572
27573 _proto.abortInternal = function abortInternal() {
27574 var loader = this.loader;
27575 self.clearTimeout(this.requestTimeout);
27576 self.clearTimeout(this.retryTimeout);
27577
27578 if (loader) {
27579 loader.onreadystatechange = null;
27580 loader.onprogress = null;
27581
27582 if (loader.readyState !== 4) {
27583 this.stats.aborted = true;
27584 loader.abort();
27585 }
27586 }
27587 };
27588
27589 _proto.abort = function abort() {
27590 var _this$callbacks;
27591
27592 this.abortInternal();
27593
27594 if ((_this$callbacks = this.callbacks) !== null && _this$callbacks !== void 0 && _this$callbacks.onAbort) {
27595 this.callbacks.onAbort(this.stats, this.context, this.loader);
27596 }
27597 };
27598
27599 _proto.load = function load(context, config, callbacks) {
27600 if (this.stats.loading.start) {
27601 throw new Error('Loader can only be used once.');
27602 }
27603
27604 this.stats.loading.start = self.performance.now();
27605 this.context = context;
27606 this.config = config;
27607 this.callbacks = callbacks;
27608 this.retryDelay = config.retryDelay;
27609 this.loadInternal();
27610 };
27611
27612 _proto.loadInternal = function loadInternal() {
27613 var config = this.config,
27614 context = this.context;
27615
27616 if (!config) {
27617 return;
27618 }
27619
27620 var xhr = this.loader = new self.XMLHttpRequest();
27621 var stats = this.stats;
27622 stats.loading.first = 0;
27623 stats.loaded = 0;
27624 var xhrSetup = this.xhrSetup;
27625
27626 try {
27627 if (xhrSetup) {
27628 try {
27629 xhrSetup(xhr, context.url);
27630 } catch (e) {
27631 // fix xhrSetup: (xhr, url) => {xhr.setRequestHeader("Content-Language", "test");}
27632 // not working, as xhr.setRequestHeader expects xhr.readyState === OPEN
27633 xhr.open('GET', context.url, true);
27634 xhrSetup(xhr, context.url);
27635 }
27636 }
27637
27638 if (!xhr.readyState) {
27639 xhr.open('GET', context.url, true);
27640 }
27641
27642 var headers = this.context.headers;
27643
27644 if (headers) {
27645 for (var header in headers) {
27646 xhr.setRequestHeader(header, headers[header]);
27647 }
27648 }
27649 } catch (e) {
27650 // IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
27651 this.callbacks.onError({
27652 code: xhr.status,
27653 text: e.message
27654 }, context, xhr);
27655 return;
27656 }
27657
27658 if (context.rangeEnd) {
27659 xhr.setRequestHeader('Range', 'bytes=' + context.rangeStart + '-' + (context.rangeEnd - 1));
27660 }
27661
27662 xhr.onreadystatechange = this.readystatechange.bind(this);
27663 xhr.onprogress = this.loadprogress.bind(this);
27664 xhr.responseType = context.responseType; // setup timeout before we perform request
27665
27666 self.clearTimeout(this.requestTimeout);
27667 this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.timeout);
27668 xhr.send();
27669 };
27670
27671 _proto.readystatechange = function readystatechange() {
27672 var context = this.context,
27673 xhr = this.loader,
27674 stats = this.stats;
27675
27676 if (!context || !xhr) {
27677 return;
27678 }
27679
27680 var readyState = xhr.readyState;
27681 var config = this.config; // don't proceed if xhr has been aborted
27682
27683 if (stats.aborted) {
27684 return;
27685 } // >= HEADERS_RECEIVED
27686
27687
27688 if (readyState >= 2) {
27689 // clear xhr timeout and rearm it if readyState less than 4
27690 self.clearTimeout(this.requestTimeout);
27691
27692 if (stats.loading.first === 0) {
27693 stats.loading.first = Math.max(self.performance.now(), stats.loading.start);
27694 }
27695
27696 if (readyState === 4) {
27697 xhr.onreadystatechange = null;
27698 xhr.onprogress = null;
27699 var status = xhr.status; // http status between 200 to 299 are all successful
27700
27701 if (status >= 200 && status < 300) {
27702 stats.loading.end = Math.max(self.performance.now(), stats.loading.first);
27703 var data;
27704 var len;
27705
27706 if (context.responseType === 'arraybuffer') {
27707 data = xhr.response;
27708 len = data.byteLength;
27709 } else {
27710 data = xhr.responseText;
27711 len = data.length;
27712 }
27713
27714 stats.loaded = stats.total = len;
27715
27716 if (!this.callbacks) {
27717 return;
27718 }
27719
27720 var onProgress = this.callbacks.onProgress;
27721
27722 if (onProgress) {
27723 onProgress(stats, context, data, xhr);
27724 }
27725
27726 if (!this.callbacks) {
27727 return;
27728 }
27729
27730 var response = {
27731 url: xhr.responseURL,
27732 data: data
27733 };
27734 this.callbacks.onSuccess(response, stats, context, xhr);
27735 } else {
27736 // if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error
27737 if (stats.retry >= config.maxRetry || status >= 400 && status < 499) {
27738 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].error(status + " while loading " + context.url);
27739 this.callbacks.onError({
27740 code: status,
27741 text: xhr.statusText
27742 }, context, xhr);
27743 } else {
27744 // retry
27745 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].warn(status + " while loading " + context.url + ", retrying in " + this.retryDelay + "..."); // abort and reset internal state
27746
27747 this.abortInternal();
27748 this.loader = null; // schedule retry
27749
27750 self.clearTimeout(this.retryTimeout);
27751 this.retryTimeout = self.setTimeout(this.loadInternal.bind(this), this.retryDelay); // set exponential backoff
27752
27753 this.retryDelay = Math.min(2 * this.retryDelay, config.maxRetryDelay);
27754 stats.retry++;
27755 }
27756 }
27757 } else {
27758 // readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet
27759 self.clearTimeout(this.requestTimeout);
27760 this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.timeout);
27761 }
27762 }
27763 };
27764
27765 _proto.loadtimeout = function loadtimeout() {
27766 _utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].warn("timeout while loading " + this.context.url);
27767 var callbacks = this.callbacks;
27768
27769 if (callbacks) {
27770 this.abortInternal();
27771 callbacks.onTimeout(this.stats, this.context, this.loader);
27772 }
27773 };
27774
27775 _proto.loadprogress = function loadprogress(event) {
27776 var stats = this.stats;
27777 stats.loaded = event.loaded;
27778
27779 if (event.lengthComputable) {
27780 stats.total = event.total;
27781 }
27782 };
27783
27784 _proto.getCacheAge = function getCacheAge() {
27785 var result = null;
27786
27787 if (this.loader && AGE_HEADER_LINE_REGEX.test(this.loader.getAllResponseHeaders())) {
27788 var ageHeader = this.loader.getResponseHeader('age');
27789 result = ageHeader ? parseFloat(ageHeader) : null;
27790 }
27791
27792 return result;
27793 };
27794
27795 return XhrLoader;
27796}();
27797
27798/* harmony default export */ __webpack_exports__["default"] = (XhrLoader);
27799
27800/***/ })
27801
27802/******/ })["default"];
27803});
27804//# sourceMappingURL=hls.js.map
27805});
27806
27807const Hls = /*@__PURE__*/getDefaultExportFromCjs(hls);
27808
27809const indexCss = ".taro-video{display:inline-block;overflow:hidden;position:relative;width:100%;height:225px;line-height:0}.taro-video[hidden]{display:none}.taro-video-container{display:inline-block;position:absolute;left:0;top:0;width:100%;height:100%;background-color:#000;-o-object-position:inherit;object-position:inherit}.taro-video-container.taro-video-type-fullscreen{position:fixed;left:0;right:0;top:0;bottom:0;z-index:999}.taro-video-container.taro-video-type-fullscreen.taro-video-type-rotate-left{-webkit-transform:translate(-50%, -50%) rotate(-90deg);transform:translate(-50%, -50%) rotate(-90deg)}.taro-video-container.taro-video-type-fullscreen.taro-video-type-rotate-right{-webkit-transform:translate(-50%, -50%) rotate(90deg);transform:translate(-50%, -50%) rotate(90deg)}.taro-video-video{width:100%;height:100%;-o-object-position:inherit;object-position:inherit}.taro-video-cover{display:-ms-flexbox;display:flex;position:absolute;left:0;top:0;bottom:0;z-index:1;-ms-flex-direction:column;flex-direction:column;-ms-flex-pack:center;justify-content:center;-ms-flex-align:center;align-items:center;width:100%;background-color:rgba(1, 1, 1, 0.5);-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-box-pack:center;-webkit-box-align:center}.taro-video-cover-play-button{width:40px;height:40px;background-repeat:no-repeat;background-position:50% 50%;background-size:50%}.taro-video-cover-duration{margin-top:10px;line-height:1;font-size:16px;color:#fff}.taro-video-bar{display:-ms-flexbox;display:flex;visibility:hidden;overflow:hidden;position:absolute;right:0;bottom:0;z-index:1;-ms-flex-align:center;align-items:center;padding:0 10px;height:44px;background-color:rgba(0, 0, 0, 0.5);-webkit-box-align:center}.taro-video-bar.taro-video-bar-full{left:0}.taro-video-controls{display:-ms-flexbox;display:flex;-webkit-box-flex:1;-ms-flex-positive:1;flex-grow:1;margin:0 8.5px}.taro-video-control-button{-webkit-box-sizing:content-box;box-sizing:content-box;margin-left:-8.5px;padding:14.5px 12.5px;width:13px;height:15px}.taro-video-control-button::after{display:block;width:100%;height:100%;background-repeat:no-repeat;background-position:50% 50%;background-size:100%;content:\"\"}.taro-video-control-button.taro-video-control-button-play::after,.taro-video-cover-play-button{background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABoAAAAeCAYAAAAy2w7YAAAAAXNSR0IArs4c6QAAAWhJREFUSA1j+P///0cgBoHjQGzCQCsAtgJB/AMy5wCxGNXtQ9iBwvoA5BUCMQvVLEQxHpNzDSjkRhXLMM3GKrIeKKpEkYVYjcUu+AMo3ALE3GRZiN1MvKKPgbIRJFuG10j8koeA0gZEW4jfLIKyf4EqpgOxMEELCRpFnIJ3QGU5QMyM00LizCFa1SWgSkeslhFtBGkKVwGVy6FYSJp+klR/A6quB2JOkIWMIK0oNlOf8xBoZDE9LAI7nYn6HsBq4l96WHQEaLUpAyiOaASeAM2NgvuPBpaACt82IEYtfKls0UagecpwXyAzqGTRdaA57sjmYrAptAjUsCkGYlYMg9EFyLQI1IiZB8Ti6Obh5JNh0QmgHlOcBuKSIMGi50C18UDMiMssvOJEWPQLqKYbiHnxGkRIkoBF24DyaoTMIEoeh0W3geI+RBlArCI0iz4D+RVAzEasfqLVAQ19AcSg5LoYiKWI1kiiQgCMBLnEEcfDSgAAAABJRU5ErkJggg==\")}.taro-video-control-button.taro-video-control-button-pause::after{background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABIAAAAgCAYAAAAffCjxAAAAAXNSR0IArs4c6QAAAFlJREFUSA3tksEKACAIQ7X//5zq98wOgQayum8QaGweHhMzG/6OujzKAymn+0LMqivu1XznWmX8/echTIyMyAgTwA72iIwwAexgj8gIE8CO3aMRbDPMaEy5BRGaKcZv8YxRAAAAAElFTkSuQmCC\")}.taro-video-current-time,.taro-video-duration{margin-top:15px;margin-bottom:14.5px;height:14.5px;line-height:14.5px;font-size:12px;color:#cbcbcb}.taro-video-progress-container{position:relative;-ms-flex-positive:2;flex-grow:2;-webkit-box-flex:2}.taro-video-progress{position:relative;margin:21px 12px;height:2px;background-color:rgba(255, 255, 255, 0.4)}.taro-video-progress-buffered{position:absolute;left:0;top:0;width:0;height:100%;background-color:rgba(255, 255, 255, 0.8);-webkit-transition:width 0.1s;transition:width 0.1s}.taro-video-ball{position:absolute;left:0;top:-21px;-webkit-box-sizing:content-box;box-sizing:content-box;margin-left:-22px;padding:14px;width:16px;height:16px}.taro-video-inner{border-radius:50%;width:100%;height:100%;background-color:#fff}.taro-video-danmu-button{margin:0 8.5px;padding:2px 10px;border:1px solid #fff;border-radius:5px;line-height:1;font-size:13px;color:#fff;white-space:nowrap}.taro-video-danmu-button.taro-video-danmu-button-active{border-color:#48c23d;color:#48c23d}.taro-video-fullscreen,.taro-video-mute{-webkit-box-sizing:content-box;box-sizing:content-box;padding:8.5px;width:17px;height:17px;background-repeat:no-repeat;background-position:50% 50%;background-size:50%}.taro-video-fullscreen{background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAhUlEQVRYR+2WSwrAMAhEnZO3PfmULLooGEFTiIXJ2s/kRY2wzQeb85sE9CRA8jSzY1YfAFzhJBnU1AVgxH2dSiArCnD9QgGzRNnOech48SRABHoSyFb5in3PSbhyo6yvCPQkEM3u7BsPe/0FIvBfAh/vhKmVbO9SWun1qk/PSVi9TcVPBG6R1YIhgWwNpQAAAABJRU5ErkJggg==\")}.taro-video-fullscreen.taro-video-type-fullscreen{background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAABPUlEQVRYR+2Xu0pDURBF1/ZLxNcHKNiIlfhA7C0UBSEE8RNEBNFPUEQEEbGxFiSSSrCwEHsf5E/ccsSUuWfUhKQ40947+y42Z8+ZK/pcinzf9hhwD1xJ2q/qsb0JHAOzkl5y+lGAGnCWICQtZgAS6DxQk3TeLYA6cAo0JSXxjmW7CcwBdUkJurKiDhSA4kBvHbA9CqwBQx2O7BSw8ssU3ALPFRF4knT3nQLbr8B4LjLBOdAAFgJaLUkjbYC9n+zm+i4kXWbmwCqwnRMCHiXthuZAQOzPrxSA4kBxYDAcsH0EzATCfCLpJjOINoCtgFZabg7bk7AFDAeaGpKWgitZTu5N0kQbYBmYrujo9mX0CVxL+gidAdu9vY5zXhWA4sAgOND3X7NJ4AHYCaxkB8B62gslvecSFpoDOZH/PP8Cnt7hIaM5xCEAAAAASUVORK5CYII=\")}.taro-video-mute{background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAACXBIWXMAAAsTAAALEwEAmpwYAAAGAGlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDAgNzkuMTYwNDUxLCAyMDE3LzA1LzA2LTAxOjA4OjIxICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6cGhvdG9zaG9wPSJodHRwOi8vbnMuYWRvYmUuY29tL3Bob3Rvc2hvcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RFdnQ9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZUV2ZW50IyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOCAoTWFjaW50b3NoKSIgeG1wOkNyZWF0ZURhdGU9IjIwMTktMDQtMTFUMTA6MTg6MjArMDg6MDAiIHhtcDpNb2RpZnlEYXRlPSIyMDE5LTA0LTExVDEwOjIyOjIyKzA4OjAwIiB4bXA6TWV0YWRhdGFEYXRlPSIyMDE5LTA0LTExVDEwOjIyOjIyKzA4OjAwIiBkYzpmb3JtYXQ9ImltYWdlL3BuZyIgcGhvdG9zaG9wOkNvbG9yTW9kZT0iMyIgcGhvdG9zaG9wOklDQ1Byb2ZpbGU9InNSR0IgSUVDNjE5NjYtMi4xIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOjk3YmE4Yjg0LTFhNTYtNGM1MS04NDVkLTNiZmYyMGI0ZDc0ZiIgeG1wTU06RG9jdW1lbnRJRD0iYWRvYmU6ZG9jaWQ6cGhvdG9zaG9wOjg1NGQ3MjlkLWUwNjctZjU0OC1hMTlhLTBlZjQ4OGRkYjJiOSIgeG1wTU06T3JpZ2luYWxEb2N1bWVudElEPSJ4bXAuZGlkOjA1ODY3ZDFlLWQ3NGEtNDgyNC04MDU3LTYzYmRmMTdjODk5ZSI+IDx4bXBNTTpIaXN0b3J5PiA8cmRmOlNlcT4gPHJkZjpsaSBzdEV2dDphY3Rpb249ImNyZWF0ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6MDU4NjdkMWUtZDc0YS00ODI0LTgwNTctNjNiZGYxN2M4OTllIiBzdEV2dDp3aGVuPSIyMDE5LTA0LTExVDEwOjE4OjIwKzA4OjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOCAoTWFjaW50b3NoKSIvPiA8cmRmOmxpIHN0RXZ0OmFjdGlvbj0ic2F2ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6OTdiYThiODQtMWE1Ni00YzUxLTg0NWQtM2JmZjIwYjRkNzRmIiBzdEV2dDp3aGVuPSIyMDE5LTA0LTExVDEwOjIyOjIyKzA4OjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOCAoTWFjaW50b3NoKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz459+FoAAABqElEQVRYhc2XPWsVQRSGnxPjF4oGRfxoRQKGWCU2Ft7CykrQWosEyf/If0hhIPgHDEmbNJZqCFxiQEgTUGxsBUVEHgvnyrjZZJO92V1fGIaZnTPvszPszNlQ6VIjnbr/DwCoDLMNak/dUVfUK0f2rQugnlcX/FevWgFQH6gf3autRgHUC+piiXHzAOmtPx9gXgug8itQx9SXwDpw47AGKXZWvXvQmNFCwE3gCXA2dY0Az4GrRzHONA9cU/vAbERsllEOyh31e8USV2mrMPdG9uyn+rDom2/BHHCm5puWKiKmgdtAnz+rvaxO5mNygEvHaZ5BfADuARvAaWBpP4DGFBHfgBngFzClTrUKkCDeA+9S837rAEnbqb7VFcCpVJ/oCmCw959aB1AfAROpudYqgDoOLKRmPyLelAF8bcD4pPoMeAtcB34AT4uDBqXXwFG8XXUU/72MIuK1OgE8Bs6l7mEvo8up7lN1Ge0n9aK6VHMFZvJTr9S3CiALaCQhqZOSvegMIAvu2UVSWpigLC1fbQ0gm6in7qpfLCQbhwGIYcyPQ53/G3YO8BtUtd35bvKcVwAAAABJRU5ErkJggg==\")}.taro-video-mute.taro-video-type-mute{background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAAAgCAYAAAB3j6rJAAAACXBIWXMAAAsTAAALEwEAmpwYAAAGAGlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDAgNzkuMTYwNDUxLCAyMDE3LzA1LzA2LTAxOjA4OjIxICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6cGhvdG9zaG9wPSJodHRwOi8vbnMuYWRvYmUuY29tL3Bob3Rvc2hvcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RFdnQ9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZUV2ZW50IyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOCAoTWFjaW50b3NoKSIgeG1wOkNyZWF0ZURhdGU9IjIwMTktMDQtMTFUMTA6MTk6MDMrMDg6MDAiIHhtcDpNb2RpZnlEYXRlPSIyMDE5LTA0LTExVDEwOjIyOjMzKzA4OjAwIiB4bXA6TWV0YWRhdGFEYXRlPSIyMDE5LTA0LTExVDEwOjIyOjMzKzA4OjAwIiBkYzpmb3JtYXQ9ImltYWdlL3BuZyIgcGhvdG9zaG9wOkNvbG9yTW9kZT0iMyIgcGhvdG9zaG9wOklDQ1Byb2ZpbGU9InNSR0IgSUVDNjE5NjYtMi4xIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOjAzYjJmNjE2LTZmZTUtNDJjNC1iNTgwLTczNzZjZjI2NzdmNSIgeG1wTU06RG9jdW1lbnRJRD0iYWRvYmU6ZG9jaWQ6cGhvdG9zaG9wOjYzZjQ2NTYzLWE0ZjktOGQ0Mi1hM2FhLTY3ODJhNDBhYWNjMSIgeG1wTU06T3JpZ2luYWxEb2N1bWVudElEPSJ4bXAuZGlkOjIyYWNjMWFlLTg4ZmMtNDBlZi1iMWM1LTNmODgwY2QzYWI2MiI+IDx4bXBNTTpIaXN0b3J5PiA8cmRmOlNlcT4gPHJkZjpsaSBzdEV2dDphY3Rpb249ImNyZWF0ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6MjJhY2MxYWUtODhmYy00MGVmLWIxYzUtM2Y4ODBjZDNhYjYyIiBzdEV2dDp3aGVuPSIyMDE5LTA0LTExVDEwOjE5OjAzKzA4OjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOCAoTWFjaW50b3NoKSIvPiA8cmRmOmxpIHN0RXZ0OmFjdGlvbj0ic2F2ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6MDNiMmY2MTYtNmZlNS00MmM0LWI1ODAtNzM3NmNmMjY3N2Y1IiBzdEV2dDp3aGVuPSIyMDE5LTA0LTExVDEwOjIyOjMzKzA4OjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOCAoTWFjaW50b3NoKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz5PmxYVAAACLklEQVRYhc2XP2sVQRRHz40hKoqaQgVBCy1EozFlGiVFxMLGh4piYWEh+hkEP4YKAVFEEFTyughaCH6DqIVpAhYWEgIxoJE8cywyi5tNHu/tJmvyg2WZO3dmzt47/zZUtoJ6Nhsg09YDiYhKDzACTAFNYH9lEpUq80TdrT5wpV5n/ZV9KoGoo+pXV2uyKkipOaLuUceAt8DhUvQd1FsCYhR4ChzaSIBMHSOi7lOfsByFWiCgEBH1GHAF2JlMPcBt4GC3HUYEaj9wF3gVEVPtfNVTwAXgWX7CDKq/1piAZTSZBmim8qJ6sQ3EgDqb/L7kU3MH2NHtl3dQX3r3Ak21UYAYAj4A/cl0JB+RF+uMRj4iQ+p8zt7KYFLdXKHuRi0gacBhV6a6pd5bA6KRNagFJPU9qv5u47toLmW1HnoR8Q5oAK1CVQu4FBHj/wUkaXsb+4pzpVaQFPqXrN7Be4Fx9VztIOr1BLEtmX4A94E/qdwHTKjDWYM6lu81dSlnn3V570BtuLxaMs2rZ/IgYxsBovaoPwsQA4VoFWEm8ql5DiysNyURsQTMpOIMcDYiPhd8xoGr/FtNC2G6FKXD6ihwGdiVHMoeeh8jYlA9ANwE3kTEp3bO6vE03qOONzR1r/q4RGrquaFFxFxE3ALOA9+6jExpdb180y55AhirhaRTatq0GXEzL8+ZIuI9cBJ4WKiartJf9nWV/mty7UfUafW7erpqRGI9EBuprffvu9n6C1KOmsqwI5A1AAAAAElFTkSuQmCC\")}.taro-video-danmu{overflow:visible;position:absolute;left:0;top:0;bottom:0;margin-top:14px;margin-bottom:44px;width:100%;line-height:14px;font-size:14px}.taro-video-danmu-item{position:absolute;left:100%;line-height:1;color:#fff;white-space:nowrap;-webkit-transform:translateX(0);transform:translateX(0);-webkit-transition-property:left, -webkit-transform;transition-property:left, -webkit-transform;transition-property:left, transform;transition-property:left, transform, -webkit-transform;-webkit-transition-duration:3s;transition-duration:3s;-webkit-transition-timing-function:linear;transition-timing-function:linear}.taro-video-toast{display:block;visibility:hidden;position:absolute;left:50%;top:50%;border-radius:5px;background-color:rgba(255, 255, 255, 0.8);pointer-events:none;color:#000;-webkit-transform:translate(-50%, -50%);transform:translate(-50%, -50%)}.taro-video-toast.taro-video-toast-volume{display:block;width:100px;height:100px}.taro-video-toast-volume .taro-video-toast-title{display:block;margin-top:10px;width:100%;line-height:16px;text-align:center;font-size:12px}.taro-video-toast-volume .taro-video-toast-icon{display:block;margin-left:25%;width:50%;height:50%;background-image:url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAFhklEQVR4Xu2aeaxfQxTHP1VBES0NIQitWtpaaxeCUkQtaYVa0tiClAq1ExIiQTVppaWxt8RWSa2tWkJQRGgtQaSIpUKEpG0ssbbk28yV2+mZO/e9e3vvu/e98897mZnfzPl+75mZs0wvurn06ub46SGgxwLqZaA3sB/wO/A+8G/V6tS5BU4BJgJbO9DvAMOB36okoQ4CNgAeBEYbQK8Bbm4zAdsBc4EdAyA/APZoKwHHAA8DG2UA/AnYrG0ErAVcD1yXA9gfQJ8c40obsqbPgH7AY8CROTVuFQFD3X7fJid4DWsNAWOAGZ0w58YTsDZwKzChA189PbTRBGwKzAYO6iT4srbAMHfTfAksjulS1iG4JzAH2Dy2YKS/qAVcCdzi1vgZOBZ4PWvNGAG6wgYCW0IwctzNmf06BcEXtYABwOeA4otEfgEOdnGGqV6IAAUopwM6zDYuAVjeKYpYwCjgCWOhH513+b2lhE+Avrj89NPyalzyuCIE6EN9BfQ1dHoPOAD40+9LE6D/5aoqSqtLsgiQC60DTiHzfGCFoaSsQAexZdlTgEuyCDgLuK8u5G7dEAHyKN8CBrtxLwDHAX8Z+l4N3GS0L3db4aN0X5opmc+2XZQAfb2LPd2eciG1lUSRJZ9qYHkZONwiYAjwSc3gs24BXWWWfzEVuMjQez13HljX8v7A28lvEgsInaBVcxLaAvr6sgJLtBWeNTrOB+4w2p8DRvoEnAPcXTVaY70QAbrbXwIONX7zBSAL/tvrk1+iviTllnRry2irr/QSEws4D7izCxMg1XQLLAQGGXpeCkw22kOWI49RMUujCJC+uwAfGtfcImAng4AtAMsBeg04pIkESOfbgQsMsNoGnxrtSrfv7rXrSlwXWN6kLZBgEBiB8iWUUZZPIN/AF1nMoiYSICDfAlt5iB4CxhpAzwXuMtqPBuY1lYB5wFEeqFeAwwyguvIUqvuiQO/xphJwP3Cmhyh0EKrOoGDIF7n+M5pKwEwXrqdBCaQSM77Ig7SSIicDs5pKwIvACA+pzFwZIF9OlKm36QyQV7jEqDDJk5VD58uFgGIGX1Se+6yJFqAKsqI6X84GdDb4Mh0Y5zUqjFYFakUTCXja5QLSmBQHbAL8ahAgT1AeYVreSKLLphGwF/CuAfLJQLldGSTFD75c5d4mNCoWyAqG9gYWGED1AOMKo12ZbiWA/idA++deY2DVTZ0Jh5UJPsFQtD/wDaAHGWl5EzgwaUi2gK4UXS11S0cTIjrMFBtYQdAk4DID0BnAAz4B6wNLgTKKG0VIDBHwqitwpOcW+OOB540FdeipNKbUWFrUtn06o5xOij4KyDuqU0IE3Obl/rLA63pTzm9XA8jKACjdniZAaSKFmUpB1yUhAlTsUEpMh913wEkuTe7rKTzPAHqO44vpKPkFhH3cWWBVV6ogJVYZUgz/tXtIYelzI3Ct0fExIGx6j7iKWBUUveyYBehv1RIjIEsfqziq8Xp4pSBJOYTVJFQcVY3wCFeKUjVGyYfQWB00+5bEVBECrNS+qsOKBpVHNCVWHs+LS7H5PV5pOu9v0+OKEOAXR39w1e1C7wM6AkJ1eLmkRcrpRQiQrqobXO5S3vL3/4kBKMsCknV0k+iasVLUMV3UX5SAPGtED8EOT+L9YENnCasUIXNO2goChFWHqAIRyxXN4qI1BCQg9dJESYq8LnbrCBAR8t50Lig6i0krCRBoVWhVlt45wkBrCRBuRZyPuAguxIPe9lXqhpd9DcZMXOvdkPF0Xu/8dohNUmZ/1QQkuitXr+d4fryuFx3jywQYm6suAqSX8vLTXKJDt4QqO6rtLYspXWZ/nQQkOJTAUJZGIav19q9MvKvN1RUIWKMAY5P3EBBjqO393d4C/gMVHwRQlpx21QAAAABJRU5ErkJggg==\");background-repeat:no-repeat;background-position:50% 50%;background-size:50%;fill:#000}.taro-video-toast-volume .taro-video-toast-value{margin-left:10px;margin-top:5px;width:80px;height:5px}.taro-video-toast-volume .taro-video-toast-value>.taro-video-toast-value-content{overflow:hidden}.taro-video-toast-volume-grids{width:80px;height:5px}.taro-video-toast-volume-grids-item{float:left;width:7.1px;height:5px;background-color:#000}.taro-video-toast-volume-grids-item:not(:first-child){margin-left:1px}.taro-video-toast.taro-video-toast-progress{padding:6px;background-color:rgba(0, 0, 0, 0.8);line-height:18px;font-size:14px;color:#fff}";
27810
27811let Video = class {
27812 constructor(hostRef) {
27813 registerInstance(this, hostRef);
27814 this.onPlay = createEvent(this, "play", 7);
27815 this.onPause = createEvent(this, "pause", 7);
27816 this.onEnded = createEvent(this, "ended", 7);
27817 this.onTimeUpdate = createEvent(this, "timeupdate", 7);
27818 this.onError = createEvent(this, "error", 7);
27819 this.onFullScreenChange = createEvent(this, "fullscreenchange", 7);
27820 this.onProgress = createEvent(this, "progress", 7);
27821 this.onLoadedMetaData = createEvent(this, "loadedmetadata", 7);
27822 this.currentTime = 0;
27823 this.isDraggingProgress = false;
27824 this.gestureType = 'none';
27825 /**
27826 * 是否显示默认播放控件(播放/暂停按钮、播放进度、时间)
27827 */
27828 this.controls = true;
27829 /**
27830 * 是否自动播放
27831 */
27832 this.autoplay = false;
27833 /**
27834 * 是否循环播放
27835 */
27836 this.loop = false;
27837 /**
27838 * 是否静音播放
27839 */
27840 this.muted = false;
27841 /**
27842 * 指定视频初始播放位置
27843 */
27844 this.initialTime = 0;
27845 /**
27846 * 当视频大小与 video 容器大小不一致时,视频的表现形式
27847 */
27848 this.objectFit = 'contain';
27849 /**
27850 * 若不设置,宽度大于 240 时才会显示
27851 */
27852 this.showProgress = true;
27853 /**
27854 * 是否显示全屏按钮
27855 */
27856 this.showFullscreenBtn = true;
27857 /**
27858 * 是否显示视频底部控制栏的播放按钮
27859 */
27860 this.showPlayBtn = true;
27861 /**
27862 * 是否显示视频中间的播放按钮
27863 */
27864 this.showCenterPlayBtn = true;
27865 /**
27866 * 是否显示静音按钮
27867 */
27868 this.showMuteBtn = false;
27869 /**
27870 * 是否显示弹幕按钮
27871 */
27872 this.danmuBtn = false;
27873 /**
27874 * 是否展示弹幕
27875 */
27876 this.enableDanmu = false;
27877 /**
27878 * 是否开启播放手势,即双击切换播放/暂停
27879 */
27880 this.enablePlayGesture = false;
27881 /**
27882 * 是否开启控制进度的手势
27883 */
27884 this.enableProgressGesture = true;
27885 /**
27886 * 在非全屏模式下,是否开启亮度与音量调节手势
27887 */
27888 this.vslideGesture = false;
27889 /**
27890 * 在全屏模式下,是否开启亮度与音量调节手势
27891 */
27892 this.vslideGestureInFullscreen = true;
27893 this.nativeProps = {};
27894 this._enableDanmu = false;
27895 this.isPlaying = false;
27896 this.isFirst = true;
27897 this.isFullScreen = false;
27898 this.fullScreenTimestamp = new Date().getTime();
27899 this.isMute = false;
27900 this.analyzeGesture = (e) => {
27901 var _a;
27902 const obj = {
27903 type: 'none'
27904 };
27905 const nowX = e.touches[0].screenX;
27906 const nowY = e.touches[0].screenY;
27907 const distX = nowX - this.lastTouchScreenX;
27908 const distY = nowY - this.lastTouchScreenY;
27909 const enableVslideGesture = this.isFullScreen ? this.vslideGestureInFullscreen : this.vslideGesture;
27910 if (this.gestureType === 'none') {
27911 // 两点间距离
27912 const dist = calcDist(distX, distY);
27913 // 没有移动
27914 if (dist < 10)
27915 return obj;
27916 if (Math.abs(distY) >= Math.abs(distX)) {
27917 // 垂直方向移动:调整音量
27918 if (enableVslideGesture) {
27919 this.gestureType = 'adjustVolume';
27920 this.lastVolume = this.videoRef.volume;
27921 }
27922 else {
27923 return obj;
27924 }
27925 }
27926 else if (Math.abs(distY) < Math.abs(distX)) {
27927 // 水平方向移动:调整进度
27928 if (this.enableProgressGesture) {
27929 this.gestureType = 'adjustProgress';
27930 this.lastPercentage = this.currentTime / ((_a = this.duration) !== null && _a !== void 0 ? _a : this._duration);
27931 }
27932 else {
27933 return obj;
27934 }
27935 }
27936 }
27937 obj.type = this.gestureType;
27938 obj.dataX = normalizeNumber(distX / 200);
27939 obj.dataY = normalizeNumber(distY / 200);
27940 return obj;
27941 };
27942 this.loadNativePlayer = () => {
27943 if (this.videoRef) {
27944 this.videoRef.src = this.src;
27945 this.videoRef.load();
27946 }
27947 };
27948 this.init = () => {
27949 const { src, videoRef } = this;
27950 if (isHls(src)) {
27951 if (Hls.isSupported()) {
27952 if (this.hls) {
27953 this.hls.destroy();
27954 }
27955 this.hls = new Hls();
27956 this.hls.loadSource(src);
27957 this.hls.attachMedia(videoRef);
27958 this.hls.on(Hls.Events.MANIFEST_PARSED, () => {
27959 this.autoplay && this.play();
27960 });
27961 this.hls.on(Hls.Events.ERROR, (_, data) => {
27962 this.handleError(data);
27963 });
27964 }
27965 else if (videoRef.canPlayType('application/vnd.apple.mpegurl')) {
27966 this.loadNativePlayer();
27967 }
27968 else {
27969 console.error('该浏览器不支持 HLS 播放');
27970 }
27971 }
27972 else {
27973 this.loadNativePlayer();
27974 }
27975 };
27976 this.handlePlay = () => {
27977 this.isPlaying = true;
27978 this.isFirst = false;
27979 this.controlsRef.toggleVisibility(true);
27980 this.onPlay.emit();
27981 };
27982 this.handlePause = () => {
27983 this.isPlaying = false;
27984 this.controlsRef.toggleVisibility(true);
27985 this.onPause.emit();
27986 };
27987 this.handleEnded = () => {
27988 this.isFirst = true;
27989 this.pause();
27990 this.controlsRef.toggleVisibility();
27991 this.onEnded.emit();
27992 };
27993 this.handleTimeUpdate = throttle(async (e) => {
27994 var _a, _b;
27995 this.currentTime = this.videoRef.currentTime;
27996 const duration = this.duration || this._duration;
27997 const isControlDragging = await this.controlsRef.getIsDraggingProgressBall();
27998 if (this.controls && this.showProgress) {
27999 if (!isControlDragging && !this.isDraggingProgress) {
28000 this.controlsRef.setProgressBall(this.currentTime / duration);
28001 this.controlsRef.setCurrentTime(this.currentTime);
28002 }
28003 }
28004 this.danmuRef.tick(this.currentTime);
28005 this.onTimeUpdate.emit({
28006 duration: (_a = e.target) === null || _a === void 0 ? void 0 : _a.duration,
28007 currentTime: (_b = e.target) === null || _b === void 0 ? void 0 : _b.currentTime
28008 });
28009 if (this.duration) {
28010 if (this.currentTime >= this.duration) {
28011 this.seek(0);
28012 this.handleEnded();
28013 }
28014 }
28015 }, 250);
28016 this.handleError = e => {
28017 var _a, _b;
28018 if (this.hls) {
28019 switch (e.type) {
28020 case Hls.ErrorTypes.NETWORK_ERROR:
28021 // try to recover network error
28022 this.onError.emit({ errMsg: e.response });
28023 this.hls.startLoad();
28024 break;
28025 case Hls.ErrorTypes.MEDIA_ERROR:
28026 this.onError.emit({ errMsg: e.reason || '媒体错误,请重试' });
28027 this.hls.recoverMediaError();
28028 break;
28029 }
28030 }
28031 else {
28032 this.onError.emit({
28033 errMsg: (_b = (_a = e.target) === null || _a === void 0 ? void 0 : _a.error) === null || _b === void 0 ? void 0 : _b.message,
28034 });
28035 }
28036 };
28037 this.handleDurationChange = () => {
28038 this._duration = this.videoRef.duration;
28039 };
28040 this.handleProgress = () => {
28041 this.onProgress.emit();
28042 };
28043 this.handleLoadedMetaData = (e) => {
28044 const target = e.target;
28045 this.onLoadedMetaData.emit({
28046 width: target.videoWidth,
28047 height: target.videoHeight,
28048 duration: target.duration
28049 });
28050 };
28051 this._play = () => this.videoRef.play();
28052 this._pause = () => this.videoRef.pause();
28053 this._stop = () => {
28054 this.videoRef.pause();
28055 this._seek(0);
28056 };
28057 this._seek = (position) => {
28058 this.videoRef.currentTime = position;
28059 };
28060 this.onTouchStartContainer = (e) => {
28061 this.lastTouchScreenX = e.touches[0].screenX;
28062 this.lastTouchScreenY = e.touches[0].screenY;
28063 };
28064 this.onClickContainer = () => {
28065 if (this.enablePlayGesture) {
28066 const now = Date.now();
28067 if (now - this.lastClickedTime < 300) {
28068 // 双击
28069 this.isPlaying ? this.pause() : this.play();
28070 }
28071 this.lastClickedTime = now;
28072 }
28073 this.controlsRef.toggleVisibility();
28074 };
28075 this.onClickFullScreenBtn = (e) => {
28076 e.stopPropagation();
28077 this.toggleFullScreen();
28078 };
28079 this.handleFullScreenChange = e => {
28080 // 全屏后,"退出"走的是浏览器事件,在此同步状态
28081 const timestamp = new Date().getTime();
28082 if (!e.detail && this.isFullScreen && !document[screenFn.fullscreenElement] && timestamp - this.fullScreenTimestamp > 100) {
28083 this.toggleFullScreen(false);
28084 }
28085 };
28086 this.toggleFullScreen = (isFullScreen = !this.isFullScreen) => {
28087 this.isFullScreen = isFullScreen; // this.videoRef?.['webkitDisplayingFullscreen']
28088 this.controlsRef.toggleVisibility(true);
28089 this.fullScreenTimestamp = new Date().getTime();
28090 this.onFullScreenChange.emit({
28091 fullScreen: this.isFullScreen,
28092 direction: 'vertical'
28093 });
28094 if (this.isFullScreen && !document[screenFn.fullscreenElement]) {
28095 setTimeout(() => {
28096 this.videoRef[screenFn.requestFullscreen]({ navigationUI: 'auto' });
28097 }, 0);
28098 }
28099 };
28100 this.toggleMute = (e) => {
28101 e.stopPropagation();
28102 this.videoRef.muted = !this.isMute;
28103 this.controlsRef.toggleVisibility(true);
28104 this.isMute = !this.isMute;
28105 };
28106 this.toggleDanmu = (e) => {
28107 e.stopPropagation();
28108 this.controlsRef.toggleVisibility(true);
28109 this._enableDanmu = !this._enableDanmu;
28110 };
28111 }
28112 componentWillLoad() {
28113 this._enableDanmu = this.enableDanmu;
28114 }
28115 componentDidLoad() {
28116 this.init();
28117 if (this.initialTime) {
28118 this.videoRef.currentTime = this.initialTime;
28119 }
28120 // 目前只支持 danmuList 初始化弹幕列表,还未支持更新弹幕列表
28121 this.danmuRef.sendDanmu(this.danmuList);
28122 if (document.addEventListener) {
28123 document.addEventListener(screenFn.fullscreenchange, this.handleFullScreenChange);
28124 }
28125 if (this.videoRef && scene === 'iOS') {
28126 // NOTE: iOS 场景下 fullscreenchange 并不会在退出全屏状态下触发,仅 webkitpresentationmodechanged 与 webkitendfullscreen 可替代
28127 this.videoRef.addEventListener('webkitendfullscreen', this.handleFullScreenChange);
28128 }
28129 }
28130 componentDidRender() {
28131 }
28132 disconnectedCallback() {
28133 if (document.removeEventListener) {
28134 document.removeEventListener(screenFn.fullscreenchange, this.handleFullScreenChange);
28135 }
28136 if (this.videoRef && scene === 'iOS') {
28137 this.videoRef.removeEventListener('webkitendfullscreen', this.handleFullScreenChange);
28138 }
28139 }
28140 watchEnableDanmu(newVal) {
28141 this._enableDanmu = newVal;
28142 }
28143 watchSrc() {
28144 this.init();
28145 }
28146 async onDocumentTouchMove(e) {
28147 if (this.lastTouchScreenX === undefined || this.lastTouchScreenY === undefined)
28148 return;
28149 if (await this.controlsRef.getIsDraggingProgressBall())
28150 return;
28151 const gestureObj = this.analyzeGesture(e);
28152 if (gestureObj.type === 'adjustVolume') {
28153 this.toastVolumeRef.style.visibility = 'visible';
28154 const nextVolume = Math.max(Math.min(this.lastVolume - gestureObj.dataY, 1), 0);
28155 this.videoRef.volume = nextVolume;
28156 this.toastVolumeBarRef.style.width = `${nextVolume * 100}%`;
28157 }
28158 else if (gestureObj.type === 'adjustProgress') {
28159 this.isDraggingProgress = true;
28160 this.nextPercentage = Math.max(Math.min(this.lastPercentage + gestureObj.dataX, 1), 0);
28161 if (this.controls && this.showProgress) {
28162 this.controlsRef.setProgressBall(this.nextPercentage);
28163 this.controlsRef.toggleVisibility(true);
28164 }
28165 const duration = this.duration || this._duration;
28166 this.toastProgressTitleRef.innerHTML = `${formatTime(this.nextPercentage * duration)} / ${formatTime(duration)}`;
28167 this.toastProgressRef.style.visibility = 'visible';
28168 }
28169 }
28170 onDocumentTouchEnd() {
28171 var _a;
28172 if (this.gestureType === 'adjustVolume') {
28173 this.toastVolumeRef.style.visibility = 'hidden';
28174 }
28175 else if (this.gestureType === 'adjustProgress') {
28176 this.toastProgressRef.style.visibility = 'hidden';
28177 }
28178 if (this.isDraggingProgress) {
28179 this.isDraggingProgress = false;
28180 this.seek(this.nextPercentage * ((_a = this.duration) !== null && _a !== void 0 ? _a : this._duration));
28181 }
28182 this.gestureType = 'none';
28183 this.lastTouchScreenX = undefined;
28184 this.lastTouchScreenY = undefined;
28185 }
28186 /** 播放视频 */
28187 async play() {
28188 this._play();
28189 }
28190 /** 暂停视频 */
28191 async pause() {
28192 this._pause();
28193 }
28194 /** 停止视频 */
28195 async stop() {
28196 this._stop();
28197 }
28198 /** 跳转到指定位置 */
28199 async seek(position) {
28200 this._seek(position);
28201 }
28202 /** 进入全屏。若有自定义内容需在全屏时展示,需将内容节点放置到 video 节点内。 */
28203 async requestFullScreen() {
28204 this.toggleFullScreen(true);
28205 }
28206 /** 退出全屏 */
28207 async exitFullScreen() {
28208 this.toggleFullScreen(false);
28209 }
28210 render() {
28211 const { controls, autoplay, loop, muted, poster, objectFit, isFirst, isMute, isFullScreen, showCenterPlayBtn, isPlaying, _enableDanmu, showMuteBtn, danmuBtn, showFullscreenBtn, nativeProps } = this;
28212 const duration = this.duration || this._duration;
28213 const durationTime = formatTime(duration);
28214 return (h(Host, { class: classnames('taro-video-container', {
28215 'taro-video-type-fullscreen': isFullScreen
28216 }), onTouchStart: this.onTouchStartContainer, onClick: this.onClickContainer }, h("video", Object.assign({ class: 'taro-video-video', style: {
28217 'object-fit': objectFit
28218 }, ref: dom => {
28219 if (dom) {
28220 this.videoRef = dom;
28221 }
28222 }, autoplay: autoplay, loop: loop, muted: muted, poster: controls ? poster : undefined, playsinline: true, "webkit-playsinline": true, onPlay: this.handlePlay, onPause: this.handlePause, onEnded: this.handleEnded, onTimeUpdate: this.handleTimeUpdate, onError: this.handleError, onDurationChange: this.handleDurationChange, onProgress: this.handleProgress, onLoadedMetaData: this.handleLoadedMetaData }, nativeProps), "\u6682\u65F6\u4E0D\u652F\u6301\u64AD\u653E\u8BE5\u89C6\u9891"), h("taro-video-danmu", { ref: dom => {
28223 if (dom) {
28224 this.danmuRef = dom;
28225 }
28226 }, enable: _enableDanmu }), isFirst && showCenterPlayBtn && !isPlaying && (h("div", { class: 'taro-video-cover' }, h("div", { class: 'taro-video-cover-play-button', onClick: () => this.play() }), h("p", { class: 'taro-video-cover-duration' }, durationTime))), h("taro-video-control", { ref: dom => {
28227 if (dom) {
28228 this.controlsRef = dom;
28229 }
28230 }, controls: controls, currentTime: this.currentTime, duration: duration, isPlaying: this.isPlaying, pauseFunc: this._pause, playFunc: this._play, seekFunc: this._seek, showPlayBtn: this.showPlayBtn, showProgress: this.showProgress }, showMuteBtn && (h("div", { class: classnames('taro-video-mute', {
28231 'taro-video-type-mute': isMute
28232 }), onClick: this.toggleMute })), danmuBtn && (h("div", { class: classnames('taro-video-danmu-button', {
28233 'taro-video-danmu-button-active': _enableDanmu
28234 }), onClick: this.toggleDanmu }, "\u5F39\u5E55")), showFullscreenBtn && (h("div", { class: classnames('taro-video-fullscreen', {
28235 'taro-video-type-fullscreen': isFullScreen
28236 }), onClick: this.onClickFullScreenBtn }))), h("div", { class: 'taro-video-toast taro-video-toast-volume', ref: dom => {
28237 if (dom) {
28238 this.toastVolumeRef = dom;
28239 }
28240 } }, h("div", { class: 'taro-video-toast-title' }, "\u97F3\u91CF"), h("div", { class: 'taro-video-toast-icon' }), h("div", { class: 'taro-video-toast-value' }, h("div", { class: 'taro-video-toast-value-content', ref: dom => {
28241 if (dom) {
28242 this.toastVolumeBarRef = dom;
28243 }
28244 } }, h("div", { class: 'taro-video-toast-volume-grids' }, Array(10).fill(1).map(() => (h("div", { class: 'taro-video-toast-volume-grids-item' }))))))), h("div", { class: 'taro-video-toast taro-video-toast-progress', ref: dom => {
28245 if (dom) {
28246 this.toastProgressRef = dom;
28247 }
28248 } }, h("div", { class: 'taro-video-toast-title', ref: dom => {
28249 if (dom) {
28250 this.toastProgressTitleRef = dom;
28251 }
28252 } }))));
28253 }
28254 get el() { return getElement(this); }
28255 static get watchers() { return {
28256 "enableDanmu": ["watchEnableDanmu"],
28257 "src": ["watchSrc"]
28258 }; }
28259};
28260Video.style = indexCss;
28261
28262let VideoDanmu = class {
28263 constructor(hostRef) {
28264 registerInstance(this, hostRef);
28265 this.list = [];
28266 this.danmuElList = [];
28267 this.currentTime = 0;
28268 this.enable = false;
28269 this.danmuList = [];
28270 }
28271 ensureProperties(danmu) {
28272 const clonedDanmu = Object.assign({}, danmu);
28273 if (!('time' in danmu)) {
28274 clonedDanmu.time = this.currentTime;
28275 }
28276 clonedDanmu.key = Math.random();
28277 clonedDanmu.bottom = `${Math.random() * 90 + 5}%`;
28278 return clonedDanmu;
28279 }
28280 async sendDanmu(danmuList = []) {
28281 if (Array.isArray(danmuList)) {
28282 this.list = [
28283 ...this.list,
28284 ...danmuList.map(danmu => this.ensureProperties(danmu))
28285 ];
28286 }
28287 else {
28288 const danmu = danmuList;
28289 this.list = [
28290 ...this.list,
28291 Object.assign({}, this.ensureProperties(danmu))
28292 ];
28293 }
28294 }
28295 async tick(currentTime) {
28296 this.currentTime = currentTime;
28297 if (!this.enable)
28298 return;
28299 const danmuList = this.list;
28300 /**
28301 * @todo 这个判断对拖拽进度的处理不严谨
28302 */
28303 const newDanmuList = danmuList.filter(({ time }) => {
28304 return currentTime - time < 4 && currentTime > time;
28305 });
28306 let shouldUpdate = false;
28307 const oldDanmuList = this.danmuList;
28308 if (newDanmuList.length !== oldDanmuList.length) {
28309 shouldUpdate = true;
28310 }
28311 else {
28312 shouldUpdate = newDanmuList.some(({ key }) => {
28313 return oldDanmuList.every((danmu) => {
28314 return key !== danmu.key;
28315 });
28316 });
28317 }
28318 if (shouldUpdate) {
28319 this.danmuList = newDanmuList;
28320 }
28321 }
28322 componentDidUpdate() {
28323 requestAnimationFrame(() => {
28324 setTimeout(() => {
28325 const danmuElList = this.danmuElList.splice(0);
28326 danmuElList.forEach(danmu => {
28327 danmu.style.left = '0';
28328 danmu.style.webkitTransform = 'translateX(-100%)';
28329 danmu.style.transform = 'translateX(-100%)';
28330 });
28331 });
28332 });
28333 }
28334 render() {
28335 if (!this.enable)
28336 return '';
28337 return (h(Host, { class: 'taro-video-danmu' }, this.danmuList.map(({ text, color, bottom, key }) => (h("p", { class: 'taro-video-danmu-item', key: key, style: {
28338 color,
28339 bottom
28340 }, ref: ref => {
28341 if (ref) {
28342 this.danmuElList.push(ref);
28343 }
28344 } }, text)))));
28345 }
28346};
28347
28348export { VideoControl as taro_video_control, Video as taro_video_core, VideoDanmu as taro_video_danmu };