UNPKG

1.07 MBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.11.1 @license Apache-2.0 */
2(function (global, factory) {
3 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
4 typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
5 (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.httpStreaming = {}, global.videojs, global.window));
6})(this, (function (exports, videojs, xmldom) { 'use strict';
7
8 function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
10 var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
11
12 function createCommonjsModule(fn, basedir, module) {
13 return module = {
14 path: basedir,
15 exports: {},
16 require: function (path, base) {
17 return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
18 }
19 }, fn(module, module.exports), module.exports;
20 }
21
22 function commonjsRequire () {
23 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
24 }
25
26 var assertThisInitialized = createCommonjsModule(function (module) {
27 function _assertThisInitialized(self) {
28 if (self === void 0) {
29 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
30 }
31
32 return self;
33 }
34
35 module.exports = _assertThisInitialized;
36 module.exports["default"] = module.exports, module.exports.__esModule = true;
37 });
38
39 var setPrototypeOf = createCommonjsModule(function (module) {
40 function _setPrototypeOf(o, p) {
41 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
42 o.__proto__ = p;
43 return o;
44 };
45
46 module.exports["default"] = module.exports, module.exports.__esModule = true;
47 return _setPrototypeOf(o, p);
48 }
49
50 module.exports = _setPrototypeOf;
51 module.exports["default"] = module.exports, module.exports.__esModule = true;
52 });
53
54 var inheritsLoose = createCommonjsModule(function (module) {
55 function _inheritsLoose(subClass, superClass) {
56 subClass.prototype = Object.create(superClass.prototype);
57 subClass.prototype.constructor = subClass;
58 setPrototypeOf(subClass, superClass);
59 }
60
61 module.exports = _inheritsLoose;
62 module.exports["default"] = module.exports, module.exports.__esModule = true;
63 });
64
65 var urlToolkit = createCommonjsModule(function (module, exports) {
66 // see https://tools.ietf.org/html/rfc1808
67 (function (root) {
68 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
69 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
70 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
71 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
72 var URLToolkit = {
73 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
74 // E.g
75 // With opts.alwaysNormalize = false (default, spec compliant)
76 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
77 // With opts.alwaysNormalize = true (not spec compliant)
78 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
79 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
80 opts = opts || {}; // remove any remaining space and CRLF
81
82 baseURL = baseURL.trim();
83 relativeURL = relativeURL.trim();
84
85 if (!relativeURL) {
86 // 2a) If the embedded URL is entirely empty, it inherits the
87 // entire base URL (i.e., is set equal to the base URL)
88 // and we are done.
89 if (!opts.alwaysNormalize) {
90 return baseURL;
91 }
92
93 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
94
95 if (!basePartsForNormalise) {
96 throw new Error('Error trying to parse base URL.');
97 }
98
99 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
100 return URLToolkit.buildURLFromParts(basePartsForNormalise);
101 }
102
103 var relativeParts = URLToolkit.parseURL(relativeURL);
104
105 if (!relativeParts) {
106 throw new Error('Error trying to parse relative URL.');
107 }
108
109 if (relativeParts.scheme) {
110 // 2b) If the embedded URL starts with a scheme name, it is
111 // interpreted as an absolute URL and we are done.
112 if (!opts.alwaysNormalize) {
113 return relativeURL;
114 }
115
116 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
117 return URLToolkit.buildURLFromParts(relativeParts);
118 }
119
120 var baseParts = URLToolkit.parseURL(baseURL);
121
122 if (!baseParts) {
123 throw new Error('Error trying to parse base URL.');
124 }
125
126 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
127 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
128 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
129 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
130 baseParts.netLoc = pathParts[1];
131 baseParts.path = pathParts[2];
132 }
133
134 if (baseParts.netLoc && !baseParts.path) {
135 baseParts.path = '/';
136 }
137
138 var builtParts = {
139 // 2c) Otherwise, the embedded URL inherits the scheme of
140 // the base URL.
141 scheme: baseParts.scheme,
142 netLoc: relativeParts.netLoc,
143 path: null,
144 params: relativeParts.params,
145 query: relativeParts.query,
146 fragment: relativeParts.fragment
147 };
148
149 if (!relativeParts.netLoc) {
150 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
151 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
152 // (if any) of the base URL.
153 builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
154 // path is not relative and we skip to Step 7.
155
156 if (relativeParts.path[0] !== '/') {
157 if (!relativeParts.path) {
158 // 5) If the embedded URL path is empty (and not preceded by a
159 // slash), then the embedded URL inherits the base URL path
160 builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
161 // step 7; otherwise, it inherits the <params> of the base
162 // URL (if any) and
163
164 if (!relativeParts.params) {
165 builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
166 // step 7; otherwise, it inherits the <query> of the base
167 // URL (if any) and we skip to step 7.
168
169 if (!relativeParts.query) {
170 builtParts.query = baseParts.query;
171 }
172 }
173 } else {
174 // 6) The last segment of the base URL's path (anything
175 // following the rightmost slash "/", or the entire path if no
176 // slash is present) is removed and the embedded URL's path is
177 // appended in its place.
178 var baseURLPath = baseParts.path;
179 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
180 builtParts.path = URLToolkit.normalizePath(newPath);
181 }
182 }
183 }
184
185 if (builtParts.path === null) {
186 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
187 }
188
189 return URLToolkit.buildURLFromParts(builtParts);
190 },
191 parseURL: function parseURL(url) {
192 var parts = URL_REGEX.exec(url);
193
194 if (!parts) {
195 return null;
196 }
197
198 return {
199 scheme: parts[1] || '',
200 netLoc: parts[2] || '',
201 path: parts[3] || '',
202 params: parts[4] || '',
203 query: parts[5] || '',
204 fragment: parts[6] || ''
205 };
206 },
207 normalizePath: function normalizePath(path) {
208 // The following operations are
209 // then applied, in order, to the new path:
210 // 6a) All occurrences of "./", where "." is a complete path
211 // segment, are removed.
212 // 6b) If the path ends with "." as a complete path segment,
213 // that "." is removed.
214 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
215 // complete path segment not equal to "..", are removed.
216 // Removal of these path segments is performed iteratively,
217 // removing the leftmost matching pattern on each iteration,
218 // until no matching pattern remains.
219 // 6d) If the path ends with "<segment>/..", where <segment> is a
220 // complete path segment not equal to "..", that
221 // "<segment>/.." is removed.
222
223 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
224
225 return path.split('').reverse().join('');
226 },
227 buildURLFromParts: function buildURLFromParts(parts) {
228 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
229 }
230 };
231 module.exports = URLToolkit;
232 })();
233 });
234
235 var DEFAULT_LOCATION = 'http://example.com';
236
237 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
238 // return early if we don't need to resolve
239 if (/^[a-z]+:/i.test(relativeUrl)) {
240 return relativeUrl;
241 } // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
242
243
244 if (/^data:/.test(baseUrl)) {
245 baseUrl = window.location && window.location.href || '';
246 } // IE11 supports URL but not the URL constructor
247 // feature detect the behavior we want
248
249
250 var nativeURL = typeof window.URL === 'function';
251 var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
252 // and if baseUrl isn't an absolute url
253
254 var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
255
256 if (nativeURL) {
257 baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
258 } else if (!/\/\//i.test(baseUrl)) {
259 baseUrl = urlToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
260 }
261
262 if (nativeURL) {
263 var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
264 // and if we're location-less, remove the location
265 // otherwise, return the url unmodified
266
267 if (removeLocation) {
268 return newUrl.href.slice(DEFAULT_LOCATION.length);
269 } else if (protocolLess) {
270 return newUrl.href.slice(newUrl.protocol.length);
271 }
272
273 return newUrl.href;
274 }
275
276 return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
277 };
278
279 /**
280 * @file resolve-url.js - Handling how URLs are resolved and manipulated
281 */
282 var resolveUrl = resolveUrl$1;
283 /**
284 * Checks whether xhr request was redirected and returns correct url depending
285 * on `handleManifestRedirects` option
286 *
287 * @api private
288 *
289 * @param {string} url - an url being requested
290 * @param {XMLHttpRequest} req - xhr request result
291 *
292 * @return {string}
293 */
294
295 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
296 // To understand how the responseURL below is set and generated:
297 // - https://fetch.spec.whatwg.org/#concept-response-url
298 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
299 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
300 return req.responseURL;
301 }
302
303 return url;
304 };
305
306 var logger = function logger(source) {
307 if (videojs__default["default"].log.debug) {
308 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', source + " >");
309 }
310
311 return function () {};
312 };
313
314 var _extends_1 = createCommonjsModule(function (module) {
315 function _extends() {
316 module.exports = _extends = Object.assign || function (target) {
317 for (var i = 1; i < arguments.length; i++) {
318 var source = arguments[i];
319
320 for (var key in source) {
321 if (Object.prototype.hasOwnProperty.call(source, key)) {
322 target[key] = source[key];
323 }
324 }
325 }
326
327 return target;
328 };
329
330 module.exports["default"] = module.exports, module.exports.__esModule = true;
331 return _extends.apply(this, arguments);
332 }
333
334 module.exports = _extends;
335 module.exports["default"] = module.exports, module.exports.__esModule = true;
336 });
337
338 /**
339 * @file stream.js
340 */
341
342 /**
343 * A lightweight readable stream implemention that handles event dispatching.
344 *
345 * @class Stream
346 */
347 var Stream = /*#__PURE__*/function () {
348 function Stream() {
349 this.listeners = {};
350 }
351 /**
352 * Add a listener for a specified event type.
353 *
354 * @param {string} type the event name
355 * @param {Function} listener the callback to be invoked when an event of
356 * the specified type occurs
357 */
358
359
360 var _proto = Stream.prototype;
361
362 _proto.on = function on(type, listener) {
363 if (!this.listeners[type]) {
364 this.listeners[type] = [];
365 }
366
367 this.listeners[type].push(listener);
368 }
369 /**
370 * Remove a listener for a specified event type.
371 *
372 * @param {string} type the event name
373 * @param {Function} listener a function previously registered for this
374 * type of event through `on`
375 * @return {boolean} if we could turn it off or not
376 */
377 ;
378
379 _proto.off = function off(type, listener) {
380 if (!this.listeners[type]) {
381 return false;
382 }
383
384 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
385 // In Video.js we slice listener functions
386 // on trigger so that it does not mess up the order
387 // while we loop through.
388 //
389 // Here we slice on off so that the loop in trigger
390 // can continue using it's old reference to loop without
391 // messing up the order.
392
393 this.listeners[type] = this.listeners[type].slice(0);
394 this.listeners[type].splice(index, 1);
395 return index > -1;
396 }
397 /**
398 * Trigger an event of the specified type on this stream. Any additional
399 * arguments to this function are passed as parameters to event listeners.
400 *
401 * @param {string} type the event name
402 */
403 ;
404
405 _proto.trigger = function trigger(type) {
406 var callbacks = this.listeners[type];
407
408 if (!callbacks) {
409 return;
410 } // Slicing the arguments on every invocation of this method
411 // can add a significant amount of overhead. Avoid the
412 // intermediate object creation for the common case of a
413 // single callback argument
414
415
416 if (arguments.length === 2) {
417 var length = callbacks.length;
418
419 for (var i = 0; i < length; ++i) {
420 callbacks[i].call(this, arguments[1]);
421 }
422 } else {
423 var args = Array.prototype.slice.call(arguments, 1);
424 var _length = callbacks.length;
425
426 for (var _i = 0; _i < _length; ++_i) {
427 callbacks[_i].apply(this, args);
428 }
429 }
430 }
431 /**
432 * Destroys the stream and cleans up.
433 */
434 ;
435
436 _proto.dispose = function dispose() {
437 this.listeners = {};
438 }
439 /**
440 * Forwards all `data` events on this stream to the destination stream. The
441 * destination stream should provide a method `push` to receive the data
442 * events as they arrive.
443 *
444 * @param {Stream} destination the stream that will receive all `data` events
445 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
446 */
447 ;
448
449 _proto.pipe = function pipe(destination) {
450 this.on('data', function (data) {
451 destination.push(data);
452 });
453 };
454
455 return Stream;
456 }();
457
458 var atob = function atob(s) {
459 return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
460 };
461
462 function decodeB64ToUint8Array(b64Text) {
463 var decodedString = atob(b64Text);
464 var array = new Uint8Array(decodedString.length);
465
466 for (var i = 0; i < decodedString.length; i++) {
467 array[i] = decodedString.charCodeAt(i);
468 }
469
470 return array;
471 }
472
473 /*! @name m3u8-parser @version 4.7.0 @license Apache-2.0 */
474 /**
475 * A stream that buffers string input and generates a `data` event for each
476 * line.
477 *
478 * @class LineStream
479 * @extends Stream
480 */
481
482 var LineStream = /*#__PURE__*/function (_Stream) {
483 inheritsLoose(LineStream, _Stream);
484
485 function LineStream() {
486 var _this;
487
488 _this = _Stream.call(this) || this;
489 _this.buffer = '';
490 return _this;
491 }
492 /**
493 * Add new data to be parsed.
494 *
495 * @param {string} data the text to process
496 */
497
498
499 var _proto = LineStream.prototype;
500
501 _proto.push = function push(data) {
502 var nextNewline;
503 this.buffer += data;
504 nextNewline = this.buffer.indexOf('\n');
505
506 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
507 this.trigger('data', this.buffer.substring(0, nextNewline));
508 this.buffer = this.buffer.substring(nextNewline + 1);
509 }
510 };
511
512 return LineStream;
513 }(Stream);
514
515 var TAB = String.fromCharCode(0x09);
516
517 var parseByterange = function parseByterange(byterangeString) {
518 // optionally match and capture 0+ digits before `@`
519 // optionally match and capture 0+ digits after `@`
520 var match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
521 var result = {};
522
523 if (match[1]) {
524 result.length = parseInt(match[1], 10);
525 }
526
527 if (match[2]) {
528 result.offset = parseInt(match[2], 10);
529 }
530
531 return result;
532 };
533 /**
534 * "forgiving" attribute list psuedo-grammar:
535 * attributes -> keyvalue (',' keyvalue)*
536 * keyvalue -> key '=' value
537 * key -> [^=]*
538 * value -> '"' [^"]* '"' | [^,]*
539 */
540
541
542 var attributeSeparator = function attributeSeparator() {
543 var key = '[^=]*';
544 var value = '"[^"]*"|[^,]*';
545 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
546 return new RegExp('(?:^|,)(' + keyvalue + ')');
547 };
548 /**
549 * Parse attributes from a line given the separator
550 *
551 * @param {string} attributes the attribute line to parse
552 */
553
554
555 var parseAttributes$1 = function parseAttributes(attributes) {
556 // split the string using attributes as the separator
557 var attrs = attributes.split(attributeSeparator());
558 var result = {};
559 var i = attrs.length;
560 var attr;
561
562 while (i--) {
563 // filter out unmatched portions of the string
564 if (attrs[i] === '') {
565 continue;
566 } // split the key and value
567
568
569 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
570
571 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
572 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
573 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
574 result[attr[0]] = attr[1];
575 }
576
577 return result;
578 };
579 /**
580 * A line-level M3U8 parser event stream. It expects to receive input one
581 * line at a time and performs a context-free parse of its contents. A stream
582 * interpretation of a manifest can be useful if the manifest is expected to
583 * be too large to fit comfortably into memory or the entirety of the input
584 * is not immediately available. Otherwise, it's probably much easier to work
585 * with a regular `Parser` object.
586 *
587 * Produces `data` events with an object that captures the parser's
588 * interpretation of the input. That object has a property `tag` that is one
589 * of `uri`, `comment`, or `tag`. URIs only have a single additional
590 * property, `line`, which captures the entirety of the input without
591 * interpretation. Comments similarly have a single additional property
592 * `text` which is the input without the leading `#`.
593 *
594 * Tags always have a property `tagType` which is the lower-cased version of
595 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
596 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
597 * tags are given the tag type `unknown` and a single additional property
598 * `data` with the remainder of the input.
599 *
600 * @class ParseStream
601 * @extends Stream
602 */
603
604
605 var ParseStream = /*#__PURE__*/function (_Stream) {
606 inheritsLoose(ParseStream, _Stream);
607
608 function ParseStream() {
609 var _this;
610
611 _this = _Stream.call(this) || this;
612 _this.customParsers = [];
613 _this.tagMappers = [];
614 return _this;
615 }
616 /**
617 * Parses an additional line of input.
618 *
619 * @param {string} line a single line of an M3U8 file to parse
620 */
621
622
623 var _proto = ParseStream.prototype;
624
625 _proto.push = function push(line) {
626 var _this2 = this;
627
628 var match;
629 var event; // strip whitespace
630
631 line = line.trim();
632
633 if (line.length === 0) {
634 // ignore empty lines
635 return;
636 } // URIs
637
638
639 if (line[0] !== '#') {
640 this.trigger('data', {
641 type: 'uri',
642 uri: line
643 });
644 return;
645 } // map tags
646
647
648 var newLines = this.tagMappers.reduce(function (acc, mapper) {
649 var mappedLine = mapper(line); // skip if unchanged
650
651 if (mappedLine === line) {
652 return acc;
653 }
654
655 return acc.concat([mappedLine]);
656 }, [line]);
657 newLines.forEach(function (newLine) {
658 for (var i = 0; i < _this2.customParsers.length; i++) {
659 if (_this2.customParsers[i].call(_this2, newLine)) {
660 return;
661 }
662 } // Comments
663
664
665 if (newLine.indexOf('#EXT') !== 0) {
666 _this2.trigger('data', {
667 type: 'comment',
668 text: newLine.slice(1)
669 });
670
671 return;
672 } // strip off any carriage returns here so the regex matching
673 // doesn't have to account for them.
674
675
676 newLine = newLine.replace('\r', ''); // Tags
677
678 match = /^#EXTM3U/.exec(newLine);
679
680 if (match) {
681 _this2.trigger('data', {
682 type: 'tag',
683 tagType: 'm3u'
684 });
685
686 return;
687 }
688
689 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
690
691 if (match) {
692 event = {
693 type: 'tag',
694 tagType: 'inf'
695 };
696
697 if (match[1]) {
698 event.duration = parseFloat(match[1]);
699 }
700
701 if (match[2]) {
702 event.title = match[2];
703 }
704
705 _this2.trigger('data', event);
706
707 return;
708 }
709
710 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
711
712 if (match) {
713 event = {
714 type: 'tag',
715 tagType: 'targetduration'
716 };
717
718 if (match[1]) {
719 event.duration = parseInt(match[1], 10);
720 }
721
722 _this2.trigger('data', event);
723
724 return;
725 }
726
727 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
728
729 if (match) {
730 event = {
731 type: 'tag',
732 tagType: 'version'
733 };
734
735 if (match[1]) {
736 event.version = parseInt(match[1], 10);
737 }
738
739 _this2.trigger('data', event);
740
741 return;
742 }
743
744 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
745
746 if (match) {
747 event = {
748 type: 'tag',
749 tagType: 'media-sequence'
750 };
751
752 if (match[1]) {
753 event.number = parseInt(match[1], 10);
754 }
755
756 _this2.trigger('data', event);
757
758 return;
759 }
760
761 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
762
763 if (match) {
764 event = {
765 type: 'tag',
766 tagType: 'discontinuity-sequence'
767 };
768
769 if (match[1]) {
770 event.number = parseInt(match[1], 10);
771 }
772
773 _this2.trigger('data', event);
774
775 return;
776 }
777
778 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
779
780 if (match) {
781 event = {
782 type: 'tag',
783 tagType: 'playlist-type'
784 };
785
786 if (match[1]) {
787 event.playlistType = match[1];
788 }
789
790 _this2.trigger('data', event);
791
792 return;
793 }
794
795 match = /^#EXT-X-BYTERANGE:?(.*)?$/.exec(newLine);
796
797 if (match) {
798 event = _extends_1(parseByterange(match[1]), {
799 type: 'tag',
800 tagType: 'byterange'
801 });
802
803 _this2.trigger('data', event);
804
805 return;
806 }
807
808 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
809
810 if (match) {
811 event = {
812 type: 'tag',
813 tagType: 'allow-cache'
814 };
815
816 if (match[1]) {
817 event.allowed = !/NO/.test(match[1]);
818 }
819
820 _this2.trigger('data', event);
821
822 return;
823 }
824
825 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
826
827 if (match) {
828 event = {
829 type: 'tag',
830 tagType: 'map'
831 };
832
833 if (match[1]) {
834 var attributes = parseAttributes$1(match[1]);
835
836 if (attributes.URI) {
837 event.uri = attributes.URI;
838 }
839
840 if (attributes.BYTERANGE) {
841 event.byterange = parseByterange(attributes.BYTERANGE);
842 }
843 }
844
845 _this2.trigger('data', event);
846
847 return;
848 }
849
850 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
851
852 if (match) {
853 event = {
854 type: 'tag',
855 tagType: 'stream-inf'
856 };
857
858 if (match[1]) {
859 event.attributes = parseAttributes$1(match[1]);
860
861 if (event.attributes.RESOLUTION) {
862 var split = event.attributes.RESOLUTION.split('x');
863 var resolution = {};
864
865 if (split[0]) {
866 resolution.width = parseInt(split[0], 10);
867 }
868
869 if (split[1]) {
870 resolution.height = parseInt(split[1], 10);
871 }
872
873 event.attributes.RESOLUTION = resolution;
874 }
875
876 if (event.attributes.BANDWIDTH) {
877 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
878 }
879
880 if (event.attributes['PROGRAM-ID']) {
881 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
882 }
883 }
884
885 _this2.trigger('data', event);
886
887 return;
888 }
889
890 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
891
892 if (match) {
893 event = {
894 type: 'tag',
895 tagType: 'media'
896 };
897
898 if (match[1]) {
899 event.attributes = parseAttributes$1(match[1]);
900 }
901
902 _this2.trigger('data', event);
903
904 return;
905 }
906
907 match = /^#EXT-X-ENDLIST/.exec(newLine);
908
909 if (match) {
910 _this2.trigger('data', {
911 type: 'tag',
912 tagType: 'endlist'
913 });
914
915 return;
916 }
917
918 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
919
920 if (match) {
921 _this2.trigger('data', {
922 type: 'tag',
923 tagType: 'discontinuity'
924 });
925
926 return;
927 }
928
929 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
930
931 if (match) {
932 event = {
933 type: 'tag',
934 tagType: 'program-date-time'
935 };
936
937 if (match[1]) {
938 event.dateTimeString = match[1];
939 event.dateTimeObject = new Date(match[1]);
940 }
941
942 _this2.trigger('data', event);
943
944 return;
945 }
946
947 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
948
949 if (match) {
950 event = {
951 type: 'tag',
952 tagType: 'key'
953 };
954
955 if (match[1]) {
956 event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
957
958 if (event.attributes.IV) {
959 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
960 event.attributes.IV = event.attributes.IV.substring(2);
961 }
962
963 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
964 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
965 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
966 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
967 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
968 event.attributes.IV = new Uint32Array(event.attributes.IV);
969 }
970 }
971
972 _this2.trigger('data', event);
973
974 return;
975 }
976
977 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
978
979 if (match) {
980 event = {
981 type: 'tag',
982 tagType: 'start'
983 };
984
985 if (match[1]) {
986 event.attributes = parseAttributes$1(match[1]);
987 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
988 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
989 }
990
991 _this2.trigger('data', event);
992
993 return;
994 }
995
996 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
997
998 if (match) {
999 event = {
1000 type: 'tag',
1001 tagType: 'cue-out-cont'
1002 };
1003
1004 if (match[1]) {
1005 event.data = match[1];
1006 } else {
1007 event.data = '';
1008 }
1009
1010 _this2.trigger('data', event);
1011
1012 return;
1013 }
1014
1015 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
1016
1017 if (match) {
1018 event = {
1019 type: 'tag',
1020 tagType: 'cue-out'
1021 };
1022
1023 if (match[1]) {
1024 event.data = match[1];
1025 } else {
1026 event.data = '';
1027 }
1028
1029 _this2.trigger('data', event);
1030
1031 return;
1032 }
1033
1034 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
1035
1036 if (match) {
1037 event = {
1038 type: 'tag',
1039 tagType: 'cue-in'
1040 };
1041
1042 if (match[1]) {
1043 event.data = match[1];
1044 } else {
1045 event.data = '';
1046 }
1047
1048 _this2.trigger('data', event);
1049
1050 return;
1051 }
1052
1053 match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
1054
1055 if (match && match[1]) {
1056 event = {
1057 type: 'tag',
1058 tagType: 'skip'
1059 };
1060 event.attributes = parseAttributes$1(match[1]);
1061
1062 if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
1063 event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
1064 }
1065
1066 if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
1067 event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
1068 }
1069
1070 _this2.trigger('data', event);
1071
1072 return;
1073 }
1074
1075 match = /^#EXT-X-PART:(.*)$/.exec(newLine);
1076
1077 if (match && match[1]) {
1078 event = {
1079 type: 'tag',
1080 tagType: 'part'
1081 };
1082 event.attributes = parseAttributes$1(match[1]);
1083 ['DURATION'].forEach(function (key) {
1084 if (event.attributes.hasOwnProperty(key)) {
1085 event.attributes[key] = parseFloat(event.attributes[key]);
1086 }
1087 });
1088 ['INDEPENDENT', 'GAP'].forEach(function (key) {
1089 if (event.attributes.hasOwnProperty(key)) {
1090 event.attributes[key] = /YES/.test(event.attributes[key]);
1091 }
1092 });
1093
1094 if (event.attributes.hasOwnProperty('BYTERANGE')) {
1095 event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
1096 }
1097
1098 _this2.trigger('data', event);
1099
1100 return;
1101 }
1102
1103 match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
1104
1105 if (match && match[1]) {
1106 event = {
1107 type: 'tag',
1108 tagType: 'server-control'
1109 };
1110 event.attributes = parseAttributes$1(match[1]);
1111 ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
1112 if (event.attributes.hasOwnProperty(key)) {
1113 event.attributes[key] = parseFloat(event.attributes[key]);
1114 }
1115 });
1116 ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
1117 if (event.attributes.hasOwnProperty(key)) {
1118 event.attributes[key] = /YES/.test(event.attributes[key]);
1119 }
1120 });
1121
1122 _this2.trigger('data', event);
1123
1124 return;
1125 }
1126
1127 match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
1128
1129 if (match && match[1]) {
1130 event = {
1131 type: 'tag',
1132 tagType: 'part-inf'
1133 };
1134 event.attributes = parseAttributes$1(match[1]);
1135 ['PART-TARGET'].forEach(function (key) {
1136 if (event.attributes.hasOwnProperty(key)) {
1137 event.attributes[key] = parseFloat(event.attributes[key]);
1138 }
1139 });
1140
1141 _this2.trigger('data', event);
1142
1143 return;
1144 }
1145
1146 match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
1147
1148 if (match && match[1]) {
1149 event = {
1150 type: 'tag',
1151 tagType: 'preload-hint'
1152 };
1153 event.attributes = parseAttributes$1(match[1]);
1154 ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
1155 if (event.attributes.hasOwnProperty(key)) {
1156 event.attributes[key] = parseInt(event.attributes[key], 10);
1157 var subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
1158 event.attributes.byterange = event.attributes.byterange || {};
1159 event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
1160
1161 delete event.attributes[key];
1162 }
1163 });
1164
1165 _this2.trigger('data', event);
1166
1167 return;
1168 }
1169
1170 match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
1171
1172 if (match && match[1]) {
1173 event = {
1174 type: 'tag',
1175 tagType: 'rendition-report'
1176 };
1177 event.attributes = parseAttributes$1(match[1]);
1178 ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
1179 if (event.attributes.hasOwnProperty(key)) {
1180 event.attributes[key] = parseInt(event.attributes[key], 10);
1181 }
1182 });
1183
1184 _this2.trigger('data', event);
1185
1186 return;
1187 } // unknown tag type
1188
1189
1190 _this2.trigger('data', {
1191 type: 'tag',
1192 data: newLine.slice(4)
1193 });
1194 });
1195 }
1196 /**
1197 * Add a parser for custom headers
1198 *
1199 * @param {Object} options a map of options for the added parser
1200 * @param {RegExp} options.expression a regular expression to match the custom header
1201 * @param {string} options.customType the custom type to register to the output
1202 * @param {Function} [options.dataParser] function to parse the line into an object
1203 * @param {boolean} [options.segment] should tag data be attached to the segment object
1204 */
1205 ;
1206
1207 _proto.addParser = function addParser(_ref) {
1208 var _this3 = this;
1209
1210 var expression = _ref.expression,
1211 customType = _ref.customType,
1212 dataParser = _ref.dataParser,
1213 segment = _ref.segment;
1214
1215 if (typeof dataParser !== 'function') {
1216 dataParser = function dataParser(line) {
1217 return line;
1218 };
1219 }
1220
1221 this.customParsers.push(function (line) {
1222 var match = expression.exec(line);
1223
1224 if (match) {
1225 _this3.trigger('data', {
1226 type: 'custom',
1227 data: dataParser(line),
1228 customType: customType,
1229 segment: segment
1230 });
1231
1232 return true;
1233 }
1234 });
1235 }
1236 /**
1237 * Add a custom header mapper
1238 *
1239 * @param {Object} options
1240 * @param {RegExp} options.expression a regular expression to match the custom header
1241 * @param {Function} options.map function to translate tag into a different tag
1242 */
1243 ;
1244
1245 _proto.addTagMapper = function addTagMapper(_ref2) {
1246 var expression = _ref2.expression,
1247 map = _ref2.map;
1248
1249 var mapFn = function mapFn(line) {
1250 if (expression.test(line)) {
1251 return map(line);
1252 }
1253
1254 return line;
1255 };
1256
1257 this.tagMappers.push(mapFn);
1258 };
1259
1260 return ParseStream;
1261 }(Stream);
1262
1263 var camelCase = function camelCase(str) {
1264 return str.toLowerCase().replace(/-(\w)/g, function (a) {
1265 return a[1].toUpperCase();
1266 });
1267 };
1268
1269 var camelCaseKeys = function camelCaseKeys(attributes) {
1270 var result = {};
1271 Object.keys(attributes).forEach(function (key) {
1272 result[camelCase(key)] = attributes[key];
1273 });
1274 return result;
1275 }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
1276 // we need this helper because defaults are based upon targetDuration and
1277 // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
1278 // target durations are set.
1279
1280
1281 var setHoldBack = function setHoldBack(manifest) {
1282 var serverControl = manifest.serverControl,
1283 targetDuration = manifest.targetDuration,
1284 partTargetDuration = manifest.partTargetDuration;
1285
1286 if (!serverControl) {
1287 return;
1288 }
1289
1290 var tag = '#EXT-X-SERVER-CONTROL';
1291 var hb = 'holdBack';
1292 var phb = 'partHoldBack';
1293 var minTargetDuration = targetDuration && targetDuration * 3;
1294 var minPartDuration = partTargetDuration && partTargetDuration * 2;
1295
1296 if (targetDuration && !serverControl.hasOwnProperty(hb)) {
1297 serverControl[hb] = minTargetDuration;
1298 this.trigger('info', {
1299 message: tag + " defaulting HOLD-BACK to targetDuration * 3 (" + minTargetDuration + ")."
1300 });
1301 }
1302
1303 if (minTargetDuration && serverControl[hb] < minTargetDuration) {
1304 this.trigger('warn', {
1305 message: tag + " clamping HOLD-BACK (" + serverControl[hb] + ") to targetDuration * 3 (" + minTargetDuration + ")"
1306 });
1307 serverControl[hb] = minTargetDuration;
1308 } // default no part hold back to part target duration * 3
1309
1310
1311 if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
1312 serverControl[phb] = partTargetDuration * 3;
1313 this.trigger('info', {
1314 message: tag + " defaulting PART-HOLD-BACK to partTargetDuration * 3 (" + serverControl[phb] + ")."
1315 });
1316 } // if part hold back is too small default it to part target duration * 2
1317
1318
1319 if (partTargetDuration && serverControl[phb] < minPartDuration) {
1320 this.trigger('warn', {
1321 message: tag + " clamping PART-HOLD-BACK (" + serverControl[phb] + ") to partTargetDuration * 2 (" + minPartDuration + ")."
1322 });
1323 serverControl[phb] = minPartDuration;
1324 }
1325 };
1326 /**
1327 * A parser for M3U8 files. The current interpretation of the input is
1328 * exposed as a property `manifest` on parser objects. It's just two lines to
1329 * create and parse a manifest once you have the contents available as a string:
1330 *
1331 * ```js
1332 * var parser = new m3u8.Parser();
1333 * parser.push(xhr.responseText);
1334 * ```
1335 *
1336 * New input can later be applied to update the manifest object by calling
1337 * `push` again.
1338 *
1339 * The parser attempts to create a usable manifest object even if the
1340 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1341 * events during the parse if it encounters input that seems invalid or
1342 * requires some property of the manifest object to be defaulted.
1343 *
1344 * @class Parser
1345 * @extends Stream
1346 */
1347
1348
1349 var Parser = /*#__PURE__*/function (_Stream) {
1350 inheritsLoose(Parser, _Stream);
1351
1352 function Parser() {
1353 var _this;
1354
1355 _this = _Stream.call(this) || this;
1356 _this.lineStream = new LineStream();
1357 _this.parseStream = new ParseStream();
1358
1359 _this.lineStream.pipe(_this.parseStream);
1360 /* eslint-disable consistent-this */
1361
1362
1363 var self = assertThisInitialized(_this);
1364 /* eslint-enable consistent-this */
1365
1366
1367 var uris = [];
1368 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1369
1370 var currentMap; // if specified, the active decryption key
1371
1372 var _key;
1373
1374 var hasParts = false;
1375
1376 var noop = function noop() {};
1377
1378 var defaultMediaGroups = {
1379 'AUDIO': {},
1380 'VIDEO': {},
1381 'CLOSED-CAPTIONS': {},
1382 'SUBTITLES': {}
1383 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1384 // used in MPDs with Widevine encrypted streams.
1385
1386 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1387
1388 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1389
1390 _this.manifest = {
1391 allowCache: true,
1392 discontinuityStarts: [],
1393 segments: []
1394 }; // keep track of the last seen segment's byte range end, as segments are not required
1395 // to provide the offset, in which case it defaults to the next byte after the
1396 // previous segment
1397
1398 var lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
1399
1400 var lastPartByterangeEnd = 0;
1401
1402 _this.on('end', function () {
1403 // only add preloadSegment if we don't yet have a uri for it.
1404 // and we actually have parts/preloadHints
1405 if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
1406 return;
1407 }
1408
1409 if (!currentUri.map && currentMap) {
1410 currentUri.map = currentMap;
1411 }
1412
1413 if (!currentUri.key && _key) {
1414 currentUri.key = _key;
1415 }
1416
1417 if (!currentUri.timeline && typeof currentTimeline === 'number') {
1418 currentUri.timeline = currentTimeline;
1419 }
1420
1421 _this.manifest.preloadSegment = currentUri;
1422 }); // update the manifest with the m3u8 entry from the parse stream
1423
1424
1425 _this.parseStream.on('data', function (entry) {
1426 var mediaGroup;
1427 var rendition;
1428 ({
1429 tag: function tag() {
1430 // switch based on the tag type
1431 (({
1432 version: function version() {
1433 if (entry.version) {
1434 this.manifest.version = entry.version;
1435 }
1436 },
1437 'allow-cache': function allowCache() {
1438 this.manifest.allowCache = entry.allowed;
1439
1440 if (!('allowed' in entry)) {
1441 this.trigger('info', {
1442 message: 'defaulting allowCache to YES'
1443 });
1444 this.manifest.allowCache = true;
1445 }
1446 },
1447 byterange: function byterange() {
1448 var byterange = {};
1449
1450 if ('length' in entry) {
1451 currentUri.byterange = byterange;
1452 byterange.length = entry.length;
1453
1454 if (!('offset' in entry)) {
1455 /*
1456 * From the latest spec (as of this writing):
1457 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
1458 *
1459 * Same text since EXT-X-BYTERANGE's introduction in draft 7:
1460 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
1461 *
1462 * "If o [offset] is not present, the sub-range begins at the next byte
1463 * following the sub-range of the previous media segment."
1464 */
1465 entry.offset = lastByterangeEnd;
1466 }
1467 }
1468
1469 if ('offset' in entry) {
1470 currentUri.byterange = byterange;
1471 byterange.offset = entry.offset;
1472 }
1473
1474 lastByterangeEnd = byterange.offset + byterange.length;
1475 },
1476 endlist: function endlist() {
1477 this.manifest.endList = true;
1478 },
1479 inf: function inf() {
1480 if (!('mediaSequence' in this.manifest)) {
1481 this.manifest.mediaSequence = 0;
1482 this.trigger('info', {
1483 message: 'defaulting media sequence to zero'
1484 });
1485 }
1486
1487 if (!('discontinuitySequence' in this.manifest)) {
1488 this.manifest.discontinuitySequence = 0;
1489 this.trigger('info', {
1490 message: 'defaulting discontinuity sequence to zero'
1491 });
1492 }
1493
1494 if (entry.duration > 0) {
1495 currentUri.duration = entry.duration;
1496 }
1497
1498 if (entry.duration === 0) {
1499 currentUri.duration = 0.01;
1500 this.trigger('info', {
1501 message: 'updating zero segment duration to a small value'
1502 });
1503 }
1504
1505 this.manifest.segments = uris;
1506 },
1507 key: function key() {
1508 if (!entry.attributes) {
1509 this.trigger('warn', {
1510 message: 'ignoring key declaration without attribute list'
1511 });
1512 return;
1513 } // clear the active encryption key
1514
1515
1516 if (entry.attributes.METHOD === 'NONE') {
1517 _key = null;
1518 return;
1519 }
1520
1521 if (!entry.attributes.URI) {
1522 this.trigger('warn', {
1523 message: 'ignoring key declaration without URI'
1524 });
1525 return;
1526 }
1527
1528 if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {
1529 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1530
1531 this.manifest.contentProtection['com.apple.fps.1_0'] = {
1532 attributes: entry.attributes
1533 };
1534 return;
1535 } // check if the content is encrypted for Widevine
1536 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1537
1538
1539 if (entry.attributes.KEYFORMAT === widevineUuid) {
1540 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1541
1542 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1543 this.trigger('warn', {
1544 message: 'invalid key method provided for Widevine'
1545 });
1546 return;
1547 }
1548
1549 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1550 this.trigger('warn', {
1551 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1552 });
1553 }
1554
1555 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1556 this.trigger('warn', {
1557 message: 'invalid key URI provided for Widevine'
1558 });
1559 return;
1560 }
1561
1562 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1563 this.trigger('warn', {
1564 message: 'invalid key ID provided for Widevine'
1565 });
1566 return;
1567 } // if Widevine key attributes are valid, store them as `contentProtection`
1568 // on the manifest to emulate Widevine tag structure in a DASH mpd
1569
1570
1571 this.manifest.contentProtection = this.manifest.contentProtection || {};
1572 this.manifest.contentProtection['com.widevine.alpha'] = {
1573 attributes: {
1574 schemeIdUri: entry.attributes.KEYFORMAT,
1575 // remove '0x' from the key id string
1576 keyId: entry.attributes.KEYID.substring(2)
1577 },
1578 // decode the base64-encoded PSSH box
1579 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1580 };
1581 return;
1582 }
1583
1584 if (!entry.attributes.METHOD) {
1585 this.trigger('warn', {
1586 message: 'defaulting key method to AES-128'
1587 });
1588 } // setup an encryption key for upcoming segments
1589
1590
1591 _key = {
1592 method: entry.attributes.METHOD || 'AES-128',
1593 uri: entry.attributes.URI
1594 };
1595
1596 if (typeof entry.attributes.IV !== 'undefined') {
1597 _key.iv = entry.attributes.IV;
1598 }
1599 },
1600 'media-sequence': function mediaSequence() {
1601 if (!isFinite(entry.number)) {
1602 this.trigger('warn', {
1603 message: 'ignoring invalid media sequence: ' + entry.number
1604 });
1605 return;
1606 }
1607
1608 this.manifest.mediaSequence = entry.number;
1609 },
1610 'discontinuity-sequence': function discontinuitySequence() {
1611 if (!isFinite(entry.number)) {
1612 this.trigger('warn', {
1613 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1614 });
1615 return;
1616 }
1617
1618 this.manifest.discontinuitySequence = entry.number;
1619 currentTimeline = entry.number;
1620 },
1621 'playlist-type': function playlistType() {
1622 if (!/VOD|EVENT/.test(entry.playlistType)) {
1623 this.trigger('warn', {
1624 message: 'ignoring unknown playlist type: ' + entry.playlist
1625 });
1626 return;
1627 }
1628
1629 this.manifest.playlistType = entry.playlistType;
1630 },
1631 map: function map() {
1632 currentMap = {};
1633
1634 if (entry.uri) {
1635 currentMap.uri = entry.uri;
1636 }
1637
1638 if (entry.byterange) {
1639 currentMap.byterange = entry.byterange;
1640 }
1641
1642 if (_key) {
1643 currentMap.key = _key;
1644 }
1645 },
1646 'stream-inf': function streamInf() {
1647 this.manifest.playlists = uris;
1648 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1649
1650 if (!entry.attributes) {
1651 this.trigger('warn', {
1652 message: 'ignoring empty stream-inf attributes'
1653 });
1654 return;
1655 }
1656
1657 if (!currentUri.attributes) {
1658 currentUri.attributes = {};
1659 }
1660
1661 _extends_1(currentUri.attributes, entry.attributes);
1662 },
1663 media: function media() {
1664 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1665
1666 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1667 this.trigger('warn', {
1668 message: 'ignoring incomplete or missing media group'
1669 });
1670 return;
1671 } // find the media group, creating defaults as necessary
1672
1673
1674 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1675 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1676 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1677
1678 rendition = {
1679 default: /yes/i.test(entry.attributes.DEFAULT)
1680 };
1681
1682 if (rendition.default) {
1683 rendition.autoselect = true;
1684 } else {
1685 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1686 }
1687
1688 if (entry.attributes.LANGUAGE) {
1689 rendition.language = entry.attributes.LANGUAGE;
1690 }
1691
1692 if (entry.attributes.URI) {
1693 rendition.uri = entry.attributes.URI;
1694 }
1695
1696 if (entry.attributes['INSTREAM-ID']) {
1697 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1698 }
1699
1700 if (entry.attributes.CHARACTERISTICS) {
1701 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1702 }
1703
1704 if (entry.attributes.FORCED) {
1705 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1706 } // insert the new rendition
1707
1708
1709 mediaGroup[entry.attributes.NAME] = rendition;
1710 },
1711 discontinuity: function discontinuity() {
1712 currentTimeline += 1;
1713 currentUri.discontinuity = true;
1714 this.manifest.discontinuityStarts.push(uris.length);
1715 },
1716 'program-date-time': function programDateTime() {
1717 if (typeof this.manifest.dateTimeString === 'undefined') {
1718 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1719 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1720 // to the manifest object
1721 // TODO: Consider removing this in future major version
1722 this.manifest.dateTimeString = entry.dateTimeString;
1723 this.manifest.dateTimeObject = entry.dateTimeObject;
1724 }
1725
1726 currentUri.dateTimeString = entry.dateTimeString;
1727 currentUri.dateTimeObject = entry.dateTimeObject;
1728 },
1729 targetduration: function targetduration() {
1730 if (!isFinite(entry.duration) || entry.duration < 0) {
1731 this.trigger('warn', {
1732 message: 'ignoring invalid target duration: ' + entry.duration
1733 });
1734 return;
1735 }
1736
1737 this.manifest.targetDuration = entry.duration;
1738 setHoldBack.call(this, this.manifest);
1739 },
1740 start: function start() {
1741 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1742 this.trigger('warn', {
1743 message: 'ignoring start declaration without appropriate attribute list'
1744 });
1745 return;
1746 }
1747
1748 this.manifest.start = {
1749 timeOffset: entry.attributes['TIME-OFFSET'],
1750 precise: entry.attributes.PRECISE
1751 };
1752 },
1753 'cue-out': function cueOut() {
1754 currentUri.cueOut = entry.data;
1755 },
1756 'cue-out-cont': function cueOutCont() {
1757 currentUri.cueOutCont = entry.data;
1758 },
1759 'cue-in': function cueIn() {
1760 currentUri.cueIn = entry.data;
1761 },
1762 'skip': function skip() {
1763 this.manifest.skip = camelCaseKeys(entry.attributes);
1764 this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
1765 },
1766 'part': function part() {
1767 var _this2 = this;
1768
1769 hasParts = true; // parts are always specifed before a segment
1770
1771 var segmentIndex = this.manifest.segments.length;
1772 var part = camelCaseKeys(entry.attributes);
1773 currentUri.parts = currentUri.parts || [];
1774 currentUri.parts.push(part);
1775
1776 if (part.byterange) {
1777 if (!part.byterange.hasOwnProperty('offset')) {
1778 part.byterange.offset = lastPartByterangeEnd;
1779 }
1780
1781 lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
1782 }
1783
1784 var partIndex = currentUri.parts.length - 1;
1785 this.warnOnMissingAttributes_("#EXT-X-PART #" + partIndex + " for segment #" + segmentIndex, entry.attributes, ['URI', 'DURATION']);
1786
1787 if (this.manifest.renditionReports) {
1788 this.manifest.renditionReports.forEach(function (r, i) {
1789 if (!r.hasOwnProperty('lastPart')) {
1790 _this2.trigger('warn', {
1791 message: "#EXT-X-RENDITION-REPORT #" + i + " lacks required attribute(s): LAST-PART"
1792 });
1793 }
1794 });
1795 }
1796 },
1797 'server-control': function serverControl() {
1798 var attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
1799
1800 if (!attrs.hasOwnProperty('canBlockReload')) {
1801 attrs.canBlockReload = false;
1802 this.trigger('info', {
1803 message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
1804 });
1805 }
1806
1807 setHoldBack.call(this, this.manifest);
1808
1809 if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
1810 this.trigger('warn', {
1811 message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
1812 });
1813 }
1814 },
1815 'preload-hint': function preloadHint() {
1816 // parts are always specifed before a segment
1817 var segmentIndex = this.manifest.segments.length;
1818 var hint = camelCaseKeys(entry.attributes);
1819 var isPart = hint.type && hint.type === 'PART';
1820 currentUri.preloadHints = currentUri.preloadHints || [];
1821 currentUri.preloadHints.push(hint);
1822
1823 if (hint.byterange) {
1824 if (!hint.byterange.hasOwnProperty('offset')) {
1825 // use last part byterange end or zero if not a part.
1826 hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
1827
1828 if (isPart) {
1829 lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
1830 }
1831 }
1832 }
1833
1834 var index = currentUri.preloadHints.length - 1;
1835 this.warnOnMissingAttributes_("#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex, entry.attributes, ['TYPE', 'URI']);
1836
1837 if (!hint.type) {
1838 return;
1839 } // search through all preload hints except for the current one for
1840 // a duplicate type.
1841
1842
1843 for (var i = 0; i < currentUri.preloadHints.length - 1; i++) {
1844 var otherHint = currentUri.preloadHints[i];
1845
1846 if (!otherHint.type) {
1847 continue;
1848 }
1849
1850 if (otherHint.type === hint.type) {
1851 this.trigger('warn', {
1852 message: "#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex + " has the same TYPE " + hint.type + " as preload hint #" + i
1853 });
1854 }
1855 }
1856 },
1857 'rendition-report': function renditionReport() {
1858 var report = camelCaseKeys(entry.attributes);
1859 this.manifest.renditionReports = this.manifest.renditionReports || [];
1860 this.manifest.renditionReports.push(report);
1861 var index = this.manifest.renditionReports.length - 1;
1862 var required = ['LAST-MSN', 'URI'];
1863
1864 if (hasParts) {
1865 required.push('LAST-PART');
1866 }
1867
1868 this.warnOnMissingAttributes_("#EXT-X-RENDITION-REPORT #" + index, entry.attributes, required);
1869 },
1870 'part-inf': function partInf() {
1871 this.manifest.partInf = camelCaseKeys(entry.attributes);
1872 this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
1873
1874 if (this.manifest.partInf.partTarget) {
1875 this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
1876 }
1877
1878 setHoldBack.call(this, this.manifest);
1879 }
1880 })[entry.tagType] || noop).call(self);
1881 },
1882 uri: function uri() {
1883 currentUri.uri = entry.uri;
1884 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1885
1886 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1887 this.trigger('warn', {
1888 message: 'defaulting segment duration to the target duration'
1889 });
1890 currentUri.duration = this.manifest.targetDuration;
1891 } // annotate with encryption information, if necessary
1892
1893
1894 if (_key) {
1895 currentUri.key = _key;
1896 }
1897
1898 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1899
1900 if (currentMap) {
1901 currentUri.map = currentMap;
1902 } // reset the last byterange end as it needs to be 0 between parts
1903
1904
1905 lastPartByterangeEnd = 0; // prepare for the next URI
1906
1907 currentUri = {};
1908 },
1909 comment: function comment() {// comments are not important for playback
1910 },
1911 custom: function custom() {
1912 // if this is segment-level data attach the output to the segment
1913 if (entry.segment) {
1914 currentUri.custom = currentUri.custom || {};
1915 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1916 } else {
1917 this.manifest.custom = this.manifest.custom || {};
1918 this.manifest.custom[entry.customType] = entry.data;
1919 }
1920 }
1921 })[entry.type].call(self);
1922 });
1923
1924 return _this;
1925 }
1926
1927 var _proto = Parser.prototype;
1928
1929 _proto.warnOnMissingAttributes_ = function warnOnMissingAttributes_(identifier, attributes, required) {
1930 var missing = [];
1931 required.forEach(function (key) {
1932 if (!attributes.hasOwnProperty(key)) {
1933 missing.push(key);
1934 }
1935 });
1936
1937 if (missing.length) {
1938 this.trigger('warn', {
1939 message: identifier + " lacks required attribute(s): " + missing.join(', ')
1940 });
1941 }
1942 }
1943 /**
1944 * Parse the input string and update the manifest object.
1945 *
1946 * @param {string} chunk a potentially incomplete portion of the manifest
1947 */
1948 ;
1949
1950 _proto.push = function push(chunk) {
1951 this.lineStream.push(chunk);
1952 }
1953 /**
1954 * Flush any remaining input. This can be handy if the last line of an M3U8
1955 * manifest did not contain a trailing newline but the file has been
1956 * completely received.
1957 */
1958 ;
1959
1960 _proto.end = function end() {
1961 // flush any buffered input
1962 this.lineStream.push('\n');
1963 this.trigger('end');
1964 }
1965 /**
1966 * Add an additional parser for non-standard tags
1967 *
1968 * @param {Object} options a map of options for the added parser
1969 * @param {RegExp} options.expression a regular expression to match the custom header
1970 * @param {string} options.type the type to register to the output
1971 * @param {Function} [options.dataParser] function to parse the line into an object
1972 * @param {boolean} [options.segment] should tag data be attached to the segment object
1973 */
1974 ;
1975
1976 _proto.addParser = function addParser(options) {
1977 this.parseStream.addParser(options);
1978 }
1979 /**
1980 * Add a custom header mapper
1981 *
1982 * @param {Object} options
1983 * @param {RegExp} options.expression a regular expression to match the custom header
1984 * @param {Function} options.map function to translate tag into a different tag
1985 */
1986 ;
1987
1988 _proto.addTagMapper = function addTagMapper(options) {
1989 this.parseStream.addTagMapper(options);
1990 };
1991
1992 return Parser;
1993 }(Stream);
1994
1995 var regexs = {
1996 // to determine mime types
1997 mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
1998 webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
1999 ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
2000 // to determine if a codec is audio or video
2001 video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
2002 audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
2003 text: /^(stpp.ttml.im1t)/,
2004 // mux.js support regex
2005 muxerVideo: /^(avc0?1)/,
2006 muxerAudio: /^(mp4a)/,
2007 // match nothing as muxer does not support text right now.
2008 // there cannot never be a character before the start of a string
2009 // so this matches nothing.
2010 muxerText: /a^/
2011 };
2012 var mediaTypes = ['video', 'audio', 'text'];
2013 var upperMediaTypes = ['Video', 'Audio', 'Text'];
2014 /**
2015 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
2016 * `avc1.<hhhhhh>`
2017 *
2018 * @param {string} codec
2019 * Codec string to translate
2020 * @return {string}
2021 * The translated codec string
2022 */
2023
2024 var translateLegacyCodec = function translateLegacyCodec(codec) {
2025 if (!codec) {
2026 return codec;
2027 }
2028
2029 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
2030 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
2031 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
2032 return 'avc1.' + profileHex + '00' + avcLevelHex;
2033 });
2034 };
2035 /**
2036 * @typedef {Object} ParsedCodecInfo
2037 * @property {number} codecCount
2038 * Number of codecs parsed
2039 * @property {string} [videoCodec]
2040 * Parsed video codec (if found)
2041 * @property {string} [videoObjectTypeIndicator]
2042 * Video object type indicator (if found)
2043 * @property {string|null} audioProfile
2044 * Audio profile
2045 */
2046
2047 /**
2048 * Parses a codec string to retrieve the number of codecs specified, the video codec and
2049 * object type indicator, and the audio profile.
2050 *
2051 * @param {string} [codecString]
2052 * The codec string to parse
2053 * @return {ParsedCodecInfo}
2054 * Parsed codec info
2055 */
2056
2057 var parseCodecs = function parseCodecs(codecString) {
2058 if (codecString === void 0) {
2059 codecString = '';
2060 }
2061
2062 var codecs = codecString.split(',');
2063 var result = [];
2064 codecs.forEach(function (codec) {
2065 codec = codec.trim();
2066 var codecType;
2067 mediaTypes.forEach(function (name) {
2068 var match = regexs[name].exec(codec.toLowerCase());
2069
2070 if (!match || match.length <= 1) {
2071 return;
2072 }
2073
2074 codecType = name; // maintain codec case
2075
2076 var type = codec.substring(0, match[1].length);
2077 var details = codec.replace(type, '');
2078 result.push({
2079 type: type,
2080 details: details,
2081 mediaType: name
2082 });
2083 });
2084
2085 if (!codecType) {
2086 result.push({
2087 type: codec,
2088 details: '',
2089 mediaType: 'unknown'
2090 });
2091 }
2092 });
2093 return result;
2094 };
2095 /**
2096 * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
2097 * a default alternate audio playlist for the provided audio group.
2098 *
2099 * @param {Object} master
2100 * The master playlist
2101 * @param {string} audioGroupId
2102 * ID of the audio group for which to find the default codec info
2103 * @return {ParsedCodecInfo}
2104 * Parsed codec info
2105 */
2106
2107 var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
2108 if (!master.mediaGroups.AUDIO || !audioGroupId) {
2109 return null;
2110 }
2111
2112 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
2113
2114 if (!audioGroup) {
2115 return null;
2116 }
2117
2118 for (var name in audioGroup) {
2119 var audioType = audioGroup[name];
2120
2121 if (audioType.default && audioType.playlists) {
2122 // codec should be the same for all playlists within the audio type
2123 return parseCodecs(audioType.playlists[0].attributes.CODECS);
2124 }
2125 }
2126
2127 return null;
2128 };
2129 var isAudioCodec = function isAudioCodec(codec) {
2130 if (codec === void 0) {
2131 codec = '';
2132 }
2133
2134 return regexs.audio.test(codec.trim().toLowerCase());
2135 };
2136 var isTextCodec = function isTextCodec(codec) {
2137 if (codec === void 0) {
2138 codec = '';
2139 }
2140
2141 return regexs.text.test(codec.trim().toLowerCase());
2142 };
2143 var getMimeForCodec = function getMimeForCodec(codecString) {
2144 if (!codecString || typeof codecString !== 'string') {
2145 return;
2146 }
2147
2148 var codecs = codecString.toLowerCase().split(',').map(function (c) {
2149 return translateLegacyCodec(c.trim());
2150 }); // default to video type
2151
2152 var type = 'video'; // only change to audio type if the only codec we have is
2153 // audio
2154
2155 if (codecs.length === 1 && isAudioCodec(codecs[0])) {
2156 type = 'audio';
2157 } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
2158 // text uses application/<container> for now
2159 type = 'application';
2160 } // default the container to mp4
2161
2162
2163 var container = 'mp4'; // every codec must be able to go into the container
2164 // for that container to be the correct one
2165
2166 if (codecs.every(function (c) {
2167 return regexs.mp4.test(c);
2168 })) {
2169 container = 'mp4';
2170 } else if (codecs.every(function (c) {
2171 return regexs.webm.test(c);
2172 })) {
2173 container = 'webm';
2174 } else if (codecs.every(function (c) {
2175 return regexs.ogg.test(c);
2176 })) {
2177 container = 'ogg';
2178 }
2179
2180 return type + "/" + container + ";codecs=\"" + codecString + "\"";
2181 };
2182 var browserSupportsCodec = function browserSupportsCodec(codecString) {
2183 if (codecString === void 0) {
2184 codecString = '';
2185 }
2186
2187 return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
2188 };
2189 var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
2190 if (codecString === void 0) {
2191 codecString = '';
2192 }
2193
2194 return codecString.toLowerCase().split(',').every(function (codec) {
2195 codec = codec.trim(); // any match is supported.
2196
2197 for (var i = 0; i < upperMediaTypes.length; i++) {
2198 var type = upperMediaTypes[i];
2199
2200 if (regexs["muxer" + type].test(codec)) {
2201 return true;
2202 }
2203 }
2204
2205 return false;
2206 });
2207 };
2208 var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
2209 var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
2210
2211 /**
2212 * ranges
2213 *
2214 * Utilities for working with TimeRanges.
2215 *
2216 */
2217
2218 var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
2219 // can be misleading because of precision differences or when the current media has poorly
2220 // aligned audio and video, which can cause values to be slightly off from what you would
2221 // expect. This value is what we consider to be safe to use in such comparisons to account
2222 // for these scenarios.
2223
2224 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2225
2226 var filterRanges = function filterRanges(timeRanges, predicate) {
2227 var results = [];
2228 var i;
2229
2230 if (timeRanges && timeRanges.length) {
2231 // Search for ranges that match the predicate
2232 for (i = 0; i < timeRanges.length; i++) {
2233 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2234 results.push([timeRanges.start(i), timeRanges.end(i)]);
2235 }
2236 }
2237 }
2238
2239 return videojs__default["default"].createTimeRanges(results);
2240 };
2241 /**
2242 * Attempts to find the buffered TimeRange that contains the specified
2243 * time.
2244 *
2245 * @param {TimeRanges} buffered - the TimeRanges object to query
2246 * @param {number} time - the time to filter on.
2247 * @return {TimeRanges} a new TimeRanges object
2248 */
2249
2250
2251 var findRange = function findRange(buffered, time) {
2252 return filterRanges(buffered, function (start, end) {
2253 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2254 });
2255 };
2256 /**
2257 * Returns the TimeRanges that begin later than the specified time.
2258 *
2259 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2260 * @param {number} time - the time to filter on.
2261 * @return {TimeRanges} a new TimeRanges object.
2262 */
2263
2264 var findNextRange = function findNextRange(timeRanges, time) {
2265 return filterRanges(timeRanges, function (start) {
2266 return start - TIME_FUDGE_FACTOR >= time;
2267 });
2268 };
2269 /**
2270 * Returns gaps within a list of TimeRanges
2271 *
2272 * @param {TimeRanges} buffered - the TimeRanges object
2273 * @return {TimeRanges} a TimeRanges object of gaps
2274 */
2275
2276 var findGaps = function findGaps(buffered) {
2277 if (buffered.length < 2) {
2278 return videojs__default["default"].createTimeRanges();
2279 }
2280
2281 var ranges = [];
2282
2283 for (var i = 1; i < buffered.length; i++) {
2284 var start = buffered.end(i - 1);
2285 var end = buffered.start(i);
2286 ranges.push([start, end]);
2287 }
2288
2289 return videojs__default["default"].createTimeRanges(ranges);
2290 };
2291 /**
2292 * Calculate the intersection of two TimeRanges
2293 *
2294 * @param {TimeRanges} bufferA
2295 * @param {TimeRanges} bufferB
2296 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
2297 */
2298
2299 var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
2300 var start = null;
2301 var end = null;
2302 var arity = 0;
2303 var extents = [];
2304 var ranges = [];
2305
2306 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
2307 return videojs__default["default"].createTimeRange();
2308 } // Handle the case where we have both buffers and create an
2309 // intersection of the two
2310
2311
2312 var count = bufferA.length; // A) Gather up all start and end times
2313
2314 while (count--) {
2315 extents.push({
2316 time: bufferA.start(count),
2317 type: 'start'
2318 });
2319 extents.push({
2320 time: bufferA.end(count),
2321 type: 'end'
2322 });
2323 }
2324
2325 count = bufferB.length;
2326
2327 while (count--) {
2328 extents.push({
2329 time: bufferB.start(count),
2330 type: 'start'
2331 });
2332 extents.push({
2333 time: bufferB.end(count),
2334 type: 'end'
2335 });
2336 } // B) Sort them by time
2337
2338
2339 extents.sort(function (a, b) {
2340 return a.time - b.time;
2341 }); // C) Go along one by one incrementing arity for start and decrementing
2342 // arity for ends
2343
2344 for (count = 0; count < extents.length; count++) {
2345 if (extents[count].type === 'start') {
2346 arity++; // D) If arity is ever incremented to 2 we are entering an
2347 // overlapping range
2348
2349 if (arity === 2) {
2350 start = extents[count].time;
2351 }
2352 } else if (extents[count].type === 'end') {
2353 arity--; // E) If arity is ever decremented to 1 we leaving an
2354 // overlapping range
2355
2356 if (arity === 1) {
2357 end = extents[count].time;
2358 }
2359 } // F) Record overlapping ranges
2360
2361
2362 if (start !== null && end !== null) {
2363 ranges.push([start, end]);
2364 start = null;
2365 end = null;
2366 }
2367 }
2368
2369 return videojs__default["default"].createTimeRanges(ranges);
2370 };
2371 /**
2372 * Gets a human readable string for a TimeRange
2373 *
2374 * @param {TimeRange} range
2375 * @return {string} a human readable string
2376 */
2377
2378 var printableRange = function printableRange(range) {
2379 var strArr = [];
2380
2381 if (!range || !range.length) {
2382 return '';
2383 }
2384
2385 for (var i = 0; i < range.length; i++) {
2386 strArr.push(range.start(i) + ' => ' + range.end(i));
2387 }
2388
2389 return strArr.join(', ');
2390 };
2391 /**
2392 * Calculates the amount of time left in seconds until the player hits the end of the
2393 * buffer and causes a rebuffer
2394 *
2395 * @param {TimeRange} buffered
2396 * The state of the buffer
2397 * @param {Numnber} currentTime
2398 * The current time of the player
2399 * @param {number} playbackRate
2400 * The current playback rate of the player. Defaults to 1.
2401 * @return {number}
2402 * Time until the player has to start rebuffering in seconds.
2403 * @function timeUntilRebuffer
2404 */
2405
2406 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
2407 if (playbackRate === void 0) {
2408 playbackRate = 1;
2409 }
2410
2411 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
2412 return (bufferedEnd - currentTime) / playbackRate;
2413 };
2414 /**
2415 * Converts a TimeRanges object into an array representation
2416 *
2417 * @param {TimeRanges} timeRanges
2418 * @return {Array}
2419 */
2420
2421 var timeRangesToArray = function timeRangesToArray(timeRanges) {
2422 var timeRangesList = [];
2423
2424 for (var i = 0; i < timeRanges.length; i++) {
2425 timeRangesList.push({
2426 start: timeRanges.start(i),
2427 end: timeRanges.end(i)
2428 });
2429 }
2430
2431 return timeRangesList;
2432 };
2433 /**
2434 * Determines if two time range objects are different.
2435 *
2436 * @param {TimeRange} a
2437 * the first time range object to check
2438 *
2439 * @param {TimeRange} b
2440 * the second time range object to check
2441 *
2442 * @return {Boolean}
2443 * Whether the time range objects differ
2444 */
2445
2446 var isRangeDifferent = function isRangeDifferent(a, b) {
2447 // same object
2448 if (a === b) {
2449 return false;
2450 } // one or the other is undefined
2451
2452
2453 if (!a && b || !b && a) {
2454 return true;
2455 } // length is different
2456
2457
2458 if (a.length !== b.length) {
2459 return true;
2460 } // see if any start/end pair is different
2461
2462
2463 for (var i = 0; i < a.length; i++) {
2464 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
2465 return true;
2466 }
2467 } // if the length and every pair is the same
2468 // this is the same time range
2469
2470
2471 return false;
2472 };
2473 var lastBufferedEnd = function lastBufferedEnd(a) {
2474 if (!a || !a.length || !a.end) {
2475 return;
2476 }
2477
2478 return a.end(a.length - 1);
2479 };
2480 /**
2481 * A utility function to add up the amount of time in a timeRange
2482 * after a specified startTime.
2483 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
2484 * would return 40 as there are 40s seconds after 0 in the timeRange
2485 *
2486 * @param {TimeRange} range
2487 * The range to check against
2488 * @param {number} startTime
2489 * The time in the time range that you should start counting from
2490 *
2491 * @return {number}
2492 * The number of seconds in the buffer passed the specified time.
2493 */
2494
2495 var timeAheadOf = function timeAheadOf(range, startTime) {
2496 var time = 0;
2497
2498 if (!range || !range.length) {
2499 return time;
2500 }
2501
2502 for (var i = 0; i < range.length; i++) {
2503 var start = range.start(i);
2504 var end = range.end(i); // startTime is after this range entirely
2505
2506 if (startTime > end) {
2507 continue;
2508 } // startTime is within this range
2509
2510
2511 if (startTime > start && startTime <= end) {
2512 time += end - startTime;
2513 continue;
2514 } // startTime is before this range.
2515
2516
2517 time += end - start;
2518 }
2519
2520 return time;
2521 };
2522
2523 /**
2524 * @file playlist.js
2525 *
2526 * Playlist related utilities.
2527 */
2528 var createTimeRange = videojs__default["default"].createTimeRange;
2529 /**
2530 * A function to get a combined list of parts and segments with durations
2531 * and indexes.
2532 *
2533 * @param {Playlist} playlist the playlist to get the list for.
2534 *
2535 * @return {Array} The part/segment list.
2536 */
2537
2538 var getPartsAndSegments = function getPartsAndSegments(playlist) {
2539 return (playlist.segments || []).reduce(function (acc, segment, si) {
2540 if (segment.parts) {
2541 segment.parts.forEach(function (part, pi) {
2542 acc.push({
2543 duration: part.duration,
2544 segmentIndex: si,
2545 partIndex: pi,
2546 part: part,
2547 segment: segment
2548 });
2549 });
2550 } else {
2551 acc.push({
2552 duration: segment.duration,
2553 segmentIndex: si,
2554 partIndex: null,
2555 segment: segment,
2556 part: null
2557 });
2558 }
2559
2560 return acc;
2561 }, []);
2562 };
2563 var getLastParts = function getLastParts(media) {
2564 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
2565 return lastSegment && lastSegment.parts || [];
2566 };
2567 var getKnownPartCount = function getKnownPartCount(_ref) {
2568 var preloadSegment = _ref.preloadSegment;
2569
2570 if (!preloadSegment) {
2571 return;
2572 }
2573
2574 var parts = preloadSegment.parts,
2575 preloadHints = preloadSegment.preloadHints;
2576 var partCount = (preloadHints || []).reduce(function (count, hint) {
2577 return count + (hint.type === 'PART' ? 1 : 0);
2578 }, 0);
2579 partCount += parts && parts.length ? parts.length : 0;
2580 return partCount;
2581 };
2582 /**
2583 * Get the number of seconds to delay from the end of a
2584 * live playlist.
2585 *
2586 * @param {Playlist} master the master playlist
2587 * @param {Playlist} media the media playlist
2588 * @return {number} the hold back in seconds.
2589 */
2590
2591 var liveEdgeDelay = function liveEdgeDelay(master, media) {
2592 if (media.endList) {
2593 return 0;
2594 } // dash suggestedPresentationDelay trumps everything
2595
2596
2597 if (master && master.suggestedPresentationDelay) {
2598 return master.suggestedPresentationDelay;
2599 }
2600
2601 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
2602
2603 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
2604 return media.serverControl.partHoldBack;
2605 } else if (hasParts && media.partTargetDuration) {
2606 return media.partTargetDuration * 3; // finally look for full segment delays
2607 } else if (media.serverControl && media.serverControl.holdBack) {
2608 return media.serverControl.holdBack;
2609 } else if (media.targetDuration) {
2610 return media.targetDuration * 3;
2611 }
2612
2613 return 0;
2614 };
2615 /**
2616 * walk backward until we find a duration we can use
2617 * or return a failure
2618 *
2619 * @param {Playlist} playlist the playlist to walk through
2620 * @param {Number} endSequence the mediaSequence to stop walking on
2621 */
2622
2623 var backwardDuration = function backwardDuration(playlist, endSequence) {
2624 var result = 0;
2625 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
2626 // the interval, use it
2627
2628 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
2629 // information that is earlier than endSequence
2630
2631 if (segment) {
2632 if (typeof segment.start !== 'undefined') {
2633 return {
2634 result: segment.start,
2635 precise: true
2636 };
2637 }
2638
2639 if (typeof segment.end !== 'undefined') {
2640 return {
2641 result: segment.end - segment.duration,
2642 precise: true
2643 };
2644 }
2645 }
2646
2647 while (i--) {
2648 segment = playlist.segments[i];
2649
2650 if (typeof segment.end !== 'undefined') {
2651 return {
2652 result: result + segment.end,
2653 precise: true
2654 };
2655 }
2656
2657 result += segment.duration;
2658
2659 if (typeof segment.start !== 'undefined') {
2660 return {
2661 result: result + segment.start,
2662 precise: true
2663 };
2664 }
2665 }
2666
2667 return {
2668 result: result,
2669 precise: false
2670 };
2671 };
2672 /**
2673 * walk forward until we find a duration we can use
2674 * or return a failure
2675 *
2676 * @param {Playlist} playlist the playlist to walk through
2677 * @param {number} endSequence the mediaSequence to stop walking on
2678 */
2679
2680
2681 var forwardDuration = function forwardDuration(playlist, endSequence) {
2682 var result = 0;
2683 var segment;
2684 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
2685 // information
2686
2687 for (; i < playlist.segments.length; i++) {
2688 segment = playlist.segments[i];
2689
2690 if (typeof segment.start !== 'undefined') {
2691 return {
2692 result: segment.start - result,
2693 precise: true
2694 };
2695 }
2696
2697 result += segment.duration;
2698
2699 if (typeof segment.end !== 'undefined') {
2700 return {
2701 result: segment.end - result,
2702 precise: true
2703 };
2704 }
2705 } // indicate we didn't find a useful duration estimate
2706
2707
2708 return {
2709 result: -1,
2710 precise: false
2711 };
2712 };
2713 /**
2714 * Calculate the media duration from the segments associated with a
2715 * playlist. The duration of a subinterval of the available segments
2716 * may be calculated by specifying an end index.
2717 *
2718 * @param {Object} playlist a media playlist object
2719 * @param {number=} endSequence an exclusive upper boundary
2720 * for the playlist. Defaults to playlist length.
2721 * @param {number} expired the amount of time that has dropped
2722 * off the front of the playlist in a live scenario
2723 * @return {number} the duration between the first available segment
2724 * and end index.
2725 */
2726
2727
2728 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
2729 if (typeof endSequence === 'undefined') {
2730 endSequence = playlist.mediaSequence + playlist.segments.length;
2731 }
2732
2733 if (endSequence < playlist.mediaSequence) {
2734 return 0;
2735 } // do a backward walk to estimate the duration
2736
2737
2738 var backward = backwardDuration(playlist, endSequence);
2739
2740 if (backward.precise) {
2741 // if we were able to base our duration estimate on timing
2742 // information provided directly from the Media Source, return
2743 // it
2744 return backward.result;
2745 } // walk forward to see if a precise duration estimate can be made
2746 // that way
2747
2748
2749 var forward = forwardDuration(playlist, endSequence);
2750
2751 if (forward.precise) {
2752 // we found a segment that has been buffered and so it's
2753 // position is known precisely
2754 return forward.result;
2755 } // return the less-precise, playlist-based duration estimate
2756
2757
2758 return backward.result + expired;
2759 };
2760 /**
2761 * Calculates the duration of a playlist. If a start and end index
2762 * are specified, the duration will be for the subset of the media
2763 * timeline between those two indices. The total duration for live
2764 * playlists is always Infinity.
2765 *
2766 * @param {Object} playlist a media playlist object
2767 * @param {number=} endSequence an exclusive upper
2768 * boundary for the playlist. Defaults to the playlist media
2769 * sequence number plus its length.
2770 * @param {number=} expired the amount of time that has
2771 * dropped off the front of the playlist in a live scenario
2772 * @return {number} the duration between the start index and end
2773 * index.
2774 */
2775
2776
2777 var duration = function duration(playlist, endSequence, expired) {
2778 if (!playlist) {
2779 return 0;
2780 }
2781
2782 if (typeof expired !== 'number') {
2783 expired = 0;
2784 } // if a slice of the total duration is not requested, use
2785 // playlist-level duration indicators when they're present
2786
2787
2788 if (typeof endSequence === 'undefined') {
2789 // if present, use the duration specified in the playlist
2790 if (playlist.totalDuration) {
2791 return playlist.totalDuration;
2792 } // duration should be Infinity for live playlists
2793
2794
2795 if (!playlist.endList) {
2796 return window.Infinity;
2797 }
2798 } // calculate the total duration based on the segment durations
2799
2800
2801 return intervalDuration(playlist, endSequence, expired);
2802 };
2803 /**
2804 * Calculate the time between two indexes in the current playlist
2805 * neight the start- nor the end-index need to be within the current
2806 * playlist in which case, the targetDuration of the playlist is used
2807 * to approximate the durations of the segments
2808 *
2809 * @param {Array} options.durationList list to iterate over for durations.
2810 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
2811 * @param {number} options.startIndex partsAndSegments index to start
2812 * @param {number} options.endIndex partsAndSegments index to end.
2813 * @return {number} the number of seconds between startIndex and endIndex
2814 */
2815
2816 var sumDurations = function sumDurations(_ref2) {
2817 var defaultDuration = _ref2.defaultDuration,
2818 durationList = _ref2.durationList,
2819 startIndex = _ref2.startIndex,
2820 endIndex = _ref2.endIndex;
2821 var durations = 0;
2822
2823 if (startIndex > endIndex) {
2824 var _ref3 = [endIndex, startIndex];
2825 startIndex = _ref3[0];
2826 endIndex = _ref3[1];
2827 }
2828
2829 if (startIndex < 0) {
2830 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
2831 durations += defaultDuration;
2832 }
2833
2834 startIndex = 0;
2835 }
2836
2837 for (var _i = startIndex; _i < endIndex; _i++) {
2838 durations += durationList[_i].duration;
2839 }
2840
2841 return durations;
2842 };
2843 /**
2844 * Calculates the playlist end time
2845 *
2846 * @param {Object} playlist a media playlist object
2847 * @param {number=} expired the amount of time that has
2848 * dropped off the front of the playlist in a live scenario
2849 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
2850 * playlist end calculation should consider the safe live end
2851 * (truncate the playlist end by three segments). This is normally
2852 * used for calculating the end of the playlist's seekable range.
2853 * This takes into account the value of liveEdgePadding.
2854 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
2855 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
2856 * If this is provided, it is used in the safe live end calculation.
2857 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
2858 * Corresponds to suggestedPresentationDelay in DASH manifests.
2859 * @return {number} the end time of playlist
2860 * @function playlistEnd
2861 */
2862
2863 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
2864 if (!playlist || !playlist.segments) {
2865 return null;
2866 }
2867
2868 if (playlist.endList) {
2869 return duration(playlist);
2870 }
2871
2872 if (expired === null) {
2873 return null;
2874 }
2875
2876 expired = expired || 0;
2877 var lastSegmentTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
2878
2879 if (useSafeLiveEnd) {
2880 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
2881 lastSegmentTime -= liveEdgePadding;
2882 } // don't return a time less than zero
2883
2884
2885 return Math.max(0, lastSegmentTime);
2886 };
2887 /**
2888 * Calculates the interval of time that is currently seekable in a
2889 * playlist. The returned time ranges are relative to the earliest
2890 * moment in the specified playlist that is still available. A full
2891 * seekable implementation for live streams would need to offset
2892 * these values by the duration of content that has expired from the
2893 * stream.
2894 *
2895 * @param {Object} playlist a media playlist object
2896 * dropped off the front of the playlist in a live scenario
2897 * @param {number=} expired the amount of time that has
2898 * dropped off the front of the playlist in a live scenario
2899 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
2900 * Corresponds to suggestedPresentationDelay in DASH manifests.
2901 * @return {TimeRanges} the periods of time that are valid targets
2902 * for seeking
2903 */
2904
2905 var seekable = function seekable(playlist, expired, liveEdgePadding) {
2906 var useSafeLiveEnd = true;
2907 var seekableStart = expired || 0;
2908 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
2909
2910 if (seekableEnd === null) {
2911 return createTimeRange();
2912 }
2913
2914 return createTimeRange(seekableStart, seekableEnd);
2915 };
2916 /**
2917 * Determine the index and estimated starting time of the segment that
2918 * contains a specified playback position in a media playlist.
2919 *
2920 * @param {Object} options.playlist the media playlist to query
2921 * @param {number} options.currentTime The number of seconds since the earliest
2922 * possible position to determine the containing segment for
2923 * @param {number} options.startTime the time when the segment/part starts
2924 * @param {number} options.startingSegmentIndex the segment index to start looking at.
2925 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
2926 *
2927 * @return {Object} an object with partIndex, segmentIndex, and startTime.
2928 */
2929
2930 var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
2931 var playlist = _ref4.playlist,
2932 currentTime = _ref4.currentTime,
2933 startingSegmentIndex = _ref4.startingSegmentIndex,
2934 startingPartIndex = _ref4.startingPartIndex,
2935 startTime = _ref4.startTime,
2936 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
2937 var time = currentTime - startTime;
2938 var partsAndSegments = getPartsAndSegments(playlist);
2939 var startIndex = 0;
2940
2941 for (var i = 0; i < partsAndSegments.length; i++) {
2942 var partAndSegment = partsAndSegments[i];
2943
2944 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
2945 continue;
2946 } // skip this if part index does not match.
2947
2948
2949 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
2950 continue;
2951 }
2952
2953 startIndex = i;
2954 break;
2955 }
2956
2957 if (time < 0) {
2958 // Walk backward from startIndex in the playlist, adding durations
2959 // until we find a segment that contains `time` and return it
2960 if (startIndex > 0) {
2961 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
2962 var _partAndSegment = partsAndSegments[_i2];
2963 time += _partAndSegment.duration;
2964
2965 if (experimentalExactManifestTimings) {
2966 if (time < 0) {
2967 continue;
2968 }
2969 } else if (time + TIME_FUDGE_FACTOR <= 0) {
2970 continue;
2971 }
2972
2973 return {
2974 partIndex: _partAndSegment.partIndex,
2975 segmentIndex: _partAndSegment.segmentIndex,
2976 startTime: startTime - sumDurations({
2977 defaultDuration: playlist.targetDuration,
2978 durationList: partsAndSegments,
2979 startIndex: startIndex,
2980 endIndex: _i2
2981 })
2982 };
2983 }
2984 } // We were unable to find a good segment within the playlist
2985 // so select the first segment
2986
2987
2988 return {
2989 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
2990 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
2991 startTime: currentTime
2992 };
2993 } // When startIndex is negative, we first walk forward to first segment
2994 // adding target durations. If we "run out of time" before getting to
2995 // the first segment, return the first segment
2996
2997
2998 if (startIndex < 0) {
2999 for (var _i3 = startIndex; _i3 < 0; _i3++) {
3000 time -= playlist.targetDuration;
3001
3002 if (time < 0) {
3003 return {
3004 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3005 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3006 startTime: currentTime
3007 };
3008 }
3009 }
3010
3011 startIndex = 0;
3012 } // Walk forward from startIndex in the playlist, subtracting durations
3013 // until we find a segment that contains `time` and return it
3014
3015
3016 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
3017 var _partAndSegment2 = partsAndSegments[_i4];
3018 time -= _partAndSegment2.duration;
3019
3020 if (experimentalExactManifestTimings) {
3021 if (time > 0) {
3022 continue;
3023 }
3024 } else if (time - TIME_FUDGE_FACTOR >= 0) {
3025 continue;
3026 }
3027
3028 return {
3029 partIndex: _partAndSegment2.partIndex,
3030 segmentIndex: _partAndSegment2.segmentIndex,
3031 startTime: startTime + sumDurations({
3032 defaultDuration: playlist.targetDuration,
3033 durationList: partsAndSegments,
3034 startIndex: startIndex,
3035 endIndex: _i4
3036 })
3037 };
3038 } // We are out of possible candidates so load the last one...
3039
3040
3041 return {
3042 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
3043 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
3044 startTime: currentTime
3045 };
3046 };
3047 /**
3048 * Check whether the playlist is blacklisted or not.
3049 *
3050 * @param {Object} playlist the media playlist object
3051 * @return {boolean} whether the playlist is blacklisted or not
3052 * @function isBlacklisted
3053 */
3054
3055 var isBlacklisted = function isBlacklisted(playlist) {
3056 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
3057 };
3058 /**
3059 * Check whether the playlist is compatible with current playback configuration or has
3060 * been blacklisted permanently for being incompatible.
3061 *
3062 * @param {Object} playlist the media playlist object
3063 * @return {boolean} whether the playlist is incompatible or not
3064 * @function isIncompatible
3065 */
3066
3067 var isIncompatible = function isIncompatible(playlist) {
3068 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
3069 };
3070 /**
3071 * Check whether the playlist is enabled or not.
3072 *
3073 * @param {Object} playlist the media playlist object
3074 * @return {boolean} whether the playlist is enabled or not
3075 * @function isEnabled
3076 */
3077
3078 var isEnabled = function isEnabled(playlist) {
3079 var blacklisted = isBlacklisted(playlist);
3080 return !playlist.disabled && !blacklisted;
3081 };
3082 /**
3083 * Check whether the playlist has been manually disabled through the representations api.
3084 *
3085 * @param {Object} playlist the media playlist object
3086 * @return {boolean} whether the playlist is disabled manually or not
3087 * @function isDisabled
3088 */
3089
3090 var isDisabled = function isDisabled(playlist) {
3091 return playlist.disabled;
3092 };
3093 /**
3094 * Returns whether the current playlist is an AES encrypted HLS stream
3095 *
3096 * @return {boolean} true if it's an AES encrypted HLS stream
3097 */
3098
3099 var isAes = function isAes(media) {
3100 for (var i = 0; i < media.segments.length; i++) {
3101 if (media.segments[i].key) {
3102 return true;
3103 }
3104 }
3105
3106 return false;
3107 };
3108 /**
3109 * Checks if the playlist has a value for the specified attribute
3110 *
3111 * @param {string} attr
3112 * Attribute to check for
3113 * @param {Object} playlist
3114 * The media playlist object
3115 * @return {boolean}
3116 * Whether the playlist contains a value for the attribute or not
3117 * @function hasAttribute
3118 */
3119
3120 var hasAttribute = function hasAttribute(attr, playlist) {
3121 return playlist.attributes && playlist.attributes[attr];
3122 };
3123 /**
3124 * Estimates the time required to complete a segment download from the specified playlist
3125 *
3126 * @param {number} segmentDuration
3127 * Duration of requested segment
3128 * @param {number} bandwidth
3129 * Current measured bandwidth of the player
3130 * @param {Object} playlist
3131 * The media playlist object
3132 * @param {number=} bytesReceived
3133 * Number of bytes already received for the request. Defaults to 0
3134 * @return {number|NaN}
3135 * The estimated time to request the segment. NaN if bandwidth information for
3136 * the given playlist is unavailable
3137 * @function estimateSegmentRequestTime
3138 */
3139
3140 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
3141 if (bytesReceived === void 0) {
3142 bytesReceived = 0;
3143 }
3144
3145 if (!hasAttribute('BANDWIDTH', playlist)) {
3146 return NaN;
3147 }
3148
3149 var size = segmentDuration * playlist.attributes.BANDWIDTH;
3150 return (size - bytesReceived * 8) / bandwidth;
3151 };
3152 /*
3153 * Returns whether the current playlist is the lowest rendition
3154 *
3155 * @return {Boolean} true if on lowest rendition
3156 */
3157
3158 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
3159 if (master.playlists.length === 1) {
3160 return true;
3161 }
3162
3163 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3164 return master.playlists.filter(function (playlist) {
3165 if (!isEnabled(playlist)) {
3166 return false;
3167 }
3168
3169 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3170 }).length === 0;
3171 };
3172 var playlistMatch = function playlistMatch(a, b) {
3173 // both playlits are null
3174 // or only one playlist is non-null
3175 // no match
3176 if (!a && !b || !a && b || a && !b) {
3177 return false;
3178 } // playlist objects are the same, match
3179
3180
3181 if (a === b) {
3182 return true;
3183 } // first try to use id as it should be the most
3184 // accurate
3185
3186
3187 if (a.id && b.id && a.id === b.id) {
3188 return true;
3189 } // next try to use reslovedUri as it should be the
3190 // second most accurate.
3191
3192
3193 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
3194 return true;
3195 } // finally try to use uri as it should be accurate
3196 // but might miss a few cases for relative uris
3197
3198
3199 if (a.uri && b.uri && a.uri === b.uri) {
3200 return true;
3201 }
3202
3203 return false;
3204 };
3205
3206 var someAudioVariant = function someAudioVariant(master, callback) {
3207 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
3208 var found = false;
3209
3210 for (var groupName in AUDIO) {
3211 for (var label in AUDIO[groupName]) {
3212 found = callback(AUDIO[groupName][label]);
3213
3214 if (found) {
3215 break;
3216 }
3217 }
3218
3219 if (found) {
3220 break;
3221 }
3222 }
3223
3224 return !!found;
3225 };
3226
3227 var isAudioOnly = function isAudioOnly(master) {
3228 // we are audio only if we have no main playlists but do
3229 // have media group playlists.
3230 if (!master || !master.playlists || !master.playlists.length) {
3231 // without audio variants or playlists this
3232 // is not an audio only master.
3233 var found = someAudioVariant(master, function (variant) {
3234 return variant.playlists && variant.playlists.length || variant.uri;
3235 });
3236 return found;
3237 } // if every playlist has only an audio codec it is audio only
3238
3239
3240 var _loop = function _loop(i) {
3241 var playlist = master.playlists[i];
3242 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
3243
3244 if (CODECS && CODECS.split(',').every(function (c) {
3245 return isAudioCodec(c);
3246 })) {
3247 return "continue";
3248 } // playlist is in an audio group it is audio only
3249
3250
3251 var found = someAudioVariant(master, function (variant) {
3252 return playlistMatch(playlist, variant);
3253 });
3254
3255 if (found) {
3256 return "continue";
3257 } // if we make it here this playlist isn't audio and we
3258 // are not audio only
3259
3260
3261 return {
3262 v: false
3263 };
3264 };
3265
3266 for (var i = 0; i < master.playlists.length; i++) {
3267 var _ret = _loop(i);
3268
3269 if (_ret === "continue") continue;
3270 if (typeof _ret === "object") return _ret.v;
3271 } // if we make it past every playlist without returning, then
3272 // this is an audio only playlist.
3273
3274
3275 return true;
3276 }; // exports
3277
3278 var Playlist = {
3279 liveEdgeDelay: liveEdgeDelay,
3280 duration: duration,
3281 seekable: seekable,
3282 getMediaInfoForTime: getMediaInfoForTime,
3283 isEnabled: isEnabled,
3284 isDisabled: isDisabled,
3285 isBlacklisted: isBlacklisted,
3286 isIncompatible: isIncompatible,
3287 playlistEnd: playlistEnd,
3288 isAes: isAes,
3289 hasAttribute: hasAttribute,
3290 estimateSegmentRequestTime: estimateSegmentRequestTime,
3291 isLowestEnabledRendition: isLowestEnabledRendition,
3292 isAudioOnly: isAudioOnly,
3293 playlistMatch: playlistMatch
3294 };
3295
3296 var log = videojs__default["default"].log;
3297 var createPlaylistID = function createPlaylistID(index, uri) {
3298 return index + "-" + uri;
3299 };
3300 /**
3301 * Parses a given m3u8 playlist
3302 *
3303 * @param {Function} [onwarn]
3304 * a function to call when the parser triggers a warning event.
3305 * @param {Function} [oninfo]
3306 * a function to call when the parser triggers an info event.
3307 * @param {string} manifestString
3308 * The downloaded manifest string
3309 * @param {Object[]} [customTagParsers]
3310 * An array of custom tag parsers for the m3u8-parser instance
3311 * @param {Object[]} [customTagMappers]
3312 * An array of custom tag mappers for the m3u8-parser instance
3313 * @param {boolean} [experimentalLLHLS=false]
3314 * Whether to keep ll-hls features in the manifest after parsing.
3315 * @return {Object}
3316 * The manifest object
3317 */
3318
3319 var parseManifest = function parseManifest(_ref) {
3320 var onwarn = _ref.onwarn,
3321 oninfo = _ref.oninfo,
3322 manifestString = _ref.manifestString,
3323 _ref$customTagParsers = _ref.customTagParsers,
3324 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
3325 _ref$customTagMappers = _ref.customTagMappers,
3326 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
3327 experimentalLLHLS = _ref.experimentalLLHLS;
3328 var parser = new Parser();
3329
3330 if (onwarn) {
3331 parser.on('warn', onwarn);
3332 }
3333
3334 if (oninfo) {
3335 parser.on('info', oninfo);
3336 }
3337
3338 customTagParsers.forEach(function (customParser) {
3339 return parser.addParser(customParser);
3340 });
3341 customTagMappers.forEach(function (mapper) {
3342 return parser.addTagMapper(mapper);
3343 });
3344 parser.push(manifestString);
3345 parser.end();
3346 var manifest = parser.manifest; // remove llhls features from the parsed manifest
3347 // if we don't want llhls support.
3348
3349 if (!experimentalLLHLS) {
3350 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
3351 if (manifest.hasOwnProperty(k)) {
3352 delete manifest[k];
3353 }
3354 });
3355
3356 if (manifest.segments) {
3357 manifest.segments.forEach(function (segment) {
3358 ['parts', 'preloadHints'].forEach(function (k) {
3359 if (segment.hasOwnProperty(k)) {
3360 delete segment[k];
3361 }
3362 });
3363 });
3364 }
3365 }
3366
3367 if (!manifest.targetDuration) {
3368 var targetDuration = 10;
3369
3370 if (manifest.segments && manifest.segments.length) {
3371 targetDuration = manifest.segments.reduce(function (acc, s) {
3372 return Math.max(acc, s.duration);
3373 }, 0);
3374 }
3375
3376 if (onwarn) {
3377 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
3378 }
3379
3380 manifest.targetDuration = targetDuration;
3381 }
3382
3383 var parts = getLastParts(manifest);
3384
3385 if (parts.length && !manifest.partTargetDuration) {
3386 var partTargetDuration = parts.reduce(function (acc, p) {
3387 return Math.max(acc, p.duration);
3388 }, 0);
3389
3390 if (onwarn) {
3391 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
3392 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
3393 }
3394
3395 manifest.partTargetDuration = partTargetDuration;
3396 }
3397
3398 return manifest;
3399 };
3400 /**
3401 * Loops through all supported media groups in master and calls the provided
3402 * callback for each group
3403 *
3404 * @param {Object} master
3405 * The parsed master manifest object
3406 * @param {Function} callback
3407 * Callback to call for each media group
3408 */
3409
3410 var forEachMediaGroup = function forEachMediaGroup(master, callback) {
3411 if (!master.mediaGroups) {
3412 return;
3413 }
3414
3415 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
3416 if (!master.mediaGroups[mediaType]) {
3417 return;
3418 }
3419
3420 for (var groupKey in master.mediaGroups[mediaType]) {
3421 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
3422 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
3423 callback(mediaProperties, mediaType, groupKey, labelKey);
3424 }
3425 }
3426 });
3427 };
3428 /**
3429 * Adds properties and attributes to the playlist to keep consistent functionality for
3430 * playlists throughout VHS.
3431 *
3432 * @param {Object} config
3433 * Arguments object
3434 * @param {Object} config.playlist
3435 * The media playlist
3436 * @param {string} [config.uri]
3437 * The uri to the media playlist (if media playlist is not from within a master
3438 * playlist)
3439 * @param {string} id
3440 * ID to use for the playlist
3441 */
3442
3443 var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
3444 var playlist = _ref2.playlist,
3445 uri = _ref2.uri,
3446 id = _ref2.id;
3447 playlist.id = id;
3448 playlist.playlistErrors_ = 0;
3449
3450 if (uri) {
3451 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
3452 // playlists do not contain their own source URI, but one is needed for consistency in
3453 // VHS.
3454 playlist.uri = uri;
3455 } // For HLS master playlists, even though certain attributes MUST be defined, the
3456 // stream may still be played without them.
3457 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
3458 // manifest.
3459 //
3460 // To avoid undefined reference errors through the project, and make the code easier
3461 // to write/read, add an empty attributes object for these cases.
3462
3463
3464 playlist.attributes = playlist.attributes || {};
3465 };
3466 /**
3467 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
3468 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
3469 * playlist references to the playlists array.
3470 *
3471 * @param {Object} master
3472 * The master playlist
3473 */
3474
3475 var setupMediaPlaylists = function setupMediaPlaylists(master) {
3476 var i = master.playlists.length;
3477
3478 while (i--) {
3479 var playlist = master.playlists[i];
3480 setupMediaPlaylist({
3481 playlist: playlist,
3482 id: createPlaylistID(i, playlist.uri)
3483 });
3484 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
3485 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
3486
3487 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
3488 // the stream can be played without it. Although an attributes property may have been
3489 // added to the playlist to prevent undefined references, issue a warning to fix the
3490 // manifest.
3491
3492 if (!playlist.attributes.BANDWIDTH) {
3493 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
3494 }
3495 }
3496 };
3497 /**
3498 * Adds resolvedUri properties to each media group.
3499 *
3500 * @param {Object} master
3501 * The master playlist
3502 */
3503
3504 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
3505 forEachMediaGroup(master, function (properties) {
3506 if (properties.uri) {
3507 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
3508 }
3509 });
3510 };
3511 /**
3512 * Creates a master playlist wrapper to insert a sole media playlist into.
3513 *
3514 * @param {Object} media
3515 * Media playlist
3516 * @param {string} uri
3517 * The media URI
3518 *
3519 * @return {Object}
3520 * Master playlist
3521 */
3522
3523 var masterForMedia = function masterForMedia(media, uri) {
3524 var id = createPlaylistID(0, uri);
3525 var master = {
3526 mediaGroups: {
3527 'AUDIO': {},
3528 'VIDEO': {},
3529 'CLOSED-CAPTIONS': {},
3530 'SUBTITLES': {}
3531 },
3532 uri: window.location.href,
3533 resolvedUri: window.location.href,
3534 playlists: [{
3535 uri: uri,
3536 id: id,
3537 resolvedUri: uri,
3538 // m3u8-parser does not attach an attributes property to media playlists so make
3539 // sure that the property is attached to avoid undefined reference errors
3540 attributes: {}
3541 }]
3542 }; // set up ID reference
3543
3544 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
3545
3546 master.playlists[uri] = master.playlists[0];
3547 return master;
3548 };
3549 /**
3550 * Does an in-place update of the master manifest to add updated playlist URI references
3551 * as well as other properties needed by VHS that aren't included by the parser.
3552 *
3553 * @param {Object} master
3554 * Master manifest object
3555 * @param {string} uri
3556 * The source URI
3557 */
3558
3559 var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
3560 master.uri = uri;
3561
3562 for (var i = 0; i < master.playlists.length; i++) {
3563 if (!master.playlists[i].uri) {
3564 // Set up phony URIs for the playlists since playlists are referenced by their URIs
3565 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
3566 // TODO: consider adding dummy URIs in mpd-parser
3567 var phonyUri = "placeholder-uri-" + i;
3568 master.playlists[i].uri = phonyUri;
3569 }
3570 }
3571
3572 var audioOnlyMaster = isAudioOnly(master);
3573 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
3574 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
3575
3576 if (!properties.playlists || !properties.playlists.length) {
3577 // If the manifest is audio only and this media group does not have a uri, check
3578 // if the media group is located in the main list of playlists. If it is, don't add
3579 // placeholder properties as it shouldn't be considered an alternate audio track.
3580 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
3581 for (var _i = 0; _i < master.playlists.length; _i++) {
3582 var p = master.playlists[_i];
3583
3584 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
3585 return;
3586 }
3587 }
3588 }
3589
3590 properties.playlists = [_extends_1({}, properties)];
3591 }
3592
3593 properties.playlists.forEach(function (p, i) {
3594 var id = createPlaylistID(i, groupId);
3595
3596 if (p.uri) {
3597 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
3598 } else {
3599 // DEPRECATED, this has been added to prevent a breaking change.
3600 // previously we only ever had a single media group playlist, so
3601 // we mark the first playlist uri without prepending the index as we used to
3602 // ideally we would do all of the playlists the same way.
3603 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
3604 // the placeholder again
3605
3606 p.resolvedUri = p.uri;
3607 }
3608
3609 p.id = p.id || id; // add an empty attributes object, all playlists are
3610 // expected to have this.
3611
3612 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
3613
3614 master.playlists[p.id] = p;
3615 master.playlists[p.uri] = p;
3616 });
3617 });
3618 setupMediaPlaylists(master);
3619 resolveMediaGroupUris(master);
3620 };
3621
3622 var mergeOptions$2 = videojs__default["default"].mergeOptions,
3623 EventTarget$1 = videojs__default["default"].EventTarget;
3624
3625 var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
3626 if (media.endList || !media.serverControl) {
3627 return uri;
3628 }
3629
3630 var parameters = {};
3631
3632 if (media.serverControl.canBlockReload) {
3633 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
3634
3635 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
3636 // that we are going to request a part of that preload segment.
3637 // the logic below is used to determine that.
3638
3639 if (preloadSegment) {
3640 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
3641
3642 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
3643 // length of parts, then we know we had part preload hints
3644 // and we need to add the _HLS_part= query
3645
3646 if (nextPart > -1 && nextPart !== parts.length - 1) {
3647 // add existing parts to our preload hints
3648 // eslint-disable-next-line
3649 parameters._HLS_part = nextPart;
3650 } // this if statement makes sure that we request the msn
3651 // of the preload segment if:
3652 // 1. the preload segment had parts (and was not yet a full segment)
3653 // but was added to our segments array
3654 // 2. the preload segment had preload hints for parts that are not in
3655 // the manifest yet.
3656 // in all other cases we want the segment after the preload segment
3657 // which will be given by using media.segments.length because it is 1 based
3658 // rather than 0 based.
3659
3660
3661 if (nextPart > -1 || parts.length) {
3662 nextMSN--;
3663 }
3664 } // add _HLS_msn= in front of any _HLS_part query
3665 // eslint-disable-next-line
3666
3667
3668 parameters._HLS_msn = nextMSN;
3669 }
3670
3671 if (media.serverControl && media.serverControl.canSkipUntil) {
3672 // add _HLS_skip= infront of all other queries.
3673 // eslint-disable-next-line
3674 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
3675 }
3676
3677 if (Object.keys(parameters).length) {
3678 var parsedUri = new window.URL(uri);
3679 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
3680 if (!parameters.hasOwnProperty(name)) {
3681 return;
3682 }
3683
3684 parsedUri.searchParams.set(name, parameters[name]);
3685 });
3686 uri = parsedUri.toString();
3687 }
3688
3689 return uri;
3690 };
3691 /**
3692 * Returns a new segment object with properties and
3693 * the parts array merged.
3694 *
3695 * @param {Object} a the old segment
3696 * @param {Object} b the new segment
3697 *
3698 * @return {Object} the merged segment
3699 */
3700
3701
3702 var updateSegment = function updateSegment(a, b) {
3703 if (!a) {
3704 return b;
3705 }
3706
3707 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
3708 // and the new one does not, remove preload hints.
3709
3710 if (a.preloadHints && !b.preloadHints) {
3711 delete result.preloadHints;
3712 } // if only the old segment has parts
3713 // then the parts are no longer valid
3714
3715
3716 if (a.parts && !b.parts) {
3717 delete result.parts; // if both segments have parts
3718 // copy part propeties from the old segment
3719 // to the new one.
3720 } else if (a.parts && b.parts) {
3721 for (var i = 0; i < b.parts.length; i++) {
3722 if (a.parts && a.parts[i]) {
3723 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
3724 }
3725 }
3726 } // set skipped to false for segments that have
3727 // have had information merged from the old segment.
3728
3729
3730 if (!a.skipped && b.skipped) {
3731 result.skipped = false;
3732 } // set preload to false for segments that have
3733 // had information added in the new segment.
3734
3735
3736 if (a.preload && !b.preload) {
3737 result.preload = false;
3738 }
3739
3740 return result;
3741 };
3742 /**
3743 * Returns a new array of segments that is the result of merging
3744 * properties from an older list of segments onto an updated
3745 * list. No properties on the updated playlist will be ovewritten.
3746 *
3747 * @param {Array} original the outdated list of segments
3748 * @param {Array} update the updated list of segments
3749 * @param {number=} offset the index of the first update
3750 * segment in the original segment list. For non-live playlists,
3751 * this should always be zero and does not need to be
3752 * specified. For live playlists, it should be the difference
3753 * between the media sequence numbers in the original and updated
3754 * playlists.
3755 * @return {Array} a list of merged segment objects
3756 */
3757
3758 var updateSegments = function updateSegments(original, update, offset) {
3759 var oldSegments = original.slice();
3760 var newSegments = update.slice();
3761 offset = offset || 0;
3762 var result = [];
3763 var currentMap;
3764
3765 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
3766 var oldSegment = oldSegments[newIndex + offset];
3767 var newSegment = newSegments[newIndex];
3768
3769 if (oldSegment) {
3770 currentMap = oldSegment.map || currentMap;
3771 result.push(updateSegment(oldSegment, newSegment));
3772 } else {
3773 // carry over map to new segment if it is missing
3774 if (currentMap && !newSegment.map) {
3775 newSegment.map = currentMap;
3776 }
3777
3778 result.push(newSegment);
3779 }
3780 }
3781
3782 return result;
3783 };
3784 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
3785 // preloadSegment will not have a uri at all
3786 // as the segment isn't actually in the manifest yet, only parts
3787 if (!segment.resolvedUri && segment.uri) {
3788 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
3789 }
3790
3791 if (segment.key && !segment.key.resolvedUri) {
3792 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
3793 }
3794
3795 if (segment.map && !segment.map.resolvedUri) {
3796 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
3797 }
3798
3799 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
3800 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
3801 }
3802
3803 if (segment.parts && segment.parts.length) {
3804 segment.parts.forEach(function (p) {
3805 if (p.resolvedUri) {
3806 return;
3807 }
3808
3809 p.resolvedUri = resolveUrl(baseUri, p.uri);
3810 });
3811 }
3812
3813 if (segment.preloadHints && segment.preloadHints.length) {
3814 segment.preloadHints.forEach(function (p) {
3815 if (p.resolvedUri) {
3816 return;
3817 }
3818
3819 p.resolvedUri = resolveUrl(baseUri, p.uri);
3820 });
3821 }
3822 };
3823
3824 var getAllSegments = function getAllSegments(media) {
3825 var segments = media.segments || [];
3826 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
3827 // a usable segment, only include a preloadSegment that has
3828 // parts.
3829
3830 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
3831 // if preloadHints has a MAP that means that the
3832 // init segment is going to change. We cannot use any of the parts
3833 // from this preload segment.
3834 if (preloadSegment.preloadHints) {
3835 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
3836 if (preloadSegment.preloadHints[i].type === 'MAP') {
3837 return segments;
3838 }
3839 }
3840 } // set the duration for our preload segment to target duration.
3841
3842
3843 preloadSegment.duration = media.targetDuration;
3844 preloadSegment.preload = true;
3845 segments.push(preloadSegment);
3846 }
3847
3848 return segments;
3849 }; // consider the playlist unchanged if the playlist object is the same or
3850 // the number of segments is equal, the media sequence number is unchanged,
3851 // and this playlist hasn't become the end of the playlist
3852
3853
3854 var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
3855 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence;
3856 };
3857 /**
3858 * Returns a new master playlist that is the result of merging an
3859 * updated media playlist into the original version. If the
3860 * updated media playlist does not match any of the playlist
3861 * entries in the original master playlist, null is returned.
3862 *
3863 * @param {Object} master a parsed master M3U8 object
3864 * @param {Object} media a parsed media M3U8 object
3865 * @return {Object} a new object that represents the original
3866 * master playlist with the updated media playlist merged in, or
3867 * null if the merge produced no change.
3868 */
3869
3870 var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
3871 if (unchangedCheck === void 0) {
3872 unchangedCheck = isPlaylistUnchanged;
3873 }
3874
3875 var result = mergeOptions$2(master, {});
3876 var oldMedia = result.playlists[newMedia.id];
3877
3878 if (!oldMedia) {
3879 return null;
3880 }
3881
3882 if (unchangedCheck(oldMedia, newMedia)) {
3883 return null;
3884 }
3885
3886 newMedia.segments = getAllSegments(newMedia);
3887 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
3888
3889 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
3890 delete mergedPlaylist.preloadSegment;
3891 } // if the update could overlap existing segment information, merge the two segment lists
3892
3893
3894 if (oldMedia.segments) {
3895 if (newMedia.skip) {
3896 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
3897 // old properties into the new segments
3898
3899 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
3900 newMedia.segments.unshift({
3901 skipped: true
3902 });
3903 }
3904 }
3905
3906 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
3907 } // resolve any segment URIs to prevent us from having to do it later
3908
3909
3910 mergedPlaylist.segments.forEach(function (segment) {
3911 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
3912 }); // TODO Right now in the playlists array there are two references to each playlist, one
3913 // that is referenced by index, and one by URI. The index reference may no longer be
3914 // necessary.
3915
3916 for (var _i = 0; _i < result.playlists.length; _i++) {
3917 if (result.playlists[_i].id === newMedia.id) {
3918 result.playlists[_i] = mergedPlaylist;
3919 }
3920 }
3921
3922 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
3923
3924 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
3925
3926 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
3927 if (!properties.playlists) {
3928 return;
3929 }
3930
3931 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
3932 if (newMedia.id === properties.playlists[_i2].id) {
3933 properties.playlists[_i2] = newMedia;
3934 }
3935 }
3936 });
3937 return result;
3938 };
3939 /**
3940 * Calculates the time to wait before refreshing a live playlist
3941 *
3942 * @param {Object} media
3943 * The current media
3944 * @param {boolean} update
3945 * True if there were any updates from the last refresh, false otherwise
3946 * @return {number}
3947 * The time in ms to wait before refreshing the live playlist
3948 */
3949
3950 var refreshDelay = function refreshDelay(media, update) {
3951 var segments = media.segments || [];
3952 var lastSegment = segments[segments.length - 1];
3953 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
3954 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
3955
3956 if (update && lastDuration) {
3957 return lastDuration * 1000;
3958 } // if the playlist is unchanged since the last reload or last segment duration
3959 // cannot be determined, try again after half the target duration
3960
3961
3962 return (media.partTargetDuration || media.targetDuration || 10) * 500;
3963 };
3964 /**
3965 * Load a playlist from a remote location
3966 *
3967 * @class PlaylistLoader
3968 * @extends Stream
3969 * @param {string|Object} src url or object of manifest
3970 * @param {boolean} withCredentials the withCredentials xhr option
3971 * @class
3972 */
3973
3974 var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
3975 inheritsLoose(PlaylistLoader, _EventTarget);
3976
3977 function PlaylistLoader(src, vhs, options) {
3978 var _this;
3979
3980 if (options === void 0) {
3981 options = {};
3982 }
3983
3984 _this = _EventTarget.call(this) || this;
3985
3986 if (!src) {
3987 throw new Error('A non-empty playlist URL or object is required');
3988 }
3989
3990 _this.logger_ = logger('PlaylistLoader');
3991 var _options = options,
3992 _options$withCredenti = _options.withCredentials,
3993 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
3994 _options$handleManife = _options.handleManifestRedirects,
3995 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
3996 _this.src = src;
3997 _this.vhs_ = vhs;
3998 _this.withCredentials = withCredentials;
3999 _this.handleManifestRedirects = handleManifestRedirects;
4000 var vhsOptions = vhs.options_;
4001 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
4002 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
4003 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
4004
4005 if (videojs__default["default"].browser.IE_VERSION) {
4006 _this.experimentalLLHLS = false;
4007 } // initialize the loader state
4008
4009
4010 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
4011
4012 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(assertThisInitialized(_this));
4013
4014 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
4015
4016 return _this;
4017 }
4018
4019 var _proto = PlaylistLoader.prototype;
4020
4021 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
4022 var _this2 = this;
4023
4024 if (this.state !== 'HAVE_METADATA') {
4025 // only refresh the media playlist if no other activity is going on
4026 return;
4027 }
4028
4029 var media = this.media();
4030 var uri = resolveUrl(this.master.uri, media.uri);
4031
4032 if (this.experimentalLLHLS) {
4033 uri = addLLHLSQueryDirectives(uri, media);
4034 }
4035
4036 this.state = 'HAVE_CURRENT_METADATA';
4037 this.request = this.vhs_.xhr({
4038 uri: uri,
4039 withCredentials: this.withCredentials
4040 }, function (error, req) {
4041 // disposed
4042 if (!_this2.request) {
4043 return;
4044 }
4045
4046 if (error) {
4047 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
4048 }
4049
4050 _this2.haveMetadata({
4051 playlistString: _this2.request.responseText,
4052 url: _this2.media().uri,
4053 id: _this2.media().id
4054 });
4055 });
4056 };
4057
4058 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
4059 var uri = playlist.uri,
4060 id = playlist.id; // any in-flight request is now finished
4061
4062 this.request = null;
4063
4064 if (startingState) {
4065 this.state = startingState;
4066 }
4067
4068 this.error = {
4069 playlist: this.master.playlists[id],
4070 status: xhr.status,
4071 message: "HLS playlist request error at URL: " + uri + ".",
4072 responseText: xhr.responseText,
4073 code: xhr.status >= 500 ? 4 : 2
4074 };
4075 this.trigger('error');
4076 };
4077
4078 _proto.parseManifest_ = function parseManifest_(_ref) {
4079 var _this3 = this;
4080
4081 var url = _ref.url,
4082 manifestString = _ref.manifestString;
4083 return parseManifest({
4084 onwarn: function onwarn(_ref2) {
4085 var message = _ref2.message;
4086 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
4087 },
4088 oninfo: function oninfo(_ref3) {
4089 var message = _ref3.message;
4090 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
4091 },
4092 manifestString: manifestString,
4093 customTagParsers: this.customTagParsers,
4094 customTagMappers: this.customTagMappers,
4095 experimentalLLHLS: this.experimentalLLHLS
4096 });
4097 }
4098 /**
4099 * Update the playlist loader's state in response to a new or updated playlist.
4100 *
4101 * @param {string} [playlistString]
4102 * Playlist string (if playlistObject is not provided)
4103 * @param {Object} [playlistObject]
4104 * Playlist object (if playlistString is not provided)
4105 * @param {string} url
4106 * URL of playlist
4107 * @param {string} id
4108 * ID to use for playlist
4109 */
4110 ;
4111
4112 _proto.haveMetadata = function haveMetadata(_ref4) {
4113 var playlistString = _ref4.playlistString,
4114 playlistObject = _ref4.playlistObject,
4115 url = _ref4.url,
4116 id = _ref4.id;
4117 // any in-flight request is now finished
4118 this.request = null;
4119 this.state = 'HAVE_METADATA';
4120 var playlist = playlistObject || this.parseManifest_({
4121 url: url,
4122 manifestString: playlistString
4123 });
4124 playlist.lastRequest = Date.now();
4125 setupMediaPlaylist({
4126 playlist: playlist,
4127 uri: url,
4128 id: id
4129 }); // merge this playlist into the master
4130
4131 var update = updateMaster$1(this.master, playlist);
4132 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
4133
4134 if (update) {
4135 this.master = update;
4136 this.media_ = this.master.playlists[id];
4137 } else {
4138 this.trigger('playlistunchanged');
4139 }
4140
4141 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
4142 this.trigger('loadedplaylist');
4143 }
4144 /**
4145 * Abort any outstanding work and clean up.
4146 */
4147 ;
4148
4149 _proto.dispose = function dispose() {
4150 this.trigger('dispose');
4151 this.stopRequest();
4152 window.clearTimeout(this.mediaUpdateTimeout);
4153 window.clearTimeout(this.finalRenditionTimeout);
4154 this.off();
4155 };
4156
4157 _proto.stopRequest = function stopRequest() {
4158 if (this.request) {
4159 var oldRequest = this.request;
4160 this.request = null;
4161 oldRequest.onreadystatechange = null;
4162 oldRequest.abort();
4163 }
4164 }
4165 /**
4166 * When called without any arguments, returns the currently
4167 * active media playlist. When called with a single argument,
4168 * triggers the playlist loader to asynchronously switch to the
4169 * specified media playlist. Calling this method while the
4170 * loader is in the HAVE_NOTHING causes an error to be emitted
4171 * but otherwise has no effect.
4172 *
4173 * @param {Object=} playlist the parsed media playlist
4174 * object to switch to
4175 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
4176 *
4177 * @return {Playlist} the current loaded media
4178 */
4179 ;
4180
4181 _proto.media = function media(playlist, shouldDelay) {
4182 var _this4 = this;
4183
4184 // getter
4185 if (!playlist) {
4186 return this.media_;
4187 } // setter
4188
4189
4190 if (this.state === 'HAVE_NOTHING') {
4191 throw new Error('Cannot switch media playlist from ' + this.state);
4192 } // find the playlist object if the target playlist has been
4193 // specified by URI
4194
4195
4196 if (typeof playlist === 'string') {
4197 if (!this.master.playlists[playlist]) {
4198 throw new Error('Unknown playlist URI: ' + playlist);
4199 }
4200
4201 playlist = this.master.playlists[playlist];
4202 }
4203
4204 window.clearTimeout(this.finalRenditionTimeout);
4205
4206 if (shouldDelay) {
4207 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
4208 this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);
4209 return;
4210 }
4211
4212 var startingState = this.state;
4213 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
4214 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
4215
4216 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
4217 // media playlist or, for the case of demuxed audio, a resolved audio media group)
4218 playlist.endList && playlist.segments.length) {
4219 // abort outstanding playlist requests
4220 if (this.request) {
4221 this.request.onreadystatechange = null;
4222 this.request.abort();
4223 this.request = null;
4224 }
4225
4226 this.state = 'HAVE_METADATA';
4227 this.media_ = playlist; // trigger media change if the active media has been updated
4228
4229 if (mediaChange) {
4230 this.trigger('mediachanging');
4231
4232 if (startingState === 'HAVE_MASTER') {
4233 // The initial playlist was a master manifest, and the first media selected was
4234 // also provided (in the form of a resolved playlist object) as part of the
4235 // source object (rather than just a URL). Therefore, since the media playlist
4236 // doesn't need to be requested, loadedmetadata won't trigger as part of the
4237 // normal flow, and needs an explicit trigger here.
4238 this.trigger('loadedmetadata');
4239 } else {
4240 this.trigger('mediachange');
4241 }
4242 }
4243
4244 return;
4245 } // We update/set the timeout here so that live playlists
4246 // that are not a media change will "start" the loader as expected.
4247 // We expect that this function will start the media update timeout
4248 // cycle again. This also prevents a playlist switch failure from
4249 // causing us to stall during live.
4250
4251
4252 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
4253
4254 if (!mediaChange) {
4255 return;
4256 }
4257
4258 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
4259
4260 if (this.request) {
4261 if (playlist.resolvedUri === this.request.url) {
4262 // requesting to switch to the same playlist multiple times
4263 // has no effect after the first
4264 return;
4265 }
4266
4267 this.request.onreadystatechange = null;
4268 this.request.abort();
4269 this.request = null;
4270 } // request the new playlist
4271
4272
4273 if (this.media_) {
4274 this.trigger('mediachanging');
4275 }
4276
4277 this.request = this.vhs_.xhr({
4278 uri: playlist.resolvedUri,
4279 withCredentials: this.withCredentials
4280 }, function (error, req) {
4281 // disposed
4282 if (!_this4.request) {
4283 return;
4284 }
4285
4286 playlist.lastRequest = Date.now();
4287 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
4288
4289 if (error) {
4290 return _this4.playlistRequestError(_this4.request, playlist, startingState);
4291 }
4292
4293 _this4.haveMetadata({
4294 playlistString: req.responseText,
4295 url: playlist.uri,
4296 id: playlist.id
4297 }); // fire loadedmetadata the first time a media playlist is loaded
4298
4299
4300 if (startingState === 'HAVE_MASTER') {
4301 _this4.trigger('loadedmetadata');
4302 } else {
4303 _this4.trigger('mediachange');
4304 }
4305 });
4306 }
4307 /**
4308 * pause loading of the playlist
4309 */
4310 ;
4311
4312 _proto.pause = function pause() {
4313 if (this.mediaUpdateTimeout) {
4314 window.clearTimeout(this.mediaUpdateTimeout);
4315 this.mediaUpdateTimeout = null;
4316 }
4317
4318 this.stopRequest();
4319
4320 if (this.state === 'HAVE_NOTHING') {
4321 // If we pause the loader before any data has been retrieved, its as if we never
4322 // started, so reset to an unstarted state.
4323 this.started = false;
4324 } // Need to restore state now that no activity is happening
4325
4326
4327 if (this.state === 'SWITCHING_MEDIA') {
4328 // if the loader was in the process of switching media, it should either return to
4329 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
4330 // playlist yet. This is determined by the existence of loader.media_
4331 if (this.media_) {
4332 this.state = 'HAVE_METADATA';
4333 } else {
4334 this.state = 'HAVE_MASTER';
4335 }
4336 } else if (this.state === 'HAVE_CURRENT_METADATA') {
4337 this.state = 'HAVE_METADATA';
4338 }
4339 }
4340 /**
4341 * start loading of the playlist
4342 */
4343 ;
4344
4345 _proto.load = function load(shouldDelay) {
4346 var _this5 = this;
4347
4348 if (this.mediaUpdateTimeout) {
4349 window.clearTimeout(this.mediaUpdateTimeout);
4350 this.mediaUpdateTimeout = null;
4351 }
4352
4353 var media = this.media();
4354
4355 if (shouldDelay) {
4356 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
4357 this.mediaUpdateTimeout = window.setTimeout(function () {
4358 _this5.mediaUpdateTimeout = null;
4359
4360 _this5.load();
4361 }, delay);
4362 return;
4363 }
4364
4365 if (!this.started) {
4366 this.start();
4367 return;
4368 }
4369
4370 if (media && !media.endList) {
4371 this.trigger('mediaupdatetimeout');
4372 } else {
4373 this.trigger('loadedplaylist');
4374 }
4375 };
4376
4377 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
4378 var _this6 = this;
4379
4380 if (this.mediaUpdateTimeout) {
4381 window.clearTimeout(this.mediaUpdateTimeout);
4382 this.mediaUpdateTimeout = null;
4383 } // we only have use mediaupdatetimeout for live playlists.
4384
4385
4386 if (!this.media() || this.media().endList) {
4387 return;
4388 }
4389
4390 this.mediaUpdateTimeout = window.setTimeout(function () {
4391 _this6.mediaUpdateTimeout = null;
4392
4393 _this6.trigger('mediaupdatetimeout');
4394
4395 _this6.updateMediaUpdateTimeout_(delay);
4396 }, delay);
4397 }
4398 /**
4399 * start loading of the playlist
4400 */
4401 ;
4402
4403 _proto.start = function start() {
4404 var _this7 = this;
4405
4406 this.started = true;
4407
4408 if (typeof this.src === 'object') {
4409 // in the case of an entirely constructed manifest object (meaning there's no actual
4410 // manifest on a server), default the uri to the page's href
4411 if (!this.src.uri) {
4412 this.src.uri = window.location.href;
4413 } // resolvedUri is added on internally after the initial request. Since there's no
4414 // request for pre-resolved manifests, add on resolvedUri here.
4415
4416
4417 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
4418 // request can be skipped (since the top level of the manifest, at a minimum, is
4419 // already available as a parsed manifest object). However, if the manifest object
4420 // represents a master playlist, some media playlists may need to be resolved before
4421 // the starting segment list is available. Therefore, go directly to setup of the
4422 // initial playlist, and let the normal flow continue from there.
4423 //
4424 // Note that the call to setup is asynchronous, as other sections of VHS may assume
4425 // that the first request is asynchronous.
4426
4427 setTimeout(function () {
4428 _this7.setupInitialPlaylist(_this7.src);
4429 }, 0);
4430 return;
4431 } // request the specified URL
4432
4433
4434 this.request = this.vhs_.xhr({
4435 uri: this.src,
4436 withCredentials: this.withCredentials
4437 }, function (error, req) {
4438 // disposed
4439 if (!_this7.request) {
4440 return;
4441 } // clear the loader's request reference
4442
4443
4444 _this7.request = null;
4445
4446 if (error) {
4447 _this7.error = {
4448 status: req.status,
4449 message: "HLS playlist request error at URL: " + _this7.src + ".",
4450 responseText: req.responseText,
4451 // MEDIA_ERR_NETWORK
4452 code: 2
4453 };
4454
4455 if (_this7.state === 'HAVE_NOTHING') {
4456 _this7.started = false;
4457 }
4458
4459 return _this7.trigger('error');
4460 }
4461
4462 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
4463
4464 var manifest = _this7.parseManifest_({
4465 manifestString: req.responseText,
4466 url: _this7.src
4467 });
4468
4469 _this7.setupInitialPlaylist(manifest);
4470 });
4471 };
4472
4473 _proto.srcUri = function srcUri() {
4474 return typeof this.src === 'string' ? this.src : this.src.uri;
4475 }
4476 /**
4477 * Given a manifest object that's either a master or media playlist, trigger the proper
4478 * events and set the state of the playlist loader.
4479 *
4480 * If the manifest object represents a master playlist, `loadedplaylist` will be
4481 * triggered to allow listeners to select a playlist. If none is selected, the loader
4482 * will default to the first one in the playlists array.
4483 *
4484 * If the manifest object represents a media playlist, `loadedplaylist` will be
4485 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
4486 *
4487 * In the case of a media playlist, a master playlist object wrapper with one playlist
4488 * will be created so that all logic can handle playlists in the same fashion (as an
4489 * assumed manifest object schema).
4490 *
4491 * @param {Object} manifest
4492 * The parsed manifest object
4493 */
4494 ;
4495
4496 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
4497 this.state = 'HAVE_MASTER';
4498
4499 if (manifest.playlists) {
4500 this.master = manifest;
4501 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
4502 // then resolve URIs in advance, as they are usually done after a playlist request,
4503 // which may not happen if the playlist is resolved.
4504
4505 manifest.playlists.forEach(function (playlist) {
4506 playlist.segments = getAllSegments(playlist);
4507 playlist.segments.forEach(function (segment) {
4508 resolveSegmentUris(segment, playlist.resolvedUri);
4509 });
4510 });
4511 this.trigger('loadedplaylist');
4512
4513 if (!this.request) {
4514 // no media playlist was specifically selected so start
4515 // from the first listed one
4516 this.media(this.master.playlists[0]);
4517 }
4518
4519 return;
4520 } // In order to support media playlists passed in as vhs-json, the case where the uri
4521 // is not provided as part of the manifest should be considered, and an appropriate
4522 // default used.
4523
4524
4525 var uri = this.srcUri() || window.location.href;
4526 this.master = masterForMedia(manifest, uri);
4527 this.haveMetadata({
4528 playlistObject: manifest,
4529 url: uri,
4530 id: this.master.playlists[0].id
4531 });
4532 this.trigger('loadedmetadata');
4533 };
4534
4535 return PlaylistLoader;
4536 }(EventTarget$1);
4537
4538 /**
4539 * @file xhr.js
4540 */
4541 var videojsXHR = videojs__default["default"].xhr,
4542 mergeOptions$1 = videojs__default["default"].mergeOptions;
4543
4544 var callbackWrapper = function callbackWrapper(request, error, response, callback) {
4545 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
4546
4547 if (!error && reqResponse) {
4548 request.responseTime = Date.now();
4549 request.roundTripTime = request.responseTime - request.requestTime;
4550 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
4551
4552 if (!request.bandwidth) {
4553 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
4554 }
4555 }
4556
4557 if (response.headers) {
4558 request.responseHeaders = response.headers;
4559 } // videojs.xhr now uses a specific code on the error
4560 // object to signal that a request has timed out instead
4561 // of setting a boolean on the request object
4562
4563
4564 if (error && error.code === 'ETIMEDOUT') {
4565 request.timedout = true;
4566 } // videojs.xhr no longer considers status codes outside of 200 and 0
4567 // (for file uris) to be errors, but the old XHR did, so emulate that
4568 // behavior. Status 206 may be used in response to byterange requests.
4569
4570
4571 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
4572 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
4573 }
4574
4575 callback(error, request);
4576 };
4577
4578 var xhrFactory = function xhrFactory() {
4579 var xhr = function XhrFunction(options, callback) {
4580 // Add a default timeout
4581 options = mergeOptions$1({
4582 timeout: 45e3
4583 }, options); // Allow an optional user-specified function to modify the option
4584 // object before we construct the xhr request
4585
4586 var beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest;
4587
4588 if (beforeRequest && typeof beforeRequest === 'function') {
4589 var newOptions = beforeRequest(options);
4590
4591 if (newOptions) {
4592 options = newOptions;
4593 }
4594 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
4595 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
4596
4597
4598 var xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr;
4599 var request = xhrMethod(options, function (error, response) {
4600 return callbackWrapper(request, error, response, callback);
4601 });
4602 var originalAbort = request.abort;
4603
4604 request.abort = function () {
4605 request.aborted = true;
4606 return originalAbort.apply(request, arguments);
4607 };
4608
4609 request.uri = options.uri;
4610 request.requestTime = Date.now();
4611 return request;
4612 };
4613
4614 xhr.original = true;
4615 return xhr;
4616 };
4617 /**
4618 * Turns segment byterange into a string suitable for use in
4619 * HTTP Range requests
4620 *
4621 * @param {Object} byterange - an object with two values defining the start and end
4622 * of a byte-range
4623 */
4624
4625
4626 var byterangeStr = function byterangeStr(byterange) {
4627 // `byterangeEnd` is one less than `offset + length` because the HTTP range
4628 // header uses inclusive ranges
4629 var byterangeEnd = byterange.offset + byterange.length - 1;
4630 var byterangeStart = byterange.offset;
4631 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
4632 };
4633 /**
4634 * Defines headers for use in the xhr request for a particular segment.
4635 *
4636 * @param {Object} segment - a simplified copy of the segmentInfo object
4637 * from SegmentLoader
4638 */
4639
4640
4641 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
4642 var headers = {};
4643
4644 if (segment.byterange) {
4645 headers.Range = byterangeStr(segment.byterange);
4646 }
4647
4648 return headers;
4649 };
4650
4651 var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
4652 var DASH_REGEX = /^application\/dash\+xml/i;
4653 /**
4654 * Returns a string that describes the type of source based on a video source object's
4655 * media type.
4656 *
4657 * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
4658 *
4659 * @param {string} type
4660 * Video source object media type
4661 * @return {('hls'|'dash'|'vhs-json'|null)}
4662 * VHS source type string
4663 */
4664
4665 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
4666 if (MPEGURL_REGEX.test(type)) {
4667 return 'hls';
4668 }
4669
4670 if (DASH_REGEX.test(type)) {
4671 return 'dash';
4672 } // Denotes the special case of a manifest object passed to http-streaming instead of a
4673 // source URL.
4674 //
4675 // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
4676 //
4677 // In this case, vnd stands for vendor, video.js for the organization, VHS for this
4678 // project, and the +json suffix identifies the structure of the media type.
4679
4680
4681 if (type === 'application/vnd.videojs.vhs+json') {
4682 return 'vhs-json';
4683 }
4684
4685 return null;
4686 };
4687
4688 /**
4689 * @file bin-utils.js
4690 */
4691
4692 /**
4693 * convert a TimeRange to text
4694 *
4695 * @param {TimeRange} range the timerange to use for conversion
4696 * @param {number} i the iterator on the range to convert
4697 * @return {string} the range in string format
4698 */
4699 var textRange = function textRange(range, i) {
4700 return range.start(i) + '-' + range.end(i);
4701 };
4702 /**
4703 * format a number as hex string
4704 *
4705 * @param {number} e The number
4706 * @param {number} i the iterator
4707 * @return {string} the hex formatted number as a string
4708 */
4709
4710
4711 var formatHexString = function formatHexString(e, i) {
4712 var value = e.toString(16);
4713 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
4714 };
4715
4716 var formatAsciiString = function formatAsciiString(e) {
4717 if (e >= 0x20 && e < 0x7e) {
4718 return String.fromCharCode(e);
4719 }
4720
4721 return '.';
4722 };
4723 /**
4724 * Creates an object for sending to a web worker modifying properties that are TypedArrays
4725 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
4726 *
4727 * @param {Object} message
4728 * Object of properties and values to send to the web worker
4729 * @return {Object}
4730 * Modified message with TypedArray values expanded
4731 * @function createTransferableMessage
4732 */
4733
4734
4735 var createTransferableMessage = function createTransferableMessage(message) {
4736 var transferable = {};
4737 Object.keys(message).forEach(function (key) {
4738 var value = message[key];
4739
4740 if (ArrayBuffer.isView(value)) {
4741 transferable[key] = {
4742 bytes: value.buffer,
4743 byteOffset: value.byteOffset,
4744 byteLength: value.byteLength
4745 };
4746 } else {
4747 transferable[key] = value;
4748 }
4749 });
4750 return transferable;
4751 };
4752 /**
4753 * Returns a unique string identifier for a media initialization
4754 * segment.
4755 *
4756 * @param {Object} initSegment
4757 * the init segment object.
4758 *
4759 * @return {string} the generated init segment id
4760 */
4761
4762 var initSegmentId = function initSegmentId(initSegment) {
4763 var byterange = initSegment.byterange || {
4764 length: Infinity,
4765 offset: 0
4766 };
4767 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
4768 };
4769 /**
4770 * Returns a unique string identifier for a media segment key.
4771 *
4772 * @param {Object} key the encryption key
4773 * @return {string} the unique id for the media segment key.
4774 */
4775
4776 var segmentKeyId = function segmentKeyId(key) {
4777 return key.resolvedUri;
4778 };
4779 /**
4780 * utils to help dump binary data to the console
4781 *
4782 * @param {Array|TypedArray} data
4783 * data to dump to a string
4784 *
4785 * @return {string} the data as a hex string.
4786 */
4787
4788 var hexDump = function hexDump(data) {
4789 var bytes = Array.prototype.slice.call(data);
4790 var step = 16;
4791 var result = '';
4792 var hex;
4793 var ascii;
4794
4795 for (var j = 0; j < bytes.length / step; j++) {
4796 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
4797 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
4798 result += hex + ' ' + ascii + '\n';
4799 }
4800
4801 return result;
4802 };
4803 var tagDump = function tagDump(_ref) {
4804 var bytes = _ref.bytes;
4805 return hexDump(bytes);
4806 };
4807 var textRanges = function textRanges(ranges) {
4808 var result = '';
4809 var i;
4810
4811 for (i = 0; i < ranges.length; i++) {
4812 result += textRange(ranges, i) + ' ';
4813 }
4814
4815 return result;
4816 };
4817
4818 var utils = /*#__PURE__*/Object.freeze({
4819 __proto__: null,
4820 createTransferableMessage: createTransferableMessage,
4821 initSegmentId: initSegmentId,
4822 segmentKeyId: segmentKeyId,
4823 hexDump: hexDump,
4824 tagDump: tagDump,
4825 textRanges: textRanges
4826 });
4827
4828 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
4829 // 25% was arbitrarily chosen, and may need to be refined over time.
4830
4831 var SEGMENT_END_FUDGE_PERCENT = 0.25;
4832 /**
4833 * Converts a player time (any time that can be gotten/set from player.currentTime(),
4834 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
4835 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
4836 *
4837 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
4838 * point" (a point where we have a mapping from program time to player time, with player
4839 * time being the post transmux start of the segment).
4840 *
4841 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
4842 *
4843 * @param {number} playerTime the player time
4844 * @param {Object} segment the segment which contains the player time
4845 * @return {Date} program time
4846 */
4847
4848 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
4849 if (!segment.dateTimeObject) {
4850 // Can't convert without an "anchor point" for the program time (i.e., a time that can
4851 // be used to map the start of a segment with a real world time).
4852 return null;
4853 }
4854
4855 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
4856 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
4857
4858 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
4859 var offsetFromSegmentStart = playerTime - startOfSegment;
4860 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
4861 };
4862 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
4863 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
4864 };
4865 /**
4866 * Finds a segment that contains the time requested given as an ISO-8601 string. The
4867 * returned segment might be an estimate or an accurate match.
4868 *
4869 * @param {string} programTime The ISO-8601 programTime to find a match for
4870 * @param {Object} playlist A playlist object to search within
4871 */
4872
4873 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
4874 // Assumptions:
4875 // - verifyProgramDateTimeTags has already been run
4876 // - live streams have been started
4877 var dateTimeObject;
4878
4879 try {
4880 dateTimeObject = new Date(programTime);
4881 } catch (e) {
4882 return null;
4883 }
4884
4885 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
4886 return null;
4887 }
4888
4889 var segment = playlist.segments[0];
4890
4891 if (dateTimeObject < segment.dateTimeObject) {
4892 // Requested time is before stream start.
4893 return null;
4894 }
4895
4896 for (var i = 0; i < playlist.segments.length - 1; i++) {
4897 segment = playlist.segments[i];
4898 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
4899
4900 if (dateTimeObject < nextSegmentStart) {
4901 break;
4902 }
4903 }
4904
4905 var lastSegment = playlist.segments[playlist.segments.length - 1];
4906 var lastSegmentStart = lastSegment.dateTimeObject;
4907 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
4908 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
4909
4910 if (dateTimeObject > lastSegmentEnd) {
4911 // Beyond the end of the stream, or our best guess of the end of the stream.
4912 return null;
4913 }
4914
4915 if (dateTimeObject > lastSegmentStart) {
4916 segment = lastSegment;
4917 }
4918
4919 return {
4920 segment: segment,
4921 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
4922 // Although, given that all segments have accurate date time objects, the segment
4923 // selected should be accurate, unless the video has been transmuxed at some point
4924 // (determined by the presence of the videoTimingInfo object), the segment's "player
4925 // time" (the start time in the player) can't be considered accurate.
4926 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
4927 };
4928 };
4929 /**
4930 * Finds a segment that contains the given player time(in seconds).
4931 *
4932 * @param {number} time The player time to find a match for
4933 * @param {Object} playlist A playlist object to search within
4934 */
4935
4936 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
4937 // Assumptions:
4938 // - there will always be a segment.duration
4939 // - we can start from zero
4940 // - segments are in time order
4941 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
4942 return null;
4943 }
4944
4945 var segmentEnd = 0;
4946 var segment;
4947
4948 for (var i = 0; i < playlist.segments.length; i++) {
4949 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
4950 // should contain the most accurate values we have for the segment's player times.
4951 //
4952 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
4953 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
4954 // calculate an end value.
4955
4956 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
4957
4958 if (time <= segmentEnd) {
4959 break;
4960 }
4961 }
4962
4963 var lastSegment = playlist.segments[playlist.segments.length - 1];
4964
4965 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
4966 // The time requested is beyond the stream end.
4967 return null;
4968 }
4969
4970 if (time > segmentEnd) {
4971 // The time is within or beyond the last segment.
4972 //
4973 // Check to see if the time is beyond a reasonable guess of the end of the stream.
4974 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
4975 // Technically, because the duration value is only an estimate, the time may still
4976 // exist in the last segment, however, there isn't enough information to make even
4977 // a reasonable estimate.
4978 return null;
4979 }
4980
4981 segment = lastSegment;
4982 }
4983
4984 return {
4985 segment: segment,
4986 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
4987 // Because videoTimingInfo is only set after transmux, it is the only way to get
4988 // accurate timing values.
4989 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
4990 };
4991 };
4992 /**
4993 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
4994 * If the offset returned is positive, the programTime occurs after the
4995 * comparisonTimestamp.
4996 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
4997 *
4998 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
4999 * @param {string} programTime The programTime as an ISO-8601 string
5000 * @return {number} offset
5001 */
5002
5003 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
5004 var segmentDateTime;
5005 var programDateTime;
5006
5007 try {
5008 segmentDateTime = new Date(comparisonTimeStamp);
5009 programDateTime = new Date(programTime);
5010 } catch (e) {// TODO handle error
5011 }
5012
5013 var segmentTimeEpoch = segmentDateTime.getTime();
5014 var programTimeEpoch = programDateTime.getTime();
5015 return (programTimeEpoch - segmentTimeEpoch) / 1000;
5016 };
5017 /**
5018 * Checks that all segments in this playlist have programDateTime tags.
5019 *
5020 * @param {Object} playlist A playlist object
5021 */
5022
5023 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
5024 if (!playlist.segments || playlist.segments.length === 0) {
5025 return false;
5026 }
5027
5028 for (var i = 0; i < playlist.segments.length; i++) {
5029 var segment = playlist.segments[i];
5030
5031 if (!segment.dateTimeObject) {
5032 return false;
5033 }
5034 }
5035
5036 return true;
5037 };
5038 /**
5039 * Returns the programTime of the media given a playlist and a playerTime.
5040 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
5041 * If the segments containing the time requested have not been buffered yet, an estimate
5042 * may be returned to the callback.
5043 *
5044 * @param {Object} args
5045 * @param {Object} args.playlist A playlist object to search within
5046 * @param {number} time A playerTime in seconds
5047 * @param {Function} callback(err, programTime)
5048 * @return {string} err.message A detailed error message
5049 * @return {Object} programTime
5050 * @return {number} programTime.mediaSeconds The streamTime in seconds
5051 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
5052 */
5053
5054 var getProgramTime = function getProgramTime(_ref) {
5055 var playlist = _ref.playlist,
5056 _ref$time = _ref.time,
5057 time = _ref$time === void 0 ? undefined : _ref$time,
5058 callback = _ref.callback;
5059
5060 if (!callback) {
5061 throw new Error('getProgramTime: callback must be provided');
5062 }
5063
5064 if (!playlist || time === undefined) {
5065 return callback({
5066 message: 'getProgramTime: playlist and time must be provided'
5067 });
5068 }
5069
5070 var matchedSegment = findSegmentForPlayerTime(time, playlist);
5071
5072 if (!matchedSegment) {
5073 return callback({
5074 message: 'valid programTime was not found'
5075 });
5076 }
5077
5078 if (matchedSegment.type === 'estimate') {
5079 return callback({
5080 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
5081 seekTime: matchedSegment.estimatedStart
5082 });
5083 }
5084
5085 var programTimeObject = {
5086 mediaSeconds: time
5087 };
5088 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
5089
5090 if (programTime) {
5091 programTimeObject.programDateTime = programTime.toISOString();
5092 }
5093
5094 return callback(null, programTimeObject);
5095 };
5096 /**
5097 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
5098 *
5099 * @param {Object} args
5100 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
5101 * @param {Object} args.playlist A playlist to look within
5102 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
5103 * @param {Function} args.seekTo A method to perform a seek
5104 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
5105 * @param {Object} args.tech The tech to seek on
5106 * @param {Function} args.callback(err, newTime) A callback to return the new time to
5107 * @return {string} err.message A detailed error message
5108 * @return {number} newTime The exact time that was seeked to in seconds
5109 */
5110
5111 var seekToProgramTime = function seekToProgramTime(_ref2) {
5112 var programTime = _ref2.programTime,
5113 playlist = _ref2.playlist,
5114 _ref2$retryCount = _ref2.retryCount,
5115 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
5116 seekTo = _ref2.seekTo,
5117 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
5118 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
5119 tech = _ref2.tech,
5120 callback = _ref2.callback;
5121
5122 if (!callback) {
5123 throw new Error('seekToProgramTime: callback must be provided');
5124 }
5125
5126 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
5127 return callback({
5128 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
5129 });
5130 }
5131
5132 if (!playlist.endList && !tech.hasStarted_) {
5133 return callback({
5134 message: 'player must be playing a live stream to start buffering'
5135 });
5136 }
5137
5138 if (!verifyProgramDateTimeTags(playlist)) {
5139 return callback({
5140 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
5141 });
5142 }
5143
5144 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
5145
5146 if (!matchedSegment) {
5147 return callback({
5148 message: programTime + " was not found in the stream"
5149 });
5150 }
5151
5152 var segment = matchedSegment.segment;
5153 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
5154
5155 if (matchedSegment.type === 'estimate') {
5156 // we've run out of retries
5157 if (retryCount === 0) {
5158 return callback({
5159 message: programTime + " is not buffered yet. Try again"
5160 });
5161 }
5162
5163 seekTo(matchedSegment.estimatedStart + mediaOffset);
5164 tech.one('seeked', function () {
5165 seekToProgramTime({
5166 programTime: programTime,
5167 playlist: playlist,
5168 retryCount: retryCount - 1,
5169 seekTo: seekTo,
5170 pauseAfterSeek: pauseAfterSeek,
5171 tech: tech,
5172 callback: callback
5173 });
5174 });
5175 return;
5176 } // Since the segment.start value is determined from the buffered end or ending time
5177 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
5178 // modifications.
5179
5180
5181 var seekToTime = segment.start + mediaOffset;
5182
5183 var seekedCallback = function seekedCallback() {
5184 return callback(null, tech.currentTime());
5185 }; // listen for seeked event
5186
5187
5188 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
5189
5190 if (pauseAfterSeek) {
5191 tech.pause();
5192 }
5193
5194 seekTo(seekToTime);
5195 };
5196
5197 /*! @name mpd-parser @version 0.19.2 @license Apache-2.0 */
5198
5199 var isObject = function isObject(obj) {
5200 return !!obj && typeof obj === 'object';
5201 };
5202
5203 var merge = function merge() {
5204 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
5205 objects[_key] = arguments[_key];
5206 }
5207
5208 return objects.reduce(function (result, source) {
5209 if (typeof source !== 'object') {
5210 return result;
5211 }
5212
5213 Object.keys(source).forEach(function (key) {
5214 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
5215 result[key] = result[key].concat(source[key]);
5216 } else if (isObject(result[key]) && isObject(source[key])) {
5217 result[key] = merge(result[key], source[key]);
5218 } else {
5219 result[key] = source[key];
5220 }
5221 });
5222 return result;
5223 }, {});
5224 };
5225
5226 var values = function values(o) {
5227 return Object.keys(o).map(function (k) {
5228 return o[k];
5229 });
5230 };
5231
5232 var range = function range(start, end) {
5233 var result = [];
5234
5235 for (var i = start; i < end; i++) {
5236 result.push(i);
5237 }
5238
5239 return result;
5240 };
5241
5242 var flatten = function flatten(lists) {
5243 return lists.reduce(function (x, y) {
5244 return x.concat(y);
5245 }, []);
5246 };
5247
5248 var from = function from(list) {
5249 if (!list.length) {
5250 return [];
5251 }
5252
5253 var result = [];
5254
5255 for (var i = 0; i < list.length; i++) {
5256 result.push(list[i]);
5257 }
5258
5259 return result;
5260 };
5261
5262 var findIndexes = function findIndexes(l, key) {
5263 return l.reduce(function (a, e, i) {
5264 if (e[key]) {
5265 a.push(i);
5266 }
5267
5268 return a;
5269 }, []);
5270 };
5271
5272 var errors = {
5273 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
5274 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
5275 DASH_INVALID_XML: 'DASH_INVALID_XML',
5276 NO_BASE_URL: 'NO_BASE_URL',
5277 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
5278 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
5279 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
5280 };
5281 /**
5282 * @typedef {Object} SingleUri
5283 * @property {string} uri - relative location of segment
5284 * @property {string} resolvedUri - resolved location of segment
5285 * @property {Object} byterange - Object containing information on how to make byte range
5286 * requests following byte-range-spec per RFC2616.
5287 * @property {String} byterange.length - length of range request
5288 * @property {String} byterange.offset - byte offset of range request
5289 *
5290 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
5291 */
5292
5293 /**
5294 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
5295 * that conforms to how m3u8-parser is structured
5296 *
5297 * @see https://github.com/videojs/m3u8-parser
5298 *
5299 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
5300 * @param {string} source - source url for segment
5301 * @param {string} range - optional range used for range calls,
5302 * follows RFC 2616, Clause 14.35.1
5303 * @return {SingleUri} full segment information transformed into a format similar
5304 * to m3u8-parser
5305 */
5306
5307 var urlTypeToSegment = function urlTypeToSegment(_ref) {
5308 var _ref$baseUrl = _ref.baseUrl,
5309 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
5310 _ref$source = _ref.source,
5311 source = _ref$source === void 0 ? '' : _ref$source,
5312 _ref$range = _ref.range,
5313 range = _ref$range === void 0 ? '' : _ref$range,
5314 _ref$indexRange = _ref.indexRange,
5315 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
5316 var segment = {
5317 uri: source,
5318 resolvedUri: resolveUrl$1(baseUrl || '', source)
5319 };
5320
5321 if (range || indexRange) {
5322 var rangeStr = range ? range : indexRange;
5323 var ranges = rangeStr.split('-');
5324 var startRange = parseInt(ranges[0], 10);
5325 var endRange = parseInt(ranges[1], 10); // byterange should be inclusive according to
5326 // RFC 2616, Clause 14.35.1
5327
5328 segment.byterange = {
5329 length: endRange - startRange + 1,
5330 offset: startRange
5331 };
5332 }
5333
5334 return segment;
5335 };
5336
5337 var byteRangeToString = function byteRangeToString(byterange) {
5338 // `endRange` is one less than `offset + length` because the HTTP range
5339 // header uses inclusive ranges
5340 var endRange = byterange.offset + byterange.length - 1;
5341 return byterange.offset + "-" + endRange;
5342 };
5343 /**
5344 * parse the end number attribue that can be a string
5345 * number, or undefined.
5346 *
5347 * @param {string|number|undefined} endNumber
5348 * The end number attribute.
5349 *
5350 * @return {number|null}
5351 * The result of parsing the end number.
5352 */
5353
5354
5355 var parseEndNumber = function parseEndNumber(endNumber) {
5356 if (endNumber && typeof endNumber !== 'number') {
5357 endNumber = parseInt(endNumber, 10);
5358 }
5359
5360 if (isNaN(endNumber)) {
5361 return null;
5362 }
5363
5364 return endNumber;
5365 };
5366 /**
5367 * Functions for calculating the range of available segments in static and dynamic
5368 * manifests.
5369 */
5370
5371
5372 var segmentRange = {
5373 /**
5374 * Returns the entire range of available segments for a static MPD
5375 *
5376 * @param {Object} attributes
5377 * Inheritied MPD attributes
5378 * @return {{ start: number, end: number }}
5379 * The start and end numbers for available segments
5380 */
5381 static: function _static(attributes) {
5382 var duration = attributes.duration,
5383 _attributes$timescale = attributes.timescale,
5384 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
5385 sourceDuration = attributes.sourceDuration,
5386 periodDuration = attributes.periodDuration;
5387 var endNumber = parseEndNumber(attributes.endNumber);
5388 var segmentDuration = duration / timescale;
5389
5390 if (typeof endNumber === 'number') {
5391 return {
5392 start: 0,
5393 end: endNumber
5394 };
5395 }
5396
5397 if (typeof periodDuration === 'number') {
5398 return {
5399 start: 0,
5400 end: periodDuration / segmentDuration
5401 };
5402 }
5403
5404 return {
5405 start: 0,
5406 end: sourceDuration / segmentDuration
5407 };
5408 },
5409
5410 /**
5411 * Returns the current live window range of available segments for a dynamic MPD
5412 *
5413 * @param {Object} attributes
5414 * Inheritied MPD attributes
5415 * @return {{ start: number, end: number }}
5416 * The start and end numbers for available segments
5417 */
5418 dynamic: function dynamic(attributes) {
5419 var NOW = attributes.NOW,
5420 clientOffset = attributes.clientOffset,
5421 availabilityStartTime = attributes.availabilityStartTime,
5422 _attributes$timescale2 = attributes.timescale,
5423 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
5424 duration = attributes.duration,
5425 _attributes$start = attributes.start,
5426 start = _attributes$start === void 0 ? 0 : _attributes$start,
5427 _attributes$minimumUp = attributes.minimumUpdatePeriod,
5428 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
5429 _attributes$timeShift = attributes.timeShiftBufferDepth,
5430 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
5431 var endNumber = parseEndNumber(attributes.endNumber);
5432 var now = (NOW + clientOffset) / 1000;
5433 var periodStartWC = availabilityStartTime + start;
5434 var periodEndWC = now + minimumUpdatePeriod;
5435 var periodDuration = periodEndWC - periodStartWC;
5436 var segmentCount = Math.ceil(periodDuration * timescale / duration);
5437 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
5438 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
5439 return {
5440 start: Math.max(0, availableStart),
5441 end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)
5442 };
5443 }
5444 };
5445 /**
5446 * Maps a range of numbers to objects with information needed to build the corresponding
5447 * segment list
5448 *
5449 * @name toSegmentsCallback
5450 * @function
5451 * @param {number} number
5452 * Number of the segment
5453 * @param {number} index
5454 * Index of the number in the range list
5455 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
5456 * Object with segment timing and duration info
5457 */
5458
5459 /**
5460 * Returns a callback for Array.prototype.map for mapping a range of numbers to
5461 * information needed to build the segment list.
5462 *
5463 * @param {Object} attributes
5464 * Inherited MPD attributes
5465 * @return {toSegmentsCallback}
5466 * Callback map function
5467 */
5468
5469 var toSegments = function toSegments(attributes) {
5470 return function (number, index) {
5471 var duration = attributes.duration,
5472 _attributes$timescale3 = attributes.timescale,
5473 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
5474 periodIndex = attributes.periodIndex,
5475 _attributes$startNumb = attributes.startNumber,
5476 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
5477 return {
5478 number: startNumber + number,
5479 duration: duration / timescale,
5480 timeline: periodIndex,
5481 time: index * duration
5482 };
5483 };
5484 };
5485 /**
5486 * Returns a list of objects containing segment timing and duration info used for
5487 * building the list of segments. This uses the @duration attribute specified
5488 * in the MPD manifest to derive the range of segments.
5489 *
5490 * @param {Object} attributes
5491 * Inherited MPD attributes
5492 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5493 * List of Objects with segment timing and duration info
5494 */
5495
5496
5497 var parseByDuration = function parseByDuration(attributes) {
5498 var type = attributes.type,
5499 duration = attributes.duration,
5500 _attributes$timescale4 = attributes.timescale,
5501 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
5502 periodDuration = attributes.periodDuration,
5503 sourceDuration = attributes.sourceDuration;
5504
5505 var _segmentRange$type = segmentRange[type](attributes),
5506 start = _segmentRange$type.start,
5507 end = _segmentRange$type.end;
5508
5509 var segments = range(start, end).map(toSegments(attributes));
5510
5511 if (type === 'static') {
5512 var index = segments.length - 1; // section is either a period or the full source
5513
5514 var sectionDuration = typeof periodDuration === 'number' ? periodDuration : sourceDuration; // final segment may be less than full segment duration
5515
5516 segments[index].duration = sectionDuration - duration / timescale * index;
5517 }
5518
5519 return segments;
5520 };
5521 /**
5522 * Translates SegmentBase into a set of segments.
5523 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
5524 * node should be translated into segment.
5525 *
5526 * @param {Object} attributes
5527 * Object containing all inherited attributes from parent elements with attribute
5528 * names as keys
5529 * @return {Object.<Array>} list of segments
5530 */
5531
5532
5533 var segmentsFromBase = function segmentsFromBase(attributes) {
5534 var baseUrl = attributes.baseUrl,
5535 _attributes$initializ = attributes.initialization,
5536 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
5537 sourceDuration = attributes.sourceDuration,
5538 _attributes$indexRang = attributes.indexRange,
5539 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
5540 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
5541
5542 if (!baseUrl) {
5543 throw new Error(errors.NO_BASE_URL);
5544 }
5545
5546 var initSegment = urlTypeToSegment({
5547 baseUrl: baseUrl,
5548 source: initialization.sourceURL,
5549 range: initialization.range
5550 });
5551 var segment = urlTypeToSegment({
5552 baseUrl: baseUrl,
5553 source: baseUrl,
5554 indexRange: indexRange
5555 });
5556 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
5557 // (since SegmentBase is only for one total segment)
5558
5559 if (duration) {
5560 var segmentTimeInfo = parseByDuration(attributes);
5561
5562 if (segmentTimeInfo.length) {
5563 segment.duration = segmentTimeInfo[0].duration;
5564 segment.timeline = segmentTimeInfo[0].timeline;
5565 }
5566 } else if (sourceDuration) {
5567 segment.duration = sourceDuration;
5568 segment.timeline = 0;
5569 } // This is used for mediaSequence
5570
5571
5572 segment.number = 0;
5573 return [segment];
5574 };
5575 /**
5576 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
5577 * according to the sidx information given.
5578 *
5579 * playlist.sidx has metadadata about the sidx where-as the sidx param
5580 * is the parsed sidx box itself.
5581 *
5582 * @param {Object} playlist the playlist to update the sidx information for
5583 * @param {Object} sidx the parsed sidx box
5584 * @return {Object} the playlist object with the updated sidx information
5585 */
5586
5587
5588 var addSidxSegmentsToPlaylist = function addSidxSegmentsToPlaylist(playlist, sidx, baseUrl) {
5589 // Retain init segment information
5590 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial main manifest parsing
5591
5592 var sourceDuration = playlist.sidx.duration; // Retain source timeline
5593
5594 var timeline = playlist.timeline || 0;
5595 var sidxByteRange = playlist.sidx.byterange;
5596 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
5597
5598 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
5599
5600 var mediaReferences = sidx.references.filter(function (r) {
5601 return r.referenceType !== 1;
5602 });
5603 var segments = [];
5604 var type = playlist.endList ? 'static' : 'dynamic'; // firstOffset is the offset from the end of the sidx box
5605
5606 var startIndex = sidxEnd + sidx.firstOffset;
5607
5608 for (var i = 0; i < mediaReferences.length; i++) {
5609 var reference = sidx.references[i]; // size of the referenced (sub)segment
5610
5611 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
5612 // this will be converted to seconds when generating segments
5613
5614 var duration = reference.subsegmentDuration; // should be an inclusive range
5615
5616 var endIndex = startIndex + size - 1;
5617 var indexRange = startIndex + "-" + endIndex;
5618 var attributes = {
5619 baseUrl: baseUrl,
5620 timescale: timescale,
5621 timeline: timeline,
5622 // this is used in parseByDuration
5623 periodIndex: timeline,
5624 duration: duration,
5625 sourceDuration: sourceDuration,
5626 indexRange: indexRange,
5627 type: type
5628 };
5629 var segment = segmentsFromBase(attributes)[0];
5630
5631 if (initSegment) {
5632 segment.map = initSegment;
5633 }
5634
5635 segments.push(segment);
5636 startIndex += size;
5637 }
5638
5639 playlist.segments = segments;
5640 return playlist;
5641 };
5642
5643 var generateSidxKey = function generateSidxKey(sidx) {
5644 return sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);
5645 };
5646
5647 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
5648 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
5649 // assuming playlist IDs are the same across periods
5650 // TODO: handle multiperiod where representation sets are not the same
5651 // across periods
5652 var name = playlist.attributes.id + (playlist.attributes.lang || ''); // Periods after first
5653
5654 if (acc[name]) {
5655 var _acc$name$segments; // first segment of subsequent periods signal a discontinuity
5656
5657
5658 if (playlist.segments[0]) {
5659 playlist.segments[0].discontinuity = true;
5660 }
5661
5662 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments); // bubble up contentProtection, this assumes all DRM content
5663 // has the same contentProtection
5664
5665
5666 if (playlist.attributes.contentProtection) {
5667 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
5668 }
5669 } else {
5670 // first Period
5671 acc[name] = playlist;
5672 }
5673
5674 return acc;
5675 }, {}));
5676 return mergedPlaylists.map(function (playlist) {
5677 playlist.discontinuityStarts = findIndexes(playlist.segments, 'discontinuity');
5678 return playlist;
5679 });
5680 };
5681
5682 var addSidxSegmentsToPlaylist$1 = function addSidxSegmentsToPlaylist$1(playlist, sidxMapping) {
5683 var sidxKey = generateSidxKey(playlist.sidx);
5684 var sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
5685
5686 if (sidxMatch) {
5687 addSidxSegmentsToPlaylist(playlist, sidxMatch, playlist.sidx.resolvedUri);
5688 }
5689
5690 return playlist;
5691 };
5692
5693 var addSidxSegmentsToPlaylists = function addSidxSegmentsToPlaylists(playlists, sidxMapping) {
5694 if (sidxMapping === void 0) {
5695 sidxMapping = {};
5696 }
5697
5698 if (!Object.keys(sidxMapping).length) {
5699 return playlists;
5700 }
5701
5702 for (var i in playlists) {
5703 playlists[i] = addSidxSegmentsToPlaylist$1(playlists[i], sidxMapping);
5704 }
5705
5706 return playlists;
5707 };
5708
5709 var formatAudioPlaylist = function formatAudioPlaylist(_ref, isAudioOnly) {
5710 var _attributes;
5711
5712 var attributes = _ref.attributes,
5713 segments = _ref.segments,
5714 sidx = _ref.sidx;
5715 var playlist = {
5716 attributes: (_attributes = {
5717 NAME: attributes.id,
5718 BANDWIDTH: attributes.bandwidth,
5719 CODECS: attributes.codecs
5720 }, _attributes['PROGRAM-ID'] = 1, _attributes),
5721 uri: '',
5722 endList: attributes.type === 'static',
5723 timeline: attributes.periodIndex,
5724 resolvedUri: '',
5725 targetDuration: attributes.duration,
5726 segments: segments,
5727 mediaSequence: segments.length ? segments[0].number : 1
5728 };
5729
5730 if (attributes.contentProtection) {
5731 playlist.contentProtection = attributes.contentProtection;
5732 }
5733
5734 if (sidx) {
5735 playlist.sidx = sidx;
5736 }
5737
5738 if (isAudioOnly) {
5739 playlist.attributes.AUDIO = 'audio';
5740 playlist.attributes.SUBTITLES = 'subs';
5741 }
5742
5743 return playlist;
5744 };
5745
5746 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
5747 var _m3u8Attributes;
5748
5749 var attributes = _ref2.attributes,
5750 segments = _ref2.segments;
5751
5752 if (typeof segments === 'undefined') {
5753 // vtt tracks may use single file in BaseURL
5754 segments = [{
5755 uri: attributes.baseUrl,
5756 timeline: attributes.periodIndex,
5757 resolvedUri: attributes.baseUrl || '',
5758 duration: attributes.sourceDuration,
5759 number: 0
5760 }]; // targetDuration should be the same duration as the only segment
5761
5762 attributes.duration = attributes.sourceDuration;
5763 }
5764
5765 var m3u8Attributes = (_m3u8Attributes = {
5766 NAME: attributes.id,
5767 BANDWIDTH: attributes.bandwidth
5768 }, _m3u8Attributes['PROGRAM-ID'] = 1, _m3u8Attributes);
5769
5770 if (attributes.codecs) {
5771 m3u8Attributes.CODECS = attributes.codecs;
5772 }
5773
5774 return {
5775 attributes: m3u8Attributes,
5776 uri: '',
5777 endList: attributes.type === 'static',
5778 timeline: attributes.periodIndex,
5779 resolvedUri: attributes.baseUrl || '',
5780 targetDuration: attributes.duration,
5781 segments: segments,
5782 mediaSequence: segments.length ? segments[0].number : 1
5783 };
5784 };
5785
5786 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping, isAudioOnly) {
5787 if (sidxMapping === void 0) {
5788 sidxMapping = {};
5789 }
5790
5791 if (isAudioOnly === void 0) {
5792 isAudioOnly = false;
5793 }
5794
5795 var mainPlaylist;
5796 var formattedPlaylists = playlists.reduce(function (a, playlist) {
5797 var role = playlist.attributes.role && playlist.attributes.role.value || '';
5798 var language = playlist.attributes.lang || '';
5799 var label = playlist.attributes.label || 'main';
5800
5801 if (language && !playlist.attributes.label) {
5802 var roleLabel = role ? " (" + role + ")" : '';
5803 label = "" + playlist.attributes.lang + roleLabel;
5804 }
5805
5806 if (!a[label]) {
5807 a[label] = {
5808 language: language,
5809 autoselect: true,
5810 default: role === 'main',
5811 playlists: [],
5812 uri: ''
5813 };
5814 }
5815
5816 var formatted = addSidxSegmentsToPlaylist$1(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);
5817 a[label].playlists.push(formatted);
5818
5819 if (typeof mainPlaylist === 'undefined' && role === 'main') {
5820 mainPlaylist = playlist;
5821 mainPlaylist.default = true;
5822 }
5823
5824 return a;
5825 }, {}); // if no playlists have role "main", mark the first as main
5826
5827 if (!mainPlaylist) {
5828 var firstLabel = Object.keys(formattedPlaylists)[0];
5829 formattedPlaylists[firstLabel].default = true;
5830 }
5831
5832 return formattedPlaylists;
5833 };
5834
5835 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
5836 if (sidxMapping === void 0) {
5837 sidxMapping = {};
5838 }
5839
5840 return playlists.reduce(function (a, playlist) {
5841 var label = playlist.attributes.lang || 'text';
5842
5843 if (!a[label]) {
5844 a[label] = {
5845 language: label,
5846 default: false,
5847 autoselect: false,
5848 playlists: [],
5849 uri: ''
5850 };
5851 }
5852
5853 a[label].playlists.push(addSidxSegmentsToPlaylist$1(formatVttPlaylist(playlist), sidxMapping));
5854 return a;
5855 }, {});
5856 };
5857
5858 var organizeCaptionServices = function organizeCaptionServices(captionServices) {
5859 return captionServices.reduce(function (svcObj, svc) {
5860 if (!svc) {
5861 return svcObj;
5862 }
5863
5864 svc.forEach(function (service) {
5865 var channel = service.channel,
5866 language = service.language;
5867 svcObj[language] = {
5868 autoselect: false,
5869 default: false,
5870 instreamId: channel,
5871 language: language
5872 };
5873
5874 if (service.hasOwnProperty('aspectRatio')) {
5875 svcObj[language].aspectRatio = service.aspectRatio;
5876 }
5877
5878 if (service.hasOwnProperty('easyReader')) {
5879 svcObj[language].easyReader = service.easyReader;
5880 }
5881
5882 if (service.hasOwnProperty('3D')) {
5883 svcObj[language]['3D'] = service['3D'];
5884 }
5885 });
5886 return svcObj;
5887 }, {});
5888 };
5889
5890 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
5891 var _attributes2;
5892
5893 var attributes = _ref3.attributes,
5894 segments = _ref3.segments,
5895 sidx = _ref3.sidx;
5896 var playlist = {
5897 attributes: (_attributes2 = {
5898 NAME: attributes.id,
5899 AUDIO: 'audio',
5900 SUBTITLES: 'subs',
5901 RESOLUTION: {
5902 width: attributes.width,
5903 height: attributes.height
5904 },
5905 CODECS: attributes.codecs,
5906 BANDWIDTH: attributes.bandwidth
5907 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
5908 uri: '',
5909 endList: attributes.type === 'static',
5910 timeline: attributes.periodIndex,
5911 resolvedUri: '',
5912 targetDuration: attributes.duration,
5913 segments: segments,
5914 mediaSequence: segments.length ? segments[0].number : 1
5915 };
5916
5917 if (attributes.contentProtection) {
5918 playlist.contentProtection = attributes.contentProtection;
5919 }
5920
5921 if (sidx) {
5922 playlist.sidx = sidx;
5923 }
5924
5925 return playlist;
5926 };
5927
5928 var videoOnly = function videoOnly(_ref4) {
5929 var attributes = _ref4.attributes;
5930 return attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';
5931 };
5932
5933 var audioOnly = function audioOnly(_ref5) {
5934 var attributes = _ref5.attributes;
5935 return attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';
5936 };
5937
5938 var vttOnly = function vttOnly(_ref6) {
5939 var attributes = _ref6.attributes;
5940 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
5941 };
5942
5943 var toM3u8 = function toM3u8(dashPlaylists, locations, sidxMapping) {
5944 var _mediaGroups;
5945
5946 if (sidxMapping === void 0) {
5947 sidxMapping = {};
5948 }
5949
5950 if (!dashPlaylists.length) {
5951 return {};
5952 } // grab all main manifest attributes
5953
5954
5955 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
5956 duration = _dashPlaylists$0$attr.sourceDuration,
5957 type = _dashPlaylists$0$attr.type,
5958 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
5959 minimumUpdatePeriod = _dashPlaylists$0$attr.minimumUpdatePeriod;
5960 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
5961 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
5962 var vttPlaylists = dashPlaylists.filter(vttOnly);
5963 var captions = dashPlaylists.map(function (playlist) {
5964 return playlist.attributes.captionServices;
5965 }).filter(Boolean);
5966 var manifest = {
5967 allowCache: true,
5968 discontinuityStarts: [],
5969 segments: [],
5970 endList: true,
5971 mediaGroups: (_mediaGroups = {
5972 AUDIO: {},
5973 VIDEO: {}
5974 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
5975 uri: '',
5976 duration: duration,
5977 playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)
5978 };
5979
5980 if (minimumUpdatePeriod >= 0) {
5981 manifest.minimumUpdatePeriod = minimumUpdatePeriod * 1000;
5982 }
5983
5984 if (locations) {
5985 manifest.locations = locations;
5986 }
5987
5988 if (type === 'dynamic') {
5989 manifest.suggestedPresentationDelay = suggestedPresentationDelay;
5990 }
5991
5992 var isAudioOnly = manifest.playlists.length === 0;
5993
5994 if (audioPlaylists.length) {
5995 manifest.mediaGroups.AUDIO.audio = organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly);
5996 }
5997
5998 if (vttPlaylists.length) {
5999 manifest.mediaGroups.SUBTITLES.subs = organizeVttPlaylists(vttPlaylists, sidxMapping);
6000 }
6001
6002 if (captions.length) {
6003 manifest.mediaGroups['CLOSED-CAPTIONS'].cc = organizeCaptionServices(captions);
6004 }
6005
6006 return manifest;
6007 };
6008 /**
6009 * Calculates the R (repetition) value for a live stream (for the final segment
6010 * in a manifest where the r value is negative 1)
6011 *
6012 * @param {Object} attributes
6013 * Object containing all inherited attributes from parent elements with attribute
6014 * names as keys
6015 * @param {number} time
6016 * current time (typically the total time up until the final segment)
6017 * @param {number} duration
6018 * duration property for the given <S />
6019 *
6020 * @return {number}
6021 * R value to reach the end of the given period
6022 */
6023
6024
6025 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
6026 var NOW = attributes.NOW,
6027 clientOffset = attributes.clientOffset,
6028 availabilityStartTime = attributes.availabilityStartTime,
6029 _attributes$timescale = attributes.timescale,
6030 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
6031 _attributes$start = attributes.start,
6032 start = _attributes$start === void 0 ? 0 : _attributes$start,
6033 _attributes$minimumUp = attributes.minimumUpdatePeriod,
6034 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
6035 var now = (NOW + clientOffset) / 1000;
6036 var periodStartWC = availabilityStartTime + start;
6037 var periodEndWC = now + minimumUpdatePeriod;
6038 var periodDuration = periodEndWC - periodStartWC;
6039 return Math.ceil((periodDuration * timescale - time) / duration);
6040 };
6041 /**
6042 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
6043 * timing and duration
6044 *
6045 * @param {Object} attributes
6046 * Object containing all inherited attributes from parent elements with attribute
6047 * names as keys
6048 * @param {Object[]} segmentTimeline
6049 * List of objects representing the attributes of each S element contained within
6050 *
6051 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6052 * List of Objects with segment timing and duration info
6053 */
6054
6055
6056 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
6057 var type = attributes.type,
6058 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
6059 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
6060 _attributes$media = attributes.media,
6061 media = _attributes$media === void 0 ? '' : _attributes$media,
6062 sourceDuration = attributes.sourceDuration,
6063 _attributes$timescale2 = attributes.timescale,
6064 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
6065 _attributes$startNumb = attributes.startNumber,
6066 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
6067 timeline = attributes.periodIndex;
6068 var segments = [];
6069 var time = -1;
6070
6071 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
6072 var S = segmentTimeline[sIndex];
6073 var duration = S.d;
6074 var repeat = S.r || 0;
6075 var segmentTime = S.t || 0;
6076
6077 if (time < 0) {
6078 // first segment
6079 time = segmentTime;
6080 }
6081
6082 if (segmentTime && segmentTime > time) {
6083 // discontinuity
6084 // TODO: How to handle this type of discontinuity
6085 // timeline++ here would treat it like HLS discontuity and content would
6086 // get appended without gap
6087 // E.G.
6088 // <S t="0" d="1" />
6089 // <S d="1" />
6090 // <S d="1" />
6091 // <S t="5" d="1" />
6092 // would have $Time$ values of [0, 1, 2, 5]
6093 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
6094 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
6095 // does the value of sourceDuration consider this when calculating arbitrary
6096 // negative @r repeat value?
6097 // E.G. Same elements as above with this added at the end
6098 // <S d="1" r="-1" />
6099 // with a sourceDuration of 10
6100 // Would the 2 gaps be included in the time duration calculations resulting in
6101 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
6102 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
6103 time = segmentTime;
6104 }
6105
6106 var count = void 0;
6107
6108 if (repeat < 0) {
6109 var nextS = sIndex + 1;
6110
6111 if (nextS === segmentTimeline.length) {
6112 // last segment
6113 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
6114 count = getLiveRValue(attributes, time, duration);
6115 } else {
6116 // TODO: This may be incorrect depending on conclusion of TODO above
6117 count = (sourceDuration * timescale - time) / duration;
6118 }
6119 } else {
6120 count = (segmentTimeline[nextS].t - time) / duration;
6121 }
6122 } else {
6123 count = repeat + 1;
6124 }
6125
6126 var end = startNumber + segments.length + count;
6127 var number = startNumber + segments.length;
6128
6129 while (number < end) {
6130 segments.push({
6131 number: number,
6132 duration: duration / timescale,
6133 time: time,
6134 timeline: timeline
6135 });
6136 time += duration;
6137 number++;
6138 }
6139 }
6140
6141 return segments;
6142 };
6143
6144 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
6145 /**
6146 * Replaces template identifiers with corresponding values. To be used as the callback
6147 * for String.prototype.replace
6148 *
6149 * @name replaceCallback
6150 * @function
6151 * @param {string} match
6152 * Entire match of identifier
6153 * @param {string} identifier
6154 * Name of matched identifier
6155 * @param {string} format
6156 * Format tag string. Its presence indicates that padding is expected
6157 * @param {string} width
6158 * Desired length of the replaced value. Values less than this width shall be left
6159 * zero padded
6160 * @return {string}
6161 * Replacement for the matched identifier
6162 */
6163
6164 /**
6165 * Returns a function to be used as a callback for String.prototype.replace to replace
6166 * template identifiers
6167 *
6168 * @param {Obect} values
6169 * Object containing values that shall be used to replace known identifiers
6170 * @param {number} values.RepresentationID
6171 * Value of the Representation@id attribute
6172 * @param {number} values.Number
6173 * Number of the corresponding segment
6174 * @param {number} values.Bandwidth
6175 * Value of the Representation@bandwidth attribute.
6176 * @param {number} values.Time
6177 * Timestamp value of the corresponding segment
6178 * @return {replaceCallback}
6179 * Callback to be used with String.prototype.replace to replace identifiers
6180 */
6181
6182 var identifierReplacement = function identifierReplacement(values) {
6183 return function (match, identifier, format, width) {
6184 if (match === '$$') {
6185 // escape sequence
6186 return '$';
6187 }
6188
6189 if (typeof values[identifier] === 'undefined') {
6190 return match;
6191 }
6192
6193 var value = '' + values[identifier];
6194
6195 if (identifier === 'RepresentationID') {
6196 // Format tag shall not be present with RepresentationID
6197 return value;
6198 }
6199
6200 if (!format) {
6201 width = 1;
6202 } else {
6203 width = parseInt(width, 10);
6204 }
6205
6206 if (value.length >= width) {
6207 return value;
6208 }
6209
6210 return "" + new Array(width - value.length + 1).join('0') + value;
6211 };
6212 };
6213 /**
6214 * Constructs a segment url from a template string
6215 *
6216 * @param {string} url
6217 * Template string to construct url from
6218 * @param {Obect} values
6219 * Object containing values that shall be used to replace known identifiers
6220 * @param {number} values.RepresentationID
6221 * Value of the Representation@id attribute
6222 * @param {number} values.Number
6223 * Number of the corresponding segment
6224 * @param {number} values.Bandwidth
6225 * Value of the Representation@bandwidth attribute.
6226 * @param {number} values.Time
6227 * Timestamp value of the corresponding segment
6228 * @return {string}
6229 * Segment url with identifiers replaced
6230 */
6231
6232
6233 var constructTemplateUrl = function constructTemplateUrl(url, values) {
6234 return url.replace(identifierPattern, identifierReplacement(values));
6235 };
6236 /**
6237 * Generates a list of objects containing timing and duration information about each
6238 * segment needed to generate segment uris and the complete segment object
6239 *
6240 * @param {Object} attributes
6241 * Object containing all inherited attributes from parent elements with attribute
6242 * names as keys
6243 * @param {Object[]|undefined} segmentTimeline
6244 * List of objects representing the attributes of each S element contained within
6245 * the SegmentTimeline element
6246 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6247 * List of Objects with segment timing and duration info
6248 */
6249
6250
6251 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
6252 if (!attributes.duration && !segmentTimeline) {
6253 // if neither @duration or SegmentTimeline are present, then there shall be exactly
6254 // one media segment
6255 return [{
6256 number: attributes.startNumber || 1,
6257 duration: attributes.sourceDuration,
6258 time: 0,
6259 timeline: attributes.periodIndex
6260 }];
6261 }
6262
6263 if (attributes.duration) {
6264 return parseByDuration(attributes);
6265 }
6266
6267 return parseByTimeline(attributes, segmentTimeline);
6268 };
6269 /**
6270 * Generates a list of segments using information provided by the SegmentTemplate element
6271 *
6272 * @param {Object} attributes
6273 * Object containing all inherited attributes from parent elements with attribute
6274 * names as keys
6275 * @param {Object[]|undefined} segmentTimeline
6276 * List of objects representing the attributes of each S element contained within
6277 * the SegmentTimeline element
6278 * @return {Object[]}
6279 * List of segment objects
6280 */
6281
6282
6283 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
6284 var templateValues = {
6285 RepresentationID: attributes.id,
6286 Bandwidth: attributes.bandwidth || 0
6287 };
6288 var _attributes$initializ = attributes.initialization,
6289 initialization = _attributes$initializ === void 0 ? {
6290 sourceURL: '',
6291 range: ''
6292 } : _attributes$initializ;
6293 var mapSegment = urlTypeToSegment({
6294 baseUrl: attributes.baseUrl,
6295 source: constructTemplateUrl(initialization.sourceURL, templateValues),
6296 range: initialization.range
6297 });
6298 var segments = parseTemplateInfo(attributes, segmentTimeline);
6299 return segments.map(function (segment) {
6300 templateValues.Number = segment.number;
6301 templateValues.Time = segment.time;
6302 var uri = constructTemplateUrl(attributes.media || '', templateValues); // See DASH spec section 5.3.9.2.2
6303 // - if timescale isn't present on any level, default to 1.
6304
6305 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
6306
6307 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
6308 var presentationTime = // Even if the @t attribute is not specified for the segment, segment.time is
6309 // calculated in mpd-parser prior to this, so it's assumed to be available.
6310 attributes.periodStart + (segment.time - presentationTimeOffset) / timescale;
6311 var map = {
6312 uri: uri,
6313 timeline: segment.timeline,
6314 duration: segment.duration,
6315 resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),
6316 map: mapSegment,
6317 number: segment.number,
6318 presentationTime: presentationTime
6319 };
6320 return map;
6321 });
6322 };
6323 /**
6324 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
6325 * to an object that matches the output of a segment in videojs/mpd-parser
6326 *
6327 * @param {Object} attributes
6328 * Object containing all inherited attributes from parent elements with attribute
6329 * names as keys
6330 * @param {Object} segmentUrl
6331 * <SegmentURL> node to translate into a segment object
6332 * @return {Object} translated segment object
6333 */
6334
6335
6336 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
6337 var baseUrl = attributes.baseUrl,
6338 _attributes$initializ = attributes.initialization,
6339 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
6340 var initSegment = urlTypeToSegment({
6341 baseUrl: baseUrl,
6342 source: initialization.sourceURL,
6343 range: initialization.range
6344 });
6345 var segment = urlTypeToSegment({
6346 baseUrl: baseUrl,
6347 source: segmentUrl.media,
6348 range: segmentUrl.mediaRange
6349 });
6350 segment.map = initSegment;
6351 return segment;
6352 };
6353 /**
6354 * Generates a list of segments using information provided by the SegmentList element
6355 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
6356 * node should be translated into segment.
6357 *
6358 * @param {Object} attributes
6359 * Object containing all inherited attributes from parent elements with attribute
6360 * names as keys
6361 * @param {Object[]|undefined} segmentTimeline
6362 * List of objects representing the attributes of each S element contained within
6363 * the SegmentTimeline element
6364 * @return {Object.<Array>} list of segments
6365 */
6366
6367
6368 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
6369 var duration = attributes.duration,
6370 _attributes$segmentUr = attributes.segmentUrls,
6371 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr,
6372 periodStart = attributes.periodStart; // Per spec (5.3.9.2.1) no way to determine segment duration OR
6373 // if both SegmentTimeline and @duration are defined, it is outside of spec.
6374
6375 if (!duration && !segmentTimeline || duration && segmentTimeline) {
6376 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
6377 }
6378
6379 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
6380 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
6381 });
6382 var segmentTimeInfo;
6383
6384 if (duration) {
6385 segmentTimeInfo = parseByDuration(attributes);
6386 }
6387
6388 if (segmentTimeline) {
6389 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
6390 }
6391
6392 var segments = segmentTimeInfo.map(function (segmentTime, index) {
6393 if (segmentUrlMap[index]) {
6394 var segment = segmentUrlMap[index]; // See DASH spec section 5.3.9.2.2
6395 // - if timescale isn't present on any level, default to 1.
6396
6397 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
6398
6399 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
6400 segment.timeline = segmentTime.timeline;
6401 segment.duration = segmentTime.duration;
6402 segment.number = segmentTime.number;
6403 segment.presentationTime = periodStart + (segmentTime.time - presentationTimeOffset) / timescale;
6404 return segment;
6405 } // Since we're mapping we should get rid of any blank segments (in case
6406 // the given SegmentTimeline is handling for more elements than we have
6407 // SegmentURLs for).
6408
6409 }).filter(function (segment) {
6410 return segment;
6411 });
6412 return segments;
6413 };
6414
6415 var generateSegments = function generateSegments(_ref) {
6416 var attributes = _ref.attributes,
6417 segmentInfo = _ref.segmentInfo;
6418 var segmentAttributes;
6419 var segmentsFn;
6420
6421 if (segmentInfo.template) {
6422 segmentsFn = segmentsFromTemplate;
6423 segmentAttributes = merge(attributes, segmentInfo.template);
6424 } else if (segmentInfo.base) {
6425 segmentsFn = segmentsFromBase;
6426 segmentAttributes = merge(attributes, segmentInfo.base);
6427 } else if (segmentInfo.list) {
6428 segmentsFn = segmentsFromList;
6429 segmentAttributes = merge(attributes, segmentInfo.list);
6430 }
6431
6432 var segmentsInfo = {
6433 attributes: attributes
6434 };
6435
6436 if (!segmentsFn) {
6437 return segmentsInfo;
6438 }
6439
6440 var segments = segmentsFn(segmentAttributes, segmentInfo.segmentTimeline); // The @duration attribute will be used to determin the playlist's targetDuration which
6441 // must be in seconds. Since we've generated the segment list, we no longer need
6442 // @duration to be in @timescale units, so we can convert it here.
6443
6444 if (segmentAttributes.duration) {
6445 var _segmentAttributes = segmentAttributes,
6446 duration = _segmentAttributes.duration,
6447 _segmentAttributes$ti = _segmentAttributes.timescale,
6448 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
6449 segmentAttributes.duration = duration / timescale;
6450 } else if (segments.length) {
6451 // if there is no @duration attribute, use the largest segment duration as
6452 // as target duration
6453 segmentAttributes.duration = segments.reduce(function (max, segment) {
6454 return Math.max(max, Math.ceil(segment.duration));
6455 }, 0);
6456 } else {
6457 segmentAttributes.duration = 0;
6458 }
6459
6460 segmentsInfo.attributes = segmentAttributes;
6461 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
6462
6463 if (segmentInfo.base && segmentAttributes.indexRange) {
6464 segmentsInfo.sidx = segments[0];
6465 segmentsInfo.segments = [];
6466 }
6467
6468 return segmentsInfo;
6469 };
6470
6471 var toPlaylists = function toPlaylists(representations) {
6472 return representations.map(generateSegments);
6473 };
6474
6475 var findChildren = function findChildren(element, name) {
6476 return from(element.childNodes).filter(function (_ref) {
6477 var tagName = _ref.tagName;
6478 return tagName === name;
6479 });
6480 };
6481
6482 var getContent = function getContent(element) {
6483 return element.textContent.trim();
6484 };
6485
6486 var parseDuration = function parseDuration(str) {
6487 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
6488 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
6489 var SECONDS_IN_DAY = 24 * 60 * 60;
6490 var SECONDS_IN_HOUR = 60 * 60;
6491 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
6492
6493 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
6494 var match = durationRegex.exec(str);
6495
6496 if (!match) {
6497 return 0;
6498 }
6499
6500 var _match$slice = match.slice(1),
6501 year = _match$slice[0],
6502 month = _match$slice[1],
6503 day = _match$slice[2],
6504 hour = _match$slice[3],
6505 minute = _match$slice[4],
6506 second = _match$slice[5];
6507
6508 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
6509 };
6510
6511 var parseDate = function parseDate(str) {
6512 // Date format without timezone according to ISO 8601
6513 // YYY-MM-DDThh:mm:ss.ssssss
6514 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
6515 // expressed by ending with 'Z'
6516
6517 if (dateRegex.test(str)) {
6518 str += 'Z';
6519 }
6520
6521 return Date.parse(str);
6522 };
6523
6524 var parsers = {
6525 /**
6526 * Specifies the duration of the entire Media Presentation. Format is a duration string
6527 * as specified in ISO 8601
6528 *
6529 * @param {string} value
6530 * value of attribute as a string
6531 * @return {number}
6532 * The duration in seconds
6533 */
6534 mediaPresentationDuration: function mediaPresentationDuration(value) {
6535 return parseDuration(value);
6536 },
6537
6538 /**
6539 * Specifies the Segment availability start time for all Segments referred to in this
6540 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
6541 * time. Format is a date string as specified in ISO 8601
6542 *
6543 * @param {string} value
6544 * value of attribute as a string
6545 * @return {number}
6546 * The date as seconds from unix epoch
6547 */
6548 availabilityStartTime: function availabilityStartTime(value) {
6549 return parseDate(value) / 1000;
6550 },
6551
6552 /**
6553 * Specifies the smallest period between potential changes to the MPD. Format is a
6554 * duration string as specified in ISO 8601
6555 *
6556 * @param {string} value
6557 * value of attribute as a string
6558 * @return {number}
6559 * The duration in seconds
6560 */
6561 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
6562 return parseDuration(value);
6563 },
6564
6565 /**
6566 * Specifies the suggested presentation delay. Format is a
6567 * duration string as specified in ISO 8601
6568 *
6569 * @param {string} value
6570 * value of attribute as a string
6571 * @return {number}
6572 * The duration in seconds
6573 */
6574 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
6575 return parseDuration(value);
6576 },
6577
6578 /**
6579 * specifices the type of mpd. Can be either "static" or "dynamic"
6580 *
6581 * @param {string} value
6582 * value of attribute as a string
6583 *
6584 * @return {string}
6585 * The type as a string
6586 */
6587 type: function type(value) {
6588 return value;
6589 },
6590
6591 /**
6592 * Specifies the duration of the smallest time shifting buffer for any Representation
6593 * in the MPD. Format is a duration string as specified in ISO 8601
6594 *
6595 * @param {string} value
6596 * value of attribute as a string
6597 * @return {number}
6598 * The duration in seconds
6599 */
6600 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
6601 return parseDuration(value);
6602 },
6603
6604 /**
6605 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
6606 * Format is a duration string as specified in ISO 8601
6607 *
6608 * @param {string} value
6609 * value of attribute as a string
6610 * @return {number}
6611 * The duration in seconds
6612 */
6613 start: function start(value) {
6614 return parseDuration(value);
6615 },
6616
6617 /**
6618 * Specifies the width of the visual presentation
6619 *
6620 * @param {string} value
6621 * value of attribute as a string
6622 * @return {number}
6623 * The parsed width
6624 */
6625 width: function width(value) {
6626 return parseInt(value, 10);
6627 },
6628
6629 /**
6630 * Specifies the height of the visual presentation
6631 *
6632 * @param {string} value
6633 * value of attribute as a string
6634 * @return {number}
6635 * The parsed height
6636 */
6637 height: function height(value) {
6638 return parseInt(value, 10);
6639 },
6640
6641 /**
6642 * Specifies the bitrate of the representation
6643 *
6644 * @param {string} value
6645 * value of attribute as a string
6646 * @return {number}
6647 * The parsed bandwidth
6648 */
6649 bandwidth: function bandwidth(value) {
6650 return parseInt(value, 10);
6651 },
6652
6653 /**
6654 * Specifies the number of the first Media Segment in this Representation in the Period
6655 *
6656 * @param {string} value
6657 * value of attribute as a string
6658 * @return {number}
6659 * The parsed number
6660 */
6661 startNumber: function startNumber(value) {
6662 return parseInt(value, 10);
6663 },
6664
6665 /**
6666 * Specifies the timescale in units per seconds
6667 *
6668 * @param {string} value
6669 * value of attribute as a string
6670 * @return {number}
6671 * The parsed timescale
6672 */
6673 timescale: function timescale(value) {
6674 return parseInt(value, 10);
6675 },
6676
6677 /**
6678 * Specifies the presentationTimeOffset.
6679 *
6680 * @param {string} value
6681 * value of the attribute as a string
6682 *
6683 * @return {number}
6684 * The parsed presentationTimeOffset
6685 */
6686 presentationTimeOffset: function presentationTimeOffset(value) {
6687 return parseInt(value, 10);
6688 },
6689
6690 /**
6691 * Specifies the constant approximate Segment duration
6692 * NOTE: The <Period> element also contains an @duration attribute. This duration
6693 * specifies the duration of the Period. This attribute is currently not
6694 * supported by the rest of the parser, however we still check for it to prevent
6695 * errors.
6696 *
6697 * @param {string} value
6698 * value of attribute as a string
6699 * @return {number}
6700 * The parsed duration
6701 */
6702 duration: function duration(value) {
6703 var parsedValue = parseInt(value, 10);
6704
6705 if (isNaN(parsedValue)) {
6706 return parseDuration(value);
6707 }
6708
6709 return parsedValue;
6710 },
6711
6712 /**
6713 * Specifies the Segment duration, in units of the value of the @timescale.
6714 *
6715 * @param {string} value
6716 * value of attribute as a string
6717 * @return {number}
6718 * The parsed duration
6719 */
6720 d: function d(value) {
6721 return parseInt(value, 10);
6722 },
6723
6724 /**
6725 * Specifies the MPD start time, in @timescale units, the first Segment in the series
6726 * starts relative to the beginning of the Period
6727 *
6728 * @param {string} value
6729 * value of attribute as a string
6730 * @return {number}
6731 * The parsed time
6732 */
6733 t: function t(value) {
6734 return parseInt(value, 10);
6735 },
6736
6737 /**
6738 * Specifies the repeat count of the number of following contiguous Segments with the
6739 * same duration expressed by the value of @d
6740 *
6741 * @param {string} value
6742 * value of attribute as a string
6743 * @return {number}
6744 * The parsed number
6745 */
6746 r: function r(value) {
6747 return parseInt(value, 10);
6748 },
6749
6750 /**
6751 * Default parser for all other attributes. Acts as a no-op and just returns the value
6752 * as a string
6753 *
6754 * @param {string} value
6755 * value of attribute as a string
6756 * @return {string}
6757 * Unparsed value
6758 */
6759 DEFAULT: function DEFAULT(value) {
6760 return value;
6761 }
6762 };
6763 /**
6764 * Gets all the attributes and values of the provided node, parses attributes with known
6765 * types, and returns an object with attribute names mapped to values.
6766 *
6767 * @param {Node} el
6768 * The node to parse attributes from
6769 * @return {Object}
6770 * Object with all attributes of el parsed
6771 */
6772
6773 var parseAttributes = function parseAttributes(el) {
6774 if (!(el && el.attributes)) {
6775 return {};
6776 }
6777
6778 return from(el.attributes).reduce(function (a, e) {
6779 var parseFn = parsers[e.name] || parsers.DEFAULT;
6780 a[e.name] = parseFn(e.value);
6781 return a;
6782 }, {});
6783 };
6784
6785 var keySystemsMap = {
6786 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
6787 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
6788 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
6789 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
6790 };
6791 /**
6792 * Builds a list of urls that is the product of the reference urls and BaseURL values
6793 *
6794 * @param {string[]} referenceUrls
6795 * List of reference urls to resolve to
6796 * @param {Node[]} baseUrlElements
6797 * List of BaseURL nodes from the mpd
6798 * @return {string[]}
6799 * List of resolved urls
6800 */
6801
6802 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
6803 if (!baseUrlElements.length) {
6804 return referenceUrls;
6805 }
6806
6807 return flatten(referenceUrls.map(function (reference) {
6808 return baseUrlElements.map(function (baseUrlElement) {
6809 return resolveUrl$1(reference, getContent(baseUrlElement));
6810 });
6811 }));
6812 };
6813 /**
6814 * Contains all Segment information for its containing AdaptationSet
6815 *
6816 * @typedef {Object} SegmentInformation
6817 * @property {Object|undefined} template
6818 * Contains the attributes for the SegmentTemplate node
6819 * @property {Object[]|undefined} segmentTimeline
6820 * Contains a list of atrributes for each S node within the SegmentTimeline node
6821 * @property {Object|undefined} list
6822 * Contains the attributes for the SegmentList node
6823 * @property {Object|undefined} base
6824 * Contains the attributes for the SegmentBase node
6825 */
6826
6827 /**
6828 * Returns all available Segment information contained within the AdaptationSet node
6829 *
6830 * @param {Node} adaptationSet
6831 * The AdaptationSet node to get Segment information from
6832 * @return {SegmentInformation}
6833 * The Segment information contained within the provided AdaptationSet
6834 */
6835
6836
6837 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
6838 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
6839 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
6840 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
6841 return merge({
6842 tag: 'SegmentURL'
6843 }, parseAttributes(s));
6844 });
6845 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
6846 var segmentTimelineParentNode = segmentList || segmentTemplate;
6847 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
6848 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
6849 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
6850 // @initialization and an <Initialization> node. @initialization can be templated,
6851 // while the node can have a url and range specified. If the <SegmentTemplate> has
6852 // both @initialization and an <Initialization> subelement we opt to override with
6853 // the node, as this interaction is not defined in the spec.
6854
6855 var template = segmentTemplate && parseAttributes(segmentTemplate);
6856
6857 if (template && segmentInitialization) {
6858 template.initialization = segmentInitialization && parseAttributes(segmentInitialization);
6859 } else if (template && template.initialization) {
6860 // If it is @initialization we convert it to an object since this is the format that
6861 // later functions will rely on for the initialization segment. This is only valid
6862 // for <SegmentTemplate>
6863 template.initialization = {
6864 sourceURL: template.initialization
6865 };
6866 }
6867
6868 var segmentInfo = {
6869 template: template,
6870 segmentTimeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
6871 return parseAttributes(s);
6872 }),
6873 list: segmentList && merge(parseAttributes(segmentList), {
6874 segmentUrls: segmentUrls,
6875 initialization: parseAttributes(segmentInitialization)
6876 }),
6877 base: segmentBase && merge(parseAttributes(segmentBase), {
6878 initialization: parseAttributes(segmentInitialization)
6879 })
6880 };
6881 Object.keys(segmentInfo).forEach(function (key) {
6882 if (!segmentInfo[key]) {
6883 delete segmentInfo[key];
6884 }
6885 });
6886 return segmentInfo;
6887 };
6888 /**
6889 * Contains Segment information and attributes needed to construct a Playlist object
6890 * from a Representation
6891 *
6892 * @typedef {Object} RepresentationInformation
6893 * @property {SegmentInformation} segmentInfo
6894 * Segment information for this Representation
6895 * @property {Object} attributes
6896 * Inherited attributes for this Representation
6897 */
6898
6899 /**
6900 * Maps a Representation node to an object containing Segment information and attributes
6901 *
6902 * @name inheritBaseUrlsCallback
6903 * @function
6904 * @param {Node} representation
6905 * Representation node from the mpd
6906 * @return {RepresentationInformation}
6907 * Representation information needed to construct a Playlist object
6908 */
6909
6910 /**
6911 * Returns a callback for Array.prototype.map for mapping Representation nodes to
6912 * Segment information and attributes using inherited BaseURL nodes.
6913 *
6914 * @param {Object} adaptationSetAttributes
6915 * Contains attributes inherited by the AdaptationSet
6916 * @param {string[]} adaptationSetBaseUrls
6917 * Contains list of resolved base urls inherited by the AdaptationSet
6918 * @param {SegmentInformation} adaptationSetSegmentInfo
6919 * Contains Segment information for the AdaptationSet
6920 * @return {inheritBaseUrlsCallback}
6921 * Callback map function
6922 */
6923
6924
6925 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
6926 return function (representation) {
6927 var repBaseUrlElements = findChildren(representation, 'BaseURL');
6928 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
6929 var attributes = merge(adaptationSetAttributes, parseAttributes(representation));
6930 var representationSegmentInfo = getSegmentInformation(representation);
6931 return repBaseUrls.map(function (baseUrl) {
6932 return {
6933 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
6934 attributes: merge(attributes, {
6935 baseUrl: baseUrl
6936 })
6937 };
6938 });
6939 };
6940 };
6941 /**
6942 * Tranforms a series of content protection nodes to
6943 * an object containing pssh data by key system
6944 *
6945 * @param {Node[]} contentProtectionNodes
6946 * Content protection nodes
6947 * @return {Object}
6948 * Object containing pssh data by key system
6949 */
6950
6951
6952 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
6953 return contentProtectionNodes.reduce(function (acc, node) {
6954 var attributes = parseAttributes(node);
6955 var keySystem = keySystemsMap[attributes.schemeIdUri];
6956
6957 if (keySystem) {
6958 acc[keySystem] = {
6959 attributes: attributes
6960 };
6961 var psshNode = findChildren(node, 'cenc:pssh')[0];
6962
6963 if (psshNode) {
6964 var pssh = getContent(psshNode);
6965 var psshBuffer = pssh && decodeB64ToUint8Array(pssh);
6966 acc[keySystem].pssh = psshBuffer;
6967 }
6968 }
6969
6970 return acc;
6971 }, {});
6972 }; // defined in ANSI_SCTE 214-1 2016
6973
6974
6975 var parseCaptionServiceMetadata = function parseCaptionServiceMetadata(service) {
6976 // 608 captions
6977 if (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {
6978 var values = typeof service.value !== 'string' ? [] : service.value.split(';');
6979 return values.map(function (value) {
6980 var channel;
6981 var language; // default language to value
6982
6983 language = value;
6984
6985 if (/^CC\d=/.test(value)) {
6986 var _value$split = value.split('=');
6987
6988 channel = _value$split[0];
6989 language = _value$split[1];
6990 } else if (/^CC\d$/.test(value)) {
6991 channel = value;
6992 }
6993
6994 return {
6995 channel: channel,
6996 language: language
6997 };
6998 });
6999 } else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {
7000 var _values = typeof service.value !== 'string' ? [] : service.value.split(';');
7001
7002 return _values.map(function (value) {
7003 var flags = {
7004 // service or channel number 1-63
7005 'channel': undefined,
7006 // language is a 3ALPHA per ISO 639.2/B
7007 // field is required
7008 'language': undefined,
7009 // BIT 1/0 or ?
7010 // default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown
7011 'aspectRatio': 1,
7012 // BIT 1/0
7013 // easy reader flag indicated the text is tailed to the needs of beginning readers
7014 // default 0, or off
7015 'easyReader': 0,
7016 // BIT 1/0
7017 // If 3d metadata is present (CEA-708.1) then 1
7018 // default 0
7019 '3D': 0
7020 };
7021
7022 if (/=/.test(value)) {
7023 var _value$split2 = value.split('='),
7024 channel = _value$split2[0],
7025 _value$split2$ = _value$split2[1],
7026 opts = _value$split2$ === void 0 ? '' : _value$split2$;
7027
7028 flags.channel = channel;
7029 flags.language = value;
7030 opts.split(',').forEach(function (opt) {
7031 var _opt$split = opt.split(':'),
7032 name = _opt$split[0],
7033 val = _opt$split[1];
7034
7035 if (name === 'lang') {
7036 flags.language = val; // er for easyReadery
7037 } else if (name === 'er') {
7038 flags.easyReader = Number(val); // war for wide aspect ratio
7039 } else if (name === 'war') {
7040 flags.aspectRatio = Number(val);
7041 } else if (name === '3D') {
7042 flags['3D'] = Number(val);
7043 }
7044 });
7045 } else {
7046 flags.language = value;
7047 }
7048
7049 if (flags.channel) {
7050 flags.channel = 'SERVICE' + flags.channel;
7051 }
7052
7053 return flags;
7054 });
7055 }
7056 };
7057 /**
7058 * Maps an AdaptationSet node to a list of Representation information objects
7059 *
7060 * @name toRepresentationsCallback
7061 * @function
7062 * @param {Node} adaptationSet
7063 * AdaptationSet node from the mpd
7064 * @return {RepresentationInformation[]}
7065 * List of objects containing Representaion information
7066 */
7067
7068 /**
7069 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
7070 * Representation information objects
7071 *
7072 * @param {Object} periodAttributes
7073 * Contains attributes inherited by the Period
7074 * @param {string[]} periodBaseUrls
7075 * Contains list of resolved base urls inherited by the Period
7076 * @param {string[]} periodSegmentInfo
7077 * Contains Segment Information at the period level
7078 * @return {toRepresentationsCallback}
7079 * Callback map function
7080 */
7081
7082
7083 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
7084 return function (adaptationSet) {
7085 var adaptationSetAttributes = parseAttributes(adaptationSet);
7086 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
7087 var role = findChildren(adaptationSet, 'Role')[0];
7088 var roleAttributes = {
7089 role: parseAttributes(role)
7090 };
7091 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
7092 var accessibility = findChildren(adaptationSet, 'Accessibility')[0];
7093 var captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));
7094
7095 if (captionServices) {
7096 attrs = merge(attrs, {
7097 captionServices: captionServices
7098 });
7099 }
7100
7101 var label = findChildren(adaptationSet, 'Label')[0];
7102
7103 if (label && label.childNodes.length) {
7104 var labelVal = label.childNodes[0].nodeValue.trim();
7105 attrs = merge(attrs, {
7106 label: labelVal
7107 });
7108 }
7109
7110 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
7111
7112 if (Object.keys(contentProtection).length) {
7113 attrs = merge(attrs, {
7114 contentProtection: contentProtection
7115 });
7116 }
7117
7118 var segmentInfo = getSegmentInformation(adaptationSet);
7119 var representations = findChildren(adaptationSet, 'Representation');
7120 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
7121 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
7122 };
7123 };
7124 /**
7125 * Contains all period information for mapping nodes onto adaptation sets.
7126 *
7127 * @typedef {Object} PeriodInformation
7128 * @property {Node} period.node
7129 * Period node from the mpd
7130 * @property {Object} period.attributes
7131 * Parsed period attributes from node plus any added
7132 */
7133
7134 /**
7135 * Maps a PeriodInformation object to a list of Representation information objects for all
7136 * AdaptationSet nodes contained within the Period.
7137 *
7138 * @name toAdaptationSetsCallback
7139 * @function
7140 * @param {PeriodInformation} period
7141 * Period object containing necessary period information
7142 * @param {number} periodIndex
7143 * Index of the Period within the mpd
7144 * @return {RepresentationInformation[]}
7145 * List of objects containing Representaion information
7146 */
7147
7148 /**
7149 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
7150 * Representation information objects
7151 *
7152 * @param {Object} mpdAttributes
7153 * Contains attributes inherited by the mpd
7154 * @param {string[]} mpdBaseUrls
7155 * Contains list of resolved base urls inherited by the mpd
7156 * @return {toAdaptationSetsCallback}
7157 * Callback map function
7158 */
7159
7160
7161 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
7162 return function (period, index) {
7163 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));
7164 var parsedPeriodId = parseInt(period.attributes.id, 10); // fallback to mapping index if Period@id is not a number
7165
7166 var periodIndex = window.isNaN(parsedPeriodId) ? index : parsedPeriodId;
7167 var periodAttributes = merge(mpdAttributes, {
7168 periodIndex: periodIndex,
7169 periodStart: period.attributes.start
7170 });
7171
7172 if (typeof period.attributes.duration === 'number') {
7173 periodAttributes.periodDuration = period.attributes.duration;
7174 }
7175
7176 var adaptationSets = findChildren(period.node, 'AdaptationSet');
7177 var periodSegmentInfo = getSegmentInformation(period.node);
7178 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
7179 };
7180 };
7181 /**
7182 * Gets Period@start property for a given period.
7183 *
7184 * @param {Object} options
7185 * Options object
7186 * @param {Object} options.attributes
7187 * Period attributes
7188 * @param {Object} [options.priorPeriodAttributes]
7189 * Prior period attributes (if prior period is available)
7190 * @param {string} options.mpdType
7191 * The MPD@type these periods came from
7192 * @return {number|null}
7193 * The period start, or null if it's an early available period or error
7194 */
7195
7196
7197 var getPeriodStart = function getPeriodStart(_ref) {
7198 var attributes = _ref.attributes,
7199 priorPeriodAttributes = _ref.priorPeriodAttributes,
7200 mpdType = _ref.mpdType; // Summary of period start time calculation from DASH spec section 5.3.2.1
7201 //
7202 // A period's start is the first period's start + time elapsed after playing all
7203 // prior periods to this one. Periods continue one after the other in time (without
7204 // gaps) until the end of the presentation.
7205 //
7206 // The value of Period@start should be:
7207 // 1. if Period@start is present: value of Period@start
7208 // 2. if previous period exists and it has @duration: previous Period@start +
7209 // previous Period@duration
7210 // 3. if this is first period and MPD@type is 'static': 0
7211 // 4. in all other cases, consider the period an "early available period" (note: not
7212 // currently supported)
7213 // (1)
7214
7215 if (typeof attributes.start === 'number') {
7216 return attributes.start;
7217 } // (2)
7218
7219
7220 if (priorPeriodAttributes && typeof priorPeriodAttributes.start === 'number' && typeof priorPeriodAttributes.duration === 'number') {
7221 return priorPeriodAttributes.start + priorPeriodAttributes.duration;
7222 } // (3)
7223
7224
7225 if (!priorPeriodAttributes && mpdType === 'static') {
7226 return 0;
7227 } // (4)
7228 // There is currently no logic for calculating the Period@start value if there is
7229 // no Period@start or prior Period@start and Period@duration available. This is not made
7230 // explicit by the DASH interop guidelines or the DASH spec, however, since there's
7231 // nothing about any other resolution strategies, it's implied. Thus, this case should
7232 // be considered an early available period, or error, and null should suffice for both
7233 // of those cases.
7234
7235
7236 return null;
7237 };
7238 /**
7239 * Traverses the mpd xml tree to generate a list of Representation information objects
7240 * that have inherited attributes from parent nodes
7241 *
7242 * @param {Node} mpd
7243 * The root node of the mpd
7244 * @param {Object} options
7245 * Available options for inheritAttributes
7246 * @param {string} options.manifestUri
7247 * The uri source of the mpd
7248 * @param {number} options.NOW
7249 * Current time per DASH IOP. Default is current time in ms since epoch
7250 * @param {number} options.clientOffset
7251 * Client time difference from NOW (in milliseconds)
7252 * @return {RepresentationInformation[]}
7253 * List of objects containing Representation information
7254 */
7255
7256
7257 var inheritAttributes = function inheritAttributes(mpd, options) {
7258 if (options === void 0) {
7259 options = {};
7260 }
7261
7262 var _options = options,
7263 _options$manifestUri = _options.manifestUri,
7264 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
7265 _options$NOW = _options.NOW,
7266 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
7267 _options$clientOffset = _options.clientOffset,
7268 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
7269 var periodNodes = findChildren(mpd, 'Period');
7270
7271 if (!periodNodes.length) {
7272 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
7273 }
7274
7275 var locations = findChildren(mpd, 'Location');
7276 var mpdAttributes = parseAttributes(mpd);
7277 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL')); // See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.
7278
7279 mpdAttributes.type = mpdAttributes.type || 'static';
7280 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
7281 mpdAttributes.NOW = NOW;
7282 mpdAttributes.clientOffset = clientOffset;
7283
7284 if (locations.length) {
7285 mpdAttributes.locations = locations.map(getContent);
7286 }
7287
7288 var periods = []; // Since toAdaptationSets acts on individual periods right now, the simplest approach to
7289 // adding properties that require looking at prior periods is to parse attributes and add
7290 // missing ones before toAdaptationSets is called. If more such properties are added, it
7291 // may be better to refactor toAdaptationSets.
7292
7293 periodNodes.forEach(function (node, index) {
7294 var attributes = parseAttributes(node); // Use the last modified prior period, as it may contain added information necessary
7295 // for this period.
7296
7297 var priorPeriod = periods[index - 1];
7298 attributes.start = getPeriodStart({
7299 attributes: attributes,
7300 priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,
7301 mpdType: mpdAttributes.type
7302 });
7303 periods.push({
7304 node: node,
7305 attributes: attributes
7306 });
7307 });
7308 return {
7309 locations: mpdAttributes.locations,
7310 representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)))
7311 };
7312 };
7313
7314 var stringToMpdXml = function stringToMpdXml(manifestString) {
7315 if (manifestString === '') {
7316 throw new Error(errors.DASH_EMPTY_MANIFEST);
7317 }
7318
7319 var parser = new xmldom.DOMParser();
7320 var xml;
7321 var mpd;
7322
7323 try {
7324 xml = parser.parseFromString(manifestString, 'application/xml');
7325 mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
7326 } catch (e) {// ie 11 throwsw on invalid xml
7327 }
7328
7329 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
7330 throw new Error(errors.DASH_INVALID_XML);
7331 }
7332
7333 return mpd;
7334 };
7335 /**
7336 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
7337 *
7338 * @param {string} mpd
7339 * XML string of the MPD manifest
7340 * @return {Object|null}
7341 * Attributes of UTCTiming node specified in the manifest. Null if none found
7342 */
7343
7344
7345 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
7346 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
7347
7348 if (!UTCTimingNode) {
7349 return null;
7350 }
7351
7352 var attributes = parseAttributes(UTCTimingNode);
7353
7354 switch (attributes.schemeIdUri) {
7355 case 'urn:mpeg:dash:utc:http-head:2014':
7356 case 'urn:mpeg:dash:utc:http-head:2012':
7357 attributes.method = 'HEAD';
7358 break;
7359
7360 case 'urn:mpeg:dash:utc:http-xsdate:2014':
7361 case 'urn:mpeg:dash:utc:http-iso:2014':
7362 case 'urn:mpeg:dash:utc:http-xsdate:2012':
7363 case 'urn:mpeg:dash:utc:http-iso:2012':
7364 attributes.method = 'GET';
7365 break;
7366
7367 case 'urn:mpeg:dash:utc:direct:2014':
7368 case 'urn:mpeg:dash:utc:direct:2012':
7369 attributes.method = 'DIRECT';
7370 attributes.value = Date.parse(attributes.value);
7371 break;
7372
7373 case 'urn:mpeg:dash:utc:http-ntp:2014':
7374 case 'urn:mpeg:dash:utc:ntp:2014':
7375 case 'urn:mpeg:dash:utc:sntp:2014':
7376 default:
7377 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
7378 }
7379
7380 return attributes;
7381 };
7382
7383 var parse = function parse(manifestString, options) {
7384 if (options === void 0) {
7385 options = {};
7386 }
7387
7388 var parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);
7389 var playlists = toPlaylists(parsedManifestInfo.representationInfo);
7390 return toM3u8(playlists, parsedManifestInfo.locations, options.sidxMapping);
7391 };
7392 /**
7393 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
7394 *
7395 * @param {string} manifestString
7396 * XML string of the MPD manifest
7397 * @return {Object|null}
7398 * Attributes of UTCTiming node specified in the manifest. Null if none found
7399 */
7400
7401
7402 var parseUTCTiming = function parseUTCTiming(manifestString) {
7403 return parseUTCTimingScheme(stringToMpdXml(manifestString));
7404 };
7405
7406 var MAX_UINT32 = Math.pow(2, 32);
7407
7408 var parseSidx = function parseSidx(data) {
7409 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
7410 result = {
7411 version: data[0],
7412 flags: new Uint8Array(data.subarray(1, 4)),
7413 references: [],
7414 referenceId: view.getUint32(4),
7415 timescale: view.getUint32(8)
7416 },
7417 i = 12;
7418
7419 if (result.version === 0) {
7420 result.earliestPresentationTime = view.getUint32(i);
7421 result.firstOffset = view.getUint32(i + 4);
7422 i += 8;
7423 } else {
7424 // read 64 bits
7425 result.earliestPresentationTime = view.getUint32(i) * MAX_UINT32 + view.getUint32(i + 4);
7426 result.firstOffset = view.getUint32(i + 8) * MAX_UINT32 + view.getUint32(i + 12);
7427 i += 16;
7428 }
7429
7430 i += 2; // reserved
7431
7432 var referenceCount = view.getUint16(i);
7433 i += 2; // start of references
7434
7435 for (; referenceCount > 0; i += 12, referenceCount--) {
7436 result.references.push({
7437 referenceType: (data[i] & 0x80) >>> 7,
7438 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
7439 subsegmentDuration: view.getUint32(i + 4),
7440 startsWithSap: !!(data[i + 8] & 0x80),
7441 sapType: (data[i + 8] & 0x70) >>> 4,
7442 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
7443 });
7444 }
7445
7446 return result;
7447 };
7448
7449 var parseSidx_1 = parseSidx;
7450
7451 // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
7452 // we used to do this with log2 but BigInt does not support builtin math
7453 // Math.ceil(log2(x));
7454
7455
7456 var countBits = function countBits(x) {
7457 return x.toString(2).length;
7458 }; // count the number of whole bytes it would take to represent a number
7459
7460 var countBytes = function countBytes(x) {
7461 return Math.ceil(countBits(x) / 8);
7462 };
7463 var isTypedArray = function isTypedArray(obj) {
7464 return ArrayBuffer.isView(obj);
7465 };
7466 var toUint8 = function toUint8(bytes) {
7467 if (bytes instanceof Uint8Array) {
7468 return bytes;
7469 }
7470
7471 if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
7472 // any non-number or NaN leads to empty uint8array
7473 // eslint-disable-next-line
7474 if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
7475 bytes = 0;
7476 } else {
7477 bytes = [bytes];
7478 }
7479 }
7480
7481 return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
7482 };
7483 var BigInt = window.BigInt || Number;
7484 var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
7485 (function () {
7486 var a = new Uint16Array([0xFFCC]);
7487 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
7488
7489 if (b[0] === 0xFF) {
7490 return 'big';
7491 }
7492
7493 if (b[0] === 0xCC) {
7494 return 'little';
7495 }
7496
7497 return 'unknown';
7498 })();
7499 var bytesToNumber = function bytesToNumber(bytes, _temp) {
7500 var _ref = _temp === void 0 ? {} : _temp,
7501 _ref$signed = _ref.signed,
7502 signed = _ref$signed === void 0 ? false : _ref$signed,
7503 _ref$le = _ref.le,
7504 le = _ref$le === void 0 ? false : _ref$le;
7505
7506 bytes = toUint8(bytes);
7507 var fn = le ? 'reduce' : 'reduceRight';
7508 var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
7509 var number = obj.call(bytes, function (total, byte, i) {
7510 var exponent = le ? i : Math.abs(i + 1 - bytes.length);
7511 return total + BigInt(byte) * BYTE_TABLE[exponent];
7512 }, BigInt(0));
7513
7514 if (signed) {
7515 var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
7516 number = BigInt(number);
7517
7518 if (number > max) {
7519 number -= max;
7520 number -= max;
7521 number -= BigInt(2);
7522 }
7523 }
7524
7525 return Number(number);
7526 };
7527 var numberToBytes = function numberToBytes(number, _temp2) {
7528 var _ref2 = _temp2 === void 0 ? {} : _temp2,
7529 _ref2$le = _ref2.le,
7530 le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
7531
7532
7533 if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
7534 number = 0;
7535 }
7536
7537 number = BigInt(number);
7538 var byteCount = countBytes(number);
7539 var bytes = new Uint8Array(new ArrayBuffer(byteCount));
7540
7541 for (var i = 0; i < byteCount; i++) {
7542 var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
7543 bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
7544
7545 if (number < 0) {
7546 bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
7547 bytes[byteIndex] -= i === 0 ? 1 : 2;
7548 }
7549 }
7550
7551 return bytes;
7552 };
7553 var stringToBytes = function stringToBytes(string, stringIsBytes) {
7554 if (typeof string !== 'string' && string && typeof string.toString === 'function') {
7555 string = string.toString();
7556 }
7557
7558 if (typeof string !== 'string') {
7559 return new Uint8Array();
7560 } // If the string already is bytes, we don't have to do this
7561 // otherwise we do this so that we split multi length characters
7562 // into individual bytes
7563
7564
7565 if (!stringIsBytes) {
7566 string = unescape(encodeURIComponent(string));
7567 }
7568
7569 var view = new Uint8Array(string.length);
7570
7571 for (var i = 0; i < string.length; i++) {
7572 view[i] = string.charCodeAt(i);
7573 }
7574
7575 return view;
7576 };
7577 var concatTypedArrays = function concatTypedArrays() {
7578 for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
7579 buffers[_key] = arguments[_key];
7580 }
7581
7582 buffers = buffers.filter(function (b) {
7583 return b && (b.byteLength || b.length) && typeof b !== 'string';
7584 });
7585
7586 if (buffers.length <= 1) {
7587 // for 0 length we will return empty uint8
7588 // for 1 length we return the first uint8
7589 return toUint8(buffers[0]);
7590 }
7591
7592 var totalLen = buffers.reduce(function (total, buf, i) {
7593 return total + (buf.byteLength || buf.length);
7594 }, 0);
7595 var tempBuffer = new Uint8Array(totalLen);
7596 var offset = 0;
7597 buffers.forEach(function (buf) {
7598 buf = toUint8(buf);
7599 tempBuffer.set(buf, offset);
7600 offset += buf.byteLength;
7601 });
7602 return tempBuffer;
7603 };
7604 /**
7605 * Check if the bytes "b" are contained within bytes "a".
7606 *
7607 * @param {Uint8Array|Array} a
7608 * Bytes to check in
7609 *
7610 * @param {Uint8Array|Array} b
7611 * Bytes to check for
7612 *
7613 * @param {Object} options
7614 * options
7615 *
7616 * @param {Array|Uint8Array} [offset=0]
7617 * offset to use when looking at bytes in a
7618 *
7619 * @param {Array|Uint8Array} [mask=[]]
7620 * mask to use on bytes before comparison.
7621 *
7622 * @return {boolean}
7623 * If all bytes in b are inside of a, taking into account
7624 * bit masks.
7625 */
7626
7627 var bytesMatch = function bytesMatch(a, b, _temp3) {
7628 var _ref3 = _temp3 === void 0 ? {} : _temp3,
7629 _ref3$offset = _ref3.offset,
7630 offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
7631 _ref3$mask = _ref3.mask,
7632 mask = _ref3$mask === void 0 ? [] : _ref3$mask;
7633
7634 a = toUint8(a);
7635 b = toUint8(b); // ie 11 does not support uint8 every
7636
7637 var fn = b.every ? b.every : Array.prototype.every;
7638 return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
7639 fn.call(b, function (bByte, i) {
7640 var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
7641 return bByte === aByte;
7642 });
7643 };
7644
7645 var ID3 = toUint8([0x49, 0x44, 0x33]);
7646 var getId3Size = function getId3Size(bytes, offset) {
7647 if (offset === void 0) {
7648 offset = 0;
7649 }
7650
7651 bytes = toUint8(bytes);
7652 var flags = bytes[offset + 5];
7653 var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
7654 var footerPresent = (flags & 16) >> 4;
7655
7656 if (footerPresent) {
7657 return returnSize + 20;
7658 }
7659
7660 return returnSize + 10;
7661 };
7662 var getId3Offset = function getId3Offset(bytes, offset) {
7663 if (offset === void 0) {
7664 offset = 0;
7665 }
7666
7667 bytes = toUint8(bytes);
7668
7669 if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
7670 offset: offset
7671 })) {
7672 return offset;
7673 }
7674
7675 offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
7676 // have multiple ID3 tag sections even though
7677 // they should not.
7678
7679 return getId3Offset(bytes, offset);
7680 };
7681
7682 var normalizePath$1 = function normalizePath(path) {
7683 if (typeof path === 'string') {
7684 return stringToBytes(path);
7685 }
7686
7687 if (typeof path === 'number') {
7688 return path;
7689 }
7690
7691 return path;
7692 };
7693
7694 var normalizePaths$1 = function normalizePaths(paths) {
7695 if (!Array.isArray(paths)) {
7696 return [normalizePath$1(paths)];
7697 }
7698
7699 return paths.map(function (p) {
7700 return normalizePath$1(p);
7701 });
7702 };
7703 /**
7704 * find any number of boxes by name given a path to it in an iso bmff
7705 * such as mp4.
7706 *
7707 * @param {TypedArray} bytes
7708 * bytes for the iso bmff to search for boxes in
7709 *
7710 * @param {Uint8Array[]|string[]|string|Uint8Array} name
7711 * An array of paths or a single path representing the name
7712 * of boxes to search through in bytes. Paths may be
7713 * uint8 (character codes) or strings.
7714 *
7715 * @param {boolean} [complete=false]
7716 * Should we search only for complete boxes on the final path.
7717 * This is very useful when you do not want to get back partial boxes
7718 * in the case of streaming files.
7719 *
7720 * @return {Uint8Array[]}
7721 * An array of the end paths that we found.
7722 */
7723
7724 var findBox = function findBox(bytes, paths, complete) {
7725 if (complete === void 0) {
7726 complete = false;
7727 }
7728
7729 paths = normalizePaths$1(paths);
7730 bytes = toUint8(bytes);
7731 var results = [];
7732
7733 if (!paths.length) {
7734 // short-circuit the search for empty paths
7735 return results;
7736 }
7737
7738 var i = 0;
7739
7740 while (i < bytes.length) {
7741 var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
7742 var type = bytes.subarray(i + 4, i + 8); // invalid box format.
7743
7744 if (size === 0) {
7745 break;
7746 }
7747
7748 var end = i + size;
7749
7750 if (end > bytes.length) {
7751 // this box is bigger than the number of bytes we have
7752 // and complete is set, we cannot find any more boxes.
7753 if (complete) {
7754 break;
7755 }
7756
7757 end = bytes.length;
7758 }
7759
7760 var data = bytes.subarray(i + 8, end);
7761
7762 if (bytesMatch(type, paths[0])) {
7763 if (paths.length === 1) {
7764 // this is the end of the path and we've found the box we were
7765 // looking for
7766 results.push(data);
7767 } else {
7768 // recursively search for the next box along the path
7769 results.push.apply(results, findBox(data, paths.slice(1), complete));
7770 }
7771 }
7772
7773 i = end;
7774 } // we've finished searching all of bytes
7775
7776
7777 return results;
7778 };
7779
7780 // https://matroska-org.github.io/libebml/specs.html
7781 // https://www.matroska.org/technical/elements.html
7782 // https://www.webmproject.org/docs/container/
7783
7784 var EBML_TAGS = {
7785 EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
7786 DocType: toUint8([0x42, 0x82]),
7787 Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
7788 SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
7789 Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
7790 Track: toUint8([0xAE]),
7791 TrackNumber: toUint8([0xd7]),
7792 DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
7793 TrackEntry: toUint8([0xAE]),
7794 TrackType: toUint8([0x83]),
7795 FlagDefault: toUint8([0x88]),
7796 CodecID: toUint8([0x86]),
7797 CodecPrivate: toUint8([0x63, 0xA2]),
7798 VideoTrack: toUint8([0xe0]),
7799 AudioTrack: toUint8([0xe1]),
7800 // Not used yet, but will be used for live webm/mkv
7801 // see https://www.matroska.org/technical/basics.html#block-structure
7802 // see https://www.matroska.org/technical/basics.html#simpleblock-structure
7803 Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
7804 Timestamp: toUint8([0xE7]),
7805 TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
7806 BlockGroup: toUint8([0xA0]),
7807 BlockDuration: toUint8([0x9B]),
7808 Block: toUint8([0xA1]),
7809 SimpleBlock: toUint8([0xA3])
7810 };
7811 /**
7812 * This is a simple table to determine the length
7813 * of things in ebml. The length is one based (starts at 1,
7814 * rather than zero) and for every zero bit before a one bit
7815 * we add one to length. We also need this table because in some
7816 * case we have to xor all the length bits from another value.
7817 */
7818
7819 var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
7820
7821 var getLength = function getLength(byte) {
7822 var len = 1;
7823
7824 for (var i = 0; i < LENGTH_TABLE.length; i++) {
7825 if (byte & LENGTH_TABLE[i]) {
7826 break;
7827 }
7828
7829 len++;
7830 }
7831
7832 return len;
7833 }; // length in ebml is stored in the first 4 to 8 bits
7834 // of the first byte. 4 for the id length and 8 for the
7835 // data size length. Length is measured by converting the number to binary
7836 // then 1 + the number of zeros before a 1 is encountered starting
7837 // from the left.
7838
7839
7840 var getvint = function getvint(bytes, offset, removeLength, signed) {
7841 if (removeLength === void 0) {
7842 removeLength = true;
7843 }
7844
7845 if (signed === void 0) {
7846 signed = false;
7847 }
7848
7849 var length = getLength(bytes[offset]);
7850 var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
7851 // as they will be modified below to remove the dataSizeLen bits and we do not
7852 // want to modify the original data. normally we could just call slice on
7853 // uint8array but ie 11 does not support that...
7854
7855 if (removeLength) {
7856 valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
7857 valueBytes[0] ^= LENGTH_TABLE[length - 1];
7858 }
7859
7860 return {
7861 length: length,
7862 value: bytesToNumber(valueBytes, {
7863 signed: signed
7864 }),
7865 bytes: valueBytes
7866 };
7867 };
7868
7869 var normalizePath = function normalizePath(path) {
7870 if (typeof path === 'string') {
7871 return path.match(/.{1,2}/g).map(function (p) {
7872 return normalizePath(p);
7873 });
7874 }
7875
7876 if (typeof path === 'number') {
7877 return numberToBytes(path);
7878 }
7879
7880 return path;
7881 };
7882
7883 var normalizePaths = function normalizePaths(paths) {
7884 if (!Array.isArray(paths)) {
7885 return [normalizePath(paths)];
7886 }
7887
7888 return paths.map(function (p) {
7889 return normalizePath(p);
7890 });
7891 };
7892
7893 var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
7894 if (offset >= bytes.length) {
7895 return bytes.length;
7896 }
7897
7898 var innerid = getvint(bytes, offset, false);
7899
7900 if (bytesMatch(id.bytes, innerid.bytes)) {
7901 return offset;
7902 }
7903
7904 var dataHeader = getvint(bytes, offset + innerid.length);
7905 return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
7906 };
7907 /**
7908 * Notes on the EBLM format.
7909 *
7910 * EBLM uses "vints" tags. Every vint tag contains
7911 * two parts
7912 *
7913 * 1. The length from the first byte. You get this by
7914 * converting the byte to binary and counting the zeros
7915 * before a 1. Then you add 1 to that. Examples
7916 * 00011111 = length 4 because there are 3 zeros before a 1.
7917 * 00100000 = length 3 because there are 2 zeros before a 1.
7918 * 00000011 = length 7 because there are 6 zeros before a 1.
7919 *
7920 * 2. The bits used for length are removed from the first byte
7921 * Then all the bytes are merged into a value. NOTE: this
7922 * is not the case for id ebml tags as there id includes
7923 * length bits.
7924 *
7925 */
7926
7927
7928 var findEbml = function findEbml(bytes, paths) {
7929 paths = normalizePaths(paths);
7930 bytes = toUint8(bytes);
7931 var results = [];
7932
7933 if (!paths.length) {
7934 return results;
7935 }
7936
7937 var i = 0;
7938
7939 while (i < bytes.length) {
7940 var id = getvint(bytes, i, false);
7941 var dataHeader = getvint(bytes, i + id.length);
7942 var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
7943
7944 if (dataHeader.value === 0x7f) {
7945 dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
7946
7947 if (dataHeader.value !== bytes.length) {
7948 dataHeader.value -= dataStart;
7949 }
7950 }
7951
7952 var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
7953 var data = bytes.subarray(dataStart, dataEnd);
7954
7955 if (bytesMatch(paths[0], id.bytes)) {
7956 if (paths.length === 1) {
7957 // this is the end of the paths and we've found the tag we were
7958 // looking for
7959 results.push(data);
7960 } else {
7961 // recursively search for the next tag inside of the data
7962 // of this one
7963 results = results.concat(findEbml(data, paths.slice(1)));
7964 }
7965 }
7966
7967 var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
7968
7969 i += totalLength;
7970 }
7971
7972 return results;
7973 }; // see https://www.matroska.org/technical/basics.html#block-structure
7974
7975 var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
7976 var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
7977 var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
7978 /**
7979 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
7980 * Sequence Payload"
7981 *
7982 * @param data {Uint8Array} the bytes of a RBSP from a NAL
7983 * unit
7984 * @return {Uint8Array} the RBSP without any Emulation
7985 * Prevention Bytes
7986 */
7987
7988 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
7989 var positions = [];
7990 var i = 1; // Find all `Emulation Prevention Bytes`
7991
7992 while (i < bytes.length - 2) {
7993 if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
7994 positions.push(i + 2);
7995 i++;
7996 }
7997
7998 i++;
7999 } // If no Emulation Prevention Bytes were found just return the original
8000 // array
8001
8002
8003 if (positions.length === 0) {
8004 return bytes;
8005 } // Create a new array to hold the NAL unit data
8006
8007
8008 var newLength = bytes.length - positions.length;
8009 var newData = new Uint8Array(newLength);
8010 var sourceIndex = 0;
8011
8012 for (i = 0; i < newLength; sourceIndex++, i++) {
8013 if (sourceIndex === positions[0]) {
8014 // Skip this byte
8015 sourceIndex++; // Remove this position index
8016
8017 positions.shift();
8018 }
8019
8020 newData[i] = bytes[sourceIndex];
8021 }
8022
8023 return newData;
8024 };
8025 var findNal = function findNal(bytes, dataType, types, nalLimit) {
8026 if (nalLimit === void 0) {
8027 nalLimit = Infinity;
8028 }
8029
8030 bytes = toUint8(bytes);
8031 types = [].concat(types);
8032 var i = 0;
8033 var nalStart;
8034 var nalsFound = 0; // keep searching until:
8035 // we reach the end of bytes
8036 // we reach the maximum number of nals they want to seach
8037 // NOTE: that we disregard nalLimit when we have found the start
8038 // of the nal we want so that we can find the end of the nal we want.
8039
8040 while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
8041 var nalOffset = void 0;
8042
8043 if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
8044 nalOffset = 4;
8045 } else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
8046 nalOffset = 3;
8047 } // we are unsynced,
8048 // find the next nal unit
8049
8050
8051 if (!nalOffset) {
8052 i++;
8053 continue;
8054 }
8055
8056 nalsFound++;
8057
8058 if (nalStart) {
8059 return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
8060 }
8061
8062 var nalType = void 0;
8063
8064 if (dataType === 'h264') {
8065 nalType = bytes[i + nalOffset] & 0x1f;
8066 } else if (dataType === 'h265') {
8067 nalType = bytes[i + nalOffset] >> 1 & 0x3f;
8068 }
8069
8070 if (types.indexOf(nalType) !== -1) {
8071 nalStart = i + nalOffset;
8072 } // nal header is 1 length for h264, and 2 for h265
8073
8074
8075 i += nalOffset + (dataType === 'h264' ? 1 : 2);
8076 }
8077
8078 return bytes.subarray(0, 0);
8079 };
8080 var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
8081 return findNal(bytes, 'h264', type, nalLimit);
8082 };
8083 var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
8084 return findNal(bytes, 'h265', type, nalLimit);
8085 };
8086
8087 var CONSTANTS = {
8088 // "webm" string literal in hex
8089 'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
8090 // "matroska" string literal in hex
8091 'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
8092 // "fLaC" string literal in hex
8093 'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
8094 // "OggS" string literal in hex
8095 'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
8096 // ac-3 sync byte, also works for ec-3 as that is simply a codec
8097 // of ac-3
8098 'ac3': toUint8([0x0b, 0x77]),
8099 // "RIFF" string literal in hex used for wav and avi
8100 'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
8101 // "AVI" string literal in hex
8102 'avi': toUint8([0x41, 0x56, 0x49]),
8103 // "WAVE" string literal in hex
8104 'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
8105 // "ftyp3g" string literal in hex
8106 '3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
8107 // "ftyp" string literal in hex
8108 'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
8109 // "styp" string literal in hex
8110 'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
8111 // "ftypqt" string literal in hex
8112 'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
8113 // moov string literal in hex
8114 'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
8115 // moof string literal in hex
8116 'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
8117 };
8118 var _isLikely = {
8119 aac: function aac(bytes) {
8120 var offset = getId3Offset(bytes);
8121 return bytesMatch(bytes, [0xFF, 0x10], {
8122 offset: offset,
8123 mask: [0xFF, 0x16]
8124 });
8125 },
8126 mp3: function mp3(bytes) {
8127 var offset = getId3Offset(bytes);
8128 return bytesMatch(bytes, [0xFF, 0x02], {
8129 offset: offset,
8130 mask: [0xFF, 0x06]
8131 });
8132 },
8133 webm: function webm(bytes) {
8134 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
8135
8136 return bytesMatch(docType, CONSTANTS.webm);
8137 },
8138 mkv: function mkv(bytes) {
8139 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
8140
8141 return bytesMatch(docType, CONSTANTS.matroska);
8142 },
8143 mp4: function mp4(bytes) {
8144 // if this file is another base media file format, it is not mp4
8145 if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
8146 return false;
8147 } // if this file starts with a ftyp or styp box its mp4
8148
8149
8150 if (bytesMatch(bytes, CONSTANTS.mp4, {
8151 offset: 4
8152 }) || bytesMatch(bytes, CONSTANTS.fmp4, {
8153 offset: 4
8154 })) {
8155 return true;
8156 } // if this file starts with a moof/moov box its mp4
8157
8158
8159 if (bytesMatch(bytes, CONSTANTS.moof, {
8160 offset: 4
8161 }) || bytesMatch(bytes, CONSTANTS.moov, {
8162 offset: 4
8163 })) {
8164 return true;
8165 }
8166 },
8167 mov: function mov(bytes) {
8168 return bytesMatch(bytes, CONSTANTS.mov, {
8169 offset: 4
8170 });
8171 },
8172 '3gp': function gp(bytes) {
8173 return bytesMatch(bytes, CONSTANTS['3gp'], {
8174 offset: 4
8175 });
8176 },
8177 ac3: function ac3(bytes) {
8178 var offset = getId3Offset(bytes);
8179 return bytesMatch(bytes, CONSTANTS.ac3, {
8180 offset: offset
8181 });
8182 },
8183 ts: function ts(bytes) {
8184 if (bytes.length < 189 && bytes.length >= 1) {
8185 return bytes[0] === 0x47;
8186 }
8187
8188 var i = 0; // check the first 376 bytes for two matching sync bytes
8189
8190 while (i + 188 < bytes.length && i < 188) {
8191 if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
8192 return true;
8193 }
8194
8195 i += 1;
8196 }
8197
8198 return false;
8199 },
8200 flac: function flac(bytes) {
8201 var offset = getId3Offset(bytes);
8202 return bytesMatch(bytes, CONSTANTS.flac, {
8203 offset: offset
8204 });
8205 },
8206 ogg: function ogg(bytes) {
8207 return bytesMatch(bytes, CONSTANTS.ogg);
8208 },
8209 avi: function avi(bytes) {
8210 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
8211 offset: 8
8212 });
8213 },
8214 wav: function wav(bytes) {
8215 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
8216 offset: 8
8217 });
8218 },
8219 'h264': function h264(bytes) {
8220 // find seq_parameter_set_rbsp
8221 return findH264Nal(bytes, 7, 3).length;
8222 },
8223 'h265': function h265(bytes) {
8224 // find video_parameter_set_rbsp or seq_parameter_set_rbsp
8225 return findH265Nal(bytes, [32, 33], 3).length;
8226 }
8227 }; // get all the isLikely functions
8228 // but make sure 'ts' is above h264 and h265
8229 // but below everything else as it is the least specific
8230
8231 var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
8232 .filter(function (t) {
8233 return t !== 'ts' && t !== 'h264' && t !== 'h265';
8234 }) // add it back to the bottom
8235 .concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
8236
8237 isLikelyTypes.forEach(function (type) {
8238 var isLikelyFn = _isLikely[type];
8239
8240 _isLikely[type] = function (bytes) {
8241 return isLikelyFn(toUint8(bytes));
8242 };
8243 }); // export after wrapping
8244
8245 var isLikely = _isLikely; // A useful list of file signatures can be found here
8246 // https://en.wikipedia.org/wiki/List_of_file_signatures
8247
8248 var detectContainerForBytes = function detectContainerForBytes(bytes) {
8249 bytes = toUint8(bytes);
8250
8251 for (var i = 0; i < isLikelyTypes.length; i++) {
8252 var type = isLikelyTypes[i];
8253
8254 if (isLikely[type](bytes)) {
8255 return type;
8256 }
8257 }
8258
8259 return '';
8260 }; // fmp4 is not a container
8261
8262 var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
8263 return findBox(bytes, ['moof']).length > 0;
8264 };
8265
8266 // which will only happen if the request is complete.
8267
8268 var callbackOnCompleted = function callbackOnCompleted(request, cb) {
8269 if (request.readyState === 4) {
8270 return cb();
8271 }
8272
8273 return;
8274 };
8275
8276 var containerRequest = function containerRequest(uri, xhr, cb) {
8277 var bytes = [];
8278 var id3Offset;
8279 var finished = false;
8280
8281 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
8282 req.abort();
8283 finished = true;
8284 return cb(err, req, type, _bytes);
8285 };
8286
8287 var progressListener = function progressListener(error, request) {
8288 if (finished) {
8289 return;
8290 }
8291
8292 if (error) {
8293 return endRequestAndCallback(error, request, '', bytes);
8294 } // grap the new part of content that was just downloaded
8295
8296
8297 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
8298
8299 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
8300 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
8301 // or we need at least two bytes after an id3Offset
8302
8303 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
8304 return callbackOnCompleted(request, function () {
8305 return endRequestAndCallback(error, request, '', bytes);
8306 });
8307 }
8308
8309 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
8310 // to see the second sync byte, wait until we have enough data
8311 // before declaring it ts
8312
8313 if (type === 'ts' && bytes.length < 188) {
8314 return callbackOnCompleted(request, function () {
8315 return endRequestAndCallback(error, request, '', bytes);
8316 });
8317 } // this may be an unsynced ts segment
8318 // wait for 376 bytes before detecting no container
8319
8320
8321 if (!type && bytes.length < 376) {
8322 return callbackOnCompleted(request, function () {
8323 return endRequestAndCallback(error, request, '', bytes);
8324 });
8325 }
8326
8327 return endRequestAndCallback(null, request, type, bytes);
8328 };
8329
8330 var options = {
8331 uri: uri,
8332 beforeSend: function beforeSend(request) {
8333 // this forces the browser to pass the bytes to us unprocessed
8334 request.overrideMimeType('text/plain; charset=x-user-defined');
8335 request.addEventListener('progress', function (_ref) {
8336 _ref.total;
8337 _ref.loaded;
8338 return callbackWrapper(request, null, {
8339 statusCode: request.status
8340 }, progressListener);
8341 });
8342 }
8343 };
8344 var request = xhr(options, function (error, response) {
8345 return callbackWrapper(request, error, response, progressListener);
8346 });
8347 return request;
8348 };
8349
8350 var EventTarget = videojs__default["default"].EventTarget,
8351 mergeOptions = videojs__default["default"].mergeOptions;
8352
8353 var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
8354 if (!isPlaylistUnchanged(a, b)) {
8355 return false;
8356 } // for dash the above check will often return true in scenarios where
8357 // the playlist actually has changed because mediaSequence isn't a
8358 // dash thing, and we often set it to 1. So if the playlists have the same amount
8359 // of segments we return true.
8360 // So for dash we need to make sure that the underlying segments are different.
8361 // if sidx changed then the playlists are different.
8362
8363
8364 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
8365 return false;
8366 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
8367 return false;
8368 } // one or the other does not have segments
8369 // there was a change.
8370
8371
8372 if (a.segments && !b.segments || !a.segments && b.segments) {
8373 return false;
8374 } // neither has segments nothing changed
8375
8376
8377 if (!a.segments && !b.segments) {
8378 return true;
8379 } // check segments themselves
8380
8381
8382 for (var i = 0; i < a.segments.length; i++) {
8383 var aSegment = a.segments[i];
8384 var bSegment = b.segments[i]; // if uris are different between segments there was a change
8385
8386 if (aSegment.uri !== bSegment.uri) {
8387 return false;
8388 } // neither segment has a byterange, there will be no byterange change.
8389
8390
8391 if (!aSegment.byterange && !bSegment.byterange) {
8392 continue;
8393 }
8394
8395 var aByterange = aSegment.byterange;
8396 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
8397
8398 if (aByterange && !bByterange || !aByterange && bByterange) {
8399 return false;
8400 } // if both segments have byterange with different offsets, there was a change.
8401
8402
8403 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
8404 return false;
8405 }
8406 } // if everything was the same with segments, this is the same playlist.
8407
8408
8409 return true;
8410 };
8411 /**
8412 * Parses the master XML string and updates playlist URI references.
8413 *
8414 * @param {Object} config
8415 * Object of arguments
8416 * @param {string} config.masterXml
8417 * The mpd XML
8418 * @param {string} config.srcUrl
8419 * The mpd URL
8420 * @param {Date} config.clientOffset
8421 * A time difference between server and client
8422 * @param {Object} config.sidxMapping
8423 * SIDX mappings for moof/mdat URIs and byte ranges
8424 * @return {Object}
8425 * The parsed mpd manifest object
8426 */
8427
8428
8429 var parseMasterXml = function parseMasterXml(_ref) {
8430 var masterXml = _ref.masterXml,
8431 srcUrl = _ref.srcUrl,
8432 clientOffset = _ref.clientOffset,
8433 sidxMapping = _ref.sidxMapping;
8434 var master = parse(masterXml, {
8435 manifestUri: srcUrl,
8436 clientOffset: clientOffset,
8437 sidxMapping: sidxMapping
8438 });
8439 addPropertiesToMaster(master, srcUrl);
8440 return master;
8441 };
8442 /**
8443 * Returns a new master manifest that is the result of merging an updated master manifest
8444 * into the original version.
8445 *
8446 * @param {Object} oldMaster
8447 * The old parsed mpd object
8448 * @param {Object} newMaster
8449 * The updated parsed mpd object
8450 * @return {Object}
8451 * A new object representing the original master manifest with the updated media
8452 * playlists merged in
8453 */
8454
8455 var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
8456 var noChanges = true;
8457 var update = mergeOptions(oldMaster, {
8458 // These are top level properties that can be updated
8459 duration: newMaster.duration,
8460 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
8461 }); // First update the playlists in playlist list
8462
8463 for (var i = 0; i < newMaster.playlists.length; i++) {
8464 var playlist = newMaster.playlists[i];
8465
8466 if (playlist.sidx) {
8467 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
8468
8469 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
8470 addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
8471 }
8472 }
8473
8474 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
8475
8476 if (playlistUpdate) {
8477 update = playlistUpdate;
8478 noChanges = false;
8479 }
8480 } // Then update media group playlists
8481
8482
8483 forEachMediaGroup(newMaster, function (properties, type, group, label) {
8484 if (properties.playlists && properties.playlists.length) {
8485 var id = properties.playlists[0].id;
8486
8487 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
8488
8489 if (_playlistUpdate) {
8490 update = _playlistUpdate; // update the playlist reference within media groups
8491
8492 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
8493 noChanges = false;
8494 }
8495 }
8496 });
8497
8498 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
8499 noChanges = false;
8500 }
8501
8502 if (noChanges) {
8503 return null;
8504 }
8505
8506 return update;
8507 }; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
8508 // If the SIDXs have maps, the two maps should match,
8509 // both `a` and `b` missing SIDXs is considered matching.
8510 // If `a` or `b` but not both have a map, they aren't matching.
8511
8512 var equivalentSidx = function equivalentSidx(a, b) {
8513 var neitherMap = Boolean(!a.map && !b.map);
8514 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
8515 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
8516 }; // exported for testing
8517
8518
8519 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
8520 var newSidxMapping = {};
8521
8522 for (var id in playlists) {
8523 var playlist = playlists[id];
8524 var currentSidxInfo = playlist.sidx;
8525
8526 if (currentSidxInfo) {
8527 var key = generateSidxKey(currentSidxInfo);
8528
8529 if (!oldSidxMapping[key]) {
8530 break;
8531 }
8532
8533 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
8534
8535 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
8536 newSidxMapping[key] = oldSidxMapping[key];
8537 }
8538 }
8539 }
8540
8541 return newSidxMapping;
8542 };
8543 /**
8544 * A function that filters out changed items as they need to be requested separately.
8545 *
8546 * The method is exported for testing
8547 *
8548 * @param {Object} master the parsed mpd XML returned via mpd-parser
8549 * @param {Object} oldSidxMapping the SIDX to compare against
8550 */
8551
8552 var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
8553 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
8554 var mediaGroupSidx = videoSidx;
8555 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
8556 if (properties.playlists && properties.playlists.length) {
8557 var playlists = properties.playlists;
8558 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
8559 }
8560 });
8561 return mediaGroupSidx;
8562 };
8563
8564 var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
8565 inheritsLoose(DashPlaylistLoader, _EventTarget);
8566
8567 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
8568 // playlist loader setups from media groups will expect to be able to pass a playlist
8569 // (since there aren't external URLs to media playlists with DASH)
8570 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
8571 var _this;
8572
8573 if (options === void 0) {
8574 options = {};
8575 }
8576
8577 _this = _EventTarget.call(this) || this;
8578 _this.masterPlaylistLoader_ = masterPlaylistLoader || assertThisInitialized(_this);
8579
8580 if (!masterPlaylistLoader) {
8581 _this.isMaster_ = true;
8582 }
8583
8584 var _options = options,
8585 _options$withCredenti = _options.withCredentials,
8586 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
8587 _options$handleManife = _options.handleManifestRedirects,
8588 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
8589 _this.vhs_ = vhs;
8590 _this.withCredentials = withCredentials;
8591 _this.handleManifestRedirects = handleManifestRedirects;
8592
8593 if (!srcUrlOrPlaylist) {
8594 throw new Error('A non-empty playlist URL or object is required');
8595 } // event naming?
8596
8597
8598 _this.on('minimumUpdatePeriod', function () {
8599 _this.refreshXml_();
8600 }); // live playlist staleness timeout
8601
8602
8603 _this.on('mediaupdatetimeout', function () {
8604 _this.refreshMedia_(_this.media().id);
8605 });
8606
8607 _this.state = 'HAVE_NOTHING';
8608 _this.loadedPlaylists_ = {};
8609 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
8610 // The masterPlaylistLoader will be created with a string
8611
8612 if (_this.isMaster_) {
8613 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
8614 // once multi-period is refactored
8615
8616 _this.masterPlaylistLoader_.sidxMapping_ = {};
8617 } else {
8618 _this.childPlaylist_ = srcUrlOrPlaylist;
8619 }
8620
8621 return _this;
8622 }
8623
8624 var _proto = DashPlaylistLoader.prototype;
8625
8626 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
8627 // disposed
8628 if (!this.request) {
8629 return true;
8630 } // pending request is cleared
8631
8632
8633 this.request = null;
8634
8635 if (err) {
8636 // use the provided error object or create one
8637 // based on the request/response
8638 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
8639 status: request.status,
8640 message: 'DASH request error at URL: ' + request.uri,
8641 response: request.response,
8642 // MEDIA_ERR_NETWORK
8643 code: 2
8644 };
8645
8646 if (startingState) {
8647 this.state = startingState;
8648 }
8649
8650 this.trigger('error');
8651 return true;
8652 }
8653 }
8654 /**
8655 * Verify that the container of the sidx segment can be parsed
8656 * and if it can, get and parse that segment.
8657 */
8658 ;
8659
8660 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
8661 var _this2 = this;
8662
8663 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
8664
8665 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
8666 // keep this function async
8667 this.mediaRequest_ = window.setTimeout(function () {
8668 return cb(false);
8669 }, 0);
8670 return;
8671 } // resolve the segment URL relative to the playlist
8672
8673
8674 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
8675
8676 var fin = function fin(err, request) {
8677 if (_this2.requestErrored_(err, request, startingState)) {
8678 return;
8679 }
8680
8681 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
8682 var sidx;
8683
8684 try {
8685 sidx = parseSidx_1(toUint8(request.response).subarray(8));
8686 } catch (e) {
8687 // sidx parsing failed.
8688 _this2.requestErrored_(e, request, startingState);
8689
8690 return;
8691 }
8692
8693 sidxMapping[sidxKey] = {
8694 sidxInfo: playlist.sidx,
8695 sidx: sidx
8696 };
8697 addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
8698 return cb(true);
8699 };
8700
8701 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
8702 if (err) {
8703 return fin(err, request);
8704 }
8705
8706 if (!container || container !== 'mp4') {
8707 return fin({
8708 status: request.status,
8709 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
8710 // response is just bytes in this case
8711 // but we really don't want to return that.
8712 response: '',
8713 playlist: playlist,
8714 internal: true,
8715 blacklistDuration: Infinity,
8716 // MEDIA_ERR_NETWORK
8717 code: 2
8718 }, request);
8719 } // if we already downloaded the sidx bytes in the container request, use them
8720
8721
8722 var _playlist$sidx$bytera = playlist.sidx.byterange,
8723 offset = _playlist$sidx$bytera.offset,
8724 length = _playlist$sidx$bytera.length;
8725
8726 if (bytes.length >= length + offset) {
8727 return fin(err, {
8728 response: bytes.subarray(offset, offset + length),
8729 status: request.status,
8730 uri: request.uri
8731 });
8732 } // otherwise request sidx bytes
8733
8734
8735 _this2.request = _this2.vhs_.xhr({
8736 uri: uri,
8737 responseType: 'arraybuffer',
8738 headers: segmentXhrHeaders({
8739 byterange: playlist.sidx.byterange
8740 })
8741 }, fin);
8742 });
8743 };
8744
8745 _proto.dispose = function dispose() {
8746 this.trigger('dispose');
8747 this.stopRequest();
8748 this.loadedPlaylists_ = {};
8749 window.clearTimeout(this.minimumUpdatePeriodTimeout_);
8750 window.clearTimeout(this.mediaRequest_);
8751 window.clearTimeout(this.mediaUpdateTimeout);
8752 this.mediaUpdateTimeout = null;
8753 this.mediaRequest_ = null;
8754 this.minimumUpdatePeriodTimeout_ = null;
8755
8756 if (this.masterPlaylistLoader_.createMupOnMedia_) {
8757 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
8758 this.masterPlaylistLoader_.createMupOnMedia_ = null;
8759 }
8760
8761 this.off();
8762 };
8763
8764 _proto.hasPendingRequest = function hasPendingRequest() {
8765 return this.request || this.mediaRequest_;
8766 };
8767
8768 _proto.stopRequest = function stopRequest() {
8769 if (this.request) {
8770 var oldRequest = this.request;
8771 this.request = null;
8772 oldRequest.onreadystatechange = null;
8773 oldRequest.abort();
8774 }
8775 };
8776
8777 _proto.media = function media(playlist) {
8778 var _this3 = this;
8779
8780 // getter
8781 if (!playlist) {
8782 return this.media_;
8783 } // setter
8784
8785
8786 if (this.state === 'HAVE_NOTHING') {
8787 throw new Error('Cannot switch media playlist from ' + this.state);
8788 }
8789
8790 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
8791
8792 if (typeof playlist === 'string') {
8793 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
8794 throw new Error('Unknown playlist URI: ' + playlist);
8795 }
8796
8797 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
8798 }
8799
8800 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
8801
8802 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
8803 this.state = 'HAVE_METADATA';
8804 this.media_ = playlist; // trigger media change if the active media has been updated
8805
8806 if (mediaChange) {
8807 this.trigger('mediachanging');
8808 this.trigger('mediachange');
8809 }
8810
8811 return;
8812 } // switching to the active playlist is a no-op
8813
8814
8815 if (!mediaChange) {
8816 return;
8817 } // switching from an already loaded playlist
8818
8819
8820 if (this.media_) {
8821 this.trigger('mediachanging');
8822 }
8823
8824 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
8825 // everything is ready just continue to haveMetadata
8826 _this3.haveMetadata({
8827 startingState: startingState,
8828 playlist: playlist
8829 });
8830 });
8831 };
8832
8833 _proto.haveMetadata = function haveMetadata(_ref2) {
8834 var startingState = _ref2.startingState,
8835 playlist = _ref2.playlist;
8836 this.state = 'HAVE_METADATA';
8837 this.loadedPlaylists_[playlist.id] = playlist;
8838 this.mediaRequest_ = null; // This will trigger loadedplaylist
8839
8840 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
8841 // to resolve setup of media groups
8842
8843 if (startingState === 'HAVE_MASTER') {
8844 this.trigger('loadedmetadata');
8845 } else {
8846 // trigger media change if the active media has been updated
8847 this.trigger('mediachange');
8848 }
8849 };
8850
8851 _proto.pause = function pause() {
8852 if (this.masterPlaylistLoader_.createMupOnMedia_) {
8853 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
8854 this.masterPlaylistLoader_.createMupOnMedia_ = null;
8855 }
8856
8857 this.stopRequest();
8858 window.clearTimeout(this.mediaUpdateTimeout);
8859 this.mediaUpdateTimeout = null;
8860
8861 if (this.isMaster_) {
8862 window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
8863 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
8864 }
8865
8866 if (this.state === 'HAVE_NOTHING') {
8867 // If we pause the loader before any data has been retrieved, its as if we never
8868 // started, so reset to an unstarted state.
8869 this.started = false;
8870 }
8871 };
8872
8873 _proto.load = function load(isFinalRendition) {
8874 var _this4 = this;
8875
8876 window.clearTimeout(this.mediaUpdateTimeout);
8877 this.mediaUpdateTimeout = null;
8878 var media = this.media();
8879
8880 if (isFinalRendition) {
8881 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
8882 this.mediaUpdateTimeout = window.setTimeout(function () {
8883 return _this4.load();
8884 }, delay);
8885 return;
8886 } // because the playlists are internal to the manifest, load should either load the
8887 // main manifest, or do nothing but trigger an event
8888
8889
8890 if (!this.started) {
8891 this.start();
8892 return;
8893 }
8894
8895 if (media && !media.endList) {
8896 // Check to see if this is the master loader and the MUP was cleared (this happens
8897 // when the loader was paused). `media` should be set at this point since one is always
8898 // set during `start()`.
8899 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
8900 // Trigger minimumUpdatePeriod to refresh the master manifest
8901 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
8902
8903 this.updateMinimumUpdatePeriodTimeout_();
8904 }
8905
8906 this.trigger('mediaupdatetimeout');
8907 } else {
8908 this.trigger('loadedplaylist');
8909 }
8910 };
8911
8912 _proto.start = function start() {
8913 var _this5 = this;
8914
8915 this.started = true; // We don't need to request the master manifest again
8916 // Call this asynchronously to match the xhr request behavior below
8917
8918 if (!this.isMaster_) {
8919 this.mediaRequest_ = window.setTimeout(function () {
8920 return _this5.haveMaster_();
8921 }, 0);
8922 return;
8923 }
8924
8925 this.requestMaster_(function (req, masterChanged) {
8926 _this5.haveMaster_();
8927
8928 if (!_this5.hasPendingRequest() && !_this5.media_) {
8929 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
8930 }
8931 });
8932 };
8933
8934 _proto.requestMaster_ = function requestMaster_(cb) {
8935 var _this6 = this;
8936
8937 this.request = this.vhs_.xhr({
8938 uri: this.masterPlaylistLoader_.srcUrl,
8939 withCredentials: this.withCredentials
8940 }, function (error, req) {
8941 if (_this6.requestErrored_(error, req)) {
8942 if (_this6.state === 'HAVE_NOTHING') {
8943 _this6.started = false;
8944 }
8945
8946 return;
8947 }
8948
8949 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
8950 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
8951
8952 if (req.responseHeaders && req.responseHeaders.date) {
8953 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
8954 } else {
8955 _this6.masterLoaded_ = Date.now();
8956 }
8957
8958 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
8959
8960 if (masterChanged) {
8961 _this6.handleMaster_();
8962
8963 _this6.syncClientServerClock_(function () {
8964 return cb(req, masterChanged);
8965 });
8966
8967 return;
8968 }
8969
8970 return cb(req, masterChanged);
8971 });
8972 }
8973 /**
8974 * Parses the master xml for UTCTiming node to sync the client clock to the server
8975 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
8976 *
8977 * @param {Function} done
8978 * Function to call when clock sync has completed
8979 */
8980 ;
8981
8982 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
8983 var _this7 = this;
8984
8985 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
8986 // server clock
8987
8988 if (utcTiming === null) {
8989 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
8990 return done();
8991 }
8992
8993 if (utcTiming.method === 'DIRECT') {
8994 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
8995 return done();
8996 }
8997
8998 this.request = this.vhs_.xhr({
8999 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
9000 method: utcTiming.method,
9001 withCredentials: this.withCredentials
9002 }, function (error, req) {
9003 // disposed
9004 if (!_this7.request) {
9005 return;
9006 }
9007
9008 if (error) {
9009 // sync request failed, fall back to using date header from mpd
9010 // TODO: log warning
9011 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
9012 return done();
9013 }
9014
9015 var serverTime;
9016
9017 if (utcTiming.method === 'HEAD') {
9018 if (!req.responseHeaders || !req.responseHeaders.date) {
9019 // expected date header not preset, fall back to using date header from mpd
9020 // TODO: log warning
9021 serverTime = _this7.masterLoaded_;
9022 } else {
9023 serverTime = Date.parse(req.responseHeaders.date);
9024 }
9025 } else {
9026 serverTime = Date.parse(req.responseText);
9027 }
9028
9029 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
9030 done();
9031 });
9032 };
9033
9034 _proto.haveMaster_ = function haveMaster_() {
9035 this.state = 'HAVE_MASTER';
9036
9037 if (this.isMaster_) {
9038 // We have the master playlist at this point, so
9039 // trigger this to allow MasterPlaylistController
9040 // to make an initial playlist selection
9041 this.trigger('loadedplaylist');
9042 } else if (!this.media_) {
9043 // no media playlist was specifically selected so select
9044 // the one the child playlist loader was created with
9045 this.media(this.childPlaylist_);
9046 }
9047 };
9048
9049 _proto.handleMaster_ = function handleMaster_() {
9050 // clear media request
9051 this.mediaRequest_ = null;
9052 var newMaster = parseMasterXml({
9053 masterXml: this.masterPlaylistLoader_.masterXml_,
9054 srcUrl: this.masterPlaylistLoader_.srcUrl,
9055 clientOffset: this.masterPlaylistLoader_.clientOffset_,
9056 sidxMapping: this.masterPlaylistLoader_.sidxMapping_
9057 });
9058 var oldMaster = this.masterPlaylistLoader_.master; // if we have an old master to compare the new master against
9059
9060 if (oldMaster) {
9061 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
9062 } // only update master if we have a new master
9063
9064
9065 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
9066 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
9067
9068 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
9069 this.masterPlaylistLoader_.srcUrl = location;
9070 }
9071
9072 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9073 this.updateMinimumUpdatePeriodTimeout_();
9074 }
9075
9076 return Boolean(newMaster);
9077 };
9078
9079 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
9080 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
9081 // a new one will be added if needed.
9082
9083 if (mpl.createMupOnMedia_) {
9084 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
9085 mpl.createMupOnMedia_ = null;
9086 } // clear any pending timeouts
9087
9088
9089 if (mpl.minimumUpdatePeriodTimeout_) {
9090 window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
9091 mpl.minimumUpdatePeriodTimeout_ = null;
9092 }
9093
9094 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
9095 // MPD has no future validity, so a new one will need to be acquired when new
9096 // media segments are to be made available. Thus, we use the target duration
9097 // in this case
9098
9099 if (mup === 0) {
9100 if (mpl.media()) {
9101 mup = mpl.media().targetDuration * 1000;
9102 } else {
9103 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
9104 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
9105 }
9106 } // if minimumUpdatePeriod is invalid or <= zero, which
9107 // can happen when a live video becomes VOD. skip timeout
9108 // creation.
9109
9110
9111 if (typeof mup !== 'number' || mup <= 0) {
9112 if (mup < 0) {
9113 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
9114 }
9115
9116 return;
9117 }
9118
9119 this.createMUPTimeout_(mup);
9120 };
9121
9122 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
9123 var mpl = this.masterPlaylistLoader_;
9124 mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(function () {
9125 mpl.minimumUpdatePeriodTimeout_ = null;
9126 mpl.trigger('minimumUpdatePeriod');
9127 mpl.createMUPTimeout_(mup);
9128 }, mup);
9129 }
9130 /**
9131 * Sends request to refresh the master xml and updates the parsed master manifest
9132 */
9133 ;
9134
9135 _proto.refreshXml_ = function refreshXml_() {
9136 var _this8 = this;
9137
9138 this.requestMaster_(function (req, masterChanged) {
9139 if (!masterChanged) {
9140 return;
9141 }
9142
9143 if (_this8.media_) {
9144 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
9145 } // This will filter out updated sidx info from the mapping
9146
9147
9148 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
9149
9150 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
9151 // TODO: do we need to reload the current playlist?
9152 _this8.refreshMedia_(_this8.media().id);
9153 });
9154 });
9155 }
9156 /**
9157 * Refreshes the media playlist by re-parsing the master xml and updating playlist
9158 * references. If this is an alternate loader, the updated parsed manifest is retrieved
9159 * from the master loader.
9160 */
9161 ;
9162
9163 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
9164 var _this9 = this;
9165
9166 if (!mediaID) {
9167 throw new Error('refreshMedia_ must take a media id');
9168 } // for master we have to reparse the master xml
9169 // to re-create segments based on current timing values
9170 // which may change media. We only skip updating master
9171 // if this is the first time this.media_ is being set.
9172 // as master was just parsed in that case.
9173
9174
9175 if (this.media_ && this.isMaster_) {
9176 this.handleMaster_();
9177 }
9178
9179 var playlists = this.masterPlaylistLoader_.master.playlists;
9180 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
9181
9182 if (mediaChanged) {
9183 this.media_ = playlists[mediaID];
9184 } else {
9185 this.trigger('playlistunchanged');
9186 }
9187
9188 if (!this.mediaUpdateTimeout) {
9189 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
9190 if (_this9.media().endList) {
9191 return;
9192 }
9193
9194 _this9.mediaUpdateTimeout = window.setTimeout(function () {
9195 _this9.trigger('mediaupdatetimeout');
9196
9197 createMediaUpdateTimeout();
9198 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
9199 };
9200
9201 createMediaUpdateTimeout();
9202 }
9203
9204 this.trigger('loadedplaylist');
9205 };
9206
9207 return DashPlaylistLoader;
9208 }(EventTarget);
9209
9210 var Config = {
9211 GOAL_BUFFER_LENGTH: 30,
9212 MAX_GOAL_BUFFER_LENGTH: 60,
9213 BACK_BUFFER_LENGTH: 30,
9214 GOAL_BUFFER_LENGTH_RATE: 1,
9215 // 0.5 MB/s
9216 INITIAL_BANDWIDTH: 4194304,
9217 // A fudge factor to apply to advertised playlist bitrates to account for
9218 // temporary flucations in client bandwidth
9219 BANDWIDTH_VARIANCE: 1.2,
9220 // How much of the buffer must be filled before we consider upswitching
9221 BUFFER_LOW_WATER_LINE: 0,
9222 MAX_BUFFER_LOW_WATER_LINE: 30,
9223 // TODO: Remove this when experimentalBufferBasedABR is removed
9224 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
9225 BUFFER_LOW_WATER_LINE_RATE: 1,
9226 // If the buffer is greater than the high water line, we won't switch down
9227 BUFFER_HIGH_WATER_LINE: 30
9228 };
9229
9230 var stringToArrayBuffer = function stringToArrayBuffer(string) {
9231 var view = new Uint8Array(new ArrayBuffer(string.length));
9232
9233 for (var i = 0; i < string.length; i++) {
9234 view[i] = string.charCodeAt(i);
9235 }
9236
9237 return view.buffer;
9238 };
9239
9240 var MockWorker = /*#__PURE__*/function () {
9241 function MockWorker() {
9242 this.listeners_ = [];
9243 this.onmessage = null;
9244 this.remote_ = null;
9245 }
9246
9247 var _proto = MockWorker.prototype;
9248
9249 _proto.addEventListener = function addEventListener(type, fn) {
9250 if (type !== 'message') {
9251 return;
9252 }
9253
9254 this.listeners_.push(fn);
9255 };
9256
9257 _proto.removeEventListener = function removeEventListener(type, fn) {
9258 if (type !== 'message') {
9259 return;
9260 }
9261
9262 var i = this.listeners_.indexOf(fn);
9263
9264 if (i === -1) {
9265 return;
9266 }
9267
9268 this.listeners_.splice(i, 1);
9269 };
9270
9271 _proto.dispatchEvent = function dispatchEvent(event) {
9272 if (!event || event.type !== 'message') {
9273 return;
9274 }
9275
9276 if (this.onmessage) {
9277 this.onmessage(event);
9278 }
9279
9280 this.listeners_.forEach(function (fn) {
9281 fn(event);
9282 });
9283 };
9284
9285 _proto.postMessage = function postMessage(data) {
9286 if (this.remote_) {
9287 this.remote_.recv_(data);
9288 }
9289 };
9290
9291 _proto.recv_ = function recv_(data) {
9292 // the browser puts the actual message under
9293 var message = {
9294 data: data
9295 };
9296
9297 if (this.onmessage) {
9298 this.onmessage(message);
9299 }
9300
9301 this.listeners_.forEach(function (fn) {
9302 fn(message);
9303 });
9304 };
9305
9306 _proto.terminate = function terminate() {
9307 if (this.remote_) {
9308 this.remote_.remote_ = null;
9309 this.remote_.terminate();
9310 this.remote_ = null;
9311 }
9312
9313 this.onmessage = null;
9314 this.listeners_.length = 0;
9315 };
9316
9317 return MockWorker;
9318 }();
9319
9320 MockWorker.prototype.on = MockWorker.prototype.addEventListener;
9321 MockWorker.prototype.off = MockWorker.prototype.removeEventListener;
9322 var factory = function factory(fn) {
9323 return function () {
9324 var client = new MockWorker();
9325 var worker = new MockWorker();
9326 client.type_ = 'window api';
9327 client.remote_ = worker;
9328 worker.remote_ = client;
9329 worker.type_ = 'web worker';
9330 fn(worker);
9331 return client;
9332 };
9333 };
9334 var transform = function transform(fn) {
9335 // eslint-disable-next-line
9336 return fn;
9337 };
9338
9339 /* rollup-plugin-worker-factory start for worker!/Users/bcasey/Projects/videojs-http-streaming/src/transmuxer-worker.js */
9340 var workerCode$1 = transform(function (self) {
9341 /**
9342 * mux.js
9343 *
9344 * Copyright (c) Brightcove
9345 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9346 *
9347 * A lightweight readable stream implemention that handles event dispatching.
9348 * Objects that inherit from streams should call init in their constructors.
9349 */
9350
9351 var Stream = function Stream() {
9352 this.init = function () {
9353 var listeners = {};
9354 /**
9355 * Add a listener for a specified event type.
9356 * @param type {string} the event name
9357 * @param listener {function} the callback to be invoked when an event of
9358 * the specified type occurs
9359 */
9360
9361 this.on = function (type, listener) {
9362 if (!listeners[type]) {
9363 listeners[type] = [];
9364 }
9365
9366 listeners[type] = listeners[type].concat(listener);
9367 };
9368 /**
9369 * Remove a listener for a specified event type.
9370 * @param type {string} the event name
9371 * @param listener {function} a function previously registered for this
9372 * type of event through `on`
9373 */
9374
9375
9376 this.off = function (type, listener) {
9377 var index;
9378
9379 if (!listeners[type]) {
9380 return false;
9381 }
9382
9383 index = listeners[type].indexOf(listener);
9384 listeners[type] = listeners[type].slice();
9385 listeners[type].splice(index, 1);
9386 return index > -1;
9387 };
9388 /**
9389 * Trigger an event of the specified type on this stream. Any additional
9390 * arguments to this function are passed as parameters to event listeners.
9391 * @param type {string} the event name
9392 */
9393
9394
9395 this.trigger = function (type) {
9396 var callbacks, i, length, args;
9397 callbacks = listeners[type];
9398
9399 if (!callbacks) {
9400 return;
9401 } // Slicing the arguments on every invocation of this method
9402 // can add a significant amount of overhead. Avoid the
9403 // intermediate object creation for the common case of a
9404 // single callback argument
9405
9406
9407 if (arguments.length === 2) {
9408 length = callbacks.length;
9409
9410 for (i = 0; i < length; ++i) {
9411 callbacks[i].call(this, arguments[1]);
9412 }
9413 } else {
9414 args = [];
9415 i = arguments.length;
9416
9417 for (i = 1; i < arguments.length; ++i) {
9418 args.push(arguments[i]);
9419 }
9420
9421 length = callbacks.length;
9422
9423 for (i = 0; i < length; ++i) {
9424 callbacks[i].apply(this, args);
9425 }
9426 }
9427 };
9428 /**
9429 * Destroys the stream and cleans up.
9430 */
9431
9432
9433 this.dispose = function () {
9434 listeners = {};
9435 };
9436 };
9437 };
9438 /**
9439 * Forwards all `data` events on this stream to the destination stream. The
9440 * destination stream should provide a method `push` to receive the data
9441 * events as they arrive.
9442 * @param destination {stream} the stream that will receive all `data` events
9443 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
9444 * when the current stream emits a 'done' event
9445 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
9446 */
9447
9448
9449 Stream.prototype.pipe = function (destination) {
9450 this.on('data', function (data) {
9451 destination.push(data);
9452 });
9453 this.on('done', function (flushSource) {
9454 destination.flush(flushSource);
9455 });
9456 this.on('partialdone', function (flushSource) {
9457 destination.partialFlush(flushSource);
9458 });
9459 this.on('endedtimeline', function (flushSource) {
9460 destination.endTimeline(flushSource);
9461 });
9462 this.on('reset', function (flushSource) {
9463 destination.reset(flushSource);
9464 });
9465 return destination;
9466 }; // Default stream functions that are expected to be overridden to perform
9467 // actual work. These are provided by the prototype as a sort of no-op
9468 // implementation so that we don't have to check for their existence in the
9469 // `pipe` function above.
9470
9471
9472 Stream.prototype.push = function (data) {
9473 this.trigger('data', data);
9474 };
9475
9476 Stream.prototype.flush = function (flushSource) {
9477 this.trigger('done', flushSource);
9478 };
9479
9480 Stream.prototype.partialFlush = function (flushSource) {
9481 this.trigger('partialdone', flushSource);
9482 };
9483
9484 Stream.prototype.endTimeline = function (flushSource) {
9485 this.trigger('endedtimeline', flushSource);
9486 };
9487
9488 Stream.prototype.reset = function (flushSource) {
9489 this.trigger('reset', flushSource);
9490 };
9491
9492 var stream = Stream;
9493 /**
9494 * mux.js
9495 *
9496 * Copyright (c) Brightcove
9497 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9498 *
9499 * Functions that generate fragmented MP4s suitable for use with Media
9500 * Source Extensions.
9501 */
9502
9503 var UINT32_MAX = Math.pow(2, 32) - 1;
9504 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
9505
9506 (function () {
9507 var i;
9508 types = {
9509 avc1: [],
9510 // codingname
9511 avcC: [],
9512 btrt: [],
9513 dinf: [],
9514 dref: [],
9515 esds: [],
9516 ftyp: [],
9517 hdlr: [],
9518 mdat: [],
9519 mdhd: [],
9520 mdia: [],
9521 mfhd: [],
9522 minf: [],
9523 moof: [],
9524 moov: [],
9525 mp4a: [],
9526 // codingname
9527 mvex: [],
9528 mvhd: [],
9529 pasp: [],
9530 sdtp: [],
9531 smhd: [],
9532 stbl: [],
9533 stco: [],
9534 stsc: [],
9535 stsd: [],
9536 stsz: [],
9537 stts: [],
9538 styp: [],
9539 tfdt: [],
9540 tfhd: [],
9541 traf: [],
9542 trak: [],
9543 trun: [],
9544 trex: [],
9545 tkhd: [],
9546 vmhd: []
9547 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
9548 // don't throw an error
9549
9550 if (typeof Uint8Array === 'undefined') {
9551 return;
9552 }
9553
9554 for (i in types) {
9555 if (types.hasOwnProperty(i)) {
9556 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
9557 }
9558 }
9559
9560 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
9561 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
9562 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
9563 VIDEO_HDLR = new Uint8Array([0x00, // version 0
9564 0x00, 0x00, 0x00, // flags
9565 0x00, 0x00, 0x00, 0x00, // pre_defined
9566 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
9567 0x00, 0x00, 0x00, 0x00, // reserved
9568 0x00, 0x00, 0x00, 0x00, // reserved
9569 0x00, 0x00, 0x00, 0x00, // reserved
9570 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
9571 ]);
9572 AUDIO_HDLR = new Uint8Array([0x00, // version 0
9573 0x00, 0x00, 0x00, // flags
9574 0x00, 0x00, 0x00, 0x00, // pre_defined
9575 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
9576 0x00, 0x00, 0x00, 0x00, // reserved
9577 0x00, 0x00, 0x00, 0x00, // reserved
9578 0x00, 0x00, 0x00, 0x00, // reserved
9579 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
9580 ]);
9581 HDLR_TYPES = {
9582 video: VIDEO_HDLR,
9583 audio: AUDIO_HDLR
9584 };
9585 DREF = new Uint8Array([0x00, // version 0
9586 0x00, 0x00, 0x00, // flags
9587 0x00, 0x00, 0x00, 0x01, // entry_count
9588 0x00, 0x00, 0x00, 0x0c, // entry_size
9589 0x75, 0x72, 0x6c, 0x20, // 'url' type
9590 0x00, // version 0
9591 0x00, 0x00, 0x01 // entry_flags
9592 ]);
9593 SMHD = new Uint8Array([0x00, // version
9594 0x00, 0x00, 0x00, // flags
9595 0x00, 0x00, // balance, 0 means centered
9596 0x00, 0x00 // reserved
9597 ]);
9598 STCO = new Uint8Array([0x00, // version
9599 0x00, 0x00, 0x00, // flags
9600 0x00, 0x00, 0x00, 0x00 // entry_count
9601 ]);
9602 STSC = STCO;
9603 STSZ = new Uint8Array([0x00, // version
9604 0x00, 0x00, 0x00, // flags
9605 0x00, 0x00, 0x00, 0x00, // sample_size
9606 0x00, 0x00, 0x00, 0x00 // sample_count
9607 ]);
9608 STTS = STCO;
9609 VMHD = new Uint8Array([0x00, // version
9610 0x00, 0x00, 0x01, // flags
9611 0x00, 0x00, // graphicsmode
9612 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
9613 ]);
9614 })();
9615
9616 box = function box(type) {
9617 var payload = [],
9618 size = 0,
9619 i,
9620 result,
9621 view;
9622
9623 for (i = 1; i < arguments.length; i++) {
9624 payload.push(arguments[i]);
9625 }
9626
9627 i = payload.length; // calculate the total size we need to allocate
9628
9629 while (i--) {
9630 size += payload[i].byteLength;
9631 }
9632
9633 result = new Uint8Array(size + 8);
9634 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
9635 view.setUint32(0, result.byteLength);
9636 result.set(type, 4); // copy the payload into the result
9637
9638 for (i = 0, size = 8; i < payload.length; i++) {
9639 result.set(payload[i], size);
9640 size += payload[i].byteLength;
9641 }
9642
9643 return result;
9644 };
9645
9646 dinf = function dinf() {
9647 return box(types.dinf, box(types.dref, DREF));
9648 };
9649
9650 esds = function esds(track) {
9651 return box(types.esds, new Uint8Array([0x00, // version
9652 0x00, 0x00, 0x00, // flags
9653 // ES_Descriptor
9654 0x03, // tag, ES_DescrTag
9655 0x19, // length
9656 0x00, 0x00, // ES_ID
9657 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
9658 // DecoderConfigDescriptor
9659 0x04, // tag, DecoderConfigDescrTag
9660 0x11, // length
9661 0x40, // object type
9662 0x15, // streamType
9663 0x00, 0x06, 0x00, // bufferSizeDB
9664 0x00, 0x00, 0xda, 0xc0, // maxBitrate
9665 0x00, 0x00, 0xda, 0xc0, // avgBitrate
9666 // DecoderSpecificInfo
9667 0x05, // tag, DecoderSpecificInfoTag
9668 0x02, // length
9669 // ISO/IEC 14496-3, AudioSpecificConfig
9670 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
9671 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
9672 ]));
9673 };
9674
9675 ftyp = function ftyp() {
9676 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
9677 };
9678
9679 hdlr = function hdlr(type) {
9680 return box(types.hdlr, HDLR_TYPES[type]);
9681 };
9682
9683 mdat = function mdat(data) {
9684 return box(types.mdat, data);
9685 };
9686
9687 mdhd = function mdhd(track) {
9688 var result = new Uint8Array([0x00, // version 0
9689 0x00, 0x00, 0x00, // flags
9690 0x00, 0x00, 0x00, 0x02, // creation_time
9691 0x00, 0x00, 0x00, 0x03, // modification_time
9692 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
9693 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
9694 0x55, 0xc4, // 'und' language (undetermined)
9695 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
9696 // defined. The sample rate can be parsed out of an ADTS header, for
9697 // instance.
9698
9699 if (track.samplerate) {
9700 result[12] = track.samplerate >>> 24 & 0xFF;
9701 result[13] = track.samplerate >>> 16 & 0xFF;
9702 result[14] = track.samplerate >>> 8 & 0xFF;
9703 result[15] = track.samplerate & 0xFF;
9704 }
9705
9706 return box(types.mdhd, result);
9707 };
9708
9709 mdia = function mdia(track) {
9710 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
9711 };
9712
9713 mfhd = function mfhd(sequenceNumber) {
9714 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
9715 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
9716 ]));
9717 };
9718
9719 minf = function minf(track) {
9720 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
9721 };
9722
9723 moof = function moof(sequenceNumber, tracks) {
9724 var trackFragments = [],
9725 i = tracks.length; // build traf boxes for each track fragment
9726
9727 while (i--) {
9728 trackFragments[i] = traf(tracks[i]);
9729 }
9730
9731 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
9732 };
9733 /**
9734 * Returns a movie box.
9735 * @param tracks {array} the tracks associated with this movie
9736 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
9737 */
9738
9739
9740 moov = function moov(tracks) {
9741 var i = tracks.length,
9742 boxes = [];
9743
9744 while (i--) {
9745 boxes[i] = trak(tracks[i]);
9746 }
9747
9748 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
9749 };
9750
9751 mvex = function mvex(tracks) {
9752 var i = tracks.length,
9753 boxes = [];
9754
9755 while (i--) {
9756 boxes[i] = trex(tracks[i]);
9757 }
9758
9759 return box.apply(null, [types.mvex].concat(boxes));
9760 };
9761
9762 mvhd = function mvhd(duration) {
9763 var bytes = new Uint8Array([0x00, // version 0
9764 0x00, 0x00, 0x00, // flags
9765 0x00, 0x00, 0x00, 0x01, // creation_time
9766 0x00, 0x00, 0x00, 0x02, // modification_time
9767 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
9768 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
9769 0x00, 0x01, 0x00, 0x00, // 1.0 rate
9770 0x01, 0x00, // 1.0 volume
9771 0x00, 0x00, // reserved
9772 0x00, 0x00, 0x00, 0x00, // reserved
9773 0x00, 0x00, 0x00, 0x00, // reserved
9774 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
9775 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
9776 0xff, 0xff, 0xff, 0xff // next_track_ID
9777 ]);
9778 return box(types.mvhd, bytes);
9779 };
9780
9781 sdtp = function sdtp(track) {
9782 var samples = track.samples || [],
9783 bytes = new Uint8Array(4 + samples.length),
9784 flags,
9785 i; // leave the full box header (4 bytes) all zero
9786 // write the sample table
9787
9788 for (i = 0; i < samples.length; i++) {
9789 flags = samples[i].flags;
9790 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
9791 }
9792
9793 return box(types.sdtp, bytes);
9794 };
9795
9796 stbl = function stbl(track) {
9797 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
9798 };
9799
9800 (function () {
9801 var videoSample, audioSample;
9802
9803 stsd = function stsd(track) {
9804 return box(types.stsd, new Uint8Array([0x00, // version 0
9805 0x00, 0x00, 0x00, // flags
9806 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
9807 };
9808
9809 videoSample = function videoSample(track) {
9810 var sps = track.sps || [],
9811 pps = track.pps || [],
9812 sequenceParameterSets = [],
9813 pictureParameterSets = [],
9814 i,
9815 avc1Box; // assemble the SPSs
9816
9817 for (i = 0; i < sps.length; i++) {
9818 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
9819 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
9820
9821 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
9822 } // assemble the PPSs
9823
9824
9825 for (i = 0; i < pps.length; i++) {
9826 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
9827 pictureParameterSets.push(pps[i].byteLength & 0xFF);
9828 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
9829 }
9830
9831 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
9832 0x00, 0x01, // data_reference_index
9833 0x00, 0x00, // pre_defined
9834 0x00, 0x00, // reserved
9835 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
9836 (track.width & 0xff00) >> 8, track.width & 0xff, // width
9837 (track.height & 0xff00) >> 8, track.height & 0xff, // height
9838 0x00, 0x48, 0x00, 0x00, // horizresolution
9839 0x00, 0x48, 0x00, 0x00, // vertresolution
9840 0x00, 0x00, 0x00, 0x00, // reserved
9841 0x00, 0x01, // frame_count
9842 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
9843 0x00, 0x18, // depth = 24
9844 0x11, 0x11 // pre_defined = -1
9845 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
9846 track.profileIdc, // AVCProfileIndication
9847 track.profileCompatibility, // profile_compatibility
9848 track.levelIdc, // AVCLevelIndication
9849 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
9850 ].concat([sps.length], // numOfSequenceParameterSets
9851 sequenceParameterSets, // "SPS"
9852 [pps.length], // numOfPictureParameterSets
9853 pictureParameterSets // "PPS"
9854 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
9855 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
9856 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
9857 ]))];
9858
9859 if (track.sarRatio) {
9860 var hSpacing = track.sarRatio[0],
9861 vSpacing = track.sarRatio[1];
9862 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
9863 }
9864
9865 return box.apply(null, avc1Box);
9866 };
9867
9868 audioSample = function audioSample(track) {
9869 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
9870 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
9871 0x00, 0x01, // data_reference_index
9872 // AudioSampleEntry, ISO/IEC 14496-12
9873 0x00, 0x00, 0x00, 0x00, // reserved
9874 0x00, 0x00, 0x00, 0x00, // reserved
9875 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
9876 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
9877 0x00, 0x00, // pre_defined
9878 0x00, 0x00, // reserved
9879 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
9880 // MP4AudioSampleEntry, ISO/IEC 14496-14
9881 ]), esds(track));
9882 };
9883 })();
9884
9885 tkhd = function tkhd(track) {
9886 var result = new Uint8Array([0x00, // version 0
9887 0x00, 0x00, 0x07, // flags
9888 0x00, 0x00, 0x00, 0x00, // creation_time
9889 0x00, 0x00, 0x00, 0x00, // modification_time
9890 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
9891 0x00, 0x00, 0x00, 0x00, // reserved
9892 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
9893 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
9894 0x00, 0x00, // layer
9895 0x00, 0x00, // alternate_group
9896 0x01, 0x00, // non-audio track volume
9897 0x00, 0x00, // reserved
9898 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
9899 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
9900 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
9901 ]);
9902 return box(types.tkhd, result);
9903 };
9904 /**
9905 * Generate a track fragment (traf) box. A traf box collects metadata
9906 * about tracks in a movie fragment (moof) box.
9907 */
9908
9909
9910 traf = function traf(track) {
9911 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
9912 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
9913 0x00, 0x00, 0x3a, // flags
9914 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
9915 0x00, 0x00, 0x00, 0x01, // sample_description_index
9916 0x00, 0x00, 0x00, 0x00, // default_sample_duration
9917 0x00, 0x00, 0x00, 0x00, // default_sample_size
9918 0x00, 0x00, 0x00, 0x00 // default_sample_flags
9919 ]));
9920 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
9921 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
9922 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
9923 0x00, 0x00, 0x00, // flags
9924 // baseMediaDecodeTime
9925 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
9926 // the containing moof to the first payload byte of the associated
9927 // mdat
9928
9929 dataOffset = 32 + // tfhd
9930 20 + // tfdt
9931 8 + // traf header
9932 16 + // mfhd
9933 8 + // moof header
9934 8; // mdat header
9935 // audio tracks require less metadata
9936
9937 if (track.type === 'audio') {
9938 trackFragmentRun = trun$1(track, dataOffset);
9939 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
9940 } // video tracks should contain an independent and disposable samples
9941 // box (sdtp)
9942 // generate one and adjust offsets to match
9943
9944
9945 sampleDependencyTable = sdtp(track);
9946 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
9947 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
9948 };
9949 /**
9950 * Generate a track box.
9951 * @param track {object} a track definition
9952 * @return {Uint8Array} the track box
9953 */
9954
9955
9956 trak = function trak(track) {
9957 track.duration = track.duration || 0xffffffff;
9958 return box(types.trak, tkhd(track), mdia(track));
9959 };
9960
9961 trex = function trex(track) {
9962 var result = new Uint8Array([0x00, // version 0
9963 0x00, 0x00, 0x00, // flags
9964 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
9965 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
9966 0x00, 0x00, 0x00, 0x00, // default_sample_duration
9967 0x00, 0x00, 0x00, 0x00, // default_sample_size
9968 0x00, 0x01, 0x00, 0x01 // default_sample_flags
9969 ]); // the last two bytes of default_sample_flags is the sample
9970 // degradation priority, a hint about the importance of this sample
9971 // relative to others. Lower the degradation priority for all sample
9972 // types other than video.
9973
9974 if (track.type !== 'video') {
9975 result[result.length - 1] = 0x00;
9976 }
9977
9978 return box(types.trex, result);
9979 };
9980
9981 (function () {
9982 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
9983 // duration is present for the first sample, it will be present for
9984 // all subsequent samples.
9985 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
9986
9987 trunHeader = function trunHeader(samples, offset) {
9988 var durationPresent = 0,
9989 sizePresent = 0,
9990 flagsPresent = 0,
9991 compositionTimeOffset = 0; // trun flag constants
9992
9993 if (samples.length) {
9994 if (samples[0].duration !== undefined) {
9995 durationPresent = 0x1;
9996 }
9997
9998 if (samples[0].size !== undefined) {
9999 sizePresent = 0x2;
10000 }
10001
10002 if (samples[0].flags !== undefined) {
10003 flagsPresent = 0x4;
10004 }
10005
10006 if (samples[0].compositionTimeOffset !== undefined) {
10007 compositionTimeOffset = 0x8;
10008 }
10009 }
10010
10011 return [0x00, // version 0
10012 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
10013 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
10014 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
10015 ];
10016 };
10017
10018 videoTrun = function videoTrun(track, offset) {
10019 var bytesOffest, bytes, header, samples, sample, i;
10020 samples = track.samples || [];
10021 offset += 8 + 12 + 16 * samples.length;
10022 header = trunHeader(samples, offset);
10023 bytes = new Uint8Array(header.length + samples.length * 16);
10024 bytes.set(header);
10025 bytesOffest = header.length;
10026
10027 for (i = 0; i < samples.length; i++) {
10028 sample = samples[i];
10029 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10030 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10031 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10032 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10033
10034 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10035 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10036 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10037 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10038
10039 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
10040 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
10041 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
10042 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
10043
10044 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
10045 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
10046 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
10047 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
10048 }
10049
10050 return box(types.trun, bytes);
10051 };
10052
10053 audioTrun = function audioTrun(track, offset) {
10054 var bytes, bytesOffest, header, samples, sample, i;
10055 samples = track.samples || [];
10056 offset += 8 + 12 + 8 * samples.length;
10057 header = trunHeader(samples, offset);
10058 bytes = new Uint8Array(header.length + samples.length * 8);
10059 bytes.set(header);
10060 bytesOffest = header.length;
10061
10062 for (i = 0; i < samples.length; i++) {
10063 sample = samples[i];
10064 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10065 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10066 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10067 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10068
10069 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10070 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10071 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10072 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10073 }
10074
10075 return box(types.trun, bytes);
10076 };
10077
10078 trun$1 = function trun(track, offset) {
10079 if (track.type === 'audio') {
10080 return audioTrun(track, offset);
10081 }
10082
10083 return videoTrun(track, offset);
10084 };
10085 })();
10086
10087 var mp4Generator = {
10088 ftyp: ftyp,
10089 mdat: mdat,
10090 moof: moof,
10091 moov: moov,
10092 initSegment: function initSegment(tracks) {
10093 var fileType = ftyp(),
10094 movie = moov(tracks),
10095 result;
10096 result = new Uint8Array(fileType.byteLength + movie.byteLength);
10097 result.set(fileType);
10098 result.set(movie, fileType.byteLength);
10099 return result;
10100 }
10101 };
10102 /**
10103 * mux.js
10104 *
10105 * Copyright (c) Brightcove
10106 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10107 */
10108 // Convert an array of nal units into an array of frames with each frame being
10109 // composed of the nal units that make up that frame
10110 // Also keep track of cummulative data about the frame from the nal units such
10111 // as the frame duration, starting pts, etc.
10112
10113 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
10114 var i,
10115 currentNal,
10116 currentFrame = [],
10117 frames = []; // TODO added for LHLS, make sure this is OK
10118
10119 frames.byteLength = 0;
10120 frames.nalCount = 0;
10121 frames.duration = 0;
10122 currentFrame.byteLength = 0;
10123
10124 for (i = 0; i < nalUnits.length; i++) {
10125 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
10126
10127 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
10128 // Since the very first nal unit is expected to be an AUD
10129 // only push to the frames array when currentFrame is not empty
10130 if (currentFrame.length) {
10131 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
10132
10133 frames.byteLength += currentFrame.byteLength;
10134 frames.nalCount += currentFrame.length;
10135 frames.duration += currentFrame.duration;
10136 frames.push(currentFrame);
10137 }
10138
10139 currentFrame = [currentNal];
10140 currentFrame.byteLength = currentNal.data.byteLength;
10141 currentFrame.pts = currentNal.pts;
10142 currentFrame.dts = currentNal.dts;
10143 } else {
10144 // Specifically flag key frames for ease of use later
10145 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
10146 currentFrame.keyFrame = true;
10147 }
10148
10149 currentFrame.duration = currentNal.dts - currentFrame.dts;
10150 currentFrame.byteLength += currentNal.data.byteLength;
10151 currentFrame.push(currentNal);
10152 }
10153 } // For the last frame, use the duration of the previous frame if we
10154 // have nothing better to go on
10155
10156
10157 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
10158 currentFrame.duration = frames[frames.length - 1].duration;
10159 } // Push the final frame
10160 // TODO added for LHLS, make sure this is OK
10161
10162
10163 frames.byteLength += currentFrame.byteLength;
10164 frames.nalCount += currentFrame.length;
10165 frames.duration += currentFrame.duration;
10166 frames.push(currentFrame);
10167 return frames;
10168 }; // Convert an array of frames into an array of Gop with each Gop being composed
10169 // of the frames that make up that Gop
10170 // Also keep track of cummulative data about the Gop from the frames such as the
10171 // Gop duration, starting pts, etc.
10172
10173
10174 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
10175 var i,
10176 currentFrame,
10177 currentGop = [],
10178 gops = []; // We must pre-set some of the values on the Gop since we
10179 // keep running totals of these values
10180
10181 currentGop.byteLength = 0;
10182 currentGop.nalCount = 0;
10183 currentGop.duration = 0;
10184 currentGop.pts = frames[0].pts;
10185 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
10186
10187 gops.byteLength = 0;
10188 gops.nalCount = 0;
10189 gops.duration = 0;
10190 gops.pts = frames[0].pts;
10191 gops.dts = frames[0].dts;
10192
10193 for (i = 0; i < frames.length; i++) {
10194 currentFrame = frames[i];
10195
10196 if (currentFrame.keyFrame) {
10197 // Since the very first frame is expected to be an keyframe
10198 // only push to the gops array when currentGop is not empty
10199 if (currentGop.length) {
10200 gops.push(currentGop);
10201 gops.byteLength += currentGop.byteLength;
10202 gops.nalCount += currentGop.nalCount;
10203 gops.duration += currentGop.duration;
10204 }
10205
10206 currentGop = [currentFrame];
10207 currentGop.nalCount = currentFrame.length;
10208 currentGop.byteLength = currentFrame.byteLength;
10209 currentGop.pts = currentFrame.pts;
10210 currentGop.dts = currentFrame.dts;
10211 currentGop.duration = currentFrame.duration;
10212 } else {
10213 currentGop.duration += currentFrame.duration;
10214 currentGop.nalCount += currentFrame.length;
10215 currentGop.byteLength += currentFrame.byteLength;
10216 currentGop.push(currentFrame);
10217 }
10218 }
10219
10220 if (gops.length && currentGop.duration <= 0) {
10221 currentGop.duration = gops[gops.length - 1].duration;
10222 }
10223
10224 gops.byteLength += currentGop.byteLength;
10225 gops.nalCount += currentGop.nalCount;
10226 gops.duration += currentGop.duration; // push the final Gop
10227
10228 gops.push(currentGop);
10229 return gops;
10230 };
10231 /*
10232 * Search for the first keyframe in the GOPs and throw away all frames
10233 * until that keyframe. Then extend the duration of the pulled keyframe
10234 * and pull the PTS and DTS of the keyframe so that it covers the time
10235 * range of the frames that were disposed.
10236 *
10237 * @param {Array} gops video GOPs
10238 * @returns {Array} modified video GOPs
10239 */
10240
10241
10242 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
10243 var currentGop;
10244
10245 if (!gops[0][0].keyFrame && gops.length > 1) {
10246 // Remove the first GOP
10247 currentGop = gops.shift();
10248 gops.byteLength -= currentGop.byteLength;
10249 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
10250 // first gop to cover the time period of the
10251 // frames we just removed
10252
10253 gops[0][0].dts = currentGop.dts;
10254 gops[0][0].pts = currentGop.pts;
10255 gops[0][0].duration += currentGop.duration;
10256 }
10257
10258 return gops;
10259 };
10260 /**
10261 * Default sample object
10262 * see ISO/IEC 14496-12:2012, section 8.6.4.3
10263 */
10264
10265
10266 var createDefaultSample = function createDefaultSample() {
10267 return {
10268 size: 0,
10269 flags: {
10270 isLeading: 0,
10271 dependsOn: 1,
10272 isDependedOn: 0,
10273 hasRedundancy: 0,
10274 degradationPriority: 0,
10275 isNonSyncSample: 1
10276 }
10277 };
10278 };
10279 /*
10280 * Collates information from a video frame into an object for eventual
10281 * entry into an MP4 sample table.
10282 *
10283 * @param {Object} frame the video frame
10284 * @param {Number} dataOffset the byte offset to position the sample
10285 * @return {Object} object containing sample table info for a frame
10286 */
10287
10288
10289 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
10290 var sample = createDefaultSample();
10291 sample.dataOffset = dataOffset;
10292 sample.compositionTimeOffset = frame.pts - frame.dts;
10293 sample.duration = frame.duration;
10294 sample.size = 4 * frame.length; // Space for nal unit size
10295
10296 sample.size += frame.byteLength;
10297
10298 if (frame.keyFrame) {
10299 sample.flags.dependsOn = 2;
10300 sample.flags.isNonSyncSample = 0;
10301 }
10302
10303 return sample;
10304 }; // generate the track's sample table from an array of gops
10305
10306
10307 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
10308 var h,
10309 i,
10310 sample,
10311 currentGop,
10312 currentFrame,
10313 dataOffset = baseDataOffset || 0,
10314 samples = [];
10315
10316 for (h = 0; h < gops.length; h++) {
10317 currentGop = gops[h];
10318
10319 for (i = 0; i < currentGop.length; i++) {
10320 currentFrame = currentGop[i];
10321 sample = sampleForFrame(currentFrame, dataOffset);
10322 dataOffset += sample.size;
10323 samples.push(sample);
10324 }
10325 }
10326
10327 return samples;
10328 }; // generate the track's raw mdat data from an array of gops
10329
10330
10331 var concatenateNalData = function concatenateNalData(gops) {
10332 var h,
10333 i,
10334 j,
10335 currentGop,
10336 currentFrame,
10337 currentNal,
10338 dataOffset = 0,
10339 nalsByteLength = gops.byteLength,
10340 numberOfNals = gops.nalCount,
10341 totalByteLength = nalsByteLength + 4 * numberOfNals,
10342 data = new Uint8Array(totalByteLength),
10343 view = new DataView(data.buffer); // For each Gop..
10344
10345 for (h = 0; h < gops.length; h++) {
10346 currentGop = gops[h]; // For each Frame..
10347
10348 for (i = 0; i < currentGop.length; i++) {
10349 currentFrame = currentGop[i]; // For each NAL..
10350
10351 for (j = 0; j < currentFrame.length; j++) {
10352 currentNal = currentFrame[j];
10353 view.setUint32(dataOffset, currentNal.data.byteLength);
10354 dataOffset += 4;
10355 data.set(currentNal.data, dataOffset);
10356 dataOffset += currentNal.data.byteLength;
10357 }
10358 }
10359 }
10360
10361 return data;
10362 }; // generate the track's sample table from a frame
10363
10364
10365 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
10366 var sample,
10367 dataOffset = baseDataOffset || 0,
10368 samples = [];
10369 sample = sampleForFrame(frame, dataOffset);
10370 samples.push(sample);
10371 return samples;
10372 }; // generate the track's raw mdat data from a frame
10373
10374
10375 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
10376 var i,
10377 currentNal,
10378 dataOffset = 0,
10379 nalsByteLength = frame.byteLength,
10380 numberOfNals = frame.length,
10381 totalByteLength = nalsByteLength + 4 * numberOfNals,
10382 data = new Uint8Array(totalByteLength),
10383 view = new DataView(data.buffer); // For each NAL..
10384
10385 for (i = 0; i < frame.length; i++) {
10386 currentNal = frame[i];
10387 view.setUint32(dataOffset, currentNal.data.byteLength);
10388 dataOffset += 4;
10389 data.set(currentNal.data, dataOffset);
10390 dataOffset += currentNal.data.byteLength;
10391 }
10392
10393 return data;
10394 };
10395
10396 var frameUtils = {
10397 groupNalsIntoFrames: groupNalsIntoFrames,
10398 groupFramesIntoGops: groupFramesIntoGops,
10399 extendFirstKeyFrame: extendFirstKeyFrame,
10400 generateSampleTable: generateSampleTable$1,
10401 concatenateNalData: concatenateNalData,
10402 generateSampleTableForFrame: generateSampleTableForFrame,
10403 concatenateNalDataForFrame: concatenateNalDataForFrame
10404 };
10405 /**
10406 * mux.js
10407 *
10408 * Copyright (c) Brightcove
10409 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10410 */
10411
10412 var highPrefix = [33, 16, 5, 32, 164, 27];
10413 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
10414
10415 var zeroFill = function zeroFill(count) {
10416 var a = [];
10417
10418 while (count--) {
10419 a.push(0);
10420 }
10421
10422 return a;
10423 };
10424
10425 var makeTable = function makeTable(metaTable) {
10426 return Object.keys(metaTable).reduce(function (obj, key) {
10427 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
10428 return arr.concat(part);
10429 }, []));
10430 return obj;
10431 }, {});
10432 };
10433
10434 var silence;
10435
10436 var silence_1 = function silence_1() {
10437 if (!silence) {
10438 // Frames-of-silence to use for filling in missing AAC frames
10439 var coneOfSilence = {
10440 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
10441 88200: [highPrefix, [231], zeroFill(170), [56]],
10442 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
10443 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
10444 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
10445 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
10446 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
10447 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
10448 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
10449 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
10450 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
10451 };
10452 silence = makeTable(coneOfSilence);
10453 }
10454
10455 return silence;
10456 };
10457 /**
10458 * mux.js
10459 *
10460 * Copyright (c) Brightcove
10461 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10462 */
10463
10464
10465 var ONE_SECOND_IN_TS$4 = 90000,
10466 // 90kHz clock
10467 secondsToVideoTs,
10468 secondsToAudioTs,
10469 videoTsToSeconds,
10470 audioTsToSeconds,
10471 audioTsToVideoTs,
10472 videoTsToAudioTs,
10473 metadataTsToSeconds;
10474
10475 secondsToVideoTs = function secondsToVideoTs(seconds) {
10476 return seconds * ONE_SECOND_IN_TS$4;
10477 };
10478
10479 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
10480 return seconds * sampleRate;
10481 };
10482
10483 videoTsToSeconds = function videoTsToSeconds(timestamp) {
10484 return timestamp / ONE_SECOND_IN_TS$4;
10485 };
10486
10487 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
10488 return timestamp / sampleRate;
10489 };
10490
10491 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
10492 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
10493 };
10494
10495 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
10496 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
10497 };
10498 /**
10499 * Adjust ID3 tag or caption timing information by the timeline pts values
10500 * (if keepOriginalTimestamps is false) and convert to seconds
10501 */
10502
10503
10504 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
10505 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
10506 };
10507
10508 var clock = {
10509 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
10510 secondsToVideoTs: secondsToVideoTs,
10511 secondsToAudioTs: secondsToAudioTs,
10512 videoTsToSeconds: videoTsToSeconds,
10513 audioTsToSeconds: audioTsToSeconds,
10514 audioTsToVideoTs: audioTsToVideoTs,
10515 videoTsToAudioTs: videoTsToAudioTs,
10516 metadataTsToSeconds: metadataTsToSeconds
10517 };
10518 /**
10519 * mux.js
10520 *
10521 * Copyright (c) Brightcove
10522 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10523 */
10524
10525 /**
10526 * Sum the `byteLength` properties of the data in each AAC frame
10527 */
10528
10529 var sumFrameByteLengths = function sumFrameByteLengths(array) {
10530 var i,
10531 currentObj,
10532 sum = 0; // sum the byteLength's all each nal unit in the frame
10533
10534 for (i = 0; i < array.length; i++) {
10535 currentObj = array[i];
10536 sum += currentObj.data.byteLength;
10537 }
10538
10539 return sum;
10540 }; // Possibly pad (prefix) the audio track with silence if appending this track
10541 // would lead to the introduction of a gap in the audio buffer
10542
10543
10544 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
10545 var baseMediaDecodeTimeTs,
10546 frameDuration = 0,
10547 audioGapDuration = 0,
10548 audioFillFrameCount = 0,
10549 audioFillDuration = 0,
10550 silentFrame,
10551 i,
10552 firstFrame;
10553
10554 if (!frames.length) {
10555 return;
10556 }
10557
10558 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
10559
10560 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
10561
10562 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
10563 // insert the shortest possible amount (audio gap or audio to video gap)
10564 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
10565
10566 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
10567 audioFillDuration = audioFillFrameCount * frameDuration;
10568 } // don't attempt to fill gaps smaller than a single frame or larger
10569 // than a half second
10570
10571
10572 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
10573 return;
10574 }
10575
10576 silentFrame = silence_1()[track.samplerate];
10577
10578 if (!silentFrame) {
10579 // we don't have a silent frame pregenerated for the sample rate, so use a frame
10580 // from the content instead
10581 silentFrame = frames[0].data;
10582 }
10583
10584 for (i = 0; i < audioFillFrameCount; i++) {
10585 firstFrame = frames[0];
10586 frames.splice(0, 0, {
10587 data: silentFrame,
10588 dts: firstFrame.dts - frameDuration,
10589 pts: firstFrame.pts - frameDuration
10590 });
10591 }
10592
10593 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
10594 return audioFillDuration;
10595 }; // If the audio segment extends before the earliest allowed dts
10596 // value, remove AAC frames until starts at or after the earliest
10597 // allowed DTS so that we don't end up with a negative baseMedia-
10598 // DecodeTime for the audio track
10599
10600
10601 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
10602 if (track.minSegmentDts >= earliestAllowedDts) {
10603 return adtsFrames;
10604 } // We will need to recalculate the earliest segment Dts
10605
10606
10607 track.minSegmentDts = Infinity;
10608 return adtsFrames.filter(function (currentFrame) {
10609 // If this is an allowed frame, keep it and record it's Dts
10610 if (currentFrame.dts >= earliestAllowedDts) {
10611 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
10612 track.minSegmentPts = track.minSegmentDts;
10613 return true;
10614 } // Otherwise, discard it
10615
10616
10617 return false;
10618 });
10619 }; // generate the track's raw mdat data from an array of frames
10620
10621
10622 var generateSampleTable = function generateSampleTable(frames) {
10623 var i,
10624 currentFrame,
10625 samples = [];
10626
10627 for (i = 0; i < frames.length; i++) {
10628 currentFrame = frames[i];
10629 samples.push({
10630 size: currentFrame.data.byteLength,
10631 duration: 1024 // For AAC audio, all samples contain 1024 samples
10632
10633 });
10634 }
10635
10636 return samples;
10637 }; // generate the track's sample table from an array of frames
10638
10639
10640 var concatenateFrameData = function concatenateFrameData(frames) {
10641 var i,
10642 currentFrame,
10643 dataOffset = 0,
10644 data = new Uint8Array(sumFrameByteLengths(frames));
10645
10646 for (i = 0; i < frames.length; i++) {
10647 currentFrame = frames[i];
10648 data.set(currentFrame.data, dataOffset);
10649 dataOffset += currentFrame.data.byteLength;
10650 }
10651
10652 return data;
10653 };
10654
10655 var audioFrameUtils = {
10656 prefixWithSilence: prefixWithSilence,
10657 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
10658 generateSampleTable: generateSampleTable,
10659 concatenateFrameData: concatenateFrameData
10660 };
10661 /**
10662 * mux.js
10663 *
10664 * Copyright (c) Brightcove
10665 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10666 */
10667
10668 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
10669 /**
10670 * Store information about the start and end of the track and the
10671 * duration for each frame/sample we process in order to calculate
10672 * the baseMediaDecodeTime
10673 */
10674
10675 var collectDtsInfo = function collectDtsInfo(track, data) {
10676 if (typeof data.pts === 'number') {
10677 if (track.timelineStartInfo.pts === undefined) {
10678 track.timelineStartInfo.pts = data.pts;
10679 }
10680
10681 if (track.minSegmentPts === undefined) {
10682 track.minSegmentPts = data.pts;
10683 } else {
10684 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
10685 }
10686
10687 if (track.maxSegmentPts === undefined) {
10688 track.maxSegmentPts = data.pts;
10689 } else {
10690 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
10691 }
10692 }
10693
10694 if (typeof data.dts === 'number') {
10695 if (track.timelineStartInfo.dts === undefined) {
10696 track.timelineStartInfo.dts = data.dts;
10697 }
10698
10699 if (track.minSegmentDts === undefined) {
10700 track.minSegmentDts = data.dts;
10701 } else {
10702 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
10703 }
10704
10705 if (track.maxSegmentDts === undefined) {
10706 track.maxSegmentDts = data.dts;
10707 } else {
10708 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
10709 }
10710 }
10711 };
10712 /**
10713 * Clear values used to calculate the baseMediaDecodeTime between
10714 * tracks
10715 */
10716
10717
10718 var clearDtsInfo = function clearDtsInfo(track) {
10719 delete track.minSegmentDts;
10720 delete track.maxSegmentDts;
10721 delete track.minSegmentPts;
10722 delete track.maxSegmentPts;
10723 };
10724 /**
10725 * Calculate the track's baseMediaDecodeTime based on the earliest
10726 * DTS the transmuxer has ever seen and the minimum DTS for the
10727 * current track
10728 * @param track {object} track metadata configuration
10729 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
10730 * in the source; false to adjust the first segment to start at 0.
10731 */
10732
10733
10734 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
10735 var baseMediaDecodeTime,
10736 scale,
10737 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
10738
10739 if (!keepOriginalTimestamps) {
10740 minSegmentDts -= track.timelineStartInfo.dts;
10741 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
10742 // we want the start of the first segment to be placed
10743
10744
10745 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
10746
10747 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
10748
10749 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
10750
10751 if (track.type === 'audio') {
10752 // Audio has a different clock equal to the sampling_rate so we need to
10753 // scale the PTS values into the clock rate of the track
10754 scale = track.samplerate / ONE_SECOND_IN_TS$3;
10755 baseMediaDecodeTime *= scale;
10756 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
10757 }
10758
10759 return baseMediaDecodeTime;
10760 };
10761
10762 var trackDecodeInfo = {
10763 clearDtsInfo: clearDtsInfo,
10764 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
10765 collectDtsInfo: collectDtsInfo
10766 };
10767 /**
10768 * mux.js
10769 *
10770 * Copyright (c) Brightcove
10771 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10772 *
10773 * Reads in-band caption information from a video elementary
10774 * stream. Captions must follow the CEA-708 standard for injection
10775 * into an MPEG-2 transport streams.
10776 * @see https://en.wikipedia.org/wiki/CEA-708
10777 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
10778 */
10779 // payload type field to indicate how they are to be
10780 // interpreted. CEAS-708 caption content is always transmitted with
10781 // payload type 0x04.
10782
10783 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
10784 RBSP_TRAILING_BITS = 128;
10785 /**
10786 * Parse a supplemental enhancement information (SEI) NAL unit.
10787 * Stops parsing once a message of type ITU T T35 has been found.
10788 *
10789 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
10790 * @return {object} the parsed SEI payload
10791 * @see Rec. ITU-T H.264, 7.3.2.3.1
10792 */
10793
10794 var parseSei = function parseSei(bytes) {
10795 var i = 0,
10796 result = {
10797 payloadType: -1,
10798 payloadSize: 0
10799 },
10800 payloadType = 0,
10801 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
10802
10803 while (i < bytes.byteLength) {
10804 // stop once we have hit the end of the sei_rbsp
10805 if (bytes[i] === RBSP_TRAILING_BITS) {
10806 break;
10807 } // Parse payload type
10808
10809
10810 while (bytes[i] === 0xFF) {
10811 payloadType += 255;
10812 i++;
10813 }
10814
10815 payloadType += bytes[i++]; // Parse payload size
10816
10817 while (bytes[i] === 0xFF) {
10818 payloadSize += 255;
10819 i++;
10820 }
10821
10822 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
10823 // there can only ever be one caption message in a frame's sei
10824
10825 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
10826 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
10827
10828 if (userIdentifier === 'GA94') {
10829 result.payloadType = payloadType;
10830 result.payloadSize = payloadSize;
10831 result.payload = bytes.subarray(i, i + payloadSize);
10832 break;
10833 } else {
10834 result.payload = void 0;
10835 }
10836 } // skip the payload and parse the next message
10837
10838
10839 i += payloadSize;
10840 payloadType = 0;
10841 payloadSize = 0;
10842 }
10843
10844 return result;
10845 }; // see ANSI/SCTE 128-1 (2013), section 8.1
10846
10847
10848 var parseUserData = function parseUserData(sei) {
10849 // itu_t_t35_contry_code must be 181 (United States) for
10850 // captions
10851 if (sei.payload[0] !== 181) {
10852 return null;
10853 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
10854
10855
10856 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
10857 return null;
10858 } // the user_identifier should be "GA94" to indicate ATSC1 data
10859
10860
10861 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
10862 return null;
10863 } // finally, user_data_type_code should be 0x03 for caption data
10864
10865
10866 if (sei.payload[7] !== 0x03) {
10867 return null;
10868 } // return the user_data_type_structure and strip the trailing
10869 // marker bits
10870
10871
10872 return sei.payload.subarray(8, sei.payload.length - 1);
10873 }; // see CEA-708-D, section 4.4
10874
10875
10876 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
10877 var results = [],
10878 i,
10879 count,
10880 offset,
10881 data; // if this is just filler, return immediately
10882
10883 if (!(userData[0] & 0x40)) {
10884 return results;
10885 } // parse out the cc_data_1 and cc_data_2 fields
10886
10887
10888 count = userData[0] & 0x1f;
10889
10890 for (i = 0; i < count; i++) {
10891 offset = i * 3;
10892 data = {
10893 type: userData[offset + 2] & 0x03,
10894 pts: pts
10895 }; // capture cc data when cc_valid is 1
10896
10897 if (userData[offset + 2] & 0x04) {
10898 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
10899 results.push(data);
10900 }
10901 }
10902
10903 return results;
10904 };
10905
10906 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
10907 var length = data.byteLength,
10908 emulationPreventionBytesPositions = [],
10909 i = 1,
10910 newLength,
10911 newData; // Find all `Emulation Prevention Bytes`
10912
10913 while (i < length - 2) {
10914 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
10915 emulationPreventionBytesPositions.push(i + 2);
10916 i += 2;
10917 } else {
10918 i++;
10919 }
10920 } // If no Emulation Prevention Bytes were found just return the original
10921 // array
10922
10923
10924 if (emulationPreventionBytesPositions.length === 0) {
10925 return data;
10926 } // Create a new array to hold the NAL unit data
10927
10928
10929 newLength = length - emulationPreventionBytesPositions.length;
10930 newData = new Uint8Array(newLength);
10931 var sourceIndex = 0;
10932
10933 for (i = 0; i < newLength; sourceIndex++, i++) {
10934 if (sourceIndex === emulationPreventionBytesPositions[0]) {
10935 // Skip this byte
10936 sourceIndex++; // Remove this position index
10937
10938 emulationPreventionBytesPositions.shift();
10939 }
10940
10941 newData[i] = data[sourceIndex];
10942 }
10943
10944 return newData;
10945 }; // exports
10946
10947
10948 var captionPacketParser = {
10949 parseSei: parseSei,
10950 parseUserData: parseUserData,
10951 parseCaptionPackets: parseCaptionPackets,
10952 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
10953 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
10954 }; // Link To Transport
10955 // -----------------
10956
10957 var CaptionStream$1 = function CaptionStream(options) {
10958 options = options || {};
10959 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
10960
10961 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
10962 this.captionPackets_ = [];
10963 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
10964 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
10965 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
10966 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
10967 ];
10968
10969 if (this.parse708captions_) {
10970 this.cc708Stream_ = new Cea708Stream({
10971 captionServices: options.captionServices
10972 }); // eslint-disable-line no-use-before-define
10973 }
10974
10975 this.reset(); // forward data and done events from CCs to this CaptionStream
10976
10977 this.ccStreams_.forEach(function (cc) {
10978 cc.on('data', this.trigger.bind(this, 'data'));
10979 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
10980 cc.on('done', this.trigger.bind(this, 'done'));
10981 }, this);
10982
10983 if (this.parse708captions_) {
10984 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
10985 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
10986 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
10987 }
10988 };
10989
10990 CaptionStream$1.prototype = new stream();
10991
10992 CaptionStream$1.prototype.push = function (event) {
10993 var sei, userData, newCaptionPackets; // only examine SEI NALs
10994
10995 if (event.nalUnitType !== 'sei_rbsp') {
10996 return;
10997 } // parse the sei
10998
10999
11000 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
11001
11002 if (!sei.payload) {
11003 return;
11004 } // ignore everything but user_data_registered_itu_t_t35
11005
11006
11007 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
11008 return;
11009 } // parse out the user data payload
11010
11011
11012 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
11013
11014 if (!userData) {
11015 return;
11016 } // Sometimes, the same segment # will be downloaded twice. To stop the
11017 // caption data from being processed twice, we track the latest dts we've
11018 // received and ignore everything with a dts before that. However, since
11019 // data for a specific dts can be split across packets on either side of
11020 // a segment boundary, we need to make sure we *don't* ignore the packets
11021 // from the *next* segment that have dts === this.latestDts_. By constantly
11022 // tracking the number of packets received with dts === this.latestDts_, we
11023 // know how many should be ignored once we start receiving duplicates.
11024
11025
11026 if (event.dts < this.latestDts_) {
11027 // We've started getting older data, so set the flag.
11028 this.ignoreNextEqualDts_ = true;
11029 return;
11030 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
11031 this.numSameDts_--;
11032
11033 if (!this.numSameDts_) {
11034 // We've received the last duplicate packet, time to start processing again
11035 this.ignoreNextEqualDts_ = false;
11036 }
11037
11038 return;
11039 } // parse out CC data packets and save them for later
11040
11041
11042 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
11043 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
11044
11045 if (this.latestDts_ !== event.dts) {
11046 this.numSameDts_ = 0;
11047 }
11048
11049 this.numSameDts_++;
11050 this.latestDts_ = event.dts;
11051 };
11052
11053 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
11054 this.ccStreams_.forEach(function (cc) {
11055 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
11056 }, this);
11057 };
11058
11059 CaptionStream$1.prototype.flushStream = function (flushType) {
11060 // make sure we actually parsed captions before proceeding
11061 if (!this.captionPackets_.length) {
11062 this.flushCCStreams(flushType);
11063 return;
11064 } // In Chrome, the Array#sort function is not stable so add a
11065 // presortIndex that we can use to ensure we get a stable-sort
11066
11067
11068 this.captionPackets_.forEach(function (elem, idx) {
11069 elem.presortIndex = idx;
11070 }); // sort caption byte-pairs based on their PTS values
11071
11072 this.captionPackets_.sort(function (a, b) {
11073 if (a.pts === b.pts) {
11074 return a.presortIndex - b.presortIndex;
11075 }
11076
11077 return a.pts - b.pts;
11078 });
11079 this.captionPackets_.forEach(function (packet) {
11080 if (packet.type < 2) {
11081 // Dispatch packet to the right Cea608Stream
11082 this.dispatchCea608Packet(packet);
11083 } else {
11084 // Dispatch packet to the Cea708Stream
11085 this.dispatchCea708Packet(packet);
11086 }
11087 }, this);
11088 this.captionPackets_.length = 0;
11089 this.flushCCStreams(flushType);
11090 };
11091
11092 CaptionStream$1.prototype.flush = function () {
11093 return this.flushStream('flush');
11094 }; // Only called if handling partial data
11095
11096
11097 CaptionStream$1.prototype.partialFlush = function () {
11098 return this.flushStream('partialFlush');
11099 };
11100
11101 CaptionStream$1.prototype.reset = function () {
11102 this.latestDts_ = null;
11103 this.ignoreNextEqualDts_ = false;
11104 this.numSameDts_ = 0;
11105 this.activeCea608Channel_ = [null, null];
11106 this.ccStreams_.forEach(function (ccStream) {
11107 ccStream.reset();
11108 });
11109 }; // From the CEA-608 spec:
11110
11111 /*
11112 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
11113 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
11114 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
11115 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
11116 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
11117 * to switch to captioning or Text.
11118 */
11119 // With that in mind, we ignore any data between an XDS control code and a
11120 // subsequent closed-captioning control code.
11121
11122
11123 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
11124 // NOTE: packet.type is the CEA608 field
11125 if (this.setsTextOrXDSActive(packet)) {
11126 this.activeCea608Channel_[packet.type] = null;
11127 } else if (this.setsChannel1Active(packet)) {
11128 this.activeCea608Channel_[packet.type] = 0;
11129 } else if (this.setsChannel2Active(packet)) {
11130 this.activeCea608Channel_[packet.type] = 1;
11131 }
11132
11133 if (this.activeCea608Channel_[packet.type] === null) {
11134 // If we haven't received anything to set the active channel, or the
11135 // packets are Text/XDS data, discard the data; we don't want jumbled
11136 // captions
11137 return;
11138 }
11139
11140 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
11141 };
11142
11143 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
11144 return (packet.ccData & 0x7800) === 0x1000;
11145 };
11146
11147 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
11148 return (packet.ccData & 0x7800) === 0x1800;
11149 };
11150
11151 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
11152 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
11153 };
11154
11155 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
11156 if (this.parse708captions_) {
11157 this.cc708Stream_.push(packet);
11158 }
11159 }; // ----------------------
11160 // Session to Application
11161 // ----------------------
11162 // This hash maps special and extended character codes to their
11163 // proper Unicode equivalent. The first one-byte key is just a
11164 // non-standard character code. The two-byte keys that follow are
11165 // the extended CEA708 character codes, along with the preceding
11166 // 0x10 extended character byte to distinguish these codes from
11167 // non-extended character codes. Every CEA708 character code that
11168 // is not in this object maps directly to a standard unicode
11169 // character code.
11170 // The transparent space and non-breaking transparent space are
11171 // technically not fully supported since there is no code to
11172 // make them transparent, so they have normal non-transparent
11173 // stand-ins.
11174 // The special closed caption (CC) character isn't a standard
11175 // unicode character, so a fairly similar unicode character was
11176 // chosen in it's place.
11177
11178
11179 var CHARACTER_TRANSLATION_708 = {
11180 0x7f: 0x266a,
11181 // ♪
11182 0x1020: 0x20,
11183 // Transparent Space
11184 0x1021: 0xa0,
11185 // Nob-breaking Transparent Space
11186 0x1025: 0x2026,
11187 // …
11188 0x102a: 0x0160,
11189 // Š
11190 0x102c: 0x0152,
11191 // Œ
11192 0x1030: 0x2588,
11193 // █
11194 0x1031: 0x2018,
11195 // ‘
11196 0x1032: 0x2019,
11197 // ’
11198 0x1033: 0x201c,
11199 // “
11200 0x1034: 0x201d,
11201 // ”
11202 0x1035: 0x2022,
11203 // •
11204 0x1039: 0x2122,
11205 // ™
11206 0x103a: 0x0161,
11207 // š
11208 0x103c: 0x0153,
11209 // œ
11210 0x103d: 0x2120,
11211 // ℠
11212 0x103f: 0x0178,
11213 // Ÿ
11214 0x1076: 0x215b,
11215 // ⅛
11216 0x1077: 0x215c,
11217 // ⅜
11218 0x1078: 0x215d,
11219 // ⅝
11220 0x1079: 0x215e,
11221 // ⅞
11222 0x107a: 0x23d0,
11223 // ⏐
11224 0x107b: 0x23a4,
11225 // ⎤
11226 0x107c: 0x23a3,
11227 // ⎣
11228 0x107d: 0x23af,
11229 // ⎯
11230 0x107e: 0x23a6,
11231 // ⎦
11232 0x107f: 0x23a1,
11233 // ⎡
11234 0x10a0: 0x3138 // ㄸ (CC char)
11235
11236 };
11237
11238 var get708CharFromCode = function get708CharFromCode(code) {
11239 var newCode = CHARACTER_TRANSLATION_708[code] || code;
11240
11241 if (code & 0x1000 && code === newCode) {
11242 // Invalid extended code
11243 return '';
11244 }
11245
11246 return String.fromCharCode(newCode);
11247 };
11248
11249 var within708TextBlock = function within708TextBlock(b) {
11250 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
11251 };
11252
11253 var Cea708Window = function Cea708Window(windowNum) {
11254 this.windowNum = windowNum;
11255 this.reset();
11256 };
11257
11258 Cea708Window.prototype.reset = function () {
11259 this.clearText();
11260 this.pendingNewLine = false;
11261 this.winAttr = {};
11262 this.penAttr = {};
11263 this.penLoc = {};
11264 this.penColor = {}; // These default values are arbitrary,
11265 // defineWindow will usually override them
11266
11267 this.visible = 0;
11268 this.rowLock = 0;
11269 this.columnLock = 0;
11270 this.priority = 0;
11271 this.relativePositioning = 0;
11272 this.anchorVertical = 0;
11273 this.anchorHorizontal = 0;
11274 this.anchorPoint = 0;
11275 this.rowCount = 1;
11276 this.virtualRowCount = this.rowCount + 1;
11277 this.columnCount = 41;
11278 this.windowStyle = 0;
11279 this.penStyle = 0;
11280 };
11281
11282 Cea708Window.prototype.getText = function () {
11283 return this.rows.join('\n');
11284 };
11285
11286 Cea708Window.prototype.clearText = function () {
11287 this.rows = [''];
11288 this.rowIdx = 0;
11289 };
11290
11291 Cea708Window.prototype.newLine = function (pts) {
11292 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
11293 this.beforeRowOverflow(pts);
11294 }
11295
11296 if (this.rows.length > 0) {
11297 this.rows.push('');
11298 this.rowIdx++;
11299 } // Show all virtual rows since there's no visible scrolling
11300
11301
11302 while (this.rows.length > this.virtualRowCount) {
11303 this.rows.shift();
11304 this.rowIdx--;
11305 }
11306 };
11307
11308 Cea708Window.prototype.isEmpty = function () {
11309 if (this.rows.length === 0) {
11310 return true;
11311 } else if (this.rows.length === 1) {
11312 return this.rows[0] === '';
11313 }
11314
11315 return false;
11316 };
11317
11318 Cea708Window.prototype.addText = function (text) {
11319 this.rows[this.rowIdx] += text;
11320 };
11321
11322 Cea708Window.prototype.backspace = function () {
11323 if (!this.isEmpty()) {
11324 var row = this.rows[this.rowIdx];
11325 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
11326 }
11327 };
11328
11329 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
11330 this.serviceNum = serviceNum;
11331 this.text = '';
11332 this.currentWindow = new Cea708Window(-1);
11333 this.windows = [];
11334 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
11335
11336 if (typeof encoding === 'string') {
11337 this.createTextDecoder(encoding);
11338 }
11339 };
11340 /**
11341 * Initialize service windows
11342 * Must be run before service use
11343 *
11344 * @param {Integer} pts PTS value
11345 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
11346 */
11347
11348
11349 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
11350 this.startPts = pts;
11351
11352 for (var win = 0; win < 8; win++) {
11353 this.windows[win] = new Cea708Window(win);
11354
11355 if (typeof beforeRowOverflow === 'function') {
11356 this.windows[win].beforeRowOverflow = beforeRowOverflow;
11357 }
11358 }
11359 };
11360 /**
11361 * Set current window of service to be affected by commands
11362 *
11363 * @param {Integer} windowNum Window number
11364 */
11365
11366
11367 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
11368 this.currentWindow = this.windows[windowNum];
11369 };
11370 /**
11371 * Try to create a TextDecoder if it is natively supported
11372 */
11373
11374
11375 Cea708Service.prototype.createTextDecoder = function (encoding) {
11376 if (typeof TextDecoder === 'undefined') {
11377 this.stream.trigger('log', {
11378 level: 'warn',
11379 message: 'The `encoding` option is unsupported without TextDecoder support'
11380 });
11381 } else {
11382 try {
11383 this.textDecoder_ = new TextDecoder(encoding);
11384 } catch (error) {
11385 this.stream.trigger('log', {
11386 level: 'warn',
11387 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
11388 });
11389 }
11390 }
11391 };
11392
11393 var Cea708Stream = function Cea708Stream(options) {
11394 options = options || {};
11395 Cea708Stream.prototype.init.call(this);
11396 var self = this;
11397 var captionServices = options.captionServices || {};
11398 var captionServiceEncodings = {};
11399 var serviceProps; // Get service encodings from captionServices option block
11400
11401 Object.keys(captionServices).forEach(function (serviceName) {
11402 serviceProps = captionServices[serviceName];
11403
11404 if (/^SERVICE/.test(serviceName)) {
11405 captionServiceEncodings[serviceName] = serviceProps.encoding;
11406 }
11407 });
11408 this.serviceEncodings = captionServiceEncodings;
11409 this.current708Packet = null;
11410 this.services = {};
11411
11412 this.push = function (packet) {
11413 if (packet.type === 3) {
11414 // 708 packet start
11415 self.new708Packet();
11416 self.add708Bytes(packet);
11417 } else {
11418 if (self.current708Packet === null) {
11419 // This should only happen at the start of a file if there's no packet start.
11420 self.new708Packet();
11421 }
11422
11423 self.add708Bytes(packet);
11424 }
11425 };
11426 };
11427
11428 Cea708Stream.prototype = new stream();
11429 /**
11430 * Push current 708 packet, create new 708 packet.
11431 */
11432
11433 Cea708Stream.prototype.new708Packet = function () {
11434 if (this.current708Packet !== null) {
11435 this.push708Packet();
11436 }
11437
11438 this.current708Packet = {
11439 data: [],
11440 ptsVals: []
11441 };
11442 };
11443 /**
11444 * Add pts and both bytes from packet into current 708 packet.
11445 */
11446
11447
11448 Cea708Stream.prototype.add708Bytes = function (packet) {
11449 var data = packet.ccData;
11450 var byte0 = data >>> 8;
11451 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
11452 // that service blocks will always line up with byte pairs.
11453
11454 this.current708Packet.ptsVals.push(packet.pts);
11455 this.current708Packet.data.push(byte0);
11456 this.current708Packet.data.push(byte1);
11457 };
11458 /**
11459 * Parse completed 708 packet into service blocks and push each service block.
11460 */
11461
11462
11463 Cea708Stream.prototype.push708Packet = function () {
11464 var packet708 = this.current708Packet;
11465 var packetData = packet708.data;
11466 var serviceNum = null;
11467 var blockSize = null;
11468 var i = 0;
11469 var b = packetData[i++];
11470 packet708.seq = b >> 6;
11471 packet708.sizeCode = b & 0x3f; // 0b00111111;
11472
11473 for (; i < packetData.length; i++) {
11474 b = packetData[i++];
11475 serviceNum = b >> 5;
11476 blockSize = b & 0x1f; // 0b00011111
11477
11478 if (serviceNum === 7 && blockSize > 0) {
11479 // Extended service num
11480 b = packetData[i++];
11481 serviceNum = b;
11482 }
11483
11484 this.pushServiceBlock(serviceNum, i, blockSize);
11485
11486 if (blockSize > 0) {
11487 i += blockSize - 1;
11488 }
11489 }
11490 };
11491 /**
11492 * Parse service block, execute commands, read text.
11493 *
11494 * Note: While many of these commands serve important purposes,
11495 * many others just parse out the parameters or attributes, but
11496 * nothing is done with them because this is not a full and complete
11497 * implementation of the entire 708 spec.
11498 *
11499 * @param {Integer} serviceNum Service number
11500 * @param {Integer} start Start index of the 708 packet data
11501 * @param {Integer} size Block size
11502 */
11503
11504
11505 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
11506 var b;
11507 var i = start;
11508 var packetData = this.current708Packet.data;
11509 var service = this.services[serviceNum];
11510
11511 if (!service) {
11512 service = this.initService(serviceNum, i);
11513 }
11514
11515 for (; i < start + size && i < packetData.length; i++) {
11516 b = packetData[i];
11517
11518 if (within708TextBlock(b)) {
11519 i = this.handleText(i, service);
11520 } else if (b === 0x18) {
11521 i = this.multiByteCharacter(i, service);
11522 } else if (b === 0x10) {
11523 i = this.extendedCommands(i, service);
11524 } else if (0x80 <= b && b <= 0x87) {
11525 i = this.setCurrentWindow(i, service);
11526 } else if (0x98 <= b && b <= 0x9f) {
11527 i = this.defineWindow(i, service);
11528 } else if (b === 0x88) {
11529 i = this.clearWindows(i, service);
11530 } else if (b === 0x8c) {
11531 i = this.deleteWindows(i, service);
11532 } else if (b === 0x89) {
11533 i = this.displayWindows(i, service);
11534 } else if (b === 0x8a) {
11535 i = this.hideWindows(i, service);
11536 } else if (b === 0x8b) {
11537 i = this.toggleWindows(i, service);
11538 } else if (b === 0x97) {
11539 i = this.setWindowAttributes(i, service);
11540 } else if (b === 0x90) {
11541 i = this.setPenAttributes(i, service);
11542 } else if (b === 0x91) {
11543 i = this.setPenColor(i, service);
11544 } else if (b === 0x92) {
11545 i = this.setPenLocation(i, service);
11546 } else if (b === 0x8f) {
11547 service = this.reset(i, service);
11548 } else if (b === 0x08) {
11549 // BS: Backspace
11550 service.currentWindow.backspace();
11551 } else if (b === 0x0c) {
11552 // FF: Form feed
11553 service.currentWindow.clearText();
11554 } else if (b === 0x0d) {
11555 // CR: Carriage return
11556 service.currentWindow.pendingNewLine = true;
11557 } else if (b === 0x0e) {
11558 // HCR: Horizontal carriage return
11559 service.currentWindow.clearText();
11560 } else if (b === 0x8d) {
11561 // DLY: Delay, nothing to do
11562 i++;
11563 } else ;
11564 }
11565 };
11566 /**
11567 * Execute an extended command
11568 *
11569 * @param {Integer} i Current index in the 708 packet
11570 * @param {Service} service The service object to be affected
11571 * @return {Integer} New index after parsing
11572 */
11573
11574
11575 Cea708Stream.prototype.extendedCommands = function (i, service) {
11576 var packetData = this.current708Packet.data;
11577 var b = packetData[++i];
11578
11579 if (within708TextBlock(b)) {
11580 i = this.handleText(i, service, {
11581 isExtended: true
11582 });
11583 }
11584
11585 return i;
11586 };
11587 /**
11588 * Get PTS value of a given byte index
11589 *
11590 * @param {Integer} byteIndex Index of the byte
11591 * @return {Integer} PTS
11592 */
11593
11594
11595 Cea708Stream.prototype.getPts = function (byteIndex) {
11596 // There's 1 pts value per 2 bytes
11597 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
11598 };
11599 /**
11600 * Initializes a service
11601 *
11602 * @param {Integer} serviceNum Service number
11603 * @return {Service} Initialized service object
11604 */
11605
11606
11607 Cea708Stream.prototype.initService = function (serviceNum, i) {
11608 var serviceName = 'SERVICE' + serviceNum;
11609 var self = this;
11610 var serviceName;
11611 var encoding;
11612
11613 if (serviceName in this.serviceEncodings) {
11614 encoding = this.serviceEncodings[serviceName];
11615 }
11616
11617 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
11618 this.services[serviceNum].init(this.getPts(i), function (pts) {
11619 self.flushDisplayed(pts, self.services[serviceNum]);
11620 });
11621 return this.services[serviceNum];
11622 };
11623 /**
11624 * Execute text writing to current window
11625 *
11626 * @param {Integer} i Current index in the 708 packet
11627 * @param {Service} service The service object to be affected
11628 * @return {Integer} New index after parsing
11629 */
11630
11631
11632 Cea708Stream.prototype.handleText = function (i, service, options) {
11633 var isExtended = options && options.isExtended;
11634 var isMultiByte = options && options.isMultiByte;
11635 var packetData = this.current708Packet.data;
11636 var extended = isExtended ? 0x1000 : 0x0000;
11637 var currentByte = packetData[i];
11638 var nextByte = packetData[i + 1];
11639 var win = service.currentWindow;
11640 var char;
11641 var charCodeArray; // Use the TextDecoder if one was created for this service
11642
11643 if (service.textDecoder_ && !isExtended) {
11644 if (isMultiByte) {
11645 charCodeArray = [currentByte, nextByte];
11646 i++;
11647 } else {
11648 charCodeArray = [currentByte];
11649 }
11650
11651 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
11652 } else {
11653 char = get708CharFromCode(extended | currentByte);
11654 }
11655
11656 if (win.pendingNewLine && !win.isEmpty()) {
11657 win.newLine(this.getPts(i));
11658 }
11659
11660 win.pendingNewLine = false;
11661 win.addText(char);
11662 return i;
11663 };
11664 /**
11665 * Handle decoding of multibyte character
11666 *
11667 * @param {Integer} i Current index in the 708 packet
11668 * @param {Service} service The service object to be affected
11669 * @return {Integer} New index after parsing
11670 */
11671
11672
11673 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
11674 var packetData = this.current708Packet.data;
11675 var firstByte = packetData[i + 1];
11676 var secondByte = packetData[i + 2];
11677
11678 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
11679 i = this.handleText(++i, service, {
11680 isMultiByte: true
11681 });
11682 }
11683
11684 return i;
11685 };
11686 /**
11687 * Parse and execute the CW# command.
11688 *
11689 * Set the current window.
11690 *
11691 * @param {Integer} i Current index in the 708 packet
11692 * @param {Service} service The service object to be affected
11693 * @return {Integer} New index after parsing
11694 */
11695
11696
11697 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
11698 var packetData = this.current708Packet.data;
11699 var b = packetData[i];
11700 var windowNum = b & 0x07;
11701 service.setCurrentWindow(windowNum);
11702 return i;
11703 };
11704 /**
11705 * Parse and execute the DF# command.
11706 *
11707 * Define a window and set it as the current window.
11708 *
11709 * @param {Integer} i Current index in the 708 packet
11710 * @param {Service} service The service object to be affected
11711 * @return {Integer} New index after parsing
11712 */
11713
11714
11715 Cea708Stream.prototype.defineWindow = function (i, service) {
11716 var packetData = this.current708Packet.data;
11717 var b = packetData[i];
11718 var windowNum = b & 0x07;
11719 service.setCurrentWindow(windowNum);
11720 var win = service.currentWindow;
11721 b = packetData[++i];
11722 win.visible = (b & 0x20) >> 5; // v
11723
11724 win.rowLock = (b & 0x10) >> 4; // rl
11725
11726 win.columnLock = (b & 0x08) >> 3; // cl
11727
11728 win.priority = b & 0x07; // p
11729
11730 b = packetData[++i];
11731 win.relativePositioning = (b & 0x80) >> 7; // rp
11732
11733 win.anchorVertical = b & 0x7f; // av
11734
11735 b = packetData[++i];
11736 win.anchorHorizontal = b; // ah
11737
11738 b = packetData[++i];
11739 win.anchorPoint = (b & 0xf0) >> 4; // ap
11740
11741 win.rowCount = b & 0x0f; // rc
11742
11743 b = packetData[++i];
11744 win.columnCount = b & 0x3f; // cc
11745
11746 b = packetData[++i];
11747 win.windowStyle = (b & 0x38) >> 3; // ws
11748
11749 win.penStyle = b & 0x07; // ps
11750 // The spec says there are (rowCount+1) "virtual rows"
11751
11752 win.virtualRowCount = win.rowCount + 1;
11753 return i;
11754 };
11755 /**
11756 * Parse and execute the SWA command.
11757 *
11758 * Set attributes of the current window.
11759 *
11760 * @param {Integer} i Current index in the 708 packet
11761 * @param {Service} service The service object to be affected
11762 * @return {Integer} New index after parsing
11763 */
11764
11765
11766 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
11767 var packetData = this.current708Packet.data;
11768 var b = packetData[i];
11769 var winAttr = service.currentWindow.winAttr;
11770 b = packetData[++i];
11771 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
11772
11773 winAttr.fillRed = (b & 0x30) >> 4; // fr
11774
11775 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
11776
11777 winAttr.fillBlue = b & 0x03; // fb
11778
11779 b = packetData[++i];
11780 winAttr.borderType = (b & 0xc0) >> 6; // bt
11781
11782 winAttr.borderRed = (b & 0x30) >> 4; // br
11783
11784 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
11785
11786 winAttr.borderBlue = b & 0x03; // bb
11787
11788 b = packetData[++i];
11789 winAttr.borderType += (b & 0x80) >> 5; // bt
11790
11791 winAttr.wordWrap = (b & 0x40) >> 6; // ww
11792
11793 winAttr.printDirection = (b & 0x30) >> 4; // pd
11794
11795 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
11796
11797 winAttr.justify = b & 0x03; // j
11798
11799 b = packetData[++i];
11800 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
11801
11802 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
11803
11804 winAttr.displayEffect = b & 0x03; // de
11805
11806 return i;
11807 };
11808 /**
11809 * Gather text from all displayed windows and push a caption to output.
11810 *
11811 * @param {Integer} i Current index in the 708 packet
11812 * @param {Service} service The service object to be affected
11813 */
11814
11815
11816 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
11817 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
11818 // display text in the correct order, but sample files so far have not shown any issue.
11819
11820 for (var winId = 0; winId < 8; winId++) {
11821 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
11822 displayedText.push(service.windows[winId].getText());
11823 }
11824 }
11825
11826 service.endPts = pts;
11827 service.text = displayedText.join('\n\n');
11828 this.pushCaption(service);
11829 service.startPts = pts;
11830 };
11831 /**
11832 * Push a caption to output if the caption contains text.
11833 *
11834 * @param {Service} service The service object to be affected
11835 */
11836
11837
11838 Cea708Stream.prototype.pushCaption = function (service) {
11839 if (service.text !== '') {
11840 this.trigger('data', {
11841 startPts: service.startPts,
11842 endPts: service.endPts,
11843 text: service.text,
11844 stream: 'cc708_' + service.serviceNum
11845 });
11846 service.text = '';
11847 service.startPts = service.endPts;
11848 }
11849 };
11850 /**
11851 * Parse and execute the DSW command.
11852 *
11853 * Set visible property of windows based on the parsed bitmask.
11854 *
11855 * @param {Integer} i Current index in the 708 packet
11856 * @param {Service} service The service object to be affected
11857 * @return {Integer} New index after parsing
11858 */
11859
11860
11861 Cea708Stream.prototype.displayWindows = function (i, service) {
11862 var packetData = this.current708Packet.data;
11863 var b = packetData[++i];
11864 var pts = this.getPts(i);
11865 this.flushDisplayed(pts, service);
11866
11867 for (var winId = 0; winId < 8; winId++) {
11868 if (b & 0x01 << winId) {
11869 service.windows[winId].visible = 1;
11870 }
11871 }
11872
11873 return i;
11874 };
11875 /**
11876 * Parse and execute the HDW command.
11877 *
11878 * Set visible property of windows based on the parsed bitmask.
11879 *
11880 * @param {Integer} i Current index in the 708 packet
11881 * @param {Service} service The service object to be affected
11882 * @return {Integer} New index after parsing
11883 */
11884
11885
11886 Cea708Stream.prototype.hideWindows = function (i, service) {
11887 var packetData = this.current708Packet.data;
11888 var b = packetData[++i];
11889 var pts = this.getPts(i);
11890 this.flushDisplayed(pts, service);
11891
11892 for (var winId = 0; winId < 8; winId++) {
11893 if (b & 0x01 << winId) {
11894 service.windows[winId].visible = 0;
11895 }
11896 }
11897
11898 return i;
11899 };
11900 /**
11901 * Parse and execute the TGW command.
11902 *
11903 * Set visible property of windows based on the parsed bitmask.
11904 *
11905 * @param {Integer} i Current index in the 708 packet
11906 * @param {Service} service The service object to be affected
11907 * @return {Integer} New index after parsing
11908 */
11909
11910
11911 Cea708Stream.prototype.toggleWindows = function (i, service) {
11912 var packetData = this.current708Packet.data;
11913 var b = packetData[++i];
11914 var pts = this.getPts(i);
11915 this.flushDisplayed(pts, service);
11916
11917 for (var winId = 0; winId < 8; winId++) {
11918 if (b & 0x01 << winId) {
11919 service.windows[winId].visible ^= 1;
11920 }
11921 }
11922
11923 return i;
11924 };
11925 /**
11926 * Parse and execute the CLW command.
11927 *
11928 * Clear text of windows based on the parsed bitmask.
11929 *
11930 * @param {Integer} i Current index in the 708 packet
11931 * @param {Service} service The service object to be affected
11932 * @return {Integer} New index after parsing
11933 */
11934
11935
11936 Cea708Stream.prototype.clearWindows = function (i, service) {
11937 var packetData = this.current708Packet.data;
11938 var b = packetData[++i];
11939 var pts = this.getPts(i);
11940 this.flushDisplayed(pts, service);
11941
11942 for (var winId = 0; winId < 8; winId++) {
11943 if (b & 0x01 << winId) {
11944 service.windows[winId].clearText();
11945 }
11946 }
11947
11948 return i;
11949 };
11950 /**
11951 * Parse and execute the DLW command.
11952 *
11953 * Re-initialize windows based on the parsed bitmask.
11954 *
11955 * @param {Integer} i Current index in the 708 packet
11956 * @param {Service} service The service object to be affected
11957 * @return {Integer} New index after parsing
11958 */
11959
11960
11961 Cea708Stream.prototype.deleteWindows = function (i, service) {
11962 var packetData = this.current708Packet.data;
11963 var b = packetData[++i];
11964 var pts = this.getPts(i);
11965 this.flushDisplayed(pts, service);
11966
11967 for (var winId = 0; winId < 8; winId++) {
11968 if (b & 0x01 << winId) {
11969 service.windows[winId].reset();
11970 }
11971 }
11972
11973 return i;
11974 };
11975 /**
11976 * Parse and execute the SPA command.
11977 *
11978 * Set pen attributes of the current window.
11979 *
11980 * @param {Integer} i Current index in the 708 packet
11981 * @param {Service} service The service object to be affected
11982 * @return {Integer} New index after parsing
11983 */
11984
11985
11986 Cea708Stream.prototype.setPenAttributes = function (i, service) {
11987 var packetData = this.current708Packet.data;
11988 var b = packetData[i];
11989 var penAttr = service.currentWindow.penAttr;
11990 b = packetData[++i];
11991 penAttr.textTag = (b & 0xf0) >> 4; // tt
11992
11993 penAttr.offset = (b & 0x0c) >> 2; // o
11994
11995 penAttr.penSize = b & 0x03; // s
11996
11997 b = packetData[++i];
11998 penAttr.italics = (b & 0x80) >> 7; // i
11999
12000 penAttr.underline = (b & 0x40) >> 6; // u
12001
12002 penAttr.edgeType = (b & 0x38) >> 3; // et
12003
12004 penAttr.fontStyle = b & 0x07; // fs
12005
12006 return i;
12007 };
12008 /**
12009 * Parse and execute the SPC command.
12010 *
12011 * Set pen color of the current window.
12012 *
12013 * @param {Integer} i Current index in the 708 packet
12014 * @param {Service} service The service object to be affected
12015 * @return {Integer} New index after parsing
12016 */
12017
12018
12019 Cea708Stream.prototype.setPenColor = function (i, service) {
12020 var packetData = this.current708Packet.data;
12021 var b = packetData[i];
12022 var penColor = service.currentWindow.penColor;
12023 b = packetData[++i];
12024 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
12025
12026 penColor.fgRed = (b & 0x30) >> 4; // fr
12027
12028 penColor.fgGreen = (b & 0x0c) >> 2; // fg
12029
12030 penColor.fgBlue = b & 0x03; // fb
12031
12032 b = packetData[++i];
12033 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
12034
12035 penColor.bgRed = (b & 0x30) >> 4; // br
12036
12037 penColor.bgGreen = (b & 0x0c) >> 2; // bg
12038
12039 penColor.bgBlue = b & 0x03; // bb
12040
12041 b = packetData[++i];
12042 penColor.edgeRed = (b & 0x30) >> 4; // er
12043
12044 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
12045
12046 penColor.edgeBlue = b & 0x03; // eb
12047
12048 return i;
12049 };
12050 /**
12051 * Parse and execute the SPL command.
12052 *
12053 * Set pen location of the current window.
12054 *
12055 * @param {Integer} i Current index in the 708 packet
12056 * @param {Service} service The service object to be affected
12057 * @return {Integer} New index after parsing
12058 */
12059
12060
12061 Cea708Stream.prototype.setPenLocation = function (i, service) {
12062 var packetData = this.current708Packet.data;
12063 var b = packetData[i];
12064 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
12065
12066 service.currentWindow.pendingNewLine = true;
12067 b = packetData[++i];
12068 penLoc.row = b & 0x0f; // r
12069
12070 b = packetData[++i];
12071 penLoc.column = b & 0x3f; // c
12072
12073 return i;
12074 };
12075 /**
12076 * Execute the RST command.
12077 *
12078 * Reset service to a clean slate. Re-initialize.
12079 *
12080 * @param {Integer} i Current index in the 708 packet
12081 * @param {Service} service The service object to be affected
12082 * @return {Service} Re-initialized service
12083 */
12084
12085
12086 Cea708Stream.prototype.reset = function (i, service) {
12087 var pts = this.getPts(i);
12088 this.flushDisplayed(pts, service);
12089 return this.initService(service.serviceNum, i);
12090 }; // This hash maps non-ASCII, special, and extended character codes to their
12091 // proper Unicode equivalent. The first keys that are only a single byte
12092 // are the non-standard ASCII characters, which simply map the CEA608 byte
12093 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
12094 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
12095 // can be performed regardless of the field and data channel on which the
12096 // character code was received.
12097
12098
12099 var CHARACTER_TRANSLATION = {
12100 0x2a: 0xe1,
12101 // á
12102 0x5c: 0xe9,
12103 // é
12104 0x5e: 0xed,
12105 // í
12106 0x5f: 0xf3,
12107 // ó
12108 0x60: 0xfa,
12109 // ú
12110 0x7b: 0xe7,
12111 // ç
12112 0x7c: 0xf7,
12113 // ÷
12114 0x7d: 0xd1,
12115 // Ñ
12116 0x7e: 0xf1,
12117 // ñ
12118 0x7f: 0x2588,
12119 // █
12120 0x0130: 0xae,
12121 // ®
12122 0x0131: 0xb0,
12123 // °
12124 0x0132: 0xbd,
12125 // ½
12126 0x0133: 0xbf,
12127 // ¿
12128 0x0134: 0x2122,
12129 // ™
12130 0x0135: 0xa2,
12131 // ¢
12132 0x0136: 0xa3,
12133 // £
12134 0x0137: 0x266a,
12135 // ♪
12136 0x0138: 0xe0,
12137 // à
12138 0x0139: 0xa0,
12139 //
12140 0x013a: 0xe8,
12141 // è
12142 0x013b: 0xe2,
12143 // â
12144 0x013c: 0xea,
12145 // ê
12146 0x013d: 0xee,
12147 // î
12148 0x013e: 0xf4,
12149 // ô
12150 0x013f: 0xfb,
12151 // û
12152 0x0220: 0xc1,
12153 // Á
12154 0x0221: 0xc9,
12155 // É
12156 0x0222: 0xd3,
12157 // Ó
12158 0x0223: 0xda,
12159 // Ú
12160 0x0224: 0xdc,
12161 // Ü
12162 0x0225: 0xfc,
12163 // ü
12164 0x0226: 0x2018,
12165 // ‘
12166 0x0227: 0xa1,
12167 // ¡
12168 0x0228: 0x2a,
12169 // *
12170 0x0229: 0x27,
12171 // '
12172 0x022a: 0x2014,
12173 // —
12174 0x022b: 0xa9,
12175 // ©
12176 0x022c: 0x2120,
12177 // ℠
12178 0x022d: 0x2022,
12179 // •
12180 0x022e: 0x201c,
12181 // “
12182 0x022f: 0x201d,
12183 // ”
12184 0x0230: 0xc0,
12185 // À
12186 0x0231: 0xc2,
12187 // Â
12188 0x0232: 0xc7,
12189 // Ç
12190 0x0233: 0xc8,
12191 // È
12192 0x0234: 0xca,
12193 // Ê
12194 0x0235: 0xcb,
12195 // Ë
12196 0x0236: 0xeb,
12197 // ë
12198 0x0237: 0xce,
12199 // Î
12200 0x0238: 0xcf,
12201 // Ï
12202 0x0239: 0xef,
12203 // ï
12204 0x023a: 0xd4,
12205 // Ô
12206 0x023b: 0xd9,
12207 // Ù
12208 0x023c: 0xf9,
12209 // ù
12210 0x023d: 0xdb,
12211 // Û
12212 0x023e: 0xab,
12213 // «
12214 0x023f: 0xbb,
12215 // »
12216 0x0320: 0xc3,
12217 // Ã
12218 0x0321: 0xe3,
12219 // ã
12220 0x0322: 0xcd,
12221 // Í
12222 0x0323: 0xcc,
12223 // Ì
12224 0x0324: 0xec,
12225 // ì
12226 0x0325: 0xd2,
12227 // Ò
12228 0x0326: 0xf2,
12229 // ò
12230 0x0327: 0xd5,
12231 // Õ
12232 0x0328: 0xf5,
12233 // õ
12234 0x0329: 0x7b,
12235 // {
12236 0x032a: 0x7d,
12237 // }
12238 0x032b: 0x5c,
12239 // \
12240 0x032c: 0x5e,
12241 // ^
12242 0x032d: 0x5f,
12243 // _
12244 0x032e: 0x7c,
12245 // |
12246 0x032f: 0x7e,
12247 // ~
12248 0x0330: 0xc4,
12249 // Ä
12250 0x0331: 0xe4,
12251 // ä
12252 0x0332: 0xd6,
12253 // Ö
12254 0x0333: 0xf6,
12255 // ö
12256 0x0334: 0xdf,
12257 // ß
12258 0x0335: 0xa5,
12259 // ¥
12260 0x0336: 0xa4,
12261 // ¤
12262 0x0337: 0x2502,
12263 // │
12264 0x0338: 0xc5,
12265 // Å
12266 0x0339: 0xe5,
12267 // å
12268 0x033a: 0xd8,
12269 // Ø
12270 0x033b: 0xf8,
12271 // ø
12272 0x033c: 0x250c,
12273 // ┌
12274 0x033d: 0x2510,
12275 // ┐
12276 0x033e: 0x2514,
12277 // └
12278 0x033f: 0x2518 // ┘
12279
12280 };
12281
12282 var getCharFromCode = function getCharFromCode(code) {
12283 if (code === null) {
12284 return '';
12285 }
12286
12287 code = CHARACTER_TRANSLATION[code] || code;
12288 return String.fromCharCode(code);
12289 }; // the index of the last row in a CEA-608 display buffer
12290
12291
12292 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
12293 // getting it through bit logic.
12294
12295 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
12296 // cells. The "bottom" row is the last element in the outer array.
12297
12298 var createDisplayBuffer = function createDisplayBuffer() {
12299 var result = [],
12300 i = BOTTOM_ROW + 1;
12301
12302 while (i--) {
12303 result.push('');
12304 }
12305
12306 return result;
12307 };
12308
12309 var Cea608Stream = function Cea608Stream(field, dataChannel) {
12310 Cea608Stream.prototype.init.call(this);
12311 this.field_ = field || 0;
12312 this.dataChannel_ = dataChannel || 0;
12313 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
12314 this.setConstants();
12315 this.reset();
12316
12317 this.push = function (packet) {
12318 var data, swap, char0, char1, text; // remove the parity bits
12319
12320 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
12321
12322 if (data === this.lastControlCode_) {
12323 this.lastControlCode_ = null;
12324 return;
12325 } // Store control codes
12326
12327
12328 if ((data & 0xf000) === 0x1000) {
12329 this.lastControlCode_ = data;
12330 } else if (data !== this.PADDING_) {
12331 this.lastControlCode_ = null;
12332 }
12333
12334 char0 = data >>> 8;
12335 char1 = data & 0xff;
12336
12337 if (data === this.PADDING_) {
12338 return;
12339 } else if (data === this.RESUME_CAPTION_LOADING_) {
12340 this.mode_ = 'popOn';
12341 } else if (data === this.END_OF_CAPTION_) {
12342 // If an EOC is received while in paint-on mode, the displayed caption
12343 // text should be swapped to non-displayed memory as if it was a pop-on
12344 // caption. Because of that, we should explicitly switch back to pop-on
12345 // mode
12346 this.mode_ = 'popOn';
12347 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
12348
12349 this.flushDisplayed(packet.pts); // flip memory
12350
12351 swap = this.displayed_;
12352 this.displayed_ = this.nonDisplayed_;
12353 this.nonDisplayed_ = swap; // start measuring the time to display the caption
12354
12355 this.startPts_ = packet.pts;
12356 } else if (data === this.ROLL_UP_2_ROWS_) {
12357 this.rollUpRows_ = 2;
12358 this.setRollUp(packet.pts);
12359 } else if (data === this.ROLL_UP_3_ROWS_) {
12360 this.rollUpRows_ = 3;
12361 this.setRollUp(packet.pts);
12362 } else if (data === this.ROLL_UP_4_ROWS_) {
12363 this.rollUpRows_ = 4;
12364 this.setRollUp(packet.pts);
12365 } else if (data === this.CARRIAGE_RETURN_) {
12366 this.clearFormatting(packet.pts);
12367 this.flushDisplayed(packet.pts);
12368 this.shiftRowsUp_();
12369 this.startPts_ = packet.pts;
12370 } else if (data === this.BACKSPACE_) {
12371 if (this.mode_ === 'popOn') {
12372 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
12373 } else {
12374 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
12375 }
12376 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
12377 this.flushDisplayed(packet.pts);
12378 this.displayed_ = createDisplayBuffer();
12379 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
12380 this.nonDisplayed_ = createDisplayBuffer();
12381 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
12382 if (this.mode_ !== 'paintOn') {
12383 // NOTE: This should be removed when proper caption positioning is
12384 // implemented
12385 this.flushDisplayed(packet.pts);
12386 this.displayed_ = createDisplayBuffer();
12387 }
12388
12389 this.mode_ = 'paintOn';
12390 this.startPts_ = packet.pts; // Append special characters to caption text
12391 } else if (this.isSpecialCharacter(char0, char1)) {
12392 // Bitmask char0 so that we can apply character transformations
12393 // regardless of field and data channel.
12394 // Then byte-shift to the left and OR with char1 so we can pass the
12395 // entire character code to `getCharFromCode`.
12396 char0 = (char0 & 0x03) << 8;
12397 text = getCharFromCode(char0 | char1);
12398 this[this.mode_](packet.pts, text);
12399 this.column_++; // Append extended characters to caption text
12400 } else if (this.isExtCharacter(char0, char1)) {
12401 // Extended characters always follow their "non-extended" equivalents.
12402 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
12403 // decoders are supposed to drop the "è", while compliant decoders
12404 // backspace the "e" and insert "è".
12405 // Delete the previous character
12406 if (this.mode_ === 'popOn') {
12407 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
12408 } else {
12409 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
12410 } // Bitmask char0 so that we can apply character transformations
12411 // regardless of field and data channel.
12412 // Then byte-shift to the left and OR with char1 so we can pass the
12413 // entire character code to `getCharFromCode`.
12414
12415
12416 char0 = (char0 & 0x03) << 8;
12417 text = getCharFromCode(char0 | char1);
12418 this[this.mode_](packet.pts, text);
12419 this.column_++; // Process mid-row codes
12420 } else if (this.isMidRowCode(char0, char1)) {
12421 // Attributes are not additive, so clear all formatting
12422 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
12423 // should be replaced with spaces, so add one now
12424
12425 this[this.mode_](packet.pts, ' ');
12426 this.column_++;
12427
12428 if ((char1 & 0xe) === 0xe) {
12429 this.addFormatting(packet.pts, ['i']);
12430 }
12431
12432 if ((char1 & 0x1) === 0x1) {
12433 this.addFormatting(packet.pts, ['u']);
12434 } // Detect offset control codes and adjust cursor
12435
12436 } else if (this.isOffsetControlCode(char0, char1)) {
12437 // Cursor position is set by indent PAC (see below) in 4-column
12438 // increments, with an additional offset code of 1-3 to reach any
12439 // of the 32 columns specified by CEA-608. So all we need to do
12440 // here is increment the column cursor by the given offset.
12441 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
12442 } else if (this.isPAC(char0, char1)) {
12443 // There's no logic for PAC -> row mapping, so we have to just
12444 // find the row code in an array and use its index :(
12445 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
12446
12447 if (this.mode_ === 'rollUp') {
12448 // This implies that the base row is incorrectly set.
12449 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
12450 // of roll-up rows set.
12451 if (row - this.rollUpRows_ + 1 < 0) {
12452 row = this.rollUpRows_ - 1;
12453 }
12454
12455 this.setRollUp(packet.pts, row);
12456 }
12457
12458 if (row !== this.row_) {
12459 // formatting is only persistent for current row
12460 this.clearFormatting(packet.pts);
12461 this.row_ = row;
12462 } // All PACs can apply underline, so detect and apply
12463 // (All odd-numbered second bytes set underline)
12464
12465
12466 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
12467 this.addFormatting(packet.pts, ['u']);
12468 }
12469
12470 if ((data & 0x10) === 0x10) {
12471 // We've got an indent level code. Each successive even number
12472 // increments the column cursor by 4, so we can get the desired
12473 // column position by bit-shifting to the right (to get n/2)
12474 // and multiplying by 4.
12475 this.column_ = ((data & 0xe) >> 1) * 4;
12476 }
12477
12478 if (this.isColorPAC(char1)) {
12479 // it's a color code, though we only support white, which
12480 // can be either normal or italicized. white italics can be
12481 // either 0x4e or 0x6e depending on the row, so we just
12482 // bitwise-and with 0xe to see if italics should be turned on
12483 if ((char1 & 0xe) === 0xe) {
12484 this.addFormatting(packet.pts, ['i']);
12485 }
12486 } // We have a normal character in char0, and possibly one in char1
12487
12488 } else if (this.isNormalChar(char0)) {
12489 if (char1 === 0x00) {
12490 char1 = null;
12491 }
12492
12493 text = getCharFromCode(char0);
12494 text += getCharFromCode(char1);
12495 this[this.mode_](packet.pts, text);
12496 this.column_ += text.length;
12497 } // finish data processing
12498
12499 };
12500 };
12501
12502 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
12503 // display buffer
12504
12505 Cea608Stream.prototype.flushDisplayed = function (pts) {
12506 var content = this.displayed_ // remove spaces from the start and end of the string
12507 .map(function (row, index) {
12508 try {
12509 return row.trim();
12510 } catch (e) {
12511 // Ordinarily, this shouldn't happen. However, caption
12512 // parsing errors should not throw exceptions and
12513 // break playback.
12514 this.trigger('log', {
12515 level: 'warn',
12516 message: 'Skipping a malformed 608 caption at index ' + index + '.'
12517 });
12518 return '';
12519 }
12520 }, this) // combine all text rows to display in one cue
12521 .join('\n') // and remove blank rows from the start and end, but not the middle
12522 .replace(/^\n+|\n+$/g, '');
12523
12524 if (content.length) {
12525 this.trigger('data', {
12526 startPts: this.startPts_,
12527 endPts: pts,
12528 text: content,
12529 stream: this.name_
12530 });
12531 }
12532 };
12533 /**
12534 * Zero out the data, used for startup and on seek
12535 */
12536
12537
12538 Cea608Stream.prototype.reset = function () {
12539 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
12540 // actually display captions. If a caption is shifted to a row
12541 // with a lower index than this, it is cleared from the display
12542 // buffer
12543
12544 this.topRow_ = 0;
12545 this.startPts_ = 0;
12546 this.displayed_ = createDisplayBuffer();
12547 this.nonDisplayed_ = createDisplayBuffer();
12548 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
12549
12550 this.column_ = 0;
12551 this.row_ = BOTTOM_ROW;
12552 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
12553
12554 this.formatting_ = [];
12555 };
12556 /**
12557 * Sets up control code and related constants for this instance
12558 */
12559
12560
12561 Cea608Stream.prototype.setConstants = function () {
12562 // The following attributes have these uses:
12563 // ext_ : char0 for mid-row codes, and the base for extended
12564 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
12565 // extended codes)
12566 // control_: char0 for control codes, except byte-shifted to the
12567 // left so that we can do this.control_ | CONTROL_CODE
12568 // offset_: char0 for tab offset codes
12569 //
12570 // It's also worth noting that control codes, and _only_ control codes,
12571 // differ between field 1 and field2. Field 2 control codes are always
12572 // their field 1 value plus 1. That's why there's the "| field" on the
12573 // control value.
12574 if (this.dataChannel_ === 0) {
12575 this.BASE_ = 0x10;
12576 this.EXT_ = 0x11;
12577 this.CONTROL_ = (0x14 | this.field_) << 8;
12578 this.OFFSET_ = 0x17;
12579 } else if (this.dataChannel_ === 1) {
12580 this.BASE_ = 0x18;
12581 this.EXT_ = 0x19;
12582 this.CONTROL_ = (0x1c | this.field_) << 8;
12583 this.OFFSET_ = 0x1f;
12584 } // Constants for the LSByte command codes recognized by Cea608Stream. This
12585 // list is not exhaustive. For a more comprehensive listing and semantics see
12586 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
12587 // Padding
12588
12589
12590 this.PADDING_ = 0x0000; // Pop-on Mode
12591
12592 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
12593 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
12594
12595 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
12596 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
12597 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
12598 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
12599
12600 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
12601
12602 this.BACKSPACE_ = this.CONTROL_ | 0x21;
12603 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
12604 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
12605 };
12606 /**
12607 * Detects if the 2-byte packet data is a special character
12608 *
12609 * Special characters have a second byte in the range 0x30 to 0x3f,
12610 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
12611 * data channel 2).
12612 *
12613 * @param {Integer} char0 The first byte
12614 * @param {Integer} char1 The second byte
12615 * @return {Boolean} Whether the 2 bytes are an special character
12616 */
12617
12618
12619 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
12620 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
12621 };
12622 /**
12623 * Detects if the 2-byte packet data is an extended character
12624 *
12625 * Extended characters have a second byte in the range 0x20 to 0x3f,
12626 * with the first byte being 0x12 or 0x13 (for data channel 1) or
12627 * 0x1a or 0x1b (for data channel 2).
12628 *
12629 * @param {Integer} char0 The first byte
12630 * @param {Integer} char1 The second byte
12631 * @return {Boolean} Whether the 2 bytes are an extended character
12632 */
12633
12634
12635 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
12636 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
12637 };
12638 /**
12639 * Detects if the 2-byte packet is a mid-row code
12640 *
12641 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
12642 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
12643 * channel 2).
12644 *
12645 * @param {Integer} char0 The first byte
12646 * @param {Integer} char1 The second byte
12647 * @return {Boolean} Whether the 2 bytes are a mid-row code
12648 */
12649
12650
12651 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
12652 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
12653 };
12654 /**
12655 * Detects if the 2-byte packet is an offset control code
12656 *
12657 * Offset control codes have a second byte in the range 0x21 to 0x23,
12658 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
12659 * data channel 2).
12660 *
12661 * @param {Integer} char0 The first byte
12662 * @param {Integer} char1 The second byte
12663 * @return {Boolean} Whether the 2 bytes are an offset control code
12664 */
12665
12666
12667 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
12668 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
12669 };
12670 /**
12671 * Detects if the 2-byte packet is a Preamble Address Code
12672 *
12673 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
12674 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
12675 * range 0x40 to 0x7f.
12676 *
12677 * @param {Integer} char0 The first byte
12678 * @param {Integer} char1 The second byte
12679 * @return {Boolean} Whether the 2 bytes are a PAC
12680 */
12681
12682
12683 Cea608Stream.prototype.isPAC = function (char0, char1) {
12684 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
12685 };
12686 /**
12687 * Detects if a packet's second byte is in the range of a PAC color code
12688 *
12689 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
12690 * 0x60 to 0x6f.
12691 *
12692 * @param {Integer} char1 The second byte
12693 * @return {Boolean} Whether the byte is a color PAC
12694 */
12695
12696
12697 Cea608Stream.prototype.isColorPAC = function (char1) {
12698 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
12699 };
12700 /**
12701 * Detects if a single byte is in the range of a normal character
12702 *
12703 * Normal text bytes are in the range 0x20 to 0x7f.
12704 *
12705 * @param {Integer} char The byte
12706 * @return {Boolean} Whether the byte is a normal character
12707 */
12708
12709
12710 Cea608Stream.prototype.isNormalChar = function (char) {
12711 return char >= 0x20 && char <= 0x7f;
12712 };
12713 /**
12714 * Configures roll-up
12715 *
12716 * @param {Integer} pts Current PTS
12717 * @param {Integer} newBaseRow Used by PACs to slide the current window to
12718 * a new position
12719 */
12720
12721
12722 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
12723 // Reset the base row to the bottom row when switching modes
12724 if (this.mode_ !== 'rollUp') {
12725 this.row_ = BOTTOM_ROW;
12726 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
12727
12728 this.flushDisplayed(pts);
12729 this.nonDisplayed_ = createDisplayBuffer();
12730 this.displayed_ = createDisplayBuffer();
12731 }
12732
12733 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
12734 // move currently displayed captions (up or down) to the new base row
12735 for (var i = 0; i < this.rollUpRows_; i++) {
12736 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
12737 this.displayed_[this.row_ - i] = '';
12738 }
12739 }
12740
12741 if (newBaseRow === undefined) {
12742 newBaseRow = this.row_;
12743 }
12744
12745 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
12746 }; // Adds the opening HTML tag for the passed character to the caption text,
12747 // and keeps track of it for later closing
12748
12749
12750 Cea608Stream.prototype.addFormatting = function (pts, format) {
12751 this.formatting_ = this.formatting_.concat(format);
12752 var text = format.reduce(function (text, format) {
12753 return text + '<' + format + '>';
12754 }, '');
12755 this[this.mode_](pts, text);
12756 }; // Adds HTML closing tags for current formatting to caption text and
12757 // clears remembered formatting
12758
12759
12760 Cea608Stream.prototype.clearFormatting = function (pts) {
12761 if (!this.formatting_.length) {
12762 return;
12763 }
12764
12765 var text = this.formatting_.reverse().reduce(function (text, format) {
12766 return text + '</' + format + '>';
12767 }, '');
12768 this.formatting_ = [];
12769 this[this.mode_](pts, text);
12770 }; // Mode Implementations
12771
12772
12773 Cea608Stream.prototype.popOn = function (pts, text) {
12774 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
12775
12776 baseRow += text;
12777 this.nonDisplayed_[this.row_] = baseRow;
12778 };
12779
12780 Cea608Stream.prototype.rollUp = function (pts, text) {
12781 var baseRow = this.displayed_[this.row_];
12782 baseRow += text;
12783 this.displayed_[this.row_] = baseRow;
12784 };
12785
12786 Cea608Stream.prototype.shiftRowsUp_ = function () {
12787 var i; // clear out inactive rows
12788
12789 for (i = 0; i < this.topRow_; i++) {
12790 this.displayed_[i] = '';
12791 }
12792
12793 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
12794 this.displayed_[i] = '';
12795 } // shift displayed rows up
12796
12797
12798 for (i = this.topRow_; i < this.row_; i++) {
12799 this.displayed_[i] = this.displayed_[i + 1];
12800 } // clear out the bottom row
12801
12802
12803 this.displayed_[this.row_] = '';
12804 };
12805
12806 Cea608Stream.prototype.paintOn = function (pts, text) {
12807 var baseRow = this.displayed_[this.row_];
12808 baseRow += text;
12809 this.displayed_[this.row_] = baseRow;
12810 }; // exports
12811
12812
12813 var captionStream = {
12814 CaptionStream: CaptionStream$1,
12815 Cea608Stream: Cea608Stream,
12816 Cea708Stream: Cea708Stream
12817 };
12818 /**
12819 * mux.js
12820 *
12821 * Copyright (c) Brightcove
12822 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
12823 */
12824
12825 var streamTypes = {
12826 H264_STREAM_TYPE: 0x1B,
12827 ADTS_STREAM_TYPE: 0x0F,
12828 METADATA_STREAM_TYPE: 0x15
12829 };
12830 var MAX_TS = 8589934592;
12831 var RO_THRESH = 4294967296;
12832 var TYPE_SHARED = 'shared';
12833
12834 var handleRollover$1 = function handleRollover(value, reference) {
12835 var direction = 1;
12836
12837 if (value > reference) {
12838 // If the current timestamp value is greater than our reference timestamp and we detect a
12839 // timestamp rollover, this means the roll over is happening in the opposite direction.
12840 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
12841 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
12842 // rollover point. In loading this segment, the timestamp values will be very large,
12843 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
12844 // the time stamp to be `value - 2^33`.
12845 direction = -1;
12846 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
12847 // cause an incorrect adjustment.
12848
12849
12850 while (Math.abs(reference - value) > RO_THRESH) {
12851 value += direction * MAX_TS;
12852 }
12853
12854 return value;
12855 };
12856
12857 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
12858 var lastDTS, referenceDTS;
12859 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
12860 // video and audio. We could use `undefined` here, but having a string
12861 // makes debugging a little clearer.
12862
12863 this.type_ = type || TYPE_SHARED;
12864
12865 this.push = function (data) {
12866 // Any "shared" rollover streams will accept _all_ data. Otherwise,
12867 // streams will only accept data that matches their type.
12868 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
12869 return;
12870 }
12871
12872 if (referenceDTS === undefined) {
12873 referenceDTS = data.dts;
12874 }
12875
12876 data.dts = handleRollover$1(data.dts, referenceDTS);
12877 data.pts = handleRollover$1(data.pts, referenceDTS);
12878 lastDTS = data.dts;
12879 this.trigger('data', data);
12880 };
12881
12882 this.flush = function () {
12883 referenceDTS = lastDTS;
12884 this.trigger('done');
12885 };
12886
12887 this.endTimeline = function () {
12888 this.flush();
12889 this.trigger('endedtimeline');
12890 };
12891
12892 this.discontinuity = function () {
12893 referenceDTS = void 0;
12894 lastDTS = void 0;
12895 };
12896
12897 this.reset = function () {
12898 this.discontinuity();
12899 this.trigger('reset');
12900 };
12901 };
12902
12903 TimestampRolloverStream$1.prototype = new stream();
12904 var timestampRolloverStream = {
12905 TimestampRolloverStream: TimestampRolloverStream$1,
12906 handleRollover: handleRollover$1
12907 };
12908
12909 var percentEncode$1 = function percentEncode(bytes, start, end) {
12910 var i,
12911 result = '';
12912
12913 for (i = start; i < end; i++) {
12914 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
12915 }
12916
12917 return result;
12918 },
12919 // return the string representation of the specified byte range,
12920 // interpreted as UTf-8.
12921 parseUtf8 = function parseUtf8(bytes, start, end) {
12922 return decodeURIComponent(percentEncode$1(bytes, start, end));
12923 },
12924 // return the string representation of the specified byte range,
12925 // interpreted as ISO-8859-1.
12926 parseIso88591$1 = function parseIso88591(bytes, start, end) {
12927 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
12928 },
12929 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
12930 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
12931 },
12932 tagParsers = {
12933 TXXX: function TXXX(tag) {
12934 var i;
12935
12936 if (tag.data[0] !== 3) {
12937 // ignore frames with unrecognized character encodings
12938 return;
12939 }
12940
12941 for (i = 1; i < tag.data.length; i++) {
12942 if (tag.data[i] === 0) {
12943 // parse the text fields
12944 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
12945
12946 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
12947 break;
12948 }
12949 }
12950
12951 tag.data = tag.value;
12952 },
12953 WXXX: function WXXX(tag) {
12954 var i;
12955
12956 if (tag.data[0] !== 3) {
12957 // ignore frames with unrecognized character encodings
12958 return;
12959 }
12960
12961 for (i = 1; i < tag.data.length; i++) {
12962 if (tag.data[i] === 0) {
12963 // parse the description and URL fields
12964 tag.description = parseUtf8(tag.data, 1, i);
12965 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
12966 break;
12967 }
12968 }
12969 },
12970 PRIV: function PRIV(tag) {
12971 var i;
12972
12973 for (i = 0; i < tag.data.length; i++) {
12974 if (tag.data[i] === 0) {
12975 // parse the description and URL fields
12976 tag.owner = parseIso88591$1(tag.data, 0, i);
12977 break;
12978 }
12979 }
12980
12981 tag.privateData = tag.data.subarray(i + 1);
12982 tag.data = tag.privateData;
12983 }
12984 },
12985 _MetadataStream;
12986
12987 _MetadataStream = function MetadataStream(options) {
12988 var settings = {
12989 // the bytes of the program-level descriptor field in MP2T
12990 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
12991 // program element descriptors"
12992 descriptor: options && options.descriptor
12993 },
12994 // the total size in bytes of the ID3 tag being parsed
12995 tagSize = 0,
12996 // tag data that is not complete enough to be parsed
12997 buffer = [],
12998 // the total number of bytes currently in the buffer
12999 bufferSize = 0,
13000 i;
13001
13002 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
13003 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
13004
13005
13006 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
13007
13008 if (settings.descriptor) {
13009 for (i = 0; i < settings.descriptor.length; i++) {
13010 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
13011 }
13012 }
13013
13014 this.push = function (chunk) {
13015 var tag, frameStart, frameSize, frame, i, frameHeader;
13016
13017 if (chunk.type !== 'timed-metadata') {
13018 return;
13019 } // if data_alignment_indicator is set in the PES header,
13020 // we must have the start of a new ID3 tag. Assume anything
13021 // remaining in the buffer was malformed and throw it out
13022
13023
13024 if (chunk.dataAlignmentIndicator) {
13025 bufferSize = 0;
13026 buffer.length = 0;
13027 } // ignore events that don't look like ID3 data
13028
13029
13030 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
13031 this.trigger('log', {
13032 level: 'warn',
13033 message: 'Skipping unrecognized metadata packet'
13034 });
13035 return;
13036 } // add this chunk to the data we've collected so far
13037
13038
13039 buffer.push(chunk);
13040 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
13041
13042 if (buffer.length === 1) {
13043 // the frame size is transmitted as a 28-bit integer in the
13044 // last four bytes of the ID3 header.
13045 // The most significant bit of each byte is dropped and the
13046 // results concatenated to recover the actual value.
13047 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
13048 // convenient for our comparisons to include it
13049
13050 tagSize += 10;
13051 } // if the entire frame has not arrived, wait for more data
13052
13053
13054 if (bufferSize < tagSize) {
13055 return;
13056 } // collect the entire frame so it can be parsed
13057
13058
13059 tag = {
13060 data: new Uint8Array(tagSize),
13061 frames: [],
13062 pts: buffer[0].pts,
13063 dts: buffer[0].dts
13064 };
13065
13066 for (i = 0; i < tagSize;) {
13067 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
13068 i += buffer[0].data.byteLength;
13069 bufferSize -= buffer[0].data.byteLength;
13070 buffer.shift();
13071 } // find the start of the first frame and the end of the tag
13072
13073
13074 frameStart = 10;
13075
13076 if (tag.data[5] & 0x40) {
13077 // advance the frame start past the extended header
13078 frameStart += 4; // header size field
13079
13080 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
13081
13082 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
13083 } // parse one or more ID3 frames
13084 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
13085
13086
13087 do {
13088 // determine the number of bytes in this frame
13089 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
13090
13091 if (frameSize < 1) {
13092 this.trigger('log', {
13093 level: 'warn',
13094 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
13095 });
13096 return;
13097 }
13098
13099 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
13100 frame = {
13101 id: frameHeader,
13102 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
13103 };
13104 frame.key = frame.id;
13105
13106 if (tagParsers[frame.id]) {
13107 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
13108 // time for raw AAC data
13109
13110 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
13111 var d = frame.data,
13112 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
13113 size *= 4;
13114 size += d[7] & 0x03;
13115 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
13116 // on the value of this frame
13117 // we couldn't have known the appropriate pts and dts before
13118 // parsing this ID3 tag so set those values now
13119
13120 if (tag.pts === undefined && tag.dts === undefined) {
13121 tag.pts = frame.timeStamp;
13122 tag.dts = frame.timeStamp;
13123 }
13124
13125 this.trigger('timestamp', frame);
13126 }
13127 }
13128
13129 tag.frames.push(frame);
13130 frameStart += 10; // advance past the frame header
13131
13132 frameStart += frameSize; // advance past the frame body
13133 } while (frameStart < tagSize);
13134
13135 this.trigger('data', tag);
13136 };
13137 };
13138
13139 _MetadataStream.prototype = new stream();
13140 var metadataStream = _MetadataStream;
13141 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
13142
13143 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
13144
13145
13146 var MP2T_PACKET_LENGTH$1 = 188,
13147 // bytes
13148 SYNC_BYTE$1 = 0x47;
13149 /**
13150 * Splits an incoming stream of binary data into MPEG-2 Transport
13151 * Stream packets.
13152 */
13153
13154 _TransportPacketStream = function TransportPacketStream() {
13155 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
13156 bytesInBuffer = 0;
13157
13158 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
13159
13160 /**
13161 * Split a stream of data into M2TS packets
13162 **/
13163
13164
13165 this.push = function (bytes) {
13166 var startIndex = 0,
13167 endIndex = MP2T_PACKET_LENGTH$1,
13168 everything; // If there are bytes remaining from the last segment, prepend them to the
13169 // bytes that were pushed in
13170
13171 if (bytesInBuffer) {
13172 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
13173 everything.set(buffer.subarray(0, bytesInBuffer));
13174 everything.set(bytes, bytesInBuffer);
13175 bytesInBuffer = 0;
13176 } else {
13177 everything = bytes;
13178 } // While we have enough data for a packet
13179
13180
13181 while (endIndex < everything.byteLength) {
13182 // Look for a pair of start and end sync bytes in the data..
13183 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
13184 // We found a packet so emit it and jump one whole packet forward in
13185 // the stream
13186 this.trigger('data', everything.subarray(startIndex, endIndex));
13187 startIndex += MP2T_PACKET_LENGTH$1;
13188 endIndex += MP2T_PACKET_LENGTH$1;
13189 continue;
13190 } // If we get here, we have somehow become de-synchronized and we need to step
13191 // forward one byte at a time until we find a pair of sync bytes that denote
13192 // a packet
13193
13194
13195 startIndex++;
13196 endIndex++;
13197 } // If there was some data left over at the end of the segment that couldn't
13198 // possibly be a whole packet, keep it because it might be the start of a packet
13199 // that continues in the next segment
13200
13201
13202 if (startIndex < everything.byteLength) {
13203 buffer.set(everything.subarray(startIndex), 0);
13204 bytesInBuffer = everything.byteLength - startIndex;
13205 }
13206 };
13207 /**
13208 * Passes identified M2TS packets to the TransportParseStream to be parsed
13209 **/
13210
13211
13212 this.flush = function () {
13213 // If the buffer contains a whole packet when we are being flushed, emit it
13214 // and empty the buffer. Otherwise hold onto the data because it may be
13215 // important for decoding the next segment
13216 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
13217 this.trigger('data', buffer);
13218 bytesInBuffer = 0;
13219 }
13220
13221 this.trigger('done');
13222 };
13223
13224 this.endTimeline = function () {
13225 this.flush();
13226 this.trigger('endedtimeline');
13227 };
13228
13229 this.reset = function () {
13230 bytesInBuffer = 0;
13231 this.trigger('reset');
13232 };
13233 };
13234
13235 _TransportPacketStream.prototype = new stream();
13236 /**
13237 * Accepts an MP2T TransportPacketStream and emits data events with parsed
13238 * forms of the individual transport stream packets.
13239 */
13240
13241 _TransportParseStream = function TransportParseStream() {
13242 var parsePsi, parsePat, parsePmt, self;
13243
13244 _TransportParseStream.prototype.init.call(this);
13245
13246 self = this;
13247 this.packetsWaitingForPmt = [];
13248 this.programMapTable = undefined;
13249
13250 parsePsi = function parsePsi(payload, psi) {
13251 var offset = 0; // PSI packets may be split into multiple sections and those
13252 // sections may be split into multiple packets. If a PSI
13253 // section starts in this packet, the payload_unit_start_indicator
13254 // will be true and the first byte of the payload will indicate
13255 // the offset from the current position to the start of the
13256 // section.
13257
13258 if (psi.payloadUnitStartIndicator) {
13259 offset += payload[offset] + 1;
13260 }
13261
13262 if (psi.type === 'pat') {
13263 parsePat(payload.subarray(offset), psi);
13264 } else {
13265 parsePmt(payload.subarray(offset), psi);
13266 }
13267 };
13268
13269 parsePat = function parsePat(payload, pat) {
13270 pat.section_number = payload[7]; // eslint-disable-line camelcase
13271
13272 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
13273 // skip the PSI header and parse the first PMT entry
13274
13275 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
13276 pat.pmtPid = self.pmtPid;
13277 };
13278 /**
13279 * Parse out the relevant fields of a Program Map Table (PMT).
13280 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
13281 * packet. The first byte in this array should be the table_id
13282 * field.
13283 * @param pmt {object} the object that should be decorated with
13284 * fields parsed from the PMT.
13285 */
13286
13287
13288 parsePmt = function parsePmt(payload, pmt) {
13289 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
13290 // take effect. We don't believe this should ever be the case
13291 // for HLS but we'll ignore "forward" PMT declarations if we see
13292 // them. Future PMT declarations have the current_next_indicator
13293 // set to zero.
13294
13295 if (!(payload[5] & 0x01)) {
13296 return;
13297 } // overwrite any existing program map table
13298
13299
13300 self.programMapTable = {
13301 video: null,
13302 audio: null,
13303 'timed-metadata': {}
13304 }; // the mapping table ends at the end of the current section
13305
13306 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
13307 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
13308 // long the program info descriptors are
13309
13310 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
13311
13312 offset = 12 + programInfoLength;
13313
13314 while (offset < tableEnd) {
13315 var streamType = payload[offset];
13316 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
13317 // TODO: should this be done for metadata too? for now maintain behavior of
13318 // multiple metadata streams
13319
13320 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
13321 self.programMapTable.video = pid;
13322 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
13323 self.programMapTable.audio = pid;
13324 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
13325 // map pid to stream type for metadata streams
13326 self.programMapTable['timed-metadata'][pid] = streamType;
13327 } // move to the next table entry
13328 // skip past the elementary stream descriptors, if present
13329
13330
13331 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
13332 } // record the map on the packet as well
13333
13334
13335 pmt.programMapTable = self.programMapTable;
13336 };
13337 /**
13338 * Deliver a new MP2T packet to the next stream in the pipeline.
13339 */
13340
13341
13342 this.push = function (packet) {
13343 var result = {},
13344 offset = 4;
13345 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
13346
13347 result.pid = packet[1] & 0x1f;
13348 result.pid <<= 8;
13349 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
13350 // fifth byte of the TS packet header. The adaptation field is
13351 // used to add stuffing to PES packets that don't fill a complete
13352 // TS packet, and to specify some forms of timing and control data
13353 // that we do not currently use.
13354
13355 if ((packet[3] & 0x30) >>> 4 > 0x01) {
13356 offset += packet[offset] + 1;
13357 } // parse the rest of the packet based on the type
13358
13359
13360 if (result.pid === 0) {
13361 result.type = 'pat';
13362 parsePsi(packet.subarray(offset), result);
13363 this.trigger('data', result);
13364 } else if (result.pid === this.pmtPid) {
13365 result.type = 'pmt';
13366 parsePsi(packet.subarray(offset), result);
13367 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
13368
13369 while (this.packetsWaitingForPmt.length) {
13370 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
13371 }
13372 } else if (this.programMapTable === undefined) {
13373 // When we have not seen a PMT yet, defer further processing of
13374 // PES packets until one has been parsed
13375 this.packetsWaitingForPmt.push([packet, offset, result]);
13376 } else {
13377 this.processPes_(packet, offset, result);
13378 }
13379 };
13380
13381 this.processPes_ = function (packet, offset, result) {
13382 // set the appropriate stream type
13383 if (result.pid === this.programMapTable.video) {
13384 result.streamType = streamTypes.H264_STREAM_TYPE;
13385 } else if (result.pid === this.programMapTable.audio) {
13386 result.streamType = streamTypes.ADTS_STREAM_TYPE;
13387 } else {
13388 // if not video or audio, it is timed-metadata or unknown
13389 // if unknown, streamType will be undefined
13390 result.streamType = this.programMapTable['timed-metadata'][result.pid];
13391 }
13392
13393 result.type = 'pes';
13394 result.data = packet.subarray(offset);
13395 this.trigger('data', result);
13396 };
13397 };
13398
13399 _TransportParseStream.prototype = new stream();
13400 _TransportParseStream.STREAM_TYPES = {
13401 h264: 0x1b,
13402 adts: 0x0f
13403 };
13404 /**
13405 * Reconsistutes program elementary stream (PES) packets from parsed
13406 * transport stream packets. That is, if you pipe an
13407 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
13408 * events will be events which capture the bytes for individual PES
13409 * packets plus relevant metadata that has been extracted from the
13410 * container.
13411 */
13412
13413 _ElementaryStream = function ElementaryStream() {
13414 var self = this,
13415 segmentHadPmt = false,
13416 // PES packet fragments
13417 video = {
13418 data: [],
13419 size: 0
13420 },
13421 audio = {
13422 data: [],
13423 size: 0
13424 },
13425 timedMetadata = {
13426 data: [],
13427 size: 0
13428 },
13429 programMapTable,
13430 parsePes = function parsePes(payload, pes) {
13431 var ptsDtsFlags;
13432 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
13433
13434 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
13435 // that are frame data that is continuing from the previous fragment. This
13436 // is to check that the pes data is the start of a new pes payload
13437
13438 if (startPrefix !== 1) {
13439 return;
13440 } // get the packet length, this will be 0 for video
13441
13442
13443 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
13444
13445 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
13446 // and a DTS value. Determine what combination of values is
13447 // available to work with.
13448
13449 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
13450 // performs all bitwise operations on 32-bit integers but javascript
13451 // supports a much greater range (52-bits) of integer using standard
13452 // mathematical operations.
13453 // We construct a 31-bit value using bitwise operators over the 31
13454 // most significant bits and then multiply by 4 (equal to a left-shift
13455 // of 2) before we add the final 2 least significant bits of the
13456 // timestamp (equal to an OR.)
13457
13458 if (ptsDtsFlags & 0xC0) {
13459 // the PTS and DTS are not written out directly. For information
13460 // on how they are encoded, see
13461 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
13462 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
13463 pes.pts *= 4; // Left shift by 2
13464
13465 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
13466
13467 pes.dts = pes.pts;
13468
13469 if (ptsDtsFlags & 0x40) {
13470 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
13471 pes.dts *= 4; // Left shift by 2
13472
13473 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
13474 }
13475 } // the data section starts immediately after the PES header.
13476 // pes_header_data_length specifies the number of header bytes
13477 // that follow the last byte of the field.
13478
13479
13480 pes.data = payload.subarray(9 + payload[8]);
13481 },
13482
13483 /**
13484 * Pass completely parsed PES packets to the next stream in the pipeline
13485 **/
13486 flushStream = function flushStream(stream, type, forceFlush) {
13487 var packetData = new Uint8Array(stream.size),
13488 event = {
13489 type: type
13490 },
13491 i = 0,
13492 offset = 0,
13493 packetFlushable = false,
13494 fragment; // do nothing if there is not enough buffered data for a complete
13495 // PES header
13496
13497 if (!stream.data.length || stream.size < 9) {
13498 return;
13499 }
13500
13501 event.trackId = stream.data[0].pid; // reassemble the packet
13502
13503 for (i = 0; i < stream.data.length; i++) {
13504 fragment = stream.data[i];
13505 packetData.set(fragment.data, offset);
13506 offset += fragment.data.byteLength;
13507 } // parse assembled packet's PES header
13508
13509
13510 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
13511 // check that there is enough stream data to fill the packet
13512
13513 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
13514
13515 if (forceFlush || packetFlushable) {
13516 stream.size = 0;
13517 stream.data.length = 0;
13518 } // only emit packets that are complete. this is to avoid assembling
13519 // incomplete PES packets due to poor segmentation
13520
13521
13522 if (packetFlushable) {
13523 self.trigger('data', event);
13524 }
13525 };
13526
13527 _ElementaryStream.prototype.init.call(this);
13528 /**
13529 * Identifies M2TS packet types and parses PES packets using metadata
13530 * parsed from the PMT
13531 **/
13532
13533
13534 this.push = function (data) {
13535 ({
13536 pat: function pat() {// we have to wait for the PMT to arrive as well before we
13537 // have any meaningful metadata
13538 },
13539 pes: function pes() {
13540 var stream, streamType;
13541
13542 switch (data.streamType) {
13543 case streamTypes.H264_STREAM_TYPE:
13544 stream = video;
13545 streamType = 'video';
13546 break;
13547
13548 case streamTypes.ADTS_STREAM_TYPE:
13549 stream = audio;
13550 streamType = 'audio';
13551 break;
13552
13553 case streamTypes.METADATA_STREAM_TYPE:
13554 stream = timedMetadata;
13555 streamType = 'timed-metadata';
13556 break;
13557
13558 default:
13559 // ignore unknown stream types
13560 return;
13561 } // if a new packet is starting, we can flush the completed
13562 // packet
13563
13564
13565 if (data.payloadUnitStartIndicator) {
13566 flushStream(stream, streamType, true);
13567 } // buffer this fragment until we are sure we've received the
13568 // complete payload
13569
13570
13571 stream.data.push(data);
13572 stream.size += data.data.byteLength;
13573 },
13574 pmt: function pmt() {
13575 var event = {
13576 type: 'metadata',
13577 tracks: []
13578 };
13579 programMapTable = data.programMapTable; // translate audio and video streams to tracks
13580
13581 if (programMapTable.video !== null) {
13582 event.tracks.push({
13583 timelineStartInfo: {
13584 baseMediaDecodeTime: 0
13585 },
13586 id: +programMapTable.video,
13587 codec: 'avc',
13588 type: 'video'
13589 });
13590 }
13591
13592 if (programMapTable.audio !== null) {
13593 event.tracks.push({
13594 timelineStartInfo: {
13595 baseMediaDecodeTime: 0
13596 },
13597 id: +programMapTable.audio,
13598 codec: 'adts',
13599 type: 'audio'
13600 });
13601 }
13602
13603 segmentHadPmt = true;
13604 self.trigger('data', event);
13605 }
13606 })[data.type]();
13607 };
13608
13609 this.reset = function () {
13610 video.size = 0;
13611 video.data.length = 0;
13612 audio.size = 0;
13613 audio.data.length = 0;
13614 this.trigger('reset');
13615 };
13616 /**
13617 * Flush any remaining input. Video PES packets may be of variable
13618 * length. Normally, the start of a new video packet can trigger the
13619 * finalization of the previous packet. That is not possible if no
13620 * more video is forthcoming, however. In that case, some other
13621 * mechanism (like the end of the file) has to be employed. When it is
13622 * clear that no additional data is forthcoming, calling this method
13623 * will flush the buffered packets.
13624 */
13625
13626
13627 this.flushStreams_ = function () {
13628 // !!THIS ORDER IS IMPORTANT!!
13629 // video first then audio
13630 flushStream(video, 'video');
13631 flushStream(audio, 'audio');
13632 flushStream(timedMetadata, 'timed-metadata');
13633 };
13634
13635 this.flush = function () {
13636 // if on flush we haven't had a pmt emitted
13637 // and we have a pmt to emit. emit the pmt
13638 // so that we trigger a trackinfo downstream.
13639 if (!segmentHadPmt && programMapTable) {
13640 var pmt = {
13641 type: 'metadata',
13642 tracks: []
13643 }; // translate audio and video streams to tracks
13644
13645 if (programMapTable.video !== null) {
13646 pmt.tracks.push({
13647 timelineStartInfo: {
13648 baseMediaDecodeTime: 0
13649 },
13650 id: +programMapTable.video,
13651 codec: 'avc',
13652 type: 'video'
13653 });
13654 }
13655
13656 if (programMapTable.audio !== null) {
13657 pmt.tracks.push({
13658 timelineStartInfo: {
13659 baseMediaDecodeTime: 0
13660 },
13661 id: +programMapTable.audio,
13662 codec: 'adts',
13663 type: 'audio'
13664 });
13665 }
13666
13667 self.trigger('data', pmt);
13668 }
13669
13670 segmentHadPmt = false;
13671 this.flushStreams_();
13672 this.trigger('done');
13673 };
13674 };
13675
13676 _ElementaryStream.prototype = new stream();
13677 var m2ts = {
13678 PAT_PID: 0x0000,
13679 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
13680 TransportPacketStream: _TransportPacketStream,
13681 TransportParseStream: _TransportParseStream,
13682 ElementaryStream: _ElementaryStream,
13683 TimestampRolloverStream: TimestampRolloverStream,
13684 CaptionStream: captionStream.CaptionStream,
13685 Cea608Stream: captionStream.Cea608Stream,
13686 Cea708Stream: captionStream.Cea708Stream,
13687 MetadataStream: metadataStream
13688 };
13689
13690 for (var type in streamTypes) {
13691 if (streamTypes.hasOwnProperty(type)) {
13692 m2ts[type] = streamTypes[type];
13693 }
13694 }
13695
13696 var m2ts_1 = m2ts;
13697 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
13698
13699 var _AdtsStream;
13700
13701 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
13702 /*
13703 * Accepts a ElementaryStream and emits data events with parsed
13704 * AAC Audio Frames of the individual packets. Input audio in ADTS
13705 * format is unpacked and re-emitted as AAC frames.
13706 *
13707 * @see http://wiki.multimedia.cx/index.php?title=ADTS
13708 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
13709 */
13710
13711 _AdtsStream = function AdtsStream(handlePartialSegments) {
13712 var buffer,
13713 frameNum = 0;
13714
13715 _AdtsStream.prototype.init.call(this);
13716
13717 this.skipWarn_ = function (start, end) {
13718 this.trigger('log', {
13719 level: 'warn',
13720 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
13721 });
13722 };
13723
13724 this.push = function (packet) {
13725 var i = 0,
13726 frameLength,
13727 protectionSkipBytes,
13728 oldBuffer,
13729 sampleCount,
13730 adtsFrameDuration;
13731
13732 if (!handlePartialSegments) {
13733 frameNum = 0;
13734 }
13735
13736 if (packet.type !== 'audio') {
13737 // ignore non-audio data
13738 return;
13739 } // Prepend any data in the buffer to the input data so that we can parse
13740 // aac frames the cross a PES packet boundary
13741
13742
13743 if (buffer && buffer.length) {
13744 oldBuffer = buffer;
13745 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
13746 buffer.set(oldBuffer);
13747 buffer.set(packet.data, oldBuffer.byteLength);
13748 } else {
13749 buffer = packet.data;
13750 } // unpack any ADTS frames which have been fully received
13751 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
13752
13753
13754 var skip; // We use i + 7 here because we want to be able to parse the entire header.
13755 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
13756
13757 while (i + 7 < buffer.length) {
13758 // Look for the start of an ADTS header..
13759 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
13760 if (typeof skip !== 'number') {
13761 skip = i;
13762 } // If a valid header was not found, jump one forward and attempt to
13763 // find a valid ADTS header starting at the next byte
13764
13765
13766 i++;
13767 continue;
13768 }
13769
13770 if (typeof skip === 'number') {
13771 this.skipWarn_(skip, i);
13772 skip = null;
13773 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
13774 // end of the ADTS header
13775
13776
13777 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
13778 // end of the sync sequence
13779 // NOTE: frame length includes the size of the header
13780
13781 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
13782 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
13783 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
13784 // then we have to wait for more data
13785
13786 if (buffer.byteLength - i < frameLength) {
13787 break;
13788 } // Otherwise, deliver the complete AAC frame
13789
13790
13791 this.trigger('data', {
13792 pts: packet.pts + frameNum * adtsFrameDuration,
13793 dts: packet.dts + frameNum * adtsFrameDuration,
13794 sampleCount: sampleCount,
13795 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
13796 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
13797 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
13798 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
13799 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
13800 samplesize: 16,
13801 // data is the frame without it's header
13802 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
13803 });
13804 frameNum++;
13805 i += frameLength;
13806 }
13807
13808 if (typeof skip === 'number') {
13809 this.skipWarn_(skip, i);
13810 skip = null;
13811 } // remove processed bytes from the buffer.
13812
13813
13814 buffer = buffer.subarray(i);
13815 };
13816
13817 this.flush = function () {
13818 frameNum = 0;
13819 this.trigger('done');
13820 };
13821
13822 this.reset = function () {
13823 buffer = void 0;
13824 this.trigger('reset');
13825 };
13826
13827 this.endTimeline = function () {
13828 buffer = void 0;
13829 this.trigger('endedtimeline');
13830 };
13831 };
13832
13833 _AdtsStream.prototype = new stream();
13834 var adts = _AdtsStream;
13835 /**
13836 * mux.js
13837 *
13838 * Copyright (c) Brightcove
13839 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
13840 */
13841
13842 var ExpGolomb;
13843 /**
13844 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
13845 * scheme used by h264.
13846 */
13847
13848 ExpGolomb = function ExpGolomb(workingData) {
13849 var // the number of bytes left to examine in workingData
13850 workingBytesAvailable = workingData.byteLength,
13851 // the current word being examined
13852 workingWord = 0,
13853 // :uint
13854 // the number of bits left to examine in the current word
13855 workingBitsAvailable = 0; // :uint;
13856 // ():uint
13857
13858 this.length = function () {
13859 return 8 * workingBytesAvailable;
13860 }; // ():uint
13861
13862
13863 this.bitsAvailable = function () {
13864 return 8 * workingBytesAvailable + workingBitsAvailable;
13865 }; // ():void
13866
13867
13868 this.loadWord = function () {
13869 var position = workingData.byteLength - workingBytesAvailable,
13870 workingBytes = new Uint8Array(4),
13871 availableBytes = Math.min(4, workingBytesAvailable);
13872
13873 if (availableBytes === 0) {
13874 throw new Error('no bytes available');
13875 }
13876
13877 workingBytes.set(workingData.subarray(position, position + availableBytes));
13878 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
13879
13880 workingBitsAvailable = availableBytes * 8;
13881 workingBytesAvailable -= availableBytes;
13882 }; // (count:int):void
13883
13884
13885 this.skipBits = function (count) {
13886 var skipBytes; // :int
13887
13888 if (workingBitsAvailable > count) {
13889 workingWord <<= count;
13890 workingBitsAvailable -= count;
13891 } else {
13892 count -= workingBitsAvailable;
13893 skipBytes = Math.floor(count / 8);
13894 count -= skipBytes * 8;
13895 workingBytesAvailable -= skipBytes;
13896 this.loadWord();
13897 workingWord <<= count;
13898 workingBitsAvailable -= count;
13899 }
13900 }; // (size:int):uint
13901
13902
13903 this.readBits = function (size) {
13904 var bits = Math.min(workingBitsAvailable, size),
13905 // :uint
13906 valu = workingWord >>> 32 - bits; // :uint
13907 // if size > 31, handle error
13908
13909 workingBitsAvailable -= bits;
13910
13911 if (workingBitsAvailable > 0) {
13912 workingWord <<= bits;
13913 } else if (workingBytesAvailable > 0) {
13914 this.loadWord();
13915 }
13916
13917 bits = size - bits;
13918
13919 if (bits > 0) {
13920 return valu << bits | this.readBits(bits);
13921 }
13922
13923 return valu;
13924 }; // ():uint
13925
13926
13927 this.skipLeadingZeros = function () {
13928 var leadingZeroCount; // :uint
13929
13930 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
13931 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
13932 // the first bit of working word is 1
13933 workingWord <<= leadingZeroCount;
13934 workingBitsAvailable -= leadingZeroCount;
13935 return leadingZeroCount;
13936 }
13937 } // we exhausted workingWord and still have not found a 1
13938
13939
13940 this.loadWord();
13941 return leadingZeroCount + this.skipLeadingZeros();
13942 }; // ():void
13943
13944
13945 this.skipUnsignedExpGolomb = function () {
13946 this.skipBits(1 + this.skipLeadingZeros());
13947 }; // ():void
13948
13949
13950 this.skipExpGolomb = function () {
13951 this.skipBits(1 + this.skipLeadingZeros());
13952 }; // ():uint
13953
13954
13955 this.readUnsignedExpGolomb = function () {
13956 var clz = this.skipLeadingZeros(); // :uint
13957
13958 return this.readBits(clz + 1) - 1;
13959 }; // ():int
13960
13961
13962 this.readExpGolomb = function () {
13963 var valu = this.readUnsignedExpGolomb(); // :int
13964
13965 if (0x01 & valu) {
13966 // the number is odd if the low order bit is set
13967 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
13968 }
13969
13970 return -1 * (valu >>> 1); // divide by two then make it negative
13971 }; // Some convenience functions
13972 // :Boolean
13973
13974
13975 this.readBoolean = function () {
13976 return this.readBits(1) === 1;
13977 }; // ():int
13978
13979
13980 this.readUnsignedByte = function () {
13981 return this.readBits(8);
13982 };
13983
13984 this.loadWord();
13985 };
13986
13987 var expGolomb = ExpGolomb;
13988
13989 var _H264Stream, _NalByteStream;
13990
13991 var PROFILES_WITH_OPTIONAL_SPS_DATA;
13992 /**
13993 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
13994 */
13995
13996 _NalByteStream = function NalByteStream() {
13997 var syncPoint = 0,
13998 i,
13999 buffer;
14000
14001 _NalByteStream.prototype.init.call(this);
14002 /*
14003 * Scans a byte stream and triggers a data event with the NAL units found.
14004 * @param {Object} data Event received from H264Stream
14005 * @param {Uint8Array} data.data The h264 byte stream to be scanned
14006 *
14007 * @see H264Stream.push
14008 */
14009
14010
14011 this.push = function (data) {
14012 var swapBuffer;
14013
14014 if (!buffer) {
14015 buffer = data.data;
14016 } else {
14017 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
14018 swapBuffer.set(buffer);
14019 swapBuffer.set(data.data, buffer.byteLength);
14020 buffer = swapBuffer;
14021 }
14022
14023 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
14024 // scan for NAL unit boundaries
14025 // a match looks like this:
14026 // 0 0 1 .. NAL .. 0 0 1
14027 // ^ sync point ^ i
14028 // or this:
14029 // 0 0 1 .. NAL .. 0 0 0
14030 // ^ sync point ^ i
14031 // advance the sync point to a NAL start, if necessary
14032
14033 for (; syncPoint < len - 3; syncPoint++) {
14034 if (buffer[syncPoint + 2] === 1) {
14035 // the sync point is properly aligned
14036 i = syncPoint + 5;
14037 break;
14038 }
14039 }
14040
14041 while (i < len) {
14042 // look at the current byte to determine if we've hit the end of
14043 // a NAL unit boundary
14044 switch (buffer[i]) {
14045 case 0:
14046 // skip past non-sync sequences
14047 if (buffer[i - 1] !== 0) {
14048 i += 2;
14049 break;
14050 } else if (buffer[i - 2] !== 0) {
14051 i++;
14052 break;
14053 } // deliver the NAL unit if it isn't empty
14054
14055
14056 if (syncPoint + 3 !== i - 2) {
14057 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14058 } // drop trailing zeroes
14059
14060
14061 do {
14062 i++;
14063 } while (buffer[i] !== 1 && i < len);
14064
14065 syncPoint = i - 2;
14066 i += 3;
14067 break;
14068
14069 case 1:
14070 // skip past non-sync sequences
14071 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
14072 i += 3;
14073 break;
14074 } // deliver the NAL unit
14075
14076
14077 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14078 syncPoint = i - 2;
14079 i += 3;
14080 break;
14081
14082 default:
14083 // the current byte isn't a one or zero, so it cannot be part
14084 // of a sync sequence
14085 i += 3;
14086 break;
14087 }
14088 } // filter out the NAL units that were delivered
14089
14090
14091 buffer = buffer.subarray(syncPoint);
14092 i -= syncPoint;
14093 syncPoint = 0;
14094 };
14095
14096 this.reset = function () {
14097 buffer = null;
14098 syncPoint = 0;
14099 this.trigger('reset');
14100 };
14101
14102 this.flush = function () {
14103 // deliver the last buffered NAL unit
14104 if (buffer && buffer.byteLength > 3) {
14105 this.trigger('data', buffer.subarray(syncPoint + 3));
14106 } // reset the stream state
14107
14108
14109 buffer = null;
14110 syncPoint = 0;
14111 this.trigger('done');
14112 };
14113
14114 this.endTimeline = function () {
14115 this.flush();
14116 this.trigger('endedtimeline');
14117 };
14118 };
14119
14120 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
14121 // see Recommendation ITU-T H.264 (4/2013),
14122 // 7.3.2.1.1 Sequence parameter set data syntax
14123
14124 PROFILES_WITH_OPTIONAL_SPS_DATA = {
14125 100: true,
14126 110: true,
14127 122: true,
14128 244: true,
14129 44: true,
14130 83: true,
14131 86: true,
14132 118: true,
14133 128: true,
14134 // TODO: the three profiles below don't
14135 // appear to have sps data in the specificiation anymore?
14136 138: true,
14137 139: true,
14138 134: true
14139 };
14140 /**
14141 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
14142 * events.
14143 */
14144
14145 _H264Stream = function H264Stream() {
14146 var nalByteStream = new _NalByteStream(),
14147 self,
14148 trackId,
14149 currentPts,
14150 currentDts,
14151 discardEmulationPreventionBytes,
14152 readSequenceParameterSet,
14153 skipScalingList;
14154
14155 _H264Stream.prototype.init.call(this);
14156
14157 self = this;
14158 /*
14159 * Pushes a packet from a stream onto the NalByteStream
14160 *
14161 * @param {Object} packet - A packet received from a stream
14162 * @param {Uint8Array} packet.data - The raw bytes of the packet
14163 * @param {Number} packet.dts - Decode timestamp of the packet
14164 * @param {Number} packet.pts - Presentation timestamp of the packet
14165 * @param {Number} packet.trackId - The id of the h264 track this packet came from
14166 * @param {('video'|'audio')} packet.type - The type of packet
14167 *
14168 */
14169
14170 this.push = function (packet) {
14171 if (packet.type !== 'video') {
14172 return;
14173 }
14174
14175 trackId = packet.trackId;
14176 currentPts = packet.pts;
14177 currentDts = packet.dts;
14178 nalByteStream.push(packet);
14179 };
14180 /*
14181 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
14182 * for the NALUs to the next stream component.
14183 * Also, preprocess caption and sequence parameter NALUs.
14184 *
14185 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
14186 * @see NalByteStream.push
14187 */
14188
14189
14190 nalByteStream.on('data', function (data) {
14191 var event = {
14192 trackId: trackId,
14193 pts: currentPts,
14194 dts: currentDts,
14195 data: data,
14196 nalUnitTypeCode: data[0] & 0x1f
14197 };
14198
14199 switch (event.nalUnitTypeCode) {
14200 case 0x05:
14201 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
14202 break;
14203
14204 case 0x06:
14205 event.nalUnitType = 'sei_rbsp';
14206 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14207 break;
14208
14209 case 0x07:
14210 event.nalUnitType = 'seq_parameter_set_rbsp';
14211 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14212 event.config = readSequenceParameterSet(event.escapedRBSP);
14213 break;
14214
14215 case 0x08:
14216 event.nalUnitType = 'pic_parameter_set_rbsp';
14217 break;
14218
14219 case 0x09:
14220 event.nalUnitType = 'access_unit_delimiter_rbsp';
14221 break;
14222 } // This triggers data on the H264Stream
14223
14224
14225 self.trigger('data', event);
14226 });
14227 nalByteStream.on('done', function () {
14228 self.trigger('done');
14229 });
14230 nalByteStream.on('partialdone', function () {
14231 self.trigger('partialdone');
14232 });
14233 nalByteStream.on('reset', function () {
14234 self.trigger('reset');
14235 });
14236 nalByteStream.on('endedtimeline', function () {
14237 self.trigger('endedtimeline');
14238 });
14239
14240 this.flush = function () {
14241 nalByteStream.flush();
14242 };
14243
14244 this.partialFlush = function () {
14245 nalByteStream.partialFlush();
14246 };
14247
14248 this.reset = function () {
14249 nalByteStream.reset();
14250 };
14251
14252 this.endTimeline = function () {
14253 nalByteStream.endTimeline();
14254 };
14255 /**
14256 * Advance the ExpGolomb decoder past a scaling list. The scaling
14257 * list is optionally transmitted as part of a sequence parameter
14258 * set and is not relevant to transmuxing.
14259 * @param count {number} the number of entries in this scaling list
14260 * @param expGolombDecoder {object} an ExpGolomb pointed to the
14261 * start of a scaling list
14262 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14263 */
14264
14265
14266 skipScalingList = function skipScalingList(count, expGolombDecoder) {
14267 var lastScale = 8,
14268 nextScale = 8,
14269 j,
14270 deltaScale;
14271
14272 for (j = 0; j < count; j++) {
14273 if (nextScale !== 0) {
14274 deltaScale = expGolombDecoder.readExpGolomb();
14275 nextScale = (lastScale + deltaScale + 256) % 256;
14276 }
14277
14278 lastScale = nextScale === 0 ? lastScale : nextScale;
14279 }
14280 };
14281 /**
14282 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
14283 * Sequence Payload"
14284 * @param data {Uint8Array} the bytes of a RBSP from a NAL
14285 * unit
14286 * @return {Uint8Array} the RBSP without any Emulation
14287 * Prevention Bytes
14288 */
14289
14290
14291 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
14292 var length = data.byteLength,
14293 emulationPreventionBytesPositions = [],
14294 i = 1,
14295 newLength,
14296 newData; // Find all `Emulation Prevention Bytes`
14297
14298 while (i < length - 2) {
14299 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
14300 emulationPreventionBytesPositions.push(i + 2);
14301 i += 2;
14302 } else {
14303 i++;
14304 }
14305 } // If no Emulation Prevention Bytes were found just return the original
14306 // array
14307
14308
14309 if (emulationPreventionBytesPositions.length === 0) {
14310 return data;
14311 } // Create a new array to hold the NAL unit data
14312
14313
14314 newLength = length - emulationPreventionBytesPositions.length;
14315 newData = new Uint8Array(newLength);
14316 var sourceIndex = 0;
14317
14318 for (i = 0; i < newLength; sourceIndex++, i++) {
14319 if (sourceIndex === emulationPreventionBytesPositions[0]) {
14320 // Skip this byte
14321 sourceIndex++; // Remove this position index
14322
14323 emulationPreventionBytesPositions.shift();
14324 }
14325
14326 newData[i] = data[sourceIndex];
14327 }
14328
14329 return newData;
14330 };
14331 /**
14332 * Read a sequence parameter set and return some interesting video
14333 * properties. A sequence parameter set is the H264 metadata that
14334 * describes the properties of upcoming video frames.
14335 * @param data {Uint8Array} the bytes of a sequence parameter set
14336 * @return {object} an object with configuration parsed from the
14337 * sequence parameter set, including the dimensions of the
14338 * associated video frames.
14339 */
14340
14341
14342 readSequenceParameterSet = function readSequenceParameterSet(data) {
14343 var frameCropLeftOffset = 0,
14344 frameCropRightOffset = 0,
14345 frameCropTopOffset = 0,
14346 frameCropBottomOffset = 0,
14347 expGolombDecoder,
14348 profileIdc,
14349 levelIdc,
14350 profileCompatibility,
14351 chromaFormatIdc,
14352 picOrderCntType,
14353 numRefFramesInPicOrderCntCycle,
14354 picWidthInMbsMinus1,
14355 picHeightInMapUnitsMinus1,
14356 frameMbsOnlyFlag,
14357 scalingListCount,
14358 sarRatio = [1, 1],
14359 aspectRatioIdc,
14360 i;
14361 expGolombDecoder = new expGolomb(data);
14362 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
14363
14364 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
14365
14366 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
14367
14368 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
14369 // some profiles have more optional data we don't need
14370
14371 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
14372 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
14373
14374 if (chromaFormatIdc === 3) {
14375 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
14376 }
14377
14378 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
14379
14380 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
14381
14382 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
14383
14384 if (expGolombDecoder.readBoolean()) {
14385 // seq_scaling_matrix_present_flag
14386 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
14387
14388 for (i = 0; i < scalingListCount; i++) {
14389 if (expGolombDecoder.readBoolean()) {
14390 // seq_scaling_list_present_flag[ i ]
14391 if (i < 6) {
14392 skipScalingList(16, expGolombDecoder);
14393 } else {
14394 skipScalingList(64, expGolombDecoder);
14395 }
14396 }
14397 }
14398 }
14399 }
14400
14401 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
14402
14403 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
14404
14405 if (picOrderCntType === 0) {
14406 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
14407 } else if (picOrderCntType === 1) {
14408 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
14409
14410 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
14411
14412 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
14413
14414 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
14415
14416 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
14417 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
14418 }
14419 }
14420
14421 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
14422
14423 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
14424
14425 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
14426 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
14427 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
14428
14429 if (frameMbsOnlyFlag === 0) {
14430 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
14431 }
14432
14433 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
14434
14435 if (expGolombDecoder.readBoolean()) {
14436 // frame_cropping_flag
14437 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
14438 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
14439 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
14440 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
14441 }
14442
14443 if (expGolombDecoder.readBoolean()) {
14444 // vui_parameters_present_flag
14445 if (expGolombDecoder.readBoolean()) {
14446 // aspect_ratio_info_present_flag
14447 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
14448
14449 switch (aspectRatioIdc) {
14450 case 1:
14451 sarRatio = [1, 1];
14452 break;
14453
14454 case 2:
14455 sarRatio = [12, 11];
14456 break;
14457
14458 case 3:
14459 sarRatio = [10, 11];
14460 break;
14461
14462 case 4:
14463 sarRatio = [16, 11];
14464 break;
14465
14466 case 5:
14467 sarRatio = [40, 33];
14468 break;
14469
14470 case 6:
14471 sarRatio = [24, 11];
14472 break;
14473
14474 case 7:
14475 sarRatio = [20, 11];
14476 break;
14477
14478 case 8:
14479 sarRatio = [32, 11];
14480 break;
14481
14482 case 9:
14483 sarRatio = [80, 33];
14484 break;
14485
14486 case 10:
14487 sarRatio = [18, 11];
14488 break;
14489
14490 case 11:
14491 sarRatio = [15, 11];
14492 break;
14493
14494 case 12:
14495 sarRatio = [64, 33];
14496 break;
14497
14498 case 13:
14499 sarRatio = [160, 99];
14500 break;
14501
14502 case 14:
14503 sarRatio = [4, 3];
14504 break;
14505
14506 case 15:
14507 sarRatio = [3, 2];
14508 break;
14509
14510 case 16:
14511 sarRatio = [2, 1];
14512 break;
14513
14514 case 255:
14515 {
14516 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
14517 break;
14518 }
14519 }
14520
14521 if (sarRatio) {
14522 sarRatio[0] / sarRatio[1];
14523 }
14524 }
14525 }
14526
14527 return {
14528 profileIdc: profileIdc,
14529 levelIdc: levelIdc,
14530 profileCompatibility: profileCompatibility,
14531 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
14532 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
14533 // sar is sample aspect ratio
14534 sarRatio: sarRatio
14535 };
14536 };
14537 };
14538
14539 _H264Stream.prototype = new stream();
14540 var h264 = {
14541 H264Stream: _H264Stream,
14542 NalByteStream: _NalByteStream
14543 };
14544 /**
14545 * mux.js
14546 *
14547 * Copyright (c) Brightcove
14548 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14549 *
14550 * Utilities to detect basic properties and metadata about Aac data.
14551 */
14552
14553 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
14554
14555 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
14556 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
14557 flags = header[byteIndex + 5],
14558 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
14559
14560 returnSize = returnSize >= 0 ? returnSize : 0;
14561
14562 if (footerPresent) {
14563 return returnSize + 20;
14564 }
14565
14566 return returnSize + 10;
14567 };
14568
14569 var getId3Offset = function getId3Offset(data, offset) {
14570 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
14571 return offset;
14572 }
14573
14574 offset += parseId3TagSize(data, offset);
14575 return getId3Offset(data, offset);
14576 }; // TODO: use vhs-utils
14577
14578
14579 var isLikelyAacData$1 = function isLikelyAacData(data) {
14580 var offset = getId3Offset(data, 0);
14581 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
14582 // is not mp3 data but aac data.
14583 (data[offset + 1] & 0x16) === 0x10;
14584 };
14585
14586 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
14587 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
14588 }; // return a percent-encoded representation of the specified byte range
14589 // @see http://en.wikipedia.org/wiki/Percent-encoding
14590
14591
14592 var percentEncode = function percentEncode(bytes, start, end) {
14593 var i,
14594 result = '';
14595
14596 for (i = start; i < end; i++) {
14597 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
14598 }
14599
14600 return result;
14601 }; // return the string representation of the specified byte range,
14602 // interpreted as ISO-8859-1.
14603
14604
14605 var parseIso88591 = function parseIso88591(bytes, start, end) {
14606 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
14607 };
14608
14609 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
14610 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
14611 middle = header[byteIndex + 4] << 3,
14612 highTwo = header[byteIndex + 3] & 0x3 << 11;
14613 return highTwo | middle | lowThree;
14614 };
14615
14616 var parseType$2 = function parseType(header, byteIndex) {
14617 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
14618 return 'timed-metadata';
14619 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
14620 return 'audio';
14621 }
14622
14623 return null;
14624 };
14625
14626 var parseSampleRate = function parseSampleRate(packet) {
14627 var i = 0;
14628
14629 while (i + 5 < packet.length) {
14630 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
14631 // If a valid header was not found, jump one forward and attempt to
14632 // find a valid ADTS header starting at the next byte
14633 i++;
14634 continue;
14635 }
14636
14637 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
14638 }
14639
14640 return null;
14641 };
14642
14643 var parseAacTimestamp = function parseAacTimestamp(packet) {
14644 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
14645
14646 frameStart = 10;
14647
14648 if (packet[5] & 0x40) {
14649 // advance the frame start past the extended header
14650 frameStart += 4; // header size field
14651
14652 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
14653 } // parse one or more ID3 frames
14654 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
14655
14656
14657 do {
14658 // determine the number of bytes in this frame
14659 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
14660
14661 if (frameSize < 1) {
14662 return null;
14663 }
14664
14665 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
14666
14667 if (frameHeader === 'PRIV') {
14668 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
14669
14670 for (var i = 0; i < frame.byteLength; i++) {
14671 if (frame[i] === 0) {
14672 var owner = parseIso88591(frame, 0, i);
14673
14674 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
14675 var d = frame.subarray(i + 1);
14676 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
14677 size *= 4;
14678 size += d[7] & 0x03;
14679 return size;
14680 }
14681
14682 break;
14683 }
14684 }
14685 }
14686
14687 frameStart += 10; // advance past the frame header
14688
14689 frameStart += frameSize; // advance past the frame body
14690 } while (frameStart < packet.byteLength);
14691
14692 return null;
14693 };
14694
14695 var utils = {
14696 isLikelyAacData: isLikelyAacData$1,
14697 parseId3TagSize: parseId3TagSize,
14698 parseAdtsSize: parseAdtsSize,
14699 parseType: parseType$2,
14700 parseSampleRate: parseSampleRate,
14701 parseAacTimestamp: parseAacTimestamp
14702 };
14703
14704 var _AacStream;
14705 /**
14706 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
14707 */
14708
14709
14710 _AacStream = function AacStream() {
14711 var everything = new Uint8Array(),
14712 timeStamp = 0;
14713
14714 _AacStream.prototype.init.call(this);
14715
14716 this.setTimestamp = function (timestamp) {
14717 timeStamp = timestamp;
14718 };
14719
14720 this.push = function (bytes) {
14721 var frameSize = 0,
14722 byteIndex = 0,
14723 bytesLeft,
14724 chunk,
14725 packet,
14726 tempLength; // If there are bytes remaining from the last segment, prepend them to the
14727 // bytes that were pushed in
14728
14729 if (everything.length) {
14730 tempLength = everything.length;
14731 everything = new Uint8Array(bytes.byteLength + tempLength);
14732 everything.set(everything.subarray(0, tempLength));
14733 everything.set(bytes, tempLength);
14734 } else {
14735 everything = bytes;
14736 }
14737
14738 while (everything.length - byteIndex >= 3) {
14739 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
14740 // Exit early because we don't have enough to parse
14741 // the ID3 tag header
14742 if (everything.length - byteIndex < 10) {
14743 break;
14744 } // check framesize
14745
14746
14747 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
14748 // to emit a full packet
14749 // Add to byteIndex to support multiple ID3 tags in sequence
14750
14751 if (byteIndex + frameSize > everything.length) {
14752 break;
14753 }
14754
14755 chunk = {
14756 type: 'timed-metadata',
14757 data: everything.subarray(byteIndex, byteIndex + frameSize)
14758 };
14759 this.trigger('data', chunk);
14760 byteIndex += frameSize;
14761 continue;
14762 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
14763 // Exit early because we don't have enough to parse
14764 // the ADTS frame header
14765 if (everything.length - byteIndex < 7) {
14766 break;
14767 }
14768
14769 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
14770 // to emit a full packet
14771
14772 if (byteIndex + frameSize > everything.length) {
14773 break;
14774 }
14775
14776 packet = {
14777 type: 'audio',
14778 data: everything.subarray(byteIndex, byteIndex + frameSize),
14779 pts: timeStamp,
14780 dts: timeStamp
14781 };
14782 this.trigger('data', packet);
14783 byteIndex += frameSize;
14784 continue;
14785 }
14786
14787 byteIndex++;
14788 }
14789
14790 bytesLeft = everything.length - byteIndex;
14791
14792 if (bytesLeft > 0) {
14793 everything = everything.subarray(byteIndex);
14794 } else {
14795 everything = new Uint8Array();
14796 }
14797 };
14798
14799 this.reset = function () {
14800 everything = new Uint8Array();
14801 this.trigger('reset');
14802 };
14803
14804 this.endTimeline = function () {
14805 everything = new Uint8Array();
14806 this.trigger('endedtimeline');
14807 };
14808 };
14809
14810 _AacStream.prototype = new stream();
14811 var aac = _AacStream; // constants
14812
14813 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
14814 var audioProperties = AUDIO_PROPERTIES;
14815 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
14816 var videoProperties = VIDEO_PROPERTIES;
14817 var H264Stream = h264.H264Stream;
14818 var isLikelyAacData = utils.isLikelyAacData;
14819 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
14820
14821 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
14822
14823 var retriggerForStream = function retriggerForStream(key, event) {
14824 event.stream = key;
14825 this.trigger('log', event);
14826 };
14827
14828 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
14829 var keys = Object.keys(pipeline);
14830
14831 for (var i = 0; i < keys.length; i++) {
14832 var key = keys[i]; // skip non-stream keys and headOfPipeline
14833 // which is just a duplicate
14834
14835 if (key === 'headOfPipeline' || !pipeline[key].on) {
14836 continue;
14837 }
14838
14839 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
14840 }
14841 };
14842 /**
14843 * Compare two arrays (even typed) for same-ness
14844 */
14845
14846
14847 var arrayEquals = function arrayEquals(a, b) {
14848 var i;
14849
14850 if (a.length !== b.length) {
14851 return false;
14852 } // compare the value of each element in the array
14853
14854
14855 for (i = 0; i < a.length; i++) {
14856 if (a[i] !== b[i]) {
14857 return false;
14858 }
14859 }
14860
14861 return true;
14862 };
14863
14864 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
14865 var ptsOffsetFromDts = startPts - startDts,
14866 decodeDuration = endDts - startDts,
14867 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
14868 // however, the player time values will reflect a start from the baseMediaDecodeTime.
14869 // In order to provide relevant values for the player times, base timing info on the
14870 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
14871
14872 return {
14873 start: {
14874 dts: baseMediaDecodeTime,
14875 pts: baseMediaDecodeTime + ptsOffsetFromDts
14876 },
14877 end: {
14878 dts: baseMediaDecodeTime + decodeDuration,
14879 pts: baseMediaDecodeTime + presentationDuration
14880 },
14881 prependedContentDuration: prependedContentDuration,
14882 baseMediaDecodeTime: baseMediaDecodeTime
14883 };
14884 };
14885 /**
14886 * Constructs a single-track, ISO BMFF media segment from AAC data
14887 * events. The output of this stream can be fed to a SourceBuffer
14888 * configured with a suitable initialization segment.
14889 * @param track {object} track metadata configuration
14890 * @param options {object} transmuxer options object
14891 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
14892 * in the source; false to adjust the first segment to start at 0.
14893 */
14894
14895
14896 _AudioSegmentStream = function AudioSegmentStream(track, options) {
14897 var adtsFrames = [],
14898 sequenceNumber,
14899 earliestAllowedDts = 0,
14900 audioAppendStartTs = 0,
14901 videoBaseMediaDecodeTime = Infinity;
14902 options = options || {};
14903 sequenceNumber = options.firstSequenceNumber || 0;
14904
14905 _AudioSegmentStream.prototype.init.call(this);
14906
14907 this.push = function (data) {
14908 trackDecodeInfo.collectDtsInfo(track, data);
14909
14910 if (track) {
14911 audioProperties.forEach(function (prop) {
14912 track[prop] = data[prop];
14913 });
14914 } // buffer audio data until end() is called
14915
14916
14917 adtsFrames.push(data);
14918 };
14919
14920 this.setEarliestDts = function (earliestDts) {
14921 earliestAllowedDts = earliestDts;
14922 };
14923
14924 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
14925 videoBaseMediaDecodeTime = baseMediaDecodeTime;
14926 };
14927
14928 this.setAudioAppendStart = function (timestamp) {
14929 audioAppendStartTs = timestamp;
14930 };
14931
14932 this.flush = function () {
14933 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
14934
14935 if (adtsFrames.length === 0) {
14936 this.trigger('done', 'AudioSegmentStream');
14937 return;
14938 }
14939
14940 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
14941 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
14942
14943 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
14944 // samples (that is, adts frames) in the audio data
14945
14946 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
14947
14948 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
14949 adtsFrames = [];
14950 moof = mp4Generator.moof(sequenceNumber, [track]);
14951 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
14952
14953 sequenceNumber++;
14954 boxes.set(moof);
14955 boxes.set(mdat, moof.byteLength);
14956 trackDecodeInfo.clearDtsInfo(track);
14957 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
14958 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
14959 // valid use-case where an init segment/data should be triggered without associated
14960 // frames. Leaving for now, but should be looked into.
14961
14962 if (frames.length) {
14963 segmentDuration = frames.length * frameDuration;
14964 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
14965 // frame info is in video clock cycles. Convert to match expectation of
14966 // listeners (that all timestamps will be based on video clock cycles).
14967 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
14968 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
14969 this.trigger('timingInfo', {
14970 start: frames[0].pts,
14971 end: frames[0].pts + segmentDuration
14972 });
14973 }
14974
14975 this.trigger('data', {
14976 track: track,
14977 boxes: boxes
14978 });
14979 this.trigger('done', 'AudioSegmentStream');
14980 };
14981
14982 this.reset = function () {
14983 trackDecodeInfo.clearDtsInfo(track);
14984 adtsFrames = [];
14985 this.trigger('reset');
14986 };
14987 };
14988
14989 _AudioSegmentStream.prototype = new stream();
14990 /**
14991 * Constructs a single-track, ISO BMFF media segment from H264 data
14992 * events. The output of this stream can be fed to a SourceBuffer
14993 * configured with a suitable initialization segment.
14994 * @param track {object} track metadata configuration
14995 * @param options {object} transmuxer options object
14996 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
14997 * gopsToAlignWith list when attempting to align gop pts
14998 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
14999 * in the source; false to adjust the first segment to start at 0.
15000 */
15001
15002 _VideoSegmentStream = function VideoSegmentStream(track, options) {
15003 var sequenceNumber,
15004 nalUnits = [],
15005 gopsToAlignWith = [],
15006 config,
15007 pps;
15008 options = options || {};
15009 sequenceNumber = options.firstSequenceNumber || 0;
15010
15011 _VideoSegmentStream.prototype.init.call(this);
15012
15013 delete track.minPTS;
15014 this.gopCache_ = [];
15015 /**
15016 * Constructs a ISO BMFF segment given H264 nalUnits
15017 * @param {Object} nalUnit A data event representing a nalUnit
15018 * @param {String} nalUnit.nalUnitType
15019 * @param {Object} nalUnit.config Properties for a mp4 track
15020 * @param {Uint8Array} nalUnit.data The nalUnit bytes
15021 * @see lib/codecs/h264.js
15022 **/
15023
15024 this.push = function (nalUnit) {
15025 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
15026
15027 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
15028 config = nalUnit.config;
15029 track.sps = [nalUnit.data];
15030 videoProperties.forEach(function (prop) {
15031 track[prop] = config[prop];
15032 }, this);
15033 }
15034
15035 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
15036 pps = nalUnit.data;
15037 track.pps = [nalUnit.data];
15038 } // buffer video until flush() is called
15039
15040
15041 nalUnits.push(nalUnit);
15042 };
15043 /**
15044 * Pass constructed ISO BMFF track and boxes on to the
15045 * next stream in the pipeline
15046 **/
15047
15048
15049 this.flush = function () {
15050 var frames,
15051 gopForFusion,
15052 gops,
15053 moof,
15054 mdat,
15055 boxes,
15056 prependedContentDuration = 0,
15057 firstGop,
15058 lastGop; // Throw away nalUnits at the start of the byte stream until
15059 // we find the first AUD
15060
15061 while (nalUnits.length) {
15062 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
15063 break;
15064 }
15065
15066 nalUnits.shift();
15067 } // Return early if no video data has been observed
15068
15069
15070 if (nalUnits.length === 0) {
15071 this.resetStream_();
15072 this.trigger('done', 'VideoSegmentStream');
15073 return;
15074 } // Organize the raw nal-units into arrays that represent
15075 // higher-level constructs such as frames and gops
15076 // (group-of-pictures)
15077
15078
15079 frames = frameUtils.groupNalsIntoFrames(nalUnits);
15080 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
15081 // a problem since MSE (on Chrome) requires a leading keyframe.
15082 //
15083 // We have two approaches to repairing this situation:
15084 // 1) GOP-FUSION:
15085 // This is where we keep track of the GOPS (group-of-pictures)
15086 // from previous fragments and attempt to find one that we can
15087 // prepend to the current fragment in order to create a valid
15088 // fragment.
15089 // 2) KEYFRAME-PULLING:
15090 // Here we search for the first keyframe in the fragment and
15091 // throw away all the frames between the start of the fragment
15092 // and that keyframe. We then extend the duration and pull the
15093 // PTS of the keyframe forward so that it covers the time range
15094 // of the frames that were disposed of.
15095 //
15096 // #1 is far prefereable over #2 which can cause "stuttering" but
15097 // requires more things to be just right.
15098
15099 if (!gops[0][0].keyFrame) {
15100 // Search for a gop for fusion from our gopCache
15101 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
15102
15103 if (gopForFusion) {
15104 // in order to provide more accurate timing information about the segment, save
15105 // the number of seconds prepended to the original segment due to GOP fusion
15106 prependedContentDuration = gopForFusion.duration;
15107 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
15108 // new gop at the beginning
15109
15110 gops.byteLength += gopForFusion.byteLength;
15111 gops.nalCount += gopForFusion.nalCount;
15112 gops.pts = gopForFusion.pts;
15113 gops.dts = gopForFusion.dts;
15114 gops.duration += gopForFusion.duration;
15115 } else {
15116 // If we didn't find a candidate gop fall back to keyframe-pulling
15117 gops = frameUtils.extendFirstKeyFrame(gops);
15118 }
15119 } // Trim gops to align with gopsToAlignWith
15120
15121
15122 if (gopsToAlignWith.length) {
15123 var alignedGops;
15124
15125 if (options.alignGopsAtEnd) {
15126 alignedGops = this.alignGopsAtEnd_(gops);
15127 } else {
15128 alignedGops = this.alignGopsAtStart_(gops);
15129 }
15130
15131 if (!alignedGops) {
15132 // save all the nals in the last GOP into the gop cache
15133 this.gopCache_.unshift({
15134 gop: gops.pop(),
15135 pps: track.pps,
15136 sps: track.sps
15137 }); // Keep a maximum of 6 GOPs in the cache
15138
15139 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15140
15141 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
15142
15143 this.resetStream_();
15144 this.trigger('done', 'VideoSegmentStream');
15145 return;
15146 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
15147 // when recalculated before sending off to CoalesceStream
15148
15149
15150 trackDecodeInfo.clearDtsInfo(track);
15151 gops = alignedGops;
15152 }
15153
15154 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
15155 // samples (that is, frames) in the video data
15156
15157 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
15158
15159 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
15160 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
15161 this.trigger('processedGopsInfo', gops.map(function (gop) {
15162 return {
15163 pts: gop.pts,
15164 dts: gop.dts,
15165 byteLength: gop.byteLength
15166 };
15167 }));
15168 firstGop = gops[0];
15169 lastGop = gops[gops.length - 1];
15170 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
15171 this.trigger('timingInfo', {
15172 start: gops[0].pts,
15173 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
15174 }); // save all the nals in the last GOP into the gop cache
15175
15176 this.gopCache_.unshift({
15177 gop: gops.pop(),
15178 pps: track.pps,
15179 sps: track.sps
15180 }); // Keep a maximum of 6 GOPs in the cache
15181
15182 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15183
15184 nalUnits = [];
15185 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
15186 this.trigger('timelineStartInfo', track.timelineStartInfo);
15187 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
15188 // throwing away hundreds of media segment fragments
15189
15190 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
15191
15192 sequenceNumber++;
15193 boxes.set(moof);
15194 boxes.set(mdat, moof.byteLength);
15195 this.trigger('data', {
15196 track: track,
15197 boxes: boxes
15198 });
15199 this.resetStream_(); // Continue with the flush process now
15200
15201 this.trigger('done', 'VideoSegmentStream');
15202 };
15203
15204 this.reset = function () {
15205 this.resetStream_();
15206 nalUnits = [];
15207 this.gopCache_.length = 0;
15208 gopsToAlignWith.length = 0;
15209 this.trigger('reset');
15210 };
15211
15212 this.resetStream_ = function () {
15213 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
15214 // for instance, when we are rendition switching
15215
15216 config = undefined;
15217 pps = undefined;
15218 }; // Search for a candidate Gop for gop-fusion from the gop cache and
15219 // return it or return null if no good candidate was found
15220
15221
15222 this.getGopForFusion_ = function (nalUnit) {
15223 var halfSecond = 45000,
15224 // Half-a-second in a 90khz clock
15225 allowableOverlap = 10000,
15226 // About 3 frames @ 30fps
15227 nearestDistance = Infinity,
15228 dtsDistance,
15229 nearestGopObj,
15230 currentGop,
15231 currentGopObj,
15232 i; // Search for the GOP nearest to the beginning of this nal unit
15233
15234 for (i = 0; i < this.gopCache_.length; i++) {
15235 currentGopObj = this.gopCache_[i];
15236 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
15237
15238 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
15239 continue;
15240 } // Reject Gops that would require a negative baseMediaDecodeTime
15241
15242
15243 if (currentGop.dts < track.timelineStartInfo.dts) {
15244 continue;
15245 } // The distance between the end of the gop and the start of the nalUnit
15246
15247
15248 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
15249 // a half-second of the nal unit
15250
15251 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
15252 // Always use the closest GOP we found if there is more than
15253 // one candidate
15254 if (!nearestGopObj || nearestDistance > dtsDistance) {
15255 nearestGopObj = currentGopObj;
15256 nearestDistance = dtsDistance;
15257 }
15258 }
15259 }
15260
15261 if (nearestGopObj) {
15262 return nearestGopObj.gop;
15263 }
15264
15265 return null;
15266 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15267 // of gopsToAlignWith starting from the START of the list
15268
15269
15270 this.alignGopsAtStart_ = function (gops) {
15271 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
15272 byteLength = gops.byteLength;
15273 nalCount = gops.nalCount;
15274 duration = gops.duration;
15275 alignIndex = gopIndex = 0;
15276
15277 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
15278 align = gopsToAlignWith[alignIndex];
15279 gop = gops[gopIndex];
15280
15281 if (align.pts === gop.pts) {
15282 break;
15283 }
15284
15285 if (gop.pts > align.pts) {
15286 // this current gop starts after the current gop we want to align on, so increment
15287 // align index
15288 alignIndex++;
15289 continue;
15290 } // current gop starts before the current gop we want to align on. so increment gop
15291 // index
15292
15293
15294 gopIndex++;
15295 byteLength -= gop.byteLength;
15296 nalCount -= gop.nalCount;
15297 duration -= gop.duration;
15298 }
15299
15300 if (gopIndex === 0) {
15301 // no gops to trim
15302 return gops;
15303 }
15304
15305 if (gopIndex === gops.length) {
15306 // all gops trimmed, skip appending all gops
15307 return null;
15308 }
15309
15310 alignedGops = gops.slice(gopIndex);
15311 alignedGops.byteLength = byteLength;
15312 alignedGops.duration = duration;
15313 alignedGops.nalCount = nalCount;
15314 alignedGops.pts = alignedGops[0].pts;
15315 alignedGops.dts = alignedGops[0].dts;
15316 return alignedGops;
15317 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15318 // of gopsToAlignWith starting from the END of the list
15319
15320
15321 this.alignGopsAtEnd_ = function (gops) {
15322 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
15323 alignIndex = gopsToAlignWith.length - 1;
15324 gopIndex = gops.length - 1;
15325 alignEndIndex = null;
15326 matchFound = false;
15327
15328 while (alignIndex >= 0 && gopIndex >= 0) {
15329 align = gopsToAlignWith[alignIndex];
15330 gop = gops[gopIndex];
15331
15332 if (align.pts === gop.pts) {
15333 matchFound = true;
15334 break;
15335 }
15336
15337 if (align.pts > gop.pts) {
15338 alignIndex--;
15339 continue;
15340 }
15341
15342 if (alignIndex === gopsToAlignWith.length - 1) {
15343 // gop.pts is greater than the last alignment candidate. If no match is found
15344 // by the end of this loop, we still want to append gops that come after this
15345 // point
15346 alignEndIndex = gopIndex;
15347 }
15348
15349 gopIndex--;
15350 }
15351
15352 if (!matchFound && alignEndIndex === null) {
15353 return null;
15354 }
15355
15356 var trimIndex;
15357
15358 if (matchFound) {
15359 trimIndex = gopIndex;
15360 } else {
15361 trimIndex = alignEndIndex;
15362 }
15363
15364 if (trimIndex === 0) {
15365 return gops;
15366 }
15367
15368 var alignedGops = gops.slice(trimIndex);
15369 var metadata = alignedGops.reduce(function (total, gop) {
15370 total.byteLength += gop.byteLength;
15371 total.duration += gop.duration;
15372 total.nalCount += gop.nalCount;
15373 return total;
15374 }, {
15375 byteLength: 0,
15376 duration: 0,
15377 nalCount: 0
15378 });
15379 alignedGops.byteLength = metadata.byteLength;
15380 alignedGops.duration = metadata.duration;
15381 alignedGops.nalCount = metadata.nalCount;
15382 alignedGops.pts = alignedGops[0].pts;
15383 alignedGops.dts = alignedGops[0].dts;
15384 return alignedGops;
15385 };
15386
15387 this.alignGopsWith = function (newGopsToAlignWith) {
15388 gopsToAlignWith = newGopsToAlignWith;
15389 };
15390 };
15391
15392 _VideoSegmentStream.prototype = new stream();
15393 /**
15394 * A Stream that can combine multiple streams (ie. audio & video)
15395 * into a single output segment for MSE. Also supports audio-only
15396 * and video-only streams.
15397 * @param options {object} transmuxer options object
15398 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15399 * in the source; false to adjust the first segment to start at media timeline start.
15400 */
15401
15402 _CoalesceStream = function CoalesceStream(options, metadataStream) {
15403 // Number of Tracks per output segment
15404 // If greater than 1, we combine multiple
15405 // tracks into a single segment
15406 this.numberOfTracks = 0;
15407 this.metadataStream = metadataStream;
15408 options = options || {};
15409
15410 if (typeof options.remux !== 'undefined') {
15411 this.remuxTracks = !!options.remux;
15412 } else {
15413 this.remuxTracks = true;
15414 }
15415
15416 if (typeof options.keepOriginalTimestamps === 'boolean') {
15417 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
15418 } else {
15419 this.keepOriginalTimestamps = false;
15420 }
15421
15422 this.pendingTracks = [];
15423 this.videoTrack = null;
15424 this.pendingBoxes = [];
15425 this.pendingCaptions = [];
15426 this.pendingMetadata = [];
15427 this.pendingBytes = 0;
15428 this.emittedTracks = 0;
15429
15430 _CoalesceStream.prototype.init.call(this); // Take output from multiple
15431
15432
15433 this.push = function (output) {
15434 // buffer incoming captions until the associated video segment
15435 // finishes
15436 if (output.text) {
15437 return this.pendingCaptions.push(output);
15438 } // buffer incoming id3 tags until the final flush
15439
15440
15441 if (output.frames) {
15442 return this.pendingMetadata.push(output);
15443 } // Add this track to the list of pending tracks and store
15444 // important information required for the construction of
15445 // the final segment
15446
15447
15448 this.pendingTracks.push(output.track);
15449 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
15450 // We unshift audio and push video because
15451 // as of Chrome 75 when switching from
15452 // one init segment to another if the video
15453 // mdat does not appear after the audio mdat
15454 // only audio will play for the duration of our transmux.
15455
15456 if (output.track.type === 'video') {
15457 this.videoTrack = output.track;
15458 this.pendingBoxes.push(output.boxes);
15459 }
15460
15461 if (output.track.type === 'audio') {
15462 this.audioTrack = output.track;
15463 this.pendingBoxes.unshift(output.boxes);
15464 }
15465 };
15466 };
15467
15468 _CoalesceStream.prototype = new stream();
15469
15470 _CoalesceStream.prototype.flush = function (flushSource) {
15471 var offset = 0,
15472 event = {
15473 captions: [],
15474 captionStreams: {},
15475 metadata: [],
15476 info: {}
15477 },
15478 caption,
15479 id3,
15480 initSegment,
15481 timelineStartPts = 0,
15482 i;
15483
15484 if (this.pendingTracks.length < this.numberOfTracks) {
15485 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
15486 // Return because we haven't received a flush from a data-generating
15487 // portion of the segment (meaning that we have only recieved meta-data
15488 // or captions.)
15489 return;
15490 } else if (this.remuxTracks) {
15491 // Return until we have enough tracks from the pipeline to remux (if we
15492 // are remuxing audio and video into a single MP4)
15493 return;
15494 } else if (this.pendingTracks.length === 0) {
15495 // In the case where we receive a flush without any data having been
15496 // received we consider it an emitted track for the purposes of coalescing
15497 // `done` events.
15498 // We do this for the case where there is an audio and video track in the
15499 // segment but no audio data. (seen in several playlists with alternate
15500 // audio tracks and no audio present in the main TS segments.)
15501 this.emittedTracks++;
15502
15503 if (this.emittedTracks >= this.numberOfTracks) {
15504 this.trigger('done');
15505 this.emittedTracks = 0;
15506 }
15507
15508 return;
15509 }
15510 }
15511
15512 if (this.videoTrack) {
15513 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
15514 videoProperties.forEach(function (prop) {
15515 event.info[prop] = this.videoTrack[prop];
15516 }, this);
15517 } else if (this.audioTrack) {
15518 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
15519 audioProperties.forEach(function (prop) {
15520 event.info[prop] = this.audioTrack[prop];
15521 }, this);
15522 }
15523
15524 if (this.videoTrack || this.audioTrack) {
15525 if (this.pendingTracks.length === 1) {
15526 event.type = this.pendingTracks[0].type;
15527 } else {
15528 event.type = 'combined';
15529 }
15530
15531 this.emittedTracks += this.pendingTracks.length;
15532 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
15533
15534 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
15535 // and track definitions
15536
15537 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
15538
15539 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
15540
15541 for (i = 0; i < this.pendingBoxes.length; i++) {
15542 event.data.set(this.pendingBoxes[i], offset);
15543 offset += this.pendingBoxes[i].byteLength;
15544 } // Translate caption PTS times into second offsets to match the
15545 // video timeline for the segment, and add track info
15546
15547
15548 for (i = 0; i < this.pendingCaptions.length; i++) {
15549 caption = this.pendingCaptions[i];
15550 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
15551 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
15552 event.captionStreams[caption.stream] = true;
15553 event.captions.push(caption);
15554 } // Translate ID3 frame PTS times into second offsets to match the
15555 // video timeline for the segment
15556
15557
15558 for (i = 0; i < this.pendingMetadata.length; i++) {
15559 id3 = this.pendingMetadata[i];
15560 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
15561 event.metadata.push(id3);
15562 } // We add this to every single emitted segment even though we only need
15563 // it for the first
15564
15565
15566 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
15567
15568 this.pendingTracks.length = 0;
15569 this.videoTrack = null;
15570 this.pendingBoxes.length = 0;
15571 this.pendingCaptions.length = 0;
15572 this.pendingBytes = 0;
15573 this.pendingMetadata.length = 0; // Emit the built segment
15574 // We include captions and ID3 tags for backwards compatibility,
15575 // ideally we should send only video and audio in the data event
15576
15577 this.trigger('data', event); // Emit each caption to the outside world
15578 // Ideally, this would happen immediately on parsing captions,
15579 // but we need to ensure that video data is sent back first
15580 // so that caption timing can be adjusted to match video timing
15581
15582 for (i = 0; i < event.captions.length; i++) {
15583 caption = event.captions[i];
15584 this.trigger('caption', caption);
15585 } // Emit each id3 tag to the outside world
15586 // Ideally, this would happen immediately on parsing the tag,
15587 // but we need to ensure that video data is sent back first
15588 // so that ID3 frame timing can be adjusted to match video timing
15589
15590
15591 for (i = 0; i < event.metadata.length; i++) {
15592 id3 = event.metadata[i];
15593 this.trigger('id3Frame', id3);
15594 }
15595 } // Only emit `done` if all tracks have been flushed and emitted
15596
15597
15598 if (this.emittedTracks >= this.numberOfTracks) {
15599 this.trigger('done');
15600 this.emittedTracks = 0;
15601 }
15602 };
15603
15604 _CoalesceStream.prototype.setRemux = function (val) {
15605 this.remuxTracks = val;
15606 };
15607 /**
15608 * A Stream that expects MP2T binary data as input and produces
15609 * corresponding media segments, suitable for use with Media Source
15610 * Extension (MSE) implementations that support the ISO BMFF byte
15611 * stream format, like Chrome.
15612 */
15613
15614
15615 _Transmuxer = function Transmuxer(options) {
15616 var self = this,
15617 hasFlushed = true,
15618 videoTrack,
15619 audioTrack;
15620
15621 _Transmuxer.prototype.init.call(this);
15622
15623 options = options || {};
15624 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
15625 this.transmuxPipeline_ = {};
15626
15627 this.setupAacPipeline = function () {
15628 var pipeline = {};
15629 this.transmuxPipeline_ = pipeline;
15630 pipeline.type = 'aac';
15631 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
15632
15633 pipeline.aacStream = new aac();
15634 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
15635 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
15636 pipeline.adtsStream = new adts();
15637 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
15638 pipeline.headOfPipeline = pipeline.aacStream;
15639 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
15640 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
15641 pipeline.metadataStream.on('timestamp', function (frame) {
15642 pipeline.aacStream.setTimestamp(frame.timeStamp);
15643 });
15644 pipeline.aacStream.on('data', function (data) {
15645 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
15646 return;
15647 }
15648
15649 audioTrack = audioTrack || {
15650 timelineStartInfo: {
15651 baseMediaDecodeTime: self.baseMediaDecodeTime
15652 },
15653 codec: 'adts',
15654 type: 'audio'
15655 }; // hook up the audio segment stream to the first track with aac data
15656
15657 pipeline.coalesceStream.numberOfTracks++;
15658 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
15659 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
15660 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
15661
15662 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
15663
15664 self.trigger('trackinfo', {
15665 hasAudio: !!audioTrack,
15666 hasVideo: !!videoTrack
15667 });
15668 }); // Re-emit any data coming from the coalesce stream to the outside world
15669
15670 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
15671
15672 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
15673 addPipelineLogRetriggers(this, pipeline);
15674 };
15675
15676 this.setupTsPipeline = function () {
15677 var pipeline = {};
15678 this.transmuxPipeline_ = pipeline;
15679 pipeline.type = 'ts';
15680 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
15681
15682 pipeline.packetStream = new m2ts_1.TransportPacketStream();
15683 pipeline.parseStream = new m2ts_1.TransportParseStream();
15684 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
15685 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
15686 pipeline.adtsStream = new adts();
15687 pipeline.h264Stream = new H264Stream();
15688 pipeline.captionStream = new m2ts_1.CaptionStream(options);
15689 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
15690 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
15691
15692 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
15693 // demux the streams
15694
15695 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
15696 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
15697 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
15698
15699 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
15700 pipeline.elementaryStream.on('data', function (data) {
15701 var i;
15702
15703 if (data.type === 'metadata') {
15704 i = data.tracks.length; // scan the tracks listed in the metadata
15705
15706 while (i--) {
15707 if (!videoTrack && data.tracks[i].type === 'video') {
15708 videoTrack = data.tracks[i];
15709 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
15710 } else if (!audioTrack && data.tracks[i].type === 'audio') {
15711 audioTrack = data.tracks[i];
15712 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
15713 }
15714 } // hook up the video segment stream to the first track with h264 data
15715
15716
15717 if (videoTrack && !pipeline.videoSegmentStream) {
15718 pipeline.coalesceStream.numberOfTracks++;
15719 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
15720 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
15721 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
15722 // When video emits timelineStartInfo data after a flush, we forward that
15723 // info to the AudioSegmentStream, if it exists, because video timeline
15724 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
15725 // because this is a particularly subtle form of timestamp alteration.
15726 if (audioTrack && !options.keepOriginalTimestamps) {
15727 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
15728 // very earliest DTS we have seen in video because Chrome will
15729 // interpret any video track with a baseMediaDecodeTime that is
15730 // non-zero as a gap.
15731
15732 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
15733 }
15734 });
15735 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
15736 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
15737 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
15738 if (audioTrack) {
15739 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
15740 }
15741 });
15742 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
15743
15744 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
15745 }
15746
15747 if (audioTrack && !pipeline.audioSegmentStream) {
15748 // hook up the audio segment stream to the first track with aac data
15749 pipeline.coalesceStream.numberOfTracks++;
15750 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
15751 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
15752 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
15753 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
15754
15755 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
15756 } // emit pmt info
15757
15758
15759 self.trigger('trackinfo', {
15760 hasAudio: !!audioTrack,
15761 hasVideo: !!videoTrack
15762 });
15763 }
15764 }); // Re-emit any data coming from the coalesce stream to the outside world
15765
15766 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
15767 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
15768 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
15769 self.trigger('id3Frame', id3Frame);
15770 });
15771 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
15772
15773 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
15774 addPipelineLogRetriggers(this, pipeline);
15775 }; // hook up the segment streams once track metadata is delivered
15776
15777
15778 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
15779 var pipeline = this.transmuxPipeline_;
15780
15781 if (!options.keepOriginalTimestamps) {
15782 this.baseMediaDecodeTime = baseMediaDecodeTime;
15783 }
15784
15785 if (audioTrack) {
15786 audioTrack.timelineStartInfo.dts = undefined;
15787 audioTrack.timelineStartInfo.pts = undefined;
15788 trackDecodeInfo.clearDtsInfo(audioTrack);
15789
15790 if (pipeline.audioTimestampRolloverStream) {
15791 pipeline.audioTimestampRolloverStream.discontinuity();
15792 }
15793 }
15794
15795 if (videoTrack) {
15796 if (pipeline.videoSegmentStream) {
15797 pipeline.videoSegmentStream.gopCache_ = [];
15798 }
15799
15800 videoTrack.timelineStartInfo.dts = undefined;
15801 videoTrack.timelineStartInfo.pts = undefined;
15802 trackDecodeInfo.clearDtsInfo(videoTrack);
15803 pipeline.captionStream.reset();
15804 }
15805
15806 if (pipeline.timestampRolloverStream) {
15807 pipeline.timestampRolloverStream.discontinuity();
15808 }
15809 };
15810
15811 this.setAudioAppendStart = function (timestamp) {
15812 if (audioTrack) {
15813 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
15814 }
15815 };
15816
15817 this.setRemux = function (val) {
15818 var pipeline = this.transmuxPipeline_;
15819 options.remux = val;
15820
15821 if (pipeline && pipeline.coalesceStream) {
15822 pipeline.coalesceStream.setRemux(val);
15823 }
15824 };
15825
15826 this.alignGopsWith = function (gopsToAlignWith) {
15827 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
15828 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
15829 }
15830 };
15831
15832 this.getLogTrigger_ = function (key) {
15833 var self = this;
15834 return function (event) {
15835 event.stream = key;
15836 self.trigger('log', event);
15837 };
15838 }; // feed incoming data to the front of the parsing pipeline
15839
15840
15841 this.push = function (data) {
15842 if (hasFlushed) {
15843 var isAac = isLikelyAacData(data);
15844
15845 if (isAac && this.transmuxPipeline_.type !== 'aac') {
15846 this.setupAacPipeline();
15847 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
15848 this.setupTsPipeline();
15849 }
15850
15851 hasFlushed = false;
15852 }
15853
15854 this.transmuxPipeline_.headOfPipeline.push(data);
15855 }; // flush any buffered data
15856
15857
15858 this.flush = function () {
15859 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
15860
15861 this.transmuxPipeline_.headOfPipeline.flush();
15862 };
15863
15864 this.endTimeline = function () {
15865 this.transmuxPipeline_.headOfPipeline.endTimeline();
15866 };
15867
15868 this.reset = function () {
15869 if (this.transmuxPipeline_.headOfPipeline) {
15870 this.transmuxPipeline_.headOfPipeline.reset();
15871 }
15872 }; // Caption data has to be reset when seeking outside buffered range
15873
15874
15875 this.resetCaptions = function () {
15876 if (this.transmuxPipeline_.captionStream) {
15877 this.transmuxPipeline_.captionStream.reset();
15878 }
15879 };
15880 };
15881
15882 _Transmuxer.prototype = new stream();
15883 var transmuxer = {
15884 Transmuxer: _Transmuxer,
15885 VideoSegmentStream: _VideoSegmentStream,
15886 AudioSegmentStream: _AudioSegmentStream,
15887 AUDIO_PROPERTIES: audioProperties,
15888 VIDEO_PROPERTIES: videoProperties,
15889 // exported for testing
15890 generateSegmentTimingInfo: generateSegmentTimingInfo
15891 };
15892 /**
15893 * mux.js
15894 *
15895 * Copyright (c) Brightcove
15896 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15897 */
15898
15899 var toUnsigned$3 = function toUnsigned(value) {
15900 return value >>> 0;
15901 };
15902
15903 var toHexString$1 = function toHexString(value) {
15904 return ('00' + value.toString(16)).slice(-2);
15905 };
15906
15907 var bin = {
15908 toUnsigned: toUnsigned$3,
15909 toHexString: toHexString$1
15910 };
15911
15912 var parseType$1 = function parseType(buffer) {
15913 var result = '';
15914 result += String.fromCharCode(buffer[0]);
15915 result += String.fromCharCode(buffer[1]);
15916 result += String.fromCharCode(buffer[2]);
15917 result += String.fromCharCode(buffer[3]);
15918 return result;
15919 };
15920
15921 var parseType_1 = parseType$1;
15922 var toUnsigned$2 = bin.toUnsigned;
15923
15924 var findBox = function findBox(data, path) {
15925 var results = [],
15926 i,
15927 size,
15928 type,
15929 end,
15930 subresults;
15931
15932 if (!path.length) {
15933 // short-circuit the search for empty paths
15934 return null;
15935 }
15936
15937 for (i = 0; i < data.byteLength;) {
15938 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
15939 type = parseType_1(data.subarray(i + 4, i + 8));
15940 end = size > 1 ? i + size : data.byteLength;
15941
15942 if (type === path[0]) {
15943 if (path.length === 1) {
15944 // this is the end of the path and we've found the box we were
15945 // looking for
15946 results.push(data.subarray(i + 8, end));
15947 } else {
15948 // recursively search for the next box along the path
15949 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
15950
15951 if (subresults.length) {
15952 results = results.concat(subresults);
15953 }
15954 }
15955 }
15956
15957 i = end;
15958 } // we've finished searching all of data
15959
15960
15961 return results;
15962 };
15963
15964 var findBox_1 = findBox;
15965 var toUnsigned$1 = bin.toUnsigned;
15966
15967 var tfdt = function tfdt(data) {
15968 var result = {
15969 version: data[0],
15970 flags: new Uint8Array(data.subarray(1, 4)),
15971 baseMediaDecodeTime: toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
15972 };
15973
15974 if (result.version === 1) {
15975 result.baseMediaDecodeTime *= Math.pow(2, 32);
15976 result.baseMediaDecodeTime += toUnsigned$1(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
15977 }
15978
15979 return result;
15980 };
15981
15982 var parseTfdt = tfdt;
15983
15984 var parseSampleFlags = function parseSampleFlags(flags) {
15985 return {
15986 isLeading: (flags[0] & 0x0c) >>> 2,
15987 dependsOn: flags[0] & 0x03,
15988 isDependedOn: (flags[1] & 0xc0) >>> 6,
15989 hasRedundancy: (flags[1] & 0x30) >>> 4,
15990 paddingValue: (flags[1] & 0x0e) >>> 1,
15991 isNonSyncSample: flags[1] & 0x01,
15992 degradationPriority: flags[2] << 8 | flags[3]
15993 };
15994 };
15995
15996 var parseSampleFlags_1 = parseSampleFlags;
15997
15998 var trun = function trun(data) {
15999 var result = {
16000 version: data[0],
16001 flags: new Uint8Array(data.subarray(1, 4)),
16002 samples: []
16003 },
16004 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16005 // Flag interpretation
16006 dataOffsetPresent = result.flags[2] & 0x01,
16007 // compare with 2nd byte of 0x1
16008 firstSampleFlagsPresent = result.flags[2] & 0x04,
16009 // compare with 2nd byte of 0x4
16010 sampleDurationPresent = result.flags[1] & 0x01,
16011 // compare with 2nd byte of 0x100
16012 sampleSizePresent = result.flags[1] & 0x02,
16013 // compare with 2nd byte of 0x200
16014 sampleFlagsPresent = result.flags[1] & 0x04,
16015 // compare with 2nd byte of 0x400
16016 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
16017 // compare with 2nd byte of 0x800
16018 sampleCount = view.getUint32(4),
16019 offset = 8,
16020 sample;
16021
16022 if (dataOffsetPresent) {
16023 // 32 bit signed integer
16024 result.dataOffset = view.getInt32(offset);
16025 offset += 4;
16026 } // Overrides the flags for the first sample only. The order of
16027 // optional values will be: duration, size, compositionTimeOffset
16028
16029
16030 if (firstSampleFlagsPresent && sampleCount) {
16031 sample = {
16032 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
16033 };
16034 offset += 4;
16035
16036 if (sampleDurationPresent) {
16037 sample.duration = view.getUint32(offset);
16038 offset += 4;
16039 }
16040
16041 if (sampleSizePresent) {
16042 sample.size = view.getUint32(offset);
16043 offset += 4;
16044 }
16045
16046 if (sampleCompositionTimeOffsetPresent) {
16047 if (result.version === 1) {
16048 sample.compositionTimeOffset = view.getInt32(offset);
16049 } else {
16050 sample.compositionTimeOffset = view.getUint32(offset);
16051 }
16052
16053 offset += 4;
16054 }
16055
16056 result.samples.push(sample);
16057 sampleCount--;
16058 }
16059
16060 while (sampleCount--) {
16061 sample = {};
16062
16063 if (sampleDurationPresent) {
16064 sample.duration = view.getUint32(offset);
16065 offset += 4;
16066 }
16067
16068 if (sampleSizePresent) {
16069 sample.size = view.getUint32(offset);
16070 offset += 4;
16071 }
16072
16073 if (sampleFlagsPresent) {
16074 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
16075 offset += 4;
16076 }
16077
16078 if (sampleCompositionTimeOffsetPresent) {
16079 if (result.version === 1) {
16080 sample.compositionTimeOffset = view.getInt32(offset);
16081 } else {
16082 sample.compositionTimeOffset = view.getUint32(offset);
16083 }
16084
16085 offset += 4;
16086 }
16087
16088 result.samples.push(sample);
16089 }
16090
16091 return result;
16092 };
16093
16094 var parseTrun = trun;
16095
16096 var tfhd = function tfhd(data) {
16097 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16098 result = {
16099 version: data[0],
16100 flags: new Uint8Array(data.subarray(1, 4)),
16101 trackId: view.getUint32(4)
16102 },
16103 baseDataOffsetPresent = result.flags[2] & 0x01,
16104 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
16105 defaultSampleDurationPresent = result.flags[2] & 0x08,
16106 defaultSampleSizePresent = result.flags[2] & 0x10,
16107 defaultSampleFlagsPresent = result.flags[2] & 0x20,
16108 durationIsEmpty = result.flags[0] & 0x010000,
16109 defaultBaseIsMoof = result.flags[0] & 0x020000,
16110 i;
16111 i = 8;
16112
16113 if (baseDataOffsetPresent) {
16114 i += 4; // truncate top 4 bytes
16115 // FIXME: should we read the full 64 bits?
16116
16117 result.baseDataOffset = view.getUint32(12);
16118 i += 4;
16119 }
16120
16121 if (sampleDescriptionIndexPresent) {
16122 result.sampleDescriptionIndex = view.getUint32(i);
16123 i += 4;
16124 }
16125
16126 if (defaultSampleDurationPresent) {
16127 result.defaultSampleDuration = view.getUint32(i);
16128 i += 4;
16129 }
16130
16131 if (defaultSampleSizePresent) {
16132 result.defaultSampleSize = view.getUint32(i);
16133 i += 4;
16134 }
16135
16136 if (defaultSampleFlagsPresent) {
16137 result.defaultSampleFlags = view.getUint32(i);
16138 }
16139
16140 if (durationIsEmpty) {
16141 result.durationIsEmpty = true;
16142 }
16143
16144 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
16145 result.baseDataOffsetIsMoof = true;
16146 }
16147
16148 return result;
16149 };
16150
16151 var parseTfhd = tfhd;
16152 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
16153 var CaptionStream = captionStream.CaptionStream;
16154 /**
16155 * Maps an offset in the mdat to a sample based on the the size of the samples.
16156 * Assumes that `parseSamples` has been called first.
16157 *
16158 * @param {Number} offset - The offset into the mdat
16159 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
16160 * @return {?Object} The matching sample, or null if no match was found.
16161 *
16162 * @see ISO-BMFF-12/2015, Section 8.8.8
16163 **/
16164
16165 var mapToSample = function mapToSample(offset, samples) {
16166 var approximateOffset = offset;
16167
16168 for (var i = 0; i < samples.length; i++) {
16169 var sample = samples[i];
16170
16171 if (approximateOffset < sample.size) {
16172 return sample;
16173 }
16174
16175 approximateOffset -= sample.size;
16176 }
16177
16178 return null;
16179 };
16180 /**
16181 * Finds SEI nal units contained in a Media Data Box.
16182 * Assumes that `parseSamples` has been called first.
16183 *
16184 * @param {Uint8Array} avcStream - The bytes of the mdat
16185 * @param {Object[]} samples - The samples parsed out by `parseSamples`
16186 * @param {Number} trackId - The trackId of this video track
16187 * @return {Object[]} seiNals - the parsed SEI NALUs found.
16188 * The contents of the seiNal should match what is expected by
16189 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
16190 *
16191 * @see ISO-BMFF-12/2015, Section 8.1.1
16192 * @see Rec. ITU-T H.264, 7.3.2.3.1
16193 **/
16194
16195
16196 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
16197 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
16198 result = {
16199 logs: [],
16200 seiNals: []
16201 },
16202 seiNal,
16203 i,
16204 length,
16205 lastMatchedSample;
16206
16207 for (i = 0; i + 4 < avcStream.length; i += length) {
16208 length = avcView.getUint32(i);
16209 i += 4; // Bail if this doesn't appear to be an H264 stream
16210
16211 if (length <= 0) {
16212 continue;
16213 }
16214
16215 switch (avcStream[i] & 0x1F) {
16216 case 0x06:
16217 var data = avcStream.subarray(i + 1, i + 1 + length);
16218 var matchingSample = mapToSample(i, samples);
16219 seiNal = {
16220 nalUnitType: 'sei_rbsp',
16221 size: length,
16222 data: data,
16223 escapedRBSP: discardEmulationPreventionBytes(data),
16224 trackId: trackId
16225 };
16226
16227 if (matchingSample) {
16228 seiNal.pts = matchingSample.pts;
16229 seiNal.dts = matchingSample.dts;
16230 lastMatchedSample = matchingSample;
16231 } else if (lastMatchedSample) {
16232 // If a matching sample cannot be found, use the last
16233 // sample's values as they should be as close as possible
16234 seiNal.pts = lastMatchedSample.pts;
16235 seiNal.dts = lastMatchedSample.dts;
16236 } else {
16237 result.logs.push({
16238 level: 'warn',
16239 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
16240 });
16241 break;
16242 }
16243
16244 result.seiNals.push(seiNal);
16245 break;
16246 }
16247 }
16248
16249 return result;
16250 };
16251 /**
16252 * Parses sample information out of Track Run Boxes and calculates
16253 * the absolute presentation and decode timestamps of each sample.
16254 *
16255 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
16256 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
16257 @see ISO-BMFF-12/2015, Section 8.8.12
16258 * @param {Object} tfhd - The parsed Track Fragment Header
16259 * @see inspect.parseTfhd
16260 * @return {Object[]} the parsed samples
16261 *
16262 * @see ISO-BMFF-12/2015, Section 8.8.8
16263 **/
16264
16265
16266 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
16267 var currentDts = baseMediaDecodeTime;
16268 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
16269 var defaultSampleSize = tfhd.defaultSampleSize || 0;
16270 var trackId = tfhd.trackId;
16271 var allSamples = [];
16272 truns.forEach(function (trun) {
16273 // Note: We currently do not parse the sample table as well
16274 // as the trun. It's possible some sources will require this.
16275 // moov > trak > mdia > minf > stbl
16276 var trackRun = parseTrun(trun);
16277 var samples = trackRun.samples;
16278 samples.forEach(function (sample) {
16279 if (sample.duration === undefined) {
16280 sample.duration = defaultSampleDuration;
16281 }
16282
16283 if (sample.size === undefined) {
16284 sample.size = defaultSampleSize;
16285 }
16286
16287 sample.trackId = trackId;
16288 sample.dts = currentDts;
16289
16290 if (sample.compositionTimeOffset === undefined) {
16291 sample.compositionTimeOffset = 0;
16292 }
16293
16294 sample.pts = currentDts + sample.compositionTimeOffset;
16295 currentDts += sample.duration;
16296 });
16297 allSamples = allSamples.concat(samples);
16298 });
16299 return allSamples;
16300 };
16301 /**
16302 * Parses out caption nals from an FMP4 segment's video tracks.
16303 *
16304 * @param {Uint8Array} segment - The bytes of a single segment
16305 * @param {Number} videoTrackId - The trackId of a video track in the segment
16306 * @return {Object.<Number, Object[]>} A mapping of video trackId to
16307 * a list of seiNals found in that track
16308 **/
16309
16310
16311 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
16312 // To get the samples
16313 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
16314
16315 var mdats = findBox_1(segment, ['mdat']);
16316 var captionNals = {};
16317 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
16318
16319 mdats.forEach(function (mdat, index) {
16320 var matchingTraf = trafs[index];
16321 mdatTrafPairs.push({
16322 mdat: mdat,
16323 traf: matchingTraf
16324 });
16325 });
16326 mdatTrafPairs.forEach(function (pair) {
16327 var mdat = pair.mdat;
16328 var traf = pair.traf;
16329 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
16330
16331 var headerInfo = parseTfhd(tfhd[0]);
16332 var trackId = headerInfo.trackId;
16333 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
16334
16335 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
16336 var truns = findBox_1(traf, ['trun']);
16337 var samples;
16338 var result; // Only parse video data for the chosen video track
16339
16340 if (videoTrackId === trackId && truns.length > 0) {
16341 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
16342 result = findSeiNals(mdat, samples, trackId);
16343
16344 if (!captionNals[trackId]) {
16345 captionNals[trackId] = {
16346 seiNals: [],
16347 logs: []
16348 };
16349 }
16350
16351 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
16352 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
16353 }
16354 });
16355 return captionNals;
16356 };
16357 /**
16358 * Parses out inband captions from an MP4 container and returns
16359 * caption objects that can be used by WebVTT and the TextTrack API.
16360 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
16361 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
16362 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
16363 *
16364 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
16365 * @param {Number} trackId - The id of the video track to parse
16366 * @param {Number} timescale - The timescale for the video track from the init segment
16367 *
16368 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
16369 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
16370 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
16371 * @return {String} parsedCaptions[].text - The visible content of the caption
16372 **/
16373
16374
16375 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
16376 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
16377
16378 if (trackId === null) {
16379 return null;
16380 }
16381
16382 captionNals = parseCaptionNals(segment, trackId);
16383 var trackNals = captionNals[trackId] || {};
16384 return {
16385 seiNals: trackNals.seiNals,
16386 logs: trackNals.logs,
16387 timescale: timescale
16388 };
16389 };
16390 /**
16391 * Converts SEI NALUs into captions that can be used by video.js
16392 **/
16393
16394
16395 var CaptionParser = function CaptionParser() {
16396 var isInitialized = false;
16397 var captionStream; // Stores segments seen before trackId and timescale are set
16398
16399 var segmentCache; // Stores video track ID of the track being parsed
16400
16401 var trackId; // Stores the timescale of the track being parsed
16402
16403 var timescale; // Stores captions parsed so far
16404
16405 var parsedCaptions; // Stores whether we are receiving partial data or not
16406
16407 var parsingPartial;
16408 /**
16409 * A method to indicate whether a CaptionParser has been initalized
16410 * @returns {Boolean}
16411 **/
16412
16413 this.isInitialized = function () {
16414 return isInitialized;
16415 };
16416 /**
16417 * Initializes the underlying CaptionStream, SEI NAL parsing
16418 * and management, and caption collection
16419 **/
16420
16421
16422 this.init = function (options) {
16423 captionStream = new CaptionStream();
16424 isInitialized = true;
16425 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
16426
16427 captionStream.on('data', function (event) {
16428 // Convert to seconds in the source's timescale
16429 event.startTime = event.startPts / timescale;
16430 event.endTime = event.endPts / timescale;
16431 parsedCaptions.captions.push(event);
16432 parsedCaptions.captionStreams[event.stream] = true;
16433 });
16434 captionStream.on('log', function (log) {
16435 parsedCaptions.logs.push(log);
16436 });
16437 };
16438 /**
16439 * Determines if a new video track will be selected
16440 * or if the timescale changed
16441 * @return {Boolean}
16442 **/
16443
16444
16445 this.isNewInit = function (videoTrackIds, timescales) {
16446 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
16447 return false;
16448 }
16449
16450 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
16451 };
16452 /**
16453 * Parses out SEI captions and interacts with underlying
16454 * CaptionStream to return dispatched captions
16455 *
16456 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
16457 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
16458 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
16459 * @see parseEmbeddedCaptions
16460 * @see m2ts/caption-stream.js
16461 **/
16462
16463
16464 this.parse = function (segment, videoTrackIds, timescales) {
16465 var parsedData;
16466
16467 if (!this.isInitialized()) {
16468 return null; // This is not likely to be a video segment
16469 } else if (!videoTrackIds || !timescales) {
16470 return null;
16471 } else if (this.isNewInit(videoTrackIds, timescales)) {
16472 // Use the first video track only as there is no
16473 // mechanism to switch to other video tracks
16474 trackId = videoTrackIds[0];
16475 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
16476 // data until we have one.
16477 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
16478 } else if (trackId === null || !timescale) {
16479 segmentCache.push(segment);
16480 return null;
16481 } // Now that a timescale and trackId is set, parse cached segments
16482
16483
16484 while (segmentCache.length > 0) {
16485 var cachedSegment = segmentCache.shift();
16486 this.parse(cachedSegment, videoTrackIds, timescales);
16487 }
16488
16489 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
16490
16491 if (parsedData && parsedData.logs) {
16492 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
16493 }
16494
16495 if (parsedData === null || !parsedData.seiNals) {
16496 if (parsedCaptions.logs.length) {
16497 return {
16498 logs: parsedCaptions.logs,
16499 captions: [],
16500 captionStreams: []
16501 };
16502 }
16503
16504 return null;
16505 }
16506
16507 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
16508
16509 this.flushStream();
16510 return parsedCaptions;
16511 };
16512 /**
16513 * Pushes SEI NALUs onto CaptionStream
16514 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
16515 * Assumes that `parseCaptionNals` has been called first
16516 * @see m2ts/caption-stream.js
16517 **/
16518
16519
16520 this.pushNals = function (nals) {
16521 if (!this.isInitialized() || !nals || nals.length === 0) {
16522 return null;
16523 }
16524
16525 nals.forEach(function (nal) {
16526 captionStream.push(nal);
16527 });
16528 };
16529 /**
16530 * Flushes underlying CaptionStream to dispatch processed, displayable captions
16531 * @see m2ts/caption-stream.js
16532 **/
16533
16534
16535 this.flushStream = function () {
16536 if (!this.isInitialized()) {
16537 return null;
16538 }
16539
16540 if (!parsingPartial) {
16541 captionStream.flush();
16542 } else {
16543 captionStream.partialFlush();
16544 }
16545 };
16546 /**
16547 * Reset caption buckets for new data
16548 **/
16549
16550
16551 this.clearParsedCaptions = function () {
16552 parsedCaptions.captions = [];
16553 parsedCaptions.captionStreams = {};
16554 parsedCaptions.logs = [];
16555 };
16556 /**
16557 * Resets underlying CaptionStream
16558 * @see m2ts/caption-stream.js
16559 **/
16560
16561
16562 this.resetCaptionStream = function () {
16563 if (!this.isInitialized()) {
16564 return null;
16565 }
16566
16567 captionStream.reset();
16568 };
16569 /**
16570 * Convenience method to clear all captions flushed from the
16571 * CaptionStream and still being parsed
16572 * @see m2ts/caption-stream.js
16573 **/
16574
16575
16576 this.clearAllCaptions = function () {
16577 this.clearParsedCaptions();
16578 this.resetCaptionStream();
16579 };
16580 /**
16581 * Reset caption parser
16582 **/
16583
16584
16585 this.reset = function () {
16586 segmentCache = [];
16587 trackId = null;
16588 timescale = null;
16589
16590 if (!parsedCaptions) {
16591 parsedCaptions = {
16592 captions: [],
16593 // CC1, CC2, CC3, CC4
16594 captionStreams: {},
16595 logs: []
16596 };
16597 } else {
16598 this.clearParsedCaptions();
16599 }
16600
16601 this.resetCaptionStream();
16602 };
16603
16604 this.reset();
16605 };
16606
16607 var captionParser = CaptionParser;
16608 var toUnsigned = bin.toUnsigned;
16609 var toHexString = bin.toHexString;
16610 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
16611 /**
16612 * Parses an MP4 initialization segment and extracts the timescale
16613 * values for any declared tracks. Timescale values indicate the
16614 * number of clock ticks per second to assume for time-based values
16615 * elsewhere in the MP4.
16616 *
16617 * To determine the start time of an MP4, you need two pieces of
16618 * information: the timescale unit and the earliest base media decode
16619 * time. Multiple timescales can be specified within an MP4 but the
16620 * base media decode time is always expressed in the timescale from
16621 * the media header box for the track:
16622 * ```
16623 * moov > trak > mdia > mdhd.timescale
16624 * ```
16625 * @param init {Uint8Array} the bytes of the init segment
16626 * @return {object} a hash of track ids to timescale values or null if
16627 * the init segment is malformed.
16628 */
16629
16630 timescale = function timescale(init) {
16631 var result = {},
16632 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
16633
16634 return traks.reduce(function (result, trak) {
16635 var tkhd, version, index, id, mdhd;
16636 tkhd = findBox_1(trak, ['tkhd'])[0];
16637
16638 if (!tkhd) {
16639 return null;
16640 }
16641
16642 version = tkhd[0];
16643 index = version === 0 ? 12 : 20;
16644 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
16645 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
16646
16647 if (!mdhd) {
16648 return null;
16649 }
16650
16651 version = mdhd[0];
16652 index = version === 0 ? 12 : 20;
16653 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
16654 return result;
16655 }, result);
16656 };
16657 /**
16658 * Determine the base media decode start time, in seconds, for an MP4
16659 * fragment. If multiple fragments are specified, the earliest time is
16660 * returned.
16661 *
16662 * The base media decode time can be parsed from track fragment
16663 * metadata:
16664 * ```
16665 * moof > traf > tfdt.baseMediaDecodeTime
16666 * ```
16667 * It requires the timescale value from the mdhd to interpret.
16668 *
16669 * @param timescale {object} a hash of track ids to timescale values.
16670 * @return {number} the earliest base media decode start time for the
16671 * fragment, in seconds
16672 */
16673
16674
16675 startTime = function startTime(timescale, fragment) {
16676 var trafs, baseTimes, result; // we need info from two childrend of each track fragment box
16677
16678 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
16679
16680 baseTimes = [].concat.apply([], trafs.map(function (traf) {
16681 return findBox_1(traf, ['tfhd']).map(function (tfhd) {
16682 var id, scale, baseTime; // get the track id from the tfhd
16683
16684 id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
16685
16686 scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
16687
16688 baseTime = findBox_1(traf, ['tfdt']).map(function (tfdt) {
16689 var version, result;
16690 version = tfdt[0];
16691 result = toUnsigned(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]);
16692
16693 if (version === 1) {
16694 result *= Math.pow(2, 32);
16695 result += toUnsigned(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11]);
16696 }
16697
16698 return result;
16699 })[0];
16700 baseTime = typeof baseTime === 'number' && !isNaN(baseTime) ? baseTime : Infinity; // convert base time to seconds
16701
16702 return baseTime / scale;
16703 });
16704 })); // return the minimum
16705
16706 result = Math.min.apply(null, baseTimes);
16707 return isFinite(result) ? result : 0;
16708 };
16709 /**
16710 * Determine the composition start, in seconds, for an MP4
16711 * fragment.
16712 *
16713 * The composition start time of a fragment can be calculated using the base
16714 * media decode time, composition time offset, and timescale, as follows:
16715 *
16716 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
16717 *
16718 * All of the aforementioned information is contained within a media fragment's
16719 * `traf` box, except for timescale info, which comes from the initialization
16720 * segment, so a track id (also contained within a `traf`) is also necessary to
16721 * associate it with a timescale
16722 *
16723 *
16724 * @param timescales {object} - a hash of track ids to timescale values.
16725 * @param fragment {Unit8Array} - the bytes of a media segment
16726 * @return {number} the composition start time for the fragment, in seconds
16727 **/
16728
16729
16730 compositionStartTime = function compositionStartTime(timescales, fragment) {
16731 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
16732 var baseMediaDecodeTime = 0;
16733 var compositionTimeOffset = 0;
16734 var trackId;
16735
16736 if (trafBoxes && trafBoxes.length) {
16737 // The spec states that track run samples contained within a `traf` box are contiguous, but
16738 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
16739 // We will assume that they are, so we only need the first to calculate start time.
16740 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
16741 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
16742 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
16743
16744 if (tfhd) {
16745 var parsedTfhd = parseTfhd(tfhd);
16746 trackId = parsedTfhd.trackId;
16747 }
16748
16749 if (tfdt) {
16750 var parsedTfdt = parseTfdt(tfdt);
16751 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
16752 }
16753
16754 if (trun) {
16755 var parsedTrun = parseTrun(trun);
16756
16757 if (parsedTrun.samples && parsedTrun.samples.length) {
16758 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
16759 }
16760 }
16761 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
16762 // specified.
16763
16764
16765 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
16766
16767 return (baseMediaDecodeTime + compositionTimeOffset) / timescale;
16768 };
16769 /**
16770 * Find the trackIds of the video tracks in this source.
16771 * Found by parsing the Handler Reference and Track Header Boxes:
16772 * moov > trak > mdia > hdlr
16773 * moov > trak > tkhd
16774 *
16775 * @param {Uint8Array} init - The bytes of the init segment for this source
16776 * @return {Number[]} A list of trackIds
16777 *
16778 * @see ISO-BMFF-12/2015, Section 8.4.3
16779 **/
16780
16781
16782 getVideoTrackIds = function getVideoTrackIds(init) {
16783 var traks = findBox_1(init, ['moov', 'trak']);
16784 var videoTrackIds = [];
16785 traks.forEach(function (trak) {
16786 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
16787 var tkhds = findBox_1(trak, ['tkhd']);
16788 hdlrs.forEach(function (hdlr, index) {
16789 var handlerType = parseType_1(hdlr.subarray(8, 12));
16790 var tkhd = tkhds[index];
16791 var view;
16792 var version;
16793 var trackId;
16794
16795 if (handlerType === 'vide') {
16796 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
16797 version = view.getUint8(0);
16798 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
16799 videoTrackIds.push(trackId);
16800 }
16801 });
16802 });
16803 return videoTrackIds;
16804 };
16805
16806 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
16807 // mdhd is a FullBox, meaning it will have its own version as the first byte
16808 var version = mdhd[0];
16809 var index = version === 0 ? 12 : 20;
16810 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
16811 };
16812 /**
16813 * Get all the video, audio, and hint tracks from a non fragmented
16814 * mp4 segment
16815 */
16816
16817
16818 getTracks = function getTracks(init) {
16819 var traks = findBox_1(init, ['moov', 'trak']);
16820 var tracks = [];
16821 traks.forEach(function (trak) {
16822 var track = {};
16823 var tkhd = findBox_1(trak, ['tkhd'])[0];
16824 var view, tkhdVersion; // id
16825
16826 if (tkhd) {
16827 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
16828 tkhdVersion = view.getUint8(0);
16829 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
16830 }
16831
16832 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
16833
16834 if (hdlr) {
16835 var type = parseType_1(hdlr.subarray(8, 12));
16836
16837 if (type === 'vide') {
16838 track.type = 'video';
16839 } else if (type === 'soun') {
16840 track.type = 'audio';
16841 } else {
16842 track.type = type;
16843 }
16844 } // codec
16845
16846
16847 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
16848
16849 if (stsd) {
16850 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
16851
16852 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
16853 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
16854 var codecConfig, codecConfigType;
16855
16856 if (codecBox) {
16857 // https://tools.ietf.org/html/rfc6381#section-3.3
16858 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
16859 // we don't need anything but the "config" parameter of the
16860 // avc1 codecBox
16861 codecConfig = codecBox.subarray(78);
16862 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
16863
16864 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
16865 track.codec += '.'; // left padded with zeroes for single digit hex
16866 // profile idc
16867
16868 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
16869
16870 track.codec += toHexString(codecConfig[10]); // level idc
16871
16872 track.codec += toHexString(codecConfig[11]);
16873 } else {
16874 // TODO: show a warning that we couldn't parse the codec
16875 // and are using the default
16876 track.codec = 'avc1.4d400d';
16877 }
16878 } else if (/^mp4[a,v]$/i.test(track.codec)) {
16879 // we do not need anything but the streamDescriptor of the mp4a codecBox
16880 codecConfig = codecBox.subarray(28);
16881 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
16882
16883 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
16884 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
16885
16886 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
16887 } else {
16888 // TODO: show a warning that we couldn't parse the codec
16889 // and are using the default
16890 track.codec = 'mp4a.40.2';
16891 }
16892 } else {
16893 // flac, opus, etc
16894 track.codec = track.codec.toLowerCase();
16895 }
16896 }
16897 }
16898
16899 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
16900
16901 if (mdhd) {
16902 track.timescale = getTimescaleFromMediaHeader(mdhd);
16903 }
16904
16905 tracks.push(track);
16906 });
16907 return tracks;
16908 };
16909
16910 var probe$2 = {
16911 // export mp4 inspector's findBox and parseType for backwards compatibility
16912 findBox: findBox_1,
16913 parseType: parseType_1,
16914 timescale: timescale,
16915 startTime: startTime,
16916 compositionStartTime: compositionStartTime,
16917 videoTrackIds: getVideoTrackIds,
16918 tracks: getTracks,
16919 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
16920 };
16921
16922 var parsePid = function parsePid(packet) {
16923 var pid = packet[1] & 0x1f;
16924 pid <<= 8;
16925 pid |= packet[2];
16926 return pid;
16927 };
16928
16929 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
16930 return !!(packet[1] & 0x40);
16931 };
16932
16933 var parseAdaptionField = function parseAdaptionField(packet) {
16934 var offset = 0; // if an adaption field is present, its length is specified by the
16935 // fifth byte of the TS packet header. The adaptation field is
16936 // used to add stuffing to PES packets that don't fill a complete
16937 // TS packet, and to specify some forms of timing and control data
16938 // that we do not currently use.
16939
16940 if ((packet[3] & 0x30) >>> 4 > 0x01) {
16941 offset += packet[4] + 1;
16942 }
16943
16944 return offset;
16945 };
16946
16947 var parseType = function parseType(packet, pmtPid) {
16948 var pid = parsePid(packet);
16949
16950 if (pid === 0) {
16951 return 'pat';
16952 } else if (pid === pmtPid) {
16953 return 'pmt';
16954 } else if (pmtPid) {
16955 return 'pes';
16956 }
16957
16958 return null;
16959 };
16960
16961 var parsePat = function parsePat(packet) {
16962 var pusi = parsePayloadUnitStartIndicator(packet);
16963 var offset = 4 + parseAdaptionField(packet);
16964
16965 if (pusi) {
16966 offset += packet[offset] + 1;
16967 }
16968
16969 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
16970 };
16971
16972 var parsePmt = function parsePmt(packet) {
16973 var programMapTable = {};
16974 var pusi = parsePayloadUnitStartIndicator(packet);
16975 var payloadOffset = 4 + parseAdaptionField(packet);
16976
16977 if (pusi) {
16978 payloadOffset += packet[payloadOffset] + 1;
16979 } // PMTs can be sent ahead of the time when they should actually
16980 // take effect. We don't believe this should ever be the case
16981 // for HLS but we'll ignore "forward" PMT declarations if we see
16982 // them. Future PMT declarations have the current_next_indicator
16983 // set to zero.
16984
16985
16986 if (!(packet[payloadOffset + 5] & 0x01)) {
16987 return;
16988 }
16989
16990 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
16991
16992 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
16993 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
16994 // long the program info descriptors are
16995
16996 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
16997
16998 var offset = 12 + programInfoLength;
16999
17000 while (offset < tableEnd) {
17001 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
17002
17003 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
17004 // skip past the elementary stream descriptors, if present
17005
17006 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
17007 }
17008
17009 return programMapTable;
17010 };
17011
17012 var parsePesType = function parsePesType(packet, programMapTable) {
17013 var pid = parsePid(packet);
17014 var type = programMapTable[pid];
17015
17016 switch (type) {
17017 case streamTypes.H264_STREAM_TYPE:
17018 return 'video';
17019
17020 case streamTypes.ADTS_STREAM_TYPE:
17021 return 'audio';
17022
17023 case streamTypes.METADATA_STREAM_TYPE:
17024 return 'timed-metadata';
17025
17026 default:
17027 return null;
17028 }
17029 };
17030
17031 var parsePesTime = function parsePesTime(packet) {
17032 var pusi = parsePayloadUnitStartIndicator(packet);
17033
17034 if (!pusi) {
17035 return null;
17036 }
17037
17038 var offset = 4 + parseAdaptionField(packet);
17039
17040 if (offset >= packet.byteLength) {
17041 // From the H 222.0 MPEG-TS spec
17042 // "For transport stream packets carrying PES packets, stuffing is needed when there
17043 // is insufficient PES packet data to completely fill the transport stream packet
17044 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
17045 // the sum of the lengths of the data elements in it, so that the payload bytes
17046 // remaining after the adaptation field exactly accommodates the available PES packet
17047 // data."
17048 //
17049 // If the offset is >= the length of the packet, then the packet contains no data
17050 // and instead is just adaption field stuffing bytes
17051 return null;
17052 }
17053
17054 var pes = null;
17055 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
17056 // and a DTS value. Determine what combination of values is
17057 // available to work with.
17058
17059 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
17060 // performs all bitwise operations on 32-bit integers but javascript
17061 // supports a much greater range (52-bits) of integer using standard
17062 // mathematical operations.
17063 // We construct a 31-bit value using bitwise operators over the 31
17064 // most significant bits and then multiply by 4 (equal to a left-shift
17065 // of 2) before we add the final 2 least significant bits of the
17066 // timestamp (equal to an OR.)
17067
17068 if (ptsDtsFlags & 0xC0) {
17069 pes = {}; // the PTS and DTS are not written out directly. For information
17070 // on how they are encoded, see
17071 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
17072
17073 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
17074 pes.pts *= 4; // Left shift by 2
17075
17076 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
17077
17078 pes.dts = pes.pts;
17079
17080 if (ptsDtsFlags & 0x40) {
17081 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
17082 pes.dts *= 4; // Left shift by 2
17083
17084 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
17085 }
17086 }
17087
17088 return pes;
17089 };
17090
17091 var parseNalUnitType = function parseNalUnitType(type) {
17092 switch (type) {
17093 case 0x05:
17094 return 'slice_layer_without_partitioning_rbsp_idr';
17095
17096 case 0x06:
17097 return 'sei_rbsp';
17098
17099 case 0x07:
17100 return 'seq_parameter_set_rbsp';
17101
17102 case 0x08:
17103 return 'pic_parameter_set_rbsp';
17104
17105 case 0x09:
17106 return 'access_unit_delimiter_rbsp';
17107
17108 default:
17109 return null;
17110 }
17111 };
17112
17113 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
17114 var offset = 4 + parseAdaptionField(packet);
17115 var frameBuffer = packet.subarray(offset);
17116 var frameI = 0;
17117 var frameSyncPoint = 0;
17118 var foundKeyFrame = false;
17119 var nalType; // advance the sync point to a NAL start, if necessary
17120
17121 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
17122 if (frameBuffer[frameSyncPoint + 2] === 1) {
17123 // the sync point is properly aligned
17124 frameI = frameSyncPoint + 5;
17125 break;
17126 }
17127 }
17128
17129 while (frameI < frameBuffer.byteLength) {
17130 // look at the current byte to determine if we've hit the end of
17131 // a NAL unit boundary
17132 switch (frameBuffer[frameI]) {
17133 case 0:
17134 // skip past non-sync sequences
17135 if (frameBuffer[frameI - 1] !== 0) {
17136 frameI += 2;
17137 break;
17138 } else if (frameBuffer[frameI - 2] !== 0) {
17139 frameI++;
17140 break;
17141 }
17142
17143 if (frameSyncPoint + 3 !== frameI - 2) {
17144 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17145
17146 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17147 foundKeyFrame = true;
17148 }
17149 } // drop trailing zeroes
17150
17151
17152 do {
17153 frameI++;
17154 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
17155
17156 frameSyncPoint = frameI - 2;
17157 frameI += 3;
17158 break;
17159
17160 case 1:
17161 // skip past non-sync sequences
17162 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
17163 frameI += 3;
17164 break;
17165 }
17166
17167 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17168
17169 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17170 foundKeyFrame = true;
17171 }
17172
17173 frameSyncPoint = frameI - 2;
17174 frameI += 3;
17175 break;
17176
17177 default:
17178 // the current byte isn't a one or zero, so it cannot be part
17179 // of a sync sequence
17180 frameI += 3;
17181 break;
17182 }
17183 }
17184
17185 frameBuffer = frameBuffer.subarray(frameSyncPoint);
17186 frameI -= frameSyncPoint;
17187 frameSyncPoint = 0; // parse the final nal
17188
17189 if (frameBuffer && frameBuffer.byteLength > 3) {
17190 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17191
17192 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17193 foundKeyFrame = true;
17194 }
17195 }
17196
17197 return foundKeyFrame;
17198 };
17199
17200 var probe$1 = {
17201 parseType: parseType,
17202 parsePat: parsePat,
17203 parsePmt: parsePmt,
17204 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
17205 parsePesType: parsePesType,
17206 parsePesTime: parsePesTime,
17207 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
17208 };
17209 var handleRollover = timestampRolloverStream.handleRollover;
17210 var probe = {};
17211 probe.ts = probe$1;
17212 probe.aac = utils;
17213 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
17214 var MP2T_PACKET_LENGTH = 188,
17215 // bytes
17216 SYNC_BYTE = 0x47;
17217 /**
17218 * walks through segment data looking for pat and pmt packets to parse out
17219 * program map table information
17220 */
17221
17222 var parsePsi_ = function parsePsi_(bytes, pmt) {
17223 var startIndex = 0,
17224 endIndex = MP2T_PACKET_LENGTH,
17225 packet,
17226 type;
17227
17228 while (endIndex < bytes.byteLength) {
17229 // Look for a pair of start and end sync bytes in the data..
17230 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17231 // We found a packet
17232 packet = bytes.subarray(startIndex, endIndex);
17233 type = probe.ts.parseType(packet, pmt.pid);
17234
17235 switch (type) {
17236 case 'pat':
17237 pmt.pid = probe.ts.parsePat(packet);
17238 break;
17239
17240 case 'pmt':
17241 var table = probe.ts.parsePmt(packet);
17242 pmt.table = pmt.table || {};
17243 Object.keys(table).forEach(function (key) {
17244 pmt.table[key] = table[key];
17245 });
17246 break;
17247 }
17248
17249 startIndex += MP2T_PACKET_LENGTH;
17250 endIndex += MP2T_PACKET_LENGTH;
17251 continue;
17252 } // If we get here, we have somehow become de-synchronized and we need to step
17253 // forward one byte at a time until we find a pair of sync bytes that denote
17254 // a packet
17255
17256
17257 startIndex++;
17258 endIndex++;
17259 }
17260 };
17261 /**
17262 * walks through the segment data from the start and end to get timing information
17263 * for the first and last audio pes packets
17264 */
17265
17266
17267 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
17268 var startIndex = 0,
17269 endIndex = MP2T_PACKET_LENGTH,
17270 packet,
17271 type,
17272 pesType,
17273 pusi,
17274 parsed;
17275 var endLoop = false; // Start walking from start of segment to get first audio packet
17276
17277 while (endIndex <= bytes.byteLength) {
17278 // Look for a pair of start and end sync bytes in the data..
17279 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17280 // We found a packet
17281 packet = bytes.subarray(startIndex, endIndex);
17282 type = probe.ts.parseType(packet, pmt.pid);
17283
17284 switch (type) {
17285 case 'pes':
17286 pesType = probe.ts.parsePesType(packet, pmt.table);
17287 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17288
17289 if (pesType === 'audio' && pusi) {
17290 parsed = probe.ts.parsePesTime(packet);
17291
17292 if (parsed) {
17293 parsed.type = 'audio';
17294 result.audio.push(parsed);
17295 endLoop = true;
17296 }
17297 }
17298
17299 break;
17300 }
17301
17302 if (endLoop) {
17303 break;
17304 }
17305
17306 startIndex += MP2T_PACKET_LENGTH;
17307 endIndex += MP2T_PACKET_LENGTH;
17308 continue;
17309 } // If we get here, we have somehow become de-synchronized and we need to step
17310 // forward one byte at a time until we find a pair of sync bytes that denote
17311 // a packet
17312
17313
17314 startIndex++;
17315 endIndex++;
17316 } // Start walking from end of segment to get last audio packet
17317
17318
17319 endIndex = bytes.byteLength;
17320 startIndex = endIndex - MP2T_PACKET_LENGTH;
17321 endLoop = false;
17322
17323 while (startIndex >= 0) {
17324 // Look for a pair of start and end sync bytes in the data..
17325 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17326 // We found a packet
17327 packet = bytes.subarray(startIndex, endIndex);
17328 type = probe.ts.parseType(packet, pmt.pid);
17329
17330 switch (type) {
17331 case 'pes':
17332 pesType = probe.ts.parsePesType(packet, pmt.table);
17333 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17334
17335 if (pesType === 'audio' && pusi) {
17336 parsed = probe.ts.parsePesTime(packet);
17337
17338 if (parsed) {
17339 parsed.type = 'audio';
17340 result.audio.push(parsed);
17341 endLoop = true;
17342 }
17343 }
17344
17345 break;
17346 }
17347
17348 if (endLoop) {
17349 break;
17350 }
17351
17352 startIndex -= MP2T_PACKET_LENGTH;
17353 endIndex -= MP2T_PACKET_LENGTH;
17354 continue;
17355 } // If we get here, we have somehow become de-synchronized and we need to step
17356 // forward one byte at a time until we find a pair of sync bytes that denote
17357 // a packet
17358
17359
17360 startIndex--;
17361 endIndex--;
17362 }
17363 };
17364 /**
17365 * walks through the segment data from the start and end to get timing information
17366 * for the first and last video pes packets as well as timing information for the first
17367 * key frame.
17368 */
17369
17370
17371 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
17372 var startIndex = 0,
17373 endIndex = MP2T_PACKET_LENGTH,
17374 packet,
17375 type,
17376 pesType,
17377 pusi,
17378 parsed,
17379 frame,
17380 i,
17381 pes;
17382 var endLoop = false;
17383 var currentFrame = {
17384 data: [],
17385 size: 0
17386 }; // Start walking from start of segment to get first video packet
17387
17388 while (endIndex < bytes.byteLength) {
17389 // Look for a pair of start and end sync bytes in the data..
17390 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17391 // We found a packet
17392 packet = bytes.subarray(startIndex, endIndex);
17393 type = probe.ts.parseType(packet, pmt.pid);
17394
17395 switch (type) {
17396 case 'pes':
17397 pesType = probe.ts.parsePesType(packet, pmt.table);
17398 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17399
17400 if (pesType === 'video') {
17401 if (pusi && !endLoop) {
17402 parsed = probe.ts.parsePesTime(packet);
17403
17404 if (parsed) {
17405 parsed.type = 'video';
17406 result.video.push(parsed);
17407 endLoop = true;
17408 }
17409 }
17410
17411 if (!result.firstKeyFrame) {
17412 if (pusi) {
17413 if (currentFrame.size !== 0) {
17414 frame = new Uint8Array(currentFrame.size);
17415 i = 0;
17416
17417 while (currentFrame.data.length) {
17418 pes = currentFrame.data.shift();
17419 frame.set(pes, i);
17420 i += pes.byteLength;
17421 }
17422
17423 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
17424 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
17425 // the keyframe seems to work fine with HLS playback
17426 // and definitely preferable to a crash with TypeError...
17427
17428 if (firstKeyFrame) {
17429 result.firstKeyFrame = firstKeyFrame;
17430 result.firstKeyFrame.type = 'video';
17431 } else {
17432 // eslint-disable-next-line
17433 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
17434 }
17435 }
17436
17437 currentFrame.size = 0;
17438 }
17439 }
17440
17441 currentFrame.data.push(packet);
17442 currentFrame.size += packet.byteLength;
17443 }
17444 }
17445
17446 break;
17447 }
17448
17449 if (endLoop && result.firstKeyFrame) {
17450 break;
17451 }
17452
17453 startIndex += MP2T_PACKET_LENGTH;
17454 endIndex += MP2T_PACKET_LENGTH;
17455 continue;
17456 } // If we get here, we have somehow become de-synchronized and we need to step
17457 // forward one byte at a time until we find a pair of sync bytes that denote
17458 // a packet
17459
17460
17461 startIndex++;
17462 endIndex++;
17463 } // Start walking from end of segment to get last video packet
17464
17465
17466 endIndex = bytes.byteLength;
17467 startIndex = endIndex - MP2T_PACKET_LENGTH;
17468 endLoop = false;
17469
17470 while (startIndex >= 0) {
17471 // Look for a pair of start and end sync bytes in the data..
17472 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17473 // We found a packet
17474 packet = bytes.subarray(startIndex, endIndex);
17475 type = probe.ts.parseType(packet, pmt.pid);
17476
17477 switch (type) {
17478 case 'pes':
17479 pesType = probe.ts.parsePesType(packet, pmt.table);
17480 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17481
17482 if (pesType === 'video' && pusi) {
17483 parsed = probe.ts.parsePesTime(packet);
17484
17485 if (parsed) {
17486 parsed.type = 'video';
17487 result.video.push(parsed);
17488 endLoop = true;
17489 }
17490 }
17491
17492 break;
17493 }
17494
17495 if (endLoop) {
17496 break;
17497 }
17498
17499 startIndex -= MP2T_PACKET_LENGTH;
17500 endIndex -= MP2T_PACKET_LENGTH;
17501 continue;
17502 } // If we get here, we have somehow become de-synchronized and we need to step
17503 // forward one byte at a time until we find a pair of sync bytes that denote
17504 // a packet
17505
17506
17507 startIndex--;
17508 endIndex--;
17509 }
17510 };
17511 /**
17512 * Adjusts the timestamp information for the segment to account for
17513 * rollover and convert to seconds based on pes packet timescale (90khz clock)
17514 */
17515
17516
17517 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
17518 if (segmentInfo.audio && segmentInfo.audio.length) {
17519 var audioBaseTimestamp = baseTimestamp;
17520
17521 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
17522 audioBaseTimestamp = segmentInfo.audio[0].dts;
17523 }
17524
17525 segmentInfo.audio.forEach(function (info) {
17526 info.dts = handleRollover(info.dts, audioBaseTimestamp);
17527 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
17528
17529 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
17530 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
17531 });
17532 }
17533
17534 if (segmentInfo.video && segmentInfo.video.length) {
17535 var videoBaseTimestamp = baseTimestamp;
17536
17537 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
17538 videoBaseTimestamp = segmentInfo.video[0].dts;
17539 }
17540
17541 segmentInfo.video.forEach(function (info) {
17542 info.dts = handleRollover(info.dts, videoBaseTimestamp);
17543 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
17544
17545 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
17546 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
17547 });
17548
17549 if (segmentInfo.firstKeyFrame) {
17550 var frame = segmentInfo.firstKeyFrame;
17551 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
17552 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
17553
17554 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
17555 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
17556 }
17557 }
17558 };
17559 /**
17560 * inspects the aac data stream for start and end time information
17561 */
17562
17563
17564 var inspectAac_ = function inspectAac_(bytes) {
17565 var endLoop = false,
17566 audioCount = 0,
17567 sampleRate = null,
17568 timestamp = null,
17569 frameSize = 0,
17570 byteIndex = 0,
17571 packet;
17572
17573 while (bytes.length - byteIndex >= 3) {
17574 var type = probe.aac.parseType(bytes, byteIndex);
17575
17576 switch (type) {
17577 case 'timed-metadata':
17578 // Exit early because we don't have enough to parse
17579 // the ID3 tag header
17580 if (bytes.length - byteIndex < 10) {
17581 endLoop = true;
17582 break;
17583 }
17584
17585 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
17586 // to emit a full packet
17587
17588 if (frameSize > bytes.length) {
17589 endLoop = true;
17590 break;
17591 }
17592
17593 if (timestamp === null) {
17594 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
17595 timestamp = probe.aac.parseAacTimestamp(packet);
17596 }
17597
17598 byteIndex += frameSize;
17599 break;
17600
17601 case 'audio':
17602 // Exit early because we don't have enough to parse
17603 // the ADTS frame header
17604 if (bytes.length - byteIndex < 7) {
17605 endLoop = true;
17606 break;
17607 }
17608
17609 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
17610 // to emit a full packet
17611
17612 if (frameSize > bytes.length) {
17613 endLoop = true;
17614 break;
17615 }
17616
17617 if (sampleRate === null) {
17618 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
17619 sampleRate = probe.aac.parseSampleRate(packet);
17620 }
17621
17622 audioCount++;
17623 byteIndex += frameSize;
17624 break;
17625
17626 default:
17627 byteIndex++;
17628 break;
17629 }
17630
17631 if (endLoop) {
17632 return null;
17633 }
17634 }
17635
17636 if (sampleRate === null || timestamp === null) {
17637 return null;
17638 }
17639
17640 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
17641 var result = {
17642 audio: [{
17643 type: 'audio',
17644 dts: timestamp,
17645 pts: timestamp
17646 }, {
17647 type: 'audio',
17648 dts: timestamp + audioCount * 1024 * audioTimescale,
17649 pts: timestamp + audioCount * 1024 * audioTimescale
17650 }]
17651 };
17652 return result;
17653 };
17654 /**
17655 * inspects the transport stream segment data for start and end time information
17656 * of the audio and video tracks (when present) as well as the first key frame's
17657 * start time.
17658 */
17659
17660
17661 var inspectTs_ = function inspectTs_(bytes) {
17662 var pmt = {
17663 pid: null,
17664 table: null
17665 };
17666 var result = {};
17667 parsePsi_(bytes, pmt);
17668
17669 for (var pid in pmt.table) {
17670 if (pmt.table.hasOwnProperty(pid)) {
17671 var type = pmt.table[pid];
17672
17673 switch (type) {
17674 case streamTypes.H264_STREAM_TYPE:
17675 result.video = [];
17676 parseVideoPes_(bytes, pmt, result);
17677
17678 if (result.video.length === 0) {
17679 delete result.video;
17680 }
17681
17682 break;
17683
17684 case streamTypes.ADTS_STREAM_TYPE:
17685 result.audio = [];
17686 parseAudioPes_(bytes, pmt, result);
17687
17688 if (result.audio.length === 0) {
17689 delete result.audio;
17690 }
17691
17692 break;
17693 }
17694 }
17695 }
17696
17697 return result;
17698 };
17699 /**
17700 * Inspects segment byte data and returns an object with start and end timing information
17701 *
17702 * @param {Uint8Array} bytes The segment byte data
17703 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
17704 * timestamps for rollover. This value must be in 90khz clock.
17705 * @return {Object} Object containing start and end frame timing info of segment.
17706 */
17707
17708
17709 var inspect = function inspect(bytes, baseTimestamp) {
17710 var isAacData = probe.aac.isLikelyAacData(bytes);
17711 var result;
17712
17713 if (isAacData) {
17714 result = inspectAac_(bytes);
17715 } else {
17716 result = inspectTs_(bytes);
17717 }
17718
17719 if (!result || !result.audio && !result.video) {
17720 return null;
17721 }
17722
17723 adjustTimestamp_(result, baseTimestamp);
17724 return result;
17725 };
17726
17727 var tsInspector = {
17728 inspect: inspect,
17729 parseAudioPes_: parseAudioPes_
17730 };
17731 /* global self */
17732
17733 /**
17734 * Re-emits transmuxer events by converting them into messages to the
17735 * world outside the worker.
17736 *
17737 * @param {Object} transmuxer the transmuxer to wire events on
17738 * @private
17739 */
17740
17741 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
17742 transmuxer.on('data', function (segment) {
17743 // transfer ownership of the underlying ArrayBuffer
17744 // instead of doing a copy to save memory
17745 // ArrayBuffers are transferable but generic TypedArrays are not
17746 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
17747 var initArray = segment.initSegment;
17748 segment.initSegment = {
17749 data: initArray.buffer,
17750 byteOffset: initArray.byteOffset,
17751 byteLength: initArray.byteLength
17752 };
17753 var typedArray = segment.data;
17754 segment.data = typedArray.buffer;
17755 self.postMessage({
17756 action: 'data',
17757 segment: segment,
17758 byteOffset: typedArray.byteOffset,
17759 byteLength: typedArray.byteLength
17760 }, [segment.data]);
17761 });
17762 transmuxer.on('done', function (data) {
17763 self.postMessage({
17764 action: 'done'
17765 });
17766 });
17767 transmuxer.on('gopInfo', function (gopInfo) {
17768 self.postMessage({
17769 action: 'gopInfo',
17770 gopInfo: gopInfo
17771 });
17772 });
17773 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
17774 var videoSegmentTimingInfo = {
17775 start: {
17776 decode: clock.videoTsToSeconds(timingInfo.start.dts),
17777 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
17778 },
17779 end: {
17780 decode: clock.videoTsToSeconds(timingInfo.end.dts),
17781 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
17782 },
17783 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
17784 };
17785
17786 if (timingInfo.prependedContentDuration) {
17787 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
17788 }
17789
17790 self.postMessage({
17791 action: 'videoSegmentTimingInfo',
17792 videoSegmentTimingInfo: videoSegmentTimingInfo
17793 });
17794 });
17795 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
17796 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
17797 var audioSegmentTimingInfo = {
17798 start: {
17799 decode: clock.videoTsToSeconds(timingInfo.start.dts),
17800 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
17801 },
17802 end: {
17803 decode: clock.videoTsToSeconds(timingInfo.end.dts),
17804 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
17805 },
17806 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
17807 };
17808
17809 if (timingInfo.prependedContentDuration) {
17810 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
17811 }
17812
17813 self.postMessage({
17814 action: 'audioSegmentTimingInfo',
17815 audioSegmentTimingInfo: audioSegmentTimingInfo
17816 });
17817 });
17818 transmuxer.on('id3Frame', function (id3Frame) {
17819 self.postMessage({
17820 action: 'id3Frame',
17821 id3Frame: id3Frame
17822 });
17823 });
17824 transmuxer.on('caption', function (caption) {
17825 self.postMessage({
17826 action: 'caption',
17827 caption: caption
17828 });
17829 });
17830 transmuxer.on('trackinfo', function (trackInfo) {
17831 self.postMessage({
17832 action: 'trackinfo',
17833 trackInfo: trackInfo
17834 });
17835 });
17836 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
17837 // convert to video TS since we prioritize video time over audio
17838 self.postMessage({
17839 action: 'audioTimingInfo',
17840 audioTimingInfo: {
17841 start: clock.videoTsToSeconds(audioTimingInfo.start),
17842 end: clock.videoTsToSeconds(audioTimingInfo.end)
17843 }
17844 });
17845 });
17846 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
17847 self.postMessage({
17848 action: 'videoTimingInfo',
17849 videoTimingInfo: {
17850 start: clock.videoTsToSeconds(videoTimingInfo.start),
17851 end: clock.videoTsToSeconds(videoTimingInfo.end)
17852 }
17853 });
17854 });
17855 transmuxer.on('log', function (log) {
17856 self.postMessage({
17857 action: 'log',
17858 log: log
17859 });
17860 });
17861 };
17862 /**
17863 * All incoming messages route through this hash. If no function exists
17864 * to handle an incoming message, then we ignore the message.
17865 *
17866 * @class MessageHandlers
17867 * @param {Object} options the options to initialize with
17868 */
17869
17870
17871 var MessageHandlers = /*#__PURE__*/function () {
17872 function MessageHandlers(self, options) {
17873 this.options = options || {};
17874 this.self = self;
17875 this.init();
17876 }
17877 /**
17878 * initialize our web worker and wire all the events.
17879 */
17880
17881
17882 var _proto = MessageHandlers.prototype;
17883
17884 _proto.init = function init() {
17885 if (this.transmuxer) {
17886 this.transmuxer.dispose();
17887 }
17888
17889 this.transmuxer = new transmuxer.Transmuxer(this.options);
17890 wireTransmuxerEvents(this.self, this.transmuxer);
17891 };
17892
17893 _proto.pushMp4Captions = function pushMp4Captions(data) {
17894 if (!this.captionParser) {
17895 this.captionParser = new captionParser();
17896 this.captionParser.init();
17897 }
17898
17899 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
17900 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
17901 this.self.postMessage({
17902 action: 'mp4Captions',
17903 captions: parsed && parsed.captions || [],
17904 logs: parsed && parsed.logs || [],
17905 data: segment.buffer
17906 }, [segment.buffer]);
17907 };
17908
17909 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
17910 var timescales = _ref.timescales,
17911 data = _ref.data;
17912 var startTime = probe$2.startTime(timescales, data);
17913 this.self.postMessage({
17914 action: 'probeMp4StartTime',
17915 startTime: startTime,
17916 data: data
17917 }, [data.buffer]);
17918 };
17919
17920 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
17921 var data = _ref2.data;
17922 var tracks = probe$2.tracks(data);
17923 this.self.postMessage({
17924 action: 'probeMp4Tracks',
17925 tracks: tracks,
17926 data: data
17927 }, [data.buffer]);
17928 }
17929 /**
17930 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
17931 * internal "media time," as well as whether it contains video and/or audio.
17932 *
17933 * @private
17934 * @param {Uint8Array} bytes - segment bytes
17935 * @param {number} baseStartTime
17936 * Relative reference timestamp used when adjusting frame timestamps for rollover.
17937 * This value should be in seconds, as it's converted to a 90khz clock within the
17938 * function body.
17939 * @return {Object} The start time of the current segment in "media time" as well as
17940 * whether it contains video and/or audio
17941 */
17942 ;
17943
17944 _proto.probeTs = function probeTs(_ref3) {
17945 var data = _ref3.data,
17946 baseStartTime = _ref3.baseStartTime;
17947 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
17948 var timeInfo = tsInspector.inspect(data, tsStartTime);
17949 var result = null;
17950
17951 if (timeInfo) {
17952 result = {
17953 // each type's time info comes back as an array of 2 times, start and end
17954 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
17955 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
17956 };
17957
17958 if (result.hasVideo) {
17959 result.videoStart = timeInfo.video[0].ptsTime;
17960 }
17961
17962 if (result.hasAudio) {
17963 result.audioStart = timeInfo.audio[0].ptsTime;
17964 }
17965 }
17966
17967 this.self.postMessage({
17968 action: 'probeTs',
17969 result: result,
17970 data: data
17971 }, [data.buffer]);
17972 };
17973
17974 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
17975 if (this.captionParser) {
17976 this.captionParser.clearAllCaptions();
17977 }
17978 };
17979
17980 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
17981 if (this.captionParser) {
17982 this.captionParser.clearParsedCaptions();
17983 }
17984 }
17985 /**
17986 * Adds data (a ts segment) to the start of the transmuxer pipeline for
17987 * processing.
17988 *
17989 * @param {ArrayBuffer} data data to push into the muxer
17990 */
17991 ;
17992
17993 _proto.push = function push(data) {
17994 // Cast array buffer to correct type for transmuxer
17995 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
17996 this.transmuxer.push(segment);
17997 }
17998 /**
17999 * Recreate the transmuxer so that the next segment added via `push`
18000 * start with a fresh transmuxer.
18001 */
18002 ;
18003
18004 _proto.reset = function reset() {
18005 this.transmuxer.reset();
18006 }
18007 /**
18008 * Set the value that will be used as the `baseMediaDecodeTime` time for the
18009 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
18010 * set relative to the first based on the PTS values.
18011 *
18012 * @param {Object} data used to set the timestamp offset in the muxer
18013 */
18014 ;
18015
18016 _proto.setTimestampOffset = function setTimestampOffset(data) {
18017 var timestampOffset = data.timestampOffset || 0;
18018 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
18019 };
18020
18021 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
18022 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
18023 };
18024
18025 _proto.setRemux = function setRemux(data) {
18026 this.transmuxer.setRemux(data.remux);
18027 }
18028 /**
18029 * Forces the pipeline to finish processing the last segment and emit it's
18030 * results.
18031 *
18032 * @param {Object} data event data, not really used
18033 */
18034 ;
18035
18036 _proto.flush = function flush(data) {
18037 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
18038
18039 self.postMessage({
18040 action: 'done',
18041 type: 'transmuxed'
18042 });
18043 };
18044
18045 _proto.endTimeline = function endTimeline() {
18046 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
18047 // timelines
18048
18049 self.postMessage({
18050 action: 'endedtimeline',
18051 type: 'transmuxed'
18052 });
18053 };
18054
18055 _proto.alignGopsWith = function alignGopsWith(data) {
18056 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
18057 };
18058
18059 return MessageHandlers;
18060 }();
18061 /**
18062 * Our web worker interface so that things can talk to mux.js
18063 * that will be running in a web worker. the scope is passed to this by
18064 * webworkify.
18065 *
18066 * @param {Object} self the scope for the web worker
18067 */
18068
18069
18070 self.onmessage = function (event) {
18071 if (event.data.action === 'init' && event.data.options) {
18072 this.messageHandlers = new MessageHandlers(self, event.data.options);
18073 return;
18074 }
18075
18076 if (!this.messageHandlers) {
18077 this.messageHandlers = new MessageHandlers(self);
18078 }
18079
18080 if (event.data && event.data.action && event.data.action !== 'init') {
18081 if (this.messageHandlers[event.data.action]) {
18082 this.messageHandlers[event.data.action](event.data);
18083 }
18084 }
18085 };
18086 });
18087 var TransmuxWorker = factory(workerCode$1);
18088 /* rollup-plugin-worker-factory end for worker!/Users/bcasey/Projects/videojs-http-streaming/src/transmuxer-worker.js */
18089
18090 var handleData_ = function handleData_(event, transmuxedData, callback) {
18091 var _event$data$segment = event.data.segment,
18092 type = _event$data$segment.type,
18093 initSegment = _event$data$segment.initSegment,
18094 captions = _event$data$segment.captions,
18095 captionStreams = _event$data$segment.captionStreams,
18096 metadata = _event$data$segment.metadata,
18097 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
18098 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
18099 transmuxedData.buffer.push({
18100 captions: captions,
18101 captionStreams: captionStreams,
18102 metadata: metadata
18103 });
18104 var boxes = event.data.segment.boxes || {
18105 data: event.data.segment.data
18106 };
18107 var result = {
18108 type: type,
18109 // cast ArrayBuffer to TypedArray
18110 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
18111 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
18112 };
18113
18114 if (typeof videoFrameDtsTime !== 'undefined') {
18115 result.videoFrameDtsTime = videoFrameDtsTime;
18116 }
18117
18118 if (typeof videoFramePtsTime !== 'undefined') {
18119 result.videoFramePtsTime = videoFramePtsTime;
18120 }
18121
18122 callback(result);
18123 };
18124 var handleDone_ = function handleDone_(_ref) {
18125 var transmuxedData = _ref.transmuxedData,
18126 callback = _ref.callback;
18127 // Previously we only returned data on data events,
18128 // not on done events. Clear out the buffer to keep that consistent.
18129 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
18130 // have received
18131
18132 callback(transmuxedData);
18133 };
18134 var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
18135 transmuxedData.gopInfo = event.data.gopInfo;
18136 };
18137 var processTransmux = function processTransmux(options) {
18138 var transmuxer = options.transmuxer,
18139 bytes = options.bytes,
18140 audioAppendStart = options.audioAppendStart,
18141 gopsToAlignWith = options.gopsToAlignWith,
18142 remux = options.remux,
18143 onData = options.onData,
18144 onTrackInfo = options.onTrackInfo,
18145 onAudioTimingInfo = options.onAudioTimingInfo,
18146 onVideoTimingInfo = options.onVideoTimingInfo,
18147 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
18148 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
18149 onId3 = options.onId3,
18150 onCaptions = options.onCaptions,
18151 onDone = options.onDone,
18152 onEndedTimeline = options.onEndedTimeline,
18153 onTransmuxerLog = options.onTransmuxerLog,
18154 isEndOfTimeline = options.isEndOfTimeline;
18155 var transmuxedData = {
18156 buffer: []
18157 };
18158 var waitForEndedTimelineEvent = isEndOfTimeline;
18159
18160 var handleMessage = function handleMessage(event) {
18161 if (transmuxer.currentTransmux !== options) {
18162 // disposed
18163 return;
18164 }
18165
18166 if (event.data.action === 'data') {
18167 handleData_(event, transmuxedData, onData);
18168 }
18169
18170 if (event.data.action === 'trackinfo') {
18171 onTrackInfo(event.data.trackInfo);
18172 }
18173
18174 if (event.data.action === 'gopInfo') {
18175 handleGopInfo_(event, transmuxedData);
18176 }
18177
18178 if (event.data.action === 'audioTimingInfo') {
18179 onAudioTimingInfo(event.data.audioTimingInfo);
18180 }
18181
18182 if (event.data.action === 'videoTimingInfo') {
18183 onVideoTimingInfo(event.data.videoTimingInfo);
18184 }
18185
18186 if (event.data.action === 'videoSegmentTimingInfo') {
18187 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
18188 }
18189
18190 if (event.data.action === 'audioSegmentTimingInfo') {
18191 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
18192 }
18193
18194 if (event.data.action === 'id3Frame') {
18195 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
18196 }
18197
18198 if (event.data.action === 'caption') {
18199 onCaptions(event.data.caption);
18200 }
18201
18202 if (event.data.action === 'endedtimeline') {
18203 waitForEndedTimelineEvent = false;
18204 onEndedTimeline();
18205 }
18206
18207 if (event.data.action === 'log') {
18208 onTransmuxerLog(event.data.log);
18209 } // wait for the transmuxed event since we may have audio and video
18210
18211
18212 if (event.data.type !== 'transmuxed') {
18213 return;
18214 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
18215 // of a timeline, that means there may still be data events before the segment
18216 // processing can be considerred complete. In that case, the final event should be
18217 // an "endedtimeline" event with the type "transmuxed."
18218
18219
18220 if (waitForEndedTimelineEvent) {
18221 return;
18222 }
18223
18224 transmuxer.onmessage = null;
18225 handleDone_({
18226 transmuxedData: transmuxedData,
18227 callback: onDone
18228 });
18229 /* eslint-disable no-use-before-define */
18230
18231 dequeue(transmuxer);
18232 /* eslint-enable */
18233 };
18234
18235 transmuxer.onmessage = handleMessage;
18236
18237 if (audioAppendStart) {
18238 transmuxer.postMessage({
18239 action: 'setAudioAppendStart',
18240 appendStart: audioAppendStart
18241 });
18242 } // allow empty arrays to be passed to clear out GOPs
18243
18244
18245 if (Array.isArray(gopsToAlignWith)) {
18246 transmuxer.postMessage({
18247 action: 'alignGopsWith',
18248 gopsToAlignWith: gopsToAlignWith
18249 });
18250 }
18251
18252 if (typeof remux !== 'undefined') {
18253 transmuxer.postMessage({
18254 action: 'setRemux',
18255 remux: remux
18256 });
18257 }
18258
18259 if (bytes.byteLength) {
18260 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
18261 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
18262 transmuxer.postMessage({
18263 action: 'push',
18264 // Send the typed-array of data as an ArrayBuffer so that
18265 // it can be sent as a "Transferable" and avoid the costly
18266 // memory copy
18267 data: buffer,
18268 // To recreate the original typed-array, we need information
18269 // about what portion of the ArrayBuffer it was a view into
18270 byteOffset: byteOffset,
18271 byteLength: bytes.byteLength
18272 }, [buffer]);
18273 }
18274
18275 if (isEndOfTimeline) {
18276 transmuxer.postMessage({
18277 action: 'endTimeline'
18278 });
18279 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
18280 // the end of the segment
18281
18282
18283 transmuxer.postMessage({
18284 action: 'flush'
18285 });
18286 };
18287 var dequeue = function dequeue(transmuxer) {
18288 transmuxer.currentTransmux = null;
18289
18290 if (transmuxer.transmuxQueue.length) {
18291 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
18292
18293 if (typeof transmuxer.currentTransmux === 'function') {
18294 transmuxer.currentTransmux();
18295 } else {
18296 processTransmux(transmuxer.currentTransmux);
18297 }
18298 }
18299 };
18300 var processAction = function processAction(transmuxer, action) {
18301 transmuxer.postMessage({
18302 action: action
18303 });
18304 dequeue(transmuxer);
18305 };
18306 var enqueueAction = function enqueueAction(action, transmuxer) {
18307 if (!transmuxer.currentTransmux) {
18308 transmuxer.currentTransmux = action;
18309 processAction(transmuxer, action);
18310 return;
18311 }
18312
18313 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
18314 };
18315 var reset = function reset(transmuxer) {
18316 enqueueAction('reset', transmuxer);
18317 };
18318 var endTimeline = function endTimeline(transmuxer) {
18319 enqueueAction('endTimeline', transmuxer);
18320 };
18321 var transmux = function transmux(options) {
18322 if (!options.transmuxer.currentTransmux) {
18323 options.transmuxer.currentTransmux = options;
18324 processTransmux(options);
18325 return;
18326 }
18327
18328 options.transmuxer.transmuxQueue.push(options);
18329 };
18330 var createTransmuxer = function createTransmuxer(options) {
18331 var transmuxer = new TransmuxWorker();
18332 transmuxer.currentTransmux = null;
18333 transmuxer.transmuxQueue = [];
18334 var term = transmuxer.terminate;
18335
18336 transmuxer.terminate = function () {
18337 transmuxer.currentTransmux = null;
18338 transmuxer.transmuxQueue.length = 0;
18339 return term.call(transmuxer);
18340 };
18341
18342 transmuxer.postMessage({
18343 action: 'init',
18344 options: options
18345 });
18346 return transmuxer;
18347 };
18348 var segmentTransmuxer = {
18349 reset: reset,
18350 endTimeline: endTimeline,
18351 transmux: transmux,
18352 createTransmuxer: createTransmuxer
18353 };
18354
18355 var workerCallback = function workerCallback(options) {
18356 var transmuxer = options.transmuxer;
18357 var endAction = options.endAction || options.action;
18358 var callback = options.callback;
18359
18360 var message = _extends_1({}, options, {
18361 endAction: null,
18362 transmuxer: null,
18363 callback: null
18364 });
18365
18366 var listenForEndEvent = function listenForEndEvent(event) {
18367 if (event.data.action !== endAction) {
18368 return;
18369 }
18370
18371 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
18372
18373 if (event.data.data) {
18374 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
18375
18376 if (options.data) {
18377 options.data = event.data.data;
18378 }
18379 }
18380
18381 callback(event.data);
18382 };
18383
18384 transmuxer.addEventListener('message', listenForEndEvent);
18385
18386 if (options.data) {
18387 var isArrayBuffer = options.data instanceof ArrayBuffer;
18388 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
18389 message.byteLength = options.data.byteLength;
18390 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
18391 transmuxer.postMessage(message, transfers);
18392 } else {
18393 transmuxer.postMessage(message);
18394 }
18395 };
18396
18397 var REQUEST_ERRORS = {
18398 FAILURE: 2,
18399 TIMEOUT: -101,
18400 ABORTED: -102
18401 };
18402 /**
18403 * Abort all requests
18404 *
18405 * @param {Object} activeXhrs - an object that tracks all XHR requests
18406 */
18407
18408 var abortAll = function abortAll(activeXhrs) {
18409 activeXhrs.forEach(function (xhr) {
18410 xhr.abort();
18411 });
18412 };
18413 /**
18414 * Gather important bandwidth stats once a request has completed
18415 *
18416 * @param {Object} request - the XHR request from which to gather stats
18417 */
18418
18419
18420 var getRequestStats = function getRequestStats(request) {
18421 return {
18422 bandwidth: request.bandwidth,
18423 bytesReceived: request.bytesReceived || 0,
18424 roundTripTime: request.roundTripTime || 0
18425 };
18426 };
18427 /**
18428 * If possible gather bandwidth stats as a request is in
18429 * progress
18430 *
18431 * @param {Event} progressEvent - an event object from an XHR's progress event
18432 */
18433
18434
18435 var getProgressStats = function getProgressStats(progressEvent) {
18436 var request = progressEvent.target;
18437 var roundTripTime = Date.now() - request.requestTime;
18438 var stats = {
18439 bandwidth: Infinity,
18440 bytesReceived: 0,
18441 roundTripTime: roundTripTime || 0
18442 };
18443 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
18444 // because we should only use bandwidth stats on progress to determine when
18445 // abort a request early due to insufficient bandwidth
18446
18447 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
18448 return stats;
18449 };
18450 /**
18451 * Handle all error conditions in one place and return an object
18452 * with all the information
18453 *
18454 * @param {Error|null} error - if non-null signals an error occured with the XHR
18455 * @param {Object} request - the XHR request that possibly generated the error
18456 */
18457
18458
18459 var handleErrors = function handleErrors(error, request) {
18460 if (request.timedout) {
18461 return {
18462 status: request.status,
18463 message: 'HLS request timed-out at URL: ' + request.uri,
18464 code: REQUEST_ERRORS.TIMEOUT,
18465 xhr: request
18466 };
18467 }
18468
18469 if (request.aborted) {
18470 return {
18471 status: request.status,
18472 message: 'HLS request aborted at URL: ' + request.uri,
18473 code: REQUEST_ERRORS.ABORTED,
18474 xhr: request
18475 };
18476 }
18477
18478 if (error) {
18479 return {
18480 status: request.status,
18481 message: 'HLS request errored at URL: ' + request.uri,
18482 code: REQUEST_ERRORS.FAILURE,
18483 xhr: request
18484 };
18485 }
18486
18487 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
18488 return {
18489 status: request.status,
18490 message: 'Empty HLS response at URL: ' + request.uri,
18491 code: REQUEST_ERRORS.FAILURE,
18492 xhr: request
18493 };
18494 }
18495
18496 return null;
18497 };
18498 /**
18499 * Handle responses for key data and convert the key data to the correct format
18500 * for the decryption step later
18501 *
18502 * @param {Object} segment - a simplified copy of the segmentInfo object
18503 * from SegmentLoader
18504 * @param {Array} objects - objects to add the key bytes to.
18505 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18506 * this request
18507 */
18508
18509
18510 var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
18511 return function (error, request) {
18512 var response = request.response;
18513 var errorObj = handleErrors(error, request);
18514
18515 if (errorObj) {
18516 return finishProcessingFn(errorObj, segment);
18517 }
18518
18519 if (response.byteLength !== 16) {
18520 return finishProcessingFn({
18521 status: request.status,
18522 message: 'Invalid HLS key at URL: ' + request.uri,
18523 code: REQUEST_ERRORS.FAILURE,
18524 xhr: request
18525 }, segment);
18526 }
18527
18528 var view = new DataView(response);
18529 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
18530
18531 for (var i = 0; i < objects.length; i++) {
18532 objects[i].bytes = bytes;
18533 }
18534
18535 return finishProcessingFn(null, segment);
18536 };
18537 };
18538
18539 var parseInitSegment = function parseInitSegment(segment, _callback) {
18540 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
18541 // only know how to parse mp4 init segments at the moment
18542
18543 if (type !== 'mp4') {
18544 var uri = segment.map.resolvedUri || segment.map.uri;
18545 return _callback({
18546 internal: true,
18547 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
18548 code: REQUEST_ERRORS.FAILURE
18549 });
18550 }
18551
18552 workerCallback({
18553 action: 'probeMp4Tracks',
18554 data: segment.map.bytes,
18555 transmuxer: segment.transmuxer,
18556 callback: function callback(_ref) {
18557 var tracks = _ref.tracks,
18558 data = _ref.data;
18559 // transfer bytes back to us
18560 segment.map.bytes = data;
18561 tracks.forEach(function (track) {
18562 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
18563
18564 if (segment.map.tracks[track.type]) {
18565 return;
18566 }
18567
18568 segment.map.tracks[track.type] = track;
18569
18570 if (typeof track.id === 'number' && track.timescale) {
18571 segment.map.timescales = segment.map.timescales || {};
18572 segment.map.timescales[track.id] = track.timescale;
18573 }
18574 });
18575 return _callback(null);
18576 }
18577 });
18578 };
18579 /**
18580 * Handle init-segment responses
18581 *
18582 * @param {Object} segment - a simplified copy of the segmentInfo object
18583 * from SegmentLoader
18584 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18585 * this request
18586 */
18587
18588
18589 var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
18590 var segment = _ref2.segment,
18591 finishProcessingFn = _ref2.finishProcessingFn;
18592 return function (error, request) {
18593 var errorObj = handleErrors(error, request);
18594
18595 if (errorObj) {
18596 return finishProcessingFn(errorObj, segment);
18597 }
18598
18599 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
18600 // until the key request is done to decrypt.
18601
18602 if (segment.map.key) {
18603 segment.map.encryptedBytes = bytes;
18604 return finishProcessingFn(null, segment);
18605 }
18606
18607 segment.map.bytes = bytes;
18608 parseInitSegment(segment, function (parseError) {
18609 if (parseError) {
18610 parseError.xhr = request;
18611 parseError.status = request.status;
18612 return finishProcessingFn(parseError, segment);
18613 }
18614
18615 finishProcessingFn(null, segment);
18616 });
18617 };
18618 };
18619 /**
18620 * Response handler for segment-requests being sure to set the correct
18621 * property depending on whether the segment is encryped or not
18622 * Also records and keeps track of stats that are used for ABR purposes
18623 *
18624 * @param {Object} segment - a simplified copy of the segmentInfo object
18625 * from SegmentLoader
18626 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18627 * this request
18628 */
18629
18630
18631 var handleSegmentResponse = function handleSegmentResponse(_ref3) {
18632 var segment = _ref3.segment,
18633 finishProcessingFn = _ref3.finishProcessingFn,
18634 responseType = _ref3.responseType;
18635 return function (error, request) {
18636 var errorObj = handleErrors(error, request);
18637
18638 if (errorObj) {
18639 return finishProcessingFn(errorObj, segment);
18640 }
18641
18642 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
18643 // thrown for two primary cases:
18644 // 1. the mime type override stops working, or is not implemented for a specific
18645 // browser
18646 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
18647 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
18648 segment.stats = getRequestStats(request);
18649
18650 if (segment.key) {
18651 segment.encryptedBytes = new Uint8Array(newBytes);
18652 } else {
18653 segment.bytes = new Uint8Array(newBytes);
18654 }
18655
18656 return finishProcessingFn(null, segment);
18657 };
18658 };
18659
18660 var transmuxAndNotify = function transmuxAndNotify(_ref4) {
18661 var segment = _ref4.segment,
18662 bytes = _ref4.bytes,
18663 trackInfoFn = _ref4.trackInfoFn,
18664 timingInfoFn = _ref4.timingInfoFn,
18665 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
18666 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
18667 id3Fn = _ref4.id3Fn,
18668 captionsFn = _ref4.captionsFn,
18669 isEndOfTimeline = _ref4.isEndOfTimeline,
18670 endedTimelineFn = _ref4.endedTimelineFn,
18671 dataFn = _ref4.dataFn,
18672 doneFn = _ref4.doneFn,
18673 onTransmuxerLog = _ref4.onTransmuxerLog;
18674 var fmp4Tracks = segment.map && segment.map.tracks || {};
18675 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
18676 // One reason for this is that in the case of full segments, we want to trust start
18677 // times from the probe, rather than the transmuxer.
18678
18679 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
18680 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
18681 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
18682 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
18683
18684 var finish = function finish() {
18685 return transmux({
18686 bytes: bytes,
18687 transmuxer: segment.transmuxer,
18688 audioAppendStart: segment.audioAppendStart,
18689 gopsToAlignWith: segment.gopsToAlignWith,
18690 remux: isMuxed,
18691 onData: function onData(result) {
18692 result.type = result.type === 'combined' ? 'video' : result.type;
18693 dataFn(segment, result);
18694 },
18695 onTrackInfo: function onTrackInfo(trackInfo) {
18696 if (trackInfoFn) {
18697 if (isMuxed) {
18698 trackInfo.isMuxed = true;
18699 }
18700
18701 trackInfoFn(segment, trackInfo);
18702 }
18703 },
18704 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
18705 // we only want the first start value we encounter
18706 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
18707 audioStartFn(audioTimingInfo.start);
18708 audioStartFn = null;
18709 } // we want to continually update the end time
18710
18711
18712 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
18713 audioEndFn(audioTimingInfo.end);
18714 }
18715 },
18716 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
18717 // we only want the first start value we encounter
18718 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
18719 videoStartFn(videoTimingInfo.start);
18720 videoStartFn = null;
18721 } // we want to continually update the end time
18722
18723
18724 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
18725 videoEndFn(videoTimingInfo.end);
18726 }
18727 },
18728 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
18729 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
18730 },
18731 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
18732 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
18733 },
18734 onId3: function onId3(id3Frames, dispatchType) {
18735 id3Fn(segment, id3Frames, dispatchType);
18736 },
18737 onCaptions: function onCaptions(captions) {
18738 captionsFn(segment, [captions]);
18739 },
18740 isEndOfTimeline: isEndOfTimeline,
18741 onEndedTimeline: function onEndedTimeline() {
18742 endedTimelineFn();
18743 },
18744 onTransmuxerLog: onTransmuxerLog,
18745 onDone: function onDone(result) {
18746 if (!doneFn) {
18747 return;
18748 }
18749
18750 result.type = result.type === 'combined' ? 'video' : result.type;
18751 doneFn(null, segment, result);
18752 }
18753 });
18754 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
18755 // Meaning cached frame data may corrupt our notion of where this segment
18756 // really starts. To get around this, probe for the info needed.
18757
18758
18759 workerCallback({
18760 action: 'probeTs',
18761 transmuxer: segment.transmuxer,
18762 data: bytes,
18763 baseStartTime: segment.baseStartTime,
18764 callback: function callback(data) {
18765 segment.bytes = bytes = data.data;
18766 var probeResult = data.result;
18767
18768 if (probeResult) {
18769 trackInfoFn(segment, {
18770 hasAudio: probeResult.hasAudio,
18771 hasVideo: probeResult.hasVideo,
18772 isMuxed: isMuxed
18773 });
18774 trackInfoFn = null;
18775
18776 if (probeResult.hasAudio && !isMuxed) {
18777 audioStartFn(probeResult.audioStart);
18778 }
18779
18780 if (probeResult.hasVideo) {
18781 videoStartFn(probeResult.videoStart);
18782 }
18783
18784 audioStartFn = null;
18785 videoStartFn = null;
18786 }
18787
18788 finish();
18789 }
18790 });
18791 };
18792
18793 var handleSegmentBytes = function handleSegmentBytes(_ref5) {
18794 var segment = _ref5.segment,
18795 bytes = _ref5.bytes,
18796 trackInfoFn = _ref5.trackInfoFn,
18797 timingInfoFn = _ref5.timingInfoFn,
18798 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
18799 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
18800 id3Fn = _ref5.id3Fn,
18801 captionsFn = _ref5.captionsFn,
18802 isEndOfTimeline = _ref5.isEndOfTimeline,
18803 endedTimelineFn = _ref5.endedTimelineFn,
18804 dataFn = _ref5.dataFn,
18805 doneFn = _ref5.doneFn,
18806 onTransmuxerLog = _ref5.onTransmuxerLog;
18807 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
18808 // We should have a handler that fetches the number of bytes required
18809 // to check if something is fmp4. This will allow us to save bandwidth
18810 // because we can only blacklist a playlist and abort requests
18811 // by codec after trackinfo triggers.
18812
18813 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
18814 segment.isFmp4 = true;
18815 var tracks = segment.map.tracks;
18816 var trackInfo = {
18817 isFmp4: true,
18818 hasVideo: !!tracks.video,
18819 hasAudio: !!tracks.audio
18820 }; // if we have a audio track, with a codec that is not set to
18821 // encrypted audio
18822
18823 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
18824 trackInfo.audioCodec = tracks.audio.codec;
18825 } // if we have a video track, with a codec that is not set to
18826 // encrypted video
18827
18828
18829 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
18830 trackInfo.videoCodec = tracks.video.codec;
18831 }
18832
18833 if (tracks.video && tracks.audio) {
18834 trackInfo.isMuxed = true;
18835 } // since we don't support appending fmp4 data on progress, we know we have the full
18836 // segment here
18837
18838
18839 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
18840 // time. The end time can be roughly calculated by the receiver using the duration.
18841 //
18842 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
18843 // that is the true start of the segment (where the playback engine should begin
18844 // decoding).
18845
18846 var finishLoading = function finishLoading(captions) {
18847 // if the track still has audio at this point it is only possible
18848 // for it to be audio only. See `tracks.video && tracks.audio` if statement
18849 // above.
18850 // we make sure to use segment.bytes here as that
18851 dataFn(segment, {
18852 data: bytesAsUint8Array,
18853 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
18854 });
18855
18856 if (captions && captions.length) {
18857 captionsFn(segment, captions);
18858 }
18859
18860 doneFn(null, segment, {});
18861 };
18862
18863 workerCallback({
18864 action: 'probeMp4StartTime',
18865 timescales: segment.map.timescales,
18866 data: bytesAsUint8Array,
18867 transmuxer: segment.transmuxer,
18868 callback: function callback(_ref6) {
18869 var data = _ref6.data,
18870 startTime = _ref6.startTime;
18871 // transfer bytes back to us
18872 bytes = data.buffer;
18873 segment.bytes = bytesAsUint8Array = data;
18874
18875 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
18876 timingInfoFn(segment, 'audio', 'start', startTime);
18877 }
18878
18879 if (trackInfo.hasVideo) {
18880 timingInfoFn(segment, 'video', 'start', startTime);
18881 } // Run through the CaptionParser in case there are captions.
18882 // Initialize CaptionParser if it hasn't been yet
18883
18884
18885 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
18886 finishLoading();
18887 return;
18888 }
18889
18890 workerCallback({
18891 action: 'pushMp4Captions',
18892 endAction: 'mp4Captions',
18893 transmuxer: segment.transmuxer,
18894 data: bytesAsUint8Array,
18895 timescales: segment.map.timescales,
18896 trackIds: [tracks.video.id],
18897 callback: function callback(message) {
18898 // transfer bytes back to us
18899 bytes = message.data.buffer;
18900 segment.bytes = bytesAsUint8Array = message.data;
18901 message.logs.forEach(function (log) {
18902 onTransmuxerLog(videojs__default["default"].mergeOptions(log, {
18903 stream: 'mp4CaptionParser'
18904 }));
18905 });
18906 finishLoading(message.captions);
18907 }
18908 });
18909 }
18910 });
18911 return;
18912 } // VTT or other segments that don't need processing
18913
18914
18915 if (!segment.transmuxer) {
18916 doneFn(null, segment, {});
18917 return;
18918 }
18919
18920 if (typeof segment.container === 'undefined') {
18921 segment.container = detectContainerForBytes(bytesAsUint8Array);
18922 }
18923
18924 if (segment.container !== 'ts' && segment.container !== 'aac') {
18925 trackInfoFn(segment, {
18926 hasAudio: false,
18927 hasVideo: false
18928 });
18929 doneFn(null, segment, {});
18930 return;
18931 } // ts or aac
18932
18933
18934 transmuxAndNotify({
18935 segment: segment,
18936 bytes: bytes,
18937 trackInfoFn: trackInfoFn,
18938 timingInfoFn: timingInfoFn,
18939 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
18940 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
18941 id3Fn: id3Fn,
18942 captionsFn: captionsFn,
18943 isEndOfTimeline: isEndOfTimeline,
18944 endedTimelineFn: endedTimelineFn,
18945 dataFn: dataFn,
18946 doneFn: doneFn,
18947 onTransmuxerLog: onTransmuxerLog
18948 });
18949 };
18950
18951 var decrypt = function decrypt(_ref7, callback) {
18952 var id = _ref7.id,
18953 key = _ref7.key,
18954 encryptedBytes = _ref7.encryptedBytes,
18955 decryptionWorker = _ref7.decryptionWorker;
18956
18957 var decryptionHandler = function decryptionHandler(event) {
18958 if (event.data.source === id) {
18959 decryptionWorker.removeEventListener('message', decryptionHandler);
18960 var decrypted = event.data.decrypted;
18961 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
18962 }
18963 };
18964
18965 decryptionWorker.addEventListener('message', decryptionHandler);
18966 var keyBytes;
18967
18968 if (key.bytes.slice) {
18969 keyBytes = key.bytes.slice();
18970 } else {
18971 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
18972 } // incrementally decrypt the bytes
18973
18974
18975 decryptionWorker.postMessage(createTransferableMessage({
18976 source: id,
18977 encrypted: encryptedBytes,
18978 key: keyBytes,
18979 iv: key.iv
18980 }), [encryptedBytes.buffer, keyBytes.buffer]);
18981 };
18982 /**
18983 * Decrypt the segment via the decryption web worker
18984 *
18985 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
18986 * routines
18987 * @param {Object} segment - a simplified copy of the segmentInfo object
18988 * from SegmentLoader
18989 * @param {Function} trackInfoFn - a callback that receives track info
18990 * @param {Function} timingInfoFn - a callback that receives timing info
18991 * @param {Function} videoSegmentTimingInfoFn
18992 * a callback that receives video timing info based on media times and
18993 * any adjustments made by the transmuxer
18994 * @param {Function} audioSegmentTimingInfoFn
18995 * a callback that receives audio timing info based on media times and
18996 * any adjustments made by the transmuxer
18997 * @param {boolean} isEndOfTimeline
18998 * true if this segment represents the last segment in a timeline
18999 * @param {Function} endedTimelineFn
19000 * a callback made when a timeline is ended, will only be called if
19001 * isEndOfTimeline is true
19002 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19003 * and ready to use
19004 * @param {Function} doneFn - a callback that is executed after decryption has completed
19005 */
19006
19007
19008 var decryptSegment = function decryptSegment(_ref8) {
19009 var decryptionWorker = _ref8.decryptionWorker,
19010 segment = _ref8.segment,
19011 trackInfoFn = _ref8.trackInfoFn,
19012 timingInfoFn = _ref8.timingInfoFn,
19013 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
19014 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
19015 id3Fn = _ref8.id3Fn,
19016 captionsFn = _ref8.captionsFn,
19017 isEndOfTimeline = _ref8.isEndOfTimeline,
19018 endedTimelineFn = _ref8.endedTimelineFn,
19019 dataFn = _ref8.dataFn,
19020 doneFn = _ref8.doneFn,
19021 onTransmuxerLog = _ref8.onTransmuxerLog;
19022 decrypt({
19023 id: segment.requestId,
19024 key: segment.key,
19025 encryptedBytes: segment.encryptedBytes,
19026 decryptionWorker: decryptionWorker
19027 }, function (decryptedBytes) {
19028 segment.bytes = decryptedBytes;
19029 handleSegmentBytes({
19030 segment: segment,
19031 bytes: segment.bytes,
19032 trackInfoFn: trackInfoFn,
19033 timingInfoFn: timingInfoFn,
19034 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19035 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19036 id3Fn: id3Fn,
19037 captionsFn: captionsFn,
19038 isEndOfTimeline: isEndOfTimeline,
19039 endedTimelineFn: endedTimelineFn,
19040 dataFn: dataFn,
19041 doneFn: doneFn,
19042 onTransmuxerLog: onTransmuxerLog
19043 });
19044 });
19045 };
19046 /**
19047 * This function waits for all XHRs to finish (with either success or failure)
19048 * before continueing processing via it's callback. The function gathers errors
19049 * from each request into a single errors array so that the error status for
19050 * each request can be examined later.
19051 *
19052 * @param {Object} activeXhrs - an object that tracks all XHR requests
19053 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19054 * routines
19055 * @param {Function} trackInfoFn - a callback that receives track info
19056 * @param {Function} timingInfoFn - a callback that receives timing info
19057 * @param {Function} videoSegmentTimingInfoFn
19058 * a callback that receives video timing info based on media times and
19059 * any adjustments made by the transmuxer
19060 * @param {Function} audioSegmentTimingInfoFn
19061 * a callback that receives audio timing info based on media times and
19062 * any adjustments made by the transmuxer
19063 * @param {Function} id3Fn - a callback that receives ID3 metadata
19064 * @param {Function} captionsFn - a callback that receives captions
19065 * @param {boolean} isEndOfTimeline
19066 * true if this segment represents the last segment in a timeline
19067 * @param {Function} endedTimelineFn
19068 * a callback made when a timeline is ended, will only be called if
19069 * isEndOfTimeline is true
19070 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19071 * and ready to use
19072 * @param {Function} doneFn - a callback that is executed after all resources have been
19073 * downloaded and any decryption completed
19074 */
19075
19076
19077 var waitForCompletion = function waitForCompletion(_ref9) {
19078 var activeXhrs = _ref9.activeXhrs,
19079 decryptionWorker = _ref9.decryptionWorker,
19080 trackInfoFn = _ref9.trackInfoFn,
19081 timingInfoFn = _ref9.timingInfoFn,
19082 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
19083 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
19084 id3Fn = _ref9.id3Fn,
19085 captionsFn = _ref9.captionsFn,
19086 isEndOfTimeline = _ref9.isEndOfTimeline,
19087 endedTimelineFn = _ref9.endedTimelineFn,
19088 dataFn = _ref9.dataFn,
19089 doneFn = _ref9.doneFn,
19090 onTransmuxerLog = _ref9.onTransmuxerLog;
19091 var count = 0;
19092 var didError = false;
19093 return function (error, segment) {
19094 if (didError) {
19095 return;
19096 }
19097
19098 if (error) {
19099 didError = true; // If there are errors, we have to abort any outstanding requests
19100
19101 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
19102 // handle the aborted events from those requests, there are some cases where we may
19103 // never get an aborted event. For instance, if the network connection is lost and
19104 // there were two requests, the first may have triggered an error immediately, while
19105 // the second request remains unsent. In that case, the aborted algorithm will not
19106 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
19107 //
19108 // We also can't rely on the ready state of the XHR, since the request that
19109 // triggered the connection error may also show as a ready state of 0 (unsent).
19110 // Therefore, we have to finish this group of requests immediately after the first
19111 // seen error.
19112
19113 return doneFn(error, segment);
19114 }
19115
19116 count += 1;
19117
19118 if (count === activeXhrs.length) {
19119 var segmentFinish = function segmentFinish() {
19120 if (segment.encryptedBytes) {
19121 return decryptSegment({
19122 decryptionWorker: decryptionWorker,
19123 segment: segment,
19124 trackInfoFn: trackInfoFn,
19125 timingInfoFn: timingInfoFn,
19126 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19127 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19128 id3Fn: id3Fn,
19129 captionsFn: captionsFn,
19130 isEndOfTimeline: isEndOfTimeline,
19131 endedTimelineFn: endedTimelineFn,
19132 dataFn: dataFn,
19133 doneFn: doneFn,
19134 onTransmuxerLog: onTransmuxerLog
19135 });
19136 } // Otherwise, everything is ready just continue
19137
19138
19139 handleSegmentBytes({
19140 segment: segment,
19141 bytes: segment.bytes,
19142 trackInfoFn: trackInfoFn,
19143 timingInfoFn: timingInfoFn,
19144 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19145 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19146 id3Fn: id3Fn,
19147 captionsFn: captionsFn,
19148 isEndOfTimeline: isEndOfTimeline,
19149 endedTimelineFn: endedTimelineFn,
19150 dataFn: dataFn,
19151 doneFn: doneFn,
19152 onTransmuxerLog: onTransmuxerLog
19153 });
19154 }; // Keep track of when *all* of the requests have completed
19155
19156
19157 segment.endOfAllRequests = Date.now();
19158
19159 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
19160 return decrypt({
19161 decryptionWorker: decryptionWorker,
19162 // add -init to the "id" to differentiate between segment
19163 // and init segment decryption, just in case they happen
19164 // at the same time at some point in the future.
19165 id: segment.requestId + '-init',
19166 encryptedBytes: segment.map.encryptedBytes,
19167 key: segment.map.key
19168 }, function (decryptedBytes) {
19169 segment.map.bytes = decryptedBytes;
19170 parseInitSegment(segment, function (parseError) {
19171 if (parseError) {
19172 abortAll(activeXhrs);
19173 return doneFn(parseError, segment);
19174 }
19175
19176 segmentFinish();
19177 });
19178 });
19179 }
19180
19181 segmentFinish();
19182 }
19183 };
19184 };
19185 /**
19186 * Calls the abort callback if any request within the batch was aborted. Will only call
19187 * the callback once per batch of requests, even if multiple were aborted.
19188 *
19189 * @param {Object} loadendState - state to check to see if the abort function was called
19190 * @param {Function} abortFn - callback to call for abort
19191 */
19192
19193
19194 var handleLoadEnd = function handleLoadEnd(_ref10) {
19195 var loadendState = _ref10.loadendState,
19196 abortFn = _ref10.abortFn;
19197 return function (event) {
19198 var request = event.target;
19199
19200 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
19201 abortFn();
19202 loadendState.calledAbortFn = true;
19203 }
19204 };
19205 };
19206 /**
19207 * Simple progress event callback handler that gathers some stats before
19208 * executing a provided callback with the `segment` object
19209 *
19210 * @param {Object} segment - a simplified copy of the segmentInfo object
19211 * from SegmentLoader
19212 * @param {Function} progressFn - a callback that is executed each time a progress event
19213 * is received
19214 * @param {Function} trackInfoFn - a callback that receives track info
19215 * @param {Function} timingInfoFn - a callback that receives timing info
19216 * @param {Function} videoSegmentTimingInfoFn
19217 * a callback that receives video timing info based on media times and
19218 * any adjustments made by the transmuxer
19219 * @param {Function} audioSegmentTimingInfoFn
19220 * a callback that receives audio timing info based on media times and
19221 * any adjustments made by the transmuxer
19222 * @param {boolean} isEndOfTimeline
19223 * true if this segment represents the last segment in a timeline
19224 * @param {Function} endedTimelineFn
19225 * a callback made when a timeline is ended, will only be called if
19226 * isEndOfTimeline is true
19227 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19228 * and ready to use
19229 * @param {Event} event - the progress event object from XMLHttpRequest
19230 */
19231
19232
19233 var handleProgress = function handleProgress(_ref11) {
19234 var segment = _ref11.segment,
19235 progressFn = _ref11.progressFn;
19236 _ref11.trackInfoFn;
19237 _ref11.timingInfoFn;
19238 _ref11.videoSegmentTimingInfoFn;
19239 _ref11.audioSegmentTimingInfoFn;
19240 _ref11.id3Fn;
19241 _ref11.captionsFn;
19242 _ref11.isEndOfTimeline;
19243 _ref11.endedTimelineFn;
19244 _ref11.dataFn;
19245 return function (event) {
19246 var request = event.target;
19247
19248 if (request.aborted) {
19249 return;
19250 }
19251
19252 segment.stats = videojs__default["default"].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
19253
19254 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
19255 segment.stats.firstBytesReceivedAt = Date.now();
19256 }
19257
19258 return progressFn(event, segment);
19259 };
19260 };
19261 /**
19262 * Load all resources and does any processing necessary for a media-segment
19263 *
19264 * Features:
19265 * decrypts the media-segment if it has a key uri and an iv
19266 * aborts *all* requests if *any* one request fails
19267 *
19268 * The segment object, at minimum, has the following format:
19269 * {
19270 * resolvedUri: String,
19271 * [transmuxer]: Object,
19272 * [byterange]: {
19273 * offset: Number,
19274 * length: Number
19275 * },
19276 * [key]: {
19277 * resolvedUri: String
19278 * [byterange]: {
19279 * offset: Number,
19280 * length: Number
19281 * },
19282 * iv: {
19283 * bytes: Uint32Array
19284 * }
19285 * },
19286 * [map]: {
19287 * resolvedUri: String,
19288 * [byterange]: {
19289 * offset: Number,
19290 * length: Number
19291 * },
19292 * [bytes]: Uint8Array
19293 * }
19294 * }
19295 * ...where [name] denotes optional properties
19296 *
19297 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
19298 * @param {Object} xhrOptions - the base options to provide to all xhr requests
19299 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
19300 * decryption routines
19301 * @param {Object} segment - a simplified copy of the segmentInfo object
19302 * from SegmentLoader
19303 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
19304 * aborted
19305 * @param {Function} progressFn - a callback that receives progress events from the main
19306 * segment's xhr request
19307 * @param {Function} trackInfoFn - a callback that receives track info
19308 * @param {Function} timingInfoFn - a callback that receives timing info
19309 * @param {Function} videoSegmentTimingInfoFn
19310 * a callback that receives video timing info based on media times and
19311 * any adjustments made by the transmuxer
19312 * @param {Function} audioSegmentTimingInfoFn
19313 * a callback that receives audio timing info based on media times and
19314 * any adjustments made by the transmuxer
19315 * @param {Function} id3Fn - a callback that receives ID3 metadata
19316 * @param {Function} captionsFn - a callback that receives captions
19317 * @param {boolean} isEndOfTimeline
19318 * true if this segment represents the last segment in a timeline
19319 * @param {Function} endedTimelineFn
19320 * a callback made when a timeline is ended, will only be called if
19321 * isEndOfTimeline is true
19322 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
19323 * request, transmuxed if needed
19324 * @param {Function} doneFn - a callback that is executed only once all requests have
19325 * succeeded or failed
19326 * @return {Function} a function that, when invoked, immediately aborts all
19327 * outstanding requests
19328 */
19329
19330
19331 var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
19332 var xhr = _ref12.xhr,
19333 xhrOptions = _ref12.xhrOptions,
19334 decryptionWorker = _ref12.decryptionWorker,
19335 segment = _ref12.segment,
19336 abortFn = _ref12.abortFn,
19337 progressFn = _ref12.progressFn,
19338 trackInfoFn = _ref12.trackInfoFn,
19339 timingInfoFn = _ref12.timingInfoFn,
19340 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
19341 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
19342 id3Fn = _ref12.id3Fn,
19343 captionsFn = _ref12.captionsFn,
19344 isEndOfTimeline = _ref12.isEndOfTimeline,
19345 endedTimelineFn = _ref12.endedTimelineFn,
19346 dataFn = _ref12.dataFn,
19347 doneFn = _ref12.doneFn,
19348 onTransmuxerLog = _ref12.onTransmuxerLog;
19349 var activeXhrs = [];
19350 var finishProcessingFn = waitForCompletion({
19351 activeXhrs: activeXhrs,
19352 decryptionWorker: decryptionWorker,
19353 trackInfoFn: trackInfoFn,
19354 timingInfoFn: timingInfoFn,
19355 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19356 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19357 id3Fn: id3Fn,
19358 captionsFn: captionsFn,
19359 isEndOfTimeline: isEndOfTimeline,
19360 endedTimelineFn: endedTimelineFn,
19361 dataFn: dataFn,
19362 doneFn: doneFn,
19363 onTransmuxerLog: onTransmuxerLog
19364 }); // optionally, request the decryption key
19365
19366 if (segment.key && !segment.key.bytes) {
19367 var objects = [segment.key];
19368
19369 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
19370 objects.push(segment.map.key);
19371 }
19372
19373 var keyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
19374 uri: segment.key.resolvedUri,
19375 responseType: 'arraybuffer'
19376 });
19377 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
19378 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
19379 activeXhrs.push(keyXhr);
19380 } // optionally, request the associated media init segment
19381
19382
19383 if (segment.map && !segment.map.bytes) {
19384 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
19385
19386 if (differentMapKey) {
19387 var mapKeyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
19388 uri: segment.map.key.resolvedUri,
19389 responseType: 'arraybuffer'
19390 });
19391 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
19392 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
19393 activeXhrs.push(mapKeyXhr);
19394 }
19395
19396 var initSegmentOptions = videojs__default["default"].mergeOptions(xhrOptions, {
19397 uri: segment.map.resolvedUri,
19398 responseType: 'arraybuffer',
19399 headers: segmentXhrHeaders(segment.map)
19400 });
19401 var initSegmentRequestCallback = handleInitSegmentResponse({
19402 segment: segment,
19403 finishProcessingFn: finishProcessingFn
19404 });
19405 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
19406 activeXhrs.push(initSegmentXhr);
19407 }
19408
19409 var segmentRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
19410 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
19411 responseType: 'arraybuffer',
19412 headers: segmentXhrHeaders(segment)
19413 });
19414 var segmentRequestCallback = handleSegmentResponse({
19415 segment: segment,
19416 finishProcessingFn: finishProcessingFn,
19417 responseType: segmentRequestOptions.responseType
19418 });
19419 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
19420 segmentXhr.addEventListener('progress', handleProgress({
19421 segment: segment,
19422 progressFn: progressFn,
19423 trackInfoFn: trackInfoFn,
19424 timingInfoFn: timingInfoFn,
19425 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19426 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19427 id3Fn: id3Fn,
19428 captionsFn: captionsFn,
19429 isEndOfTimeline: isEndOfTimeline,
19430 endedTimelineFn: endedTimelineFn,
19431 dataFn: dataFn
19432 }));
19433 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
19434 // multiple times, provide a shared state object
19435
19436 var loadendState = {};
19437 activeXhrs.forEach(function (activeXhr) {
19438 activeXhr.addEventListener('loadend', handleLoadEnd({
19439 loadendState: loadendState,
19440 abortFn: abortFn
19441 }));
19442 });
19443 return function () {
19444 return abortAll(activeXhrs);
19445 };
19446 };
19447
19448 /**
19449 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
19450 * codec strings, or translating codec strings into objects that can be examined.
19451 */
19452 var logFn$1 = logger('CodecUtils');
19453 /**
19454 * Returns a set of codec strings parsed from the playlist or the default
19455 * codec strings if no codecs were specified in the playlist
19456 *
19457 * @param {Playlist} media the current media playlist
19458 * @return {Object} an object with the video and audio codecs
19459 */
19460
19461 var getCodecs = function getCodecs(media) {
19462 // if the codecs were explicitly specified, use them instead of the
19463 // defaults
19464 var mediaAttributes = media.attributes || {};
19465
19466 if (mediaAttributes.CODECS) {
19467 return parseCodecs(mediaAttributes.CODECS);
19468 }
19469 };
19470
19471 var isMaat = function isMaat(master, media) {
19472 var mediaAttributes = media.attributes || {};
19473 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
19474 };
19475 var isMuxed = function isMuxed(master, media) {
19476 if (!isMaat(master, media)) {
19477 return true;
19478 }
19479
19480 var mediaAttributes = media.attributes || {};
19481 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
19482
19483 for (var groupId in audioGroup) {
19484 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
19485 // or there are listed playlists (the case for DASH, as the manifest will have already
19486 // provided all of the details necessary to generate the audio playlist, as opposed to
19487 // HLS' externally requested playlists), then the content is demuxed.
19488 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
19489 return true;
19490 }
19491 }
19492
19493 return false;
19494 };
19495 var unwrapCodecList = function unwrapCodecList(codecList) {
19496 var codecs = {};
19497 codecList.forEach(function (_ref) {
19498 var mediaType = _ref.mediaType,
19499 type = _ref.type,
19500 details = _ref.details;
19501 codecs[mediaType] = codecs[mediaType] || [];
19502 codecs[mediaType].push(translateLegacyCodec("" + type + details));
19503 });
19504 Object.keys(codecs).forEach(function (mediaType) {
19505 if (codecs[mediaType].length > 1) {
19506 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
19507 codecs[mediaType] = null;
19508 return;
19509 }
19510
19511 codecs[mediaType] = codecs[mediaType][0];
19512 });
19513 return codecs;
19514 };
19515 var codecCount = function codecCount(codecObj) {
19516 var count = 0;
19517
19518 if (codecObj.audio) {
19519 count++;
19520 }
19521
19522 if (codecObj.video) {
19523 count++;
19524 }
19525
19526 return count;
19527 };
19528 /**
19529 * Calculates the codec strings for a working configuration of
19530 * SourceBuffers to play variant streams in a master playlist. If
19531 * there is no possible working configuration, an empty object will be
19532 * returned.
19533 *
19534 * @param master {Object} the m3u8 object for the master playlist
19535 * @param media {Object} the m3u8 object for the variant playlist
19536 * @return {Object} the codec strings.
19537 *
19538 * @private
19539 */
19540
19541 var codecsForPlaylist = function codecsForPlaylist(master, media) {
19542 var mediaAttributes = media.attributes || {};
19543 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
19544 // Put another way, there is no way to have a video-only multiple-audio HLS!
19545
19546 if (isMaat(master, media) && !codecInfo.audio) {
19547 if (!isMuxed(master, media)) {
19548 // It is possible for codecs to be specified on the audio media group playlist but
19549 // not on the rendition playlist. This is mostly the case for DASH, where audio and
19550 // video are always separate (and separately specified).
19551 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
19552
19553 if (defaultCodecs.audio) {
19554 codecInfo.audio = defaultCodecs.audio;
19555 }
19556 }
19557 }
19558
19559 return codecInfo;
19560 };
19561
19562 var logFn = logger('PlaylistSelector');
19563
19564 var representationToString = function representationToString(representation) {
19565 if (!representation || !representation.playlist) {
19566 return;
19567 }
19568
19569 var playlist = representation.playlist;
19570 return JSON.stringify({
19571 id: playlist.id,
19572 bandwidth: representation.bandwidth,
19573 width: representation.width,
19574 height: representation.height,
19575 codecs: playlist.attributes && playlist.attributes.CODECS || ''
19576 });
19577 }; // Utilities
19578
19579 /**
19580 * Returns the CSS value for the specified property on an element
19581 * using `getComputedStyle`. Firefox has a long-standing issue where
19582 * getComputedStyle() may return null when running in an iframe with
19583 * `display: none`.
19584 *
19585 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
19586 * @param {HTMLElement} el the htmlelement to work on
19587 * @param {string} the proprety to get the style for
19588 */
19589
19590
19591 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
19592 if (!el) {
19593 return '';
19594 }
19595
19596 var result = window.getComputedStyle(el);
19597
19598 if (!result) {
19599 return '';
19600 }
19601
19602 return result[property];
19603 };
19604 /**
19605 * Resuable stable sort function
19606 *
19607 * @param {Playlists} array
19608 * @param {Function} sortFn Different comparators
19609 * @function stableSort
19610 */
19611
19612
19613 var stableSort = function stableSort(array, sortFn) {
19614 var newArray = array.slice();
19615 array.sort(function (left, right) {
19616 var cmp = sortFn(left, right);
19617
19618 if (cmp === 0) {
19619 return newArray.indexOf(left) - newArray.indexOf(right);
19620 }
19621
19622 return cmp;
19623 });
19624 };
19625 /**
19626 * A comparator function to sort two playlist object by bandwidth.
19627 *
19628 * @param {Object} left a media playlist object
19629 * @param {Object} right a media playlist object
19630 * @return {number} Greater than zero if the bandwidth attribute of
19631 * left is greater than the corresponding attribute of right. Less
19632 * than zero if the bandwidth of right is greater than left and
19633 * exactly zero if the two are equal.
19634 */
19635
19636
19637 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
19638 var leftBandwidth;
19639 var rightBandwidth;
19640
19641 if (left.attributes.BANDWIDTH) {
19642 leftBandwidth = left.attributes.BANDWIDTH;
19643 }
19644
19645 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
19646
19647 if (right.attributes.BANDWIDTH) {
19648 rightBandwidth = right.attributes.BANDWIDTH;
19649 }
19650
19651 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
19652 return leftBandwidth - rightBandwidth;
19653 };
19654 /**
19655 * A comparator function to sort two playlist object by resolution (width).
19656 *
19657 * @param {Object} left a media playlist object
19658 * @param {Object} right a media playlist object
19659 * @return {number} Greater than zero if the resolution.width attribute of
19660 * left is greater than the corresponding attribute of right. Less
19661 * than zero if the resolution.width of right is greater than left and
19662 * exactly zero if the two are equal.
19663 */
19664
19665 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
19666 var leftWidth;
19667 var rightWidth;
19668
19669 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
19670 leftWidth = left.attributes.RESOLUTION.width;
19671 }
19672
19673 leftWidth = leftWidth || window.Number.MAX_VALUE;
19674
19675 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
19676 rightWidth = right.attributes.RESOLUTION.width;
19677 }
19678
19679 rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
19680 // have the same media dimensions/ resolution
19681
19682 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
19683 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
19684 }
19685
19686 return leftWidth - rightWidth;
19687 };
19688 /**
19689 * Chooses the appropriate media playlist based on bandwidth and player size
19690 *
19691 * @param {Object} master
19692 * Object representation of the master manifest
19693 * @param {number} playerBandwidth
19694 * Current calculated bandwidth of the player
19695 * @param {number} playerWidth
19696 * Current width of the player element (should account for the device pixel ratio)
19697 * @param {number} playerHeight
19698 * Current height of the player element (should account for the device pixel ratio)
19699 * @param {boolean} limitRenditionByPlayerDimensions
19700 * True if the player width and height should be used during the selection, false otherwise
19701 * @param {Object} masterPlaylistController
19702 * the current masterPlaylistController object
19703 * @return {Playlist} the highest bitrate playlist less than the
19704 * currently detected bandwidth, accounting for some amount of
19705 * bandwidth variance
19706 */
19707
19708 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
19709 // If we end up getting called before `master` is available, exit early
19710 if (!master) {
19711 return;
19712 }
19713
19714 var options = {
19715 bandwidth: playerBandwidth,
19716 width: playerWidth,
19717 height: playerHeight,
19718 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
19719 };
19720 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
19721
19722 if (Playlist.isAudioOnly(master)) {
19723 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
19724 // at the buttom of this function for debugging.
19725
19726 options.audioOnly = true;
19727 } // convert the playlists to an intermediary representation to make comparisons easier
19728
19729
19730 var sortedPlaylistReps = playlists.map(function (playlist) {
19731 var bandwidth;
19732 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
19733 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
19734 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
19735 bandwidth = bandwidth || window.Number.MAX_VALUE;
19736 return {
19737 bandwidth: bandwidth,
19738 width: width,
19739 height: height,
19740 playlist: playlist
19741 };
19742 });
19743 stableSort(sortedPlaylistReps, function (left, right) {
19744 return left.bandwidth - right.bandwidth;
19745 }); // filter out any playlists that have been excluded due to
19746 // incompatible configurations
19747
19748 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
19749 return !Playlist.isIncompatible(rep.playlist);
19750 }); // filter out any playlists that have been disabled manually through the representations
19751 // api or blacklisted temporarily due to playback errors.
19752
19753 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
19754 return Playlist.isEnabled(rep.playlist);
19755 });
19756
19757 if (!enabledPlaylistReps.length) {
19758 // if there are no enabled playlists, then they have all been blacklisted or disabled
19759 // by the user through the representations api. In this case, ignore blacklisting and
19760 // fallback to what the user wants by using playlists the user has not disabled.
19761 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
19762 return !Playlist.isDisabled(rep.playlist);
19763 });
19764 } // filter out any variant that has greater effective bitrate
19765 // than the current estimated bandwidth
19766
19767
19768 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
19769 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
19770 });
19771 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
19772 // and then taking the very first element
19773
19774 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
19775 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
19776 })[0]; // if we're not going to limit renditions by player size, make an early decision.
19777
19778 if (limitRenditionByPlayerDimensions === false) {
19779 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
19780
19781 if (_chosenRep && _chosenRep.playlist) {
19782 var type = 'sortedPlaylistReps';
19783
19784 if (bandwidthBestRep) {
19785 type = 'bandwidthBestRep';
19786 }
19787
19788 if (enabledPlaylistReps[0]) {
19789 type = 'enabledPlaylistReps';
19790 }
19791
19792 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
19793 return _chosenRep.playlist;
19794 }
19795
19796 logFn('could not choose a playlist with options', options);
19797 return null;
19798 } // filter out playlists without resolution information
19799
19800
19801 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
19802 return rep.width && rep.height;
19803 }); // sort variants by resolution
19804
19805 stableSort(haveResolution, function (left, right) {
19806 return left.width - right.width;
19807 }); // if we have the exact resolution as the player use it
19808
19809 var resolutionBestRepList = haveResolution.filter(function (rep) {
19810 return rep.width === playerWidth && rep.height === playerHeight;
19811 });
19812 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
19813
19814 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
19815 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
19816 })[0];
19817 var resolutionPlusOneList;
19818 var resolutionPlusOneSmallest;
19819 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
19820 // if there is no match of exact resolution
19821
19822 if (!resolutionBestRep) {
19823 resolutionPlusOneList = haveResolution.filter(function (rep) {
19824 return rep.width > playerWidth || rep.height > playerHeight;
19825 }); // find all the variants have the same smallest resolution
19826
19827 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
19828 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
19829 }); // ensure that we also pick the highest bandwidth variant that
19830 // is just-larger-than the video player
19831
19832 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
19833 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
19834 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
19835 })[0];
19836 }
19837
19838 var leastPixelDiffRep; // If this selector proves to be better than others,
19839 // resolutionPlusOneRep and resolutionBestRep and all
19840 // the code involving them should be removed.
19841
19842 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
19843 // find the variant that is closest to the player's pixel size
19844 var leastPixelDiffList = haveResolution.map(function (rep) {
19845 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
19846 return rep;
19847 }); // get the highest bandwidth, closest resolution playlist
19848
19849 stableSort(leastPixelDiffList, function (left, right) {
19850 // sort by highest bandwidth if pixelDiff is the same
19851 if (left.pixelDiff === right.pixelDiff) {
19852 return right.bandwidth - left.bandwidth;
19853 }
19854
19855 return left.pixelDiff - right.pixelDiff;
19856 });
19857 leastPixelDiffRep = leastPixelDiffList[0];
19858 } // fallback chain of variants
19859
19860
19861 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
19862
19863 if (chosenRep && chosenRep.playlist) {
19864 var _type = 'sortedPlaylistReps';
19865
19866 if (leastPixelDiffRep) {
19867 _type = 'leastPixelDiffRep';
19868 } else if (resolutionPlusOneRep) {
19869 _type = 'resolutionPlusOneRep';
19870 } else if (resolutionBestRep) {
19871 _type = 'resolutionBestRep';
19872 } else if (bandwidthBestRep) {
19873 _type = 'bandwidthBestRep';
19874 } else if (enabledPlaylistReps[0]) {
19875 _type = 'enabledPlaylistReps';
19876 }
19877
19878 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
19879 return chosenRep.playlist;
19880 }
19881
19882 logFn('could not choose a playlist with options', options);
19883 return null;
19884 };
19885
19886 /**
19887 * Chooses the appropriate media playlist based on the most recent
19888 * bandwidth estimate and the player size.
19889 *
19890 * Expects to be called within the context of an instance of VhsHandler
19891 *
19892 * @return {Playlist} the highest bitrate playlist less than the
19893 * currently detected bandwidth, accounting for some amount of
19894 * bandwidth variance
19895 */
19896
19897 var lastBandwidthSelector = function lastBandwidthSelector() {
19898 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
19899 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
19900 };
19901 /**
19902 * Chooses the appropriate media playlist based on an
19903 * exponential-weighted moving average of the bandwidth after
19904 * filtering for player size.
19905 *
19906 * Expects to be called within the context of an instance of VhsHandler
19907 *
19908 * @param {number} decay - a number between 0 and 1. Higher values of
19909 * this parameter will cause previous bandwidth estimates to lose
19910 * significance more quickly.
19911 * @return {Function} a function which can be invoked to create a new
19912 * playlist selector function.
19913 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
19914 */
19915
19916 var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
19917 var average = -1;
19918 var lastSystemBandwidth = -1;
19919
19920 if (decay < 0 || decay > 1) {
19921 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
19922 }
19923
19924 return function () {
19925 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
19926
19927 if (average < 0) {
19928 average = this.systemBandwidth;
19929 lastSystemBandwidth = this.systemBandwidth;
19930 } // stop the average value from decaying for every 250ms
19931 // when the systemBandwidth is constant
19932 // and
19933 // stop average from setting to a very low value when the
19934 // systemBandwidth becomes 0 in case of chunk cancellation
19935
19936
19937 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
19938 average = decay * this.systemBandwidth + (1 - decay) * average;
19939 lastSystemBandwidth = this.systemBandwidth;
19940 }
19941
19942 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
19943 };
19944 };
19945 /**
19946 * Chooses the appropriate media playlist based on the potential to rebuffer
19947 *
19948 * @param {Object} settings
19949 * Object of information required to use this selector
19950 * @param {Object} settings.master
19951 * Object representation of the master manifest
19952 * @param {number} settings.currentTime
19953 * The current time of the player
19954 * @param {number} settings.bandwidth
19955 * Current measured bandwidth
19956 * @param {number} settings.duration
19957 * Duration of the media
19958 * @param {number} settings.segmentDuration
19959 * Segment duration to be used in round trip time calculations
19960 * @param {number} settings.timeUntilRebuffer
19961 * Time left in seconds until the player has to rebuffer
19962 * @param {number} settings.currentTimeline
19963 * The current timeline segments are being loaded from
19964 * @param {SyncController} settings.syncController
19965 * SyncController for determining if we have a sync point for a given playlist
19966 * @return {Object|null}
19967 * {Object} return.playlist
19968 * The highest bandwidth playlist with the least amount of rebuffering
19969 * {Number} return.rebufferingImpact
19970 * The amount of time in seconds switching to this playlist will rebuffer. A
19971 * negative value means that switching will cause zero rebuffering.
19972 */
19973
19974 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
19975 var master = settings.master,
19976 currentTime = settings.currentTime,
19977 bandwidth = settings.bandwidth,
19978 duration = settings.duration,
19979 segmentDuration = settings.segmentDuration,
19980 timeUntilRebuffer = settings.timeUntilRebuffer,
19981 currentTimeline = settings.currentTimeline,
19982 syncController = settings.syncController; // filter out any playlists that have been excluded due to
19983 // incompatible configurations
19984
19985 var compatiblePlaylists = master.playlists.filter(function (playlist) {
19986 return !Playlist.isIncompatible(playlist);
19987 }); // filter out any playlists that have been disabled manually through the representations
19988 // api or blacklisted temporarily due to playback errors.
19989
19990 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
19991
19992 if (!enabledPlaylists.length) {
19993 // if there are no enabled playlists, then they have all been blacklisted or disabled
19994 // by the user through the representations api. In this case, ignore blacklisting and
19995 // fallback to what the user wants by using playlists the user has not disabled.
19996 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
19997 return !Playlist.isDisabled(playlist);
19998 });
19999 }
20000
20001 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
20002 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
20003 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
20004 // sync request first. This will double the request time
20005
20006 var numRequests = syncPoint ? 1 : 2;
20007 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
20008 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
20009 return {
20010 playlist: playlist,
20011 rebufferingImpact: rebufferingImpact
20012 };
20013 });
20014 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
20015 return estimate.rebufferingImpact <= 0;
20016 }); // Sort by bandwidth DESC
20017
20018 stableSort(noRebufferingPlaylists, function (a, b) {
20019 return comparePlaylistBandwidth(b.playlist, a.playlist);
20020 });
20021
20022 if (noRebufferingPlaylists.length) {
20023 return noRebufferingPlaylists[0];
20024 }
20025
20026 stableSort(rebufferingEstimates, function (a, b) {
20027 return a.rebufferingImpact - b.rebufferingImpact;
20028 });
20029 return rebufferingEstimates[0] || null;
20030 };
20031 /**
20032 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
20033 * one with video. If no renditions with video exist, return the lowest audio rendition.
20034 *
20035 * Expects to be called within the context of an instance of VhsHandler
20036 *
20037 * @return {Object|null}
20038 * {Object} return.playlist
20039 * The lowest bitrate playlist that contains a video codec. If no such rendition
20040 * exists pick the lowest audio rendition.
20041 */
20042
20043 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
20044 var _this = this;
20045
20046 // filter out any playlists that have been excluded due to
20047 // incompatible configurations or playback errors
20048 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
20049
20050 stableSort(playlists, function (a, b) {
20051 return comparePlaylistBandwidth(a, b);
20052 }); // Parse and assume that playlists with no video codec have no video
20053 // (this is not necessarily true, although it is generally true).
20054 //
20055 // If an entire manifest has no valid videos everything will get filtered
20056 // out.
20057
20058 var playlistsWithVideo = playlists.filter(function (playlist) {
20059 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
20060 });
20061 return playlistsWithVideo[0] || null;
20062 };
20063
20064 /**
20065 * Combine all segments into a single Uint8Array
20066 *
20067 * @param {Object} segmentObj
20068 * @return {Uint8Array} concatenated bytes
20069 * @private
20070 */
20071 var concatSegments = function concatSegments(segmentObj) {
20072 var offset = 0;
20073 var tempBuffer;
20074
20075 if (segmentObj.bytes) {
20076 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
20077
20078 segmentObj.segments.forEach(function (segment) {
20079 tempBuffer.set(segment, offset);
20080 offset += segment.byteLength;
20081 });
20082 }
20083
20084 return tempBuffer;
20085 };
20086
20087 /**
20088 * @file text-tracks.js
20089 */
20090 /**
20091 * Create captions text tracks on video.js if they do not exist
20092 *
20093 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20094 * @param {Object} tech the video.js tech
20095 * @param {Object} captionStream the caption stream to create
20096 * @private
20097 */
20098
20099 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
20100 if (!inbandTextTracks[captionStream]) {
20101 tech.trigger({
20102 type: 'usage',
20103 name: 'vhs-608'
20104 });
20105 tech.trigger({
20106 type: 'usage',
20107 name: 'hls-608'
20108 });
20109 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
20110
20111 if (/^cc708_/.test(captionStream)) {
20112 instreamId = 'SERVICE' + captionStream.split('_')[1];
20113 }
20114
20115 var track = tech.textTracks().getTrackById(instreamId);
20116
20117 if (track) {
20118 // Resuse an existing track with a CC# id because this was
20119 // very likely created by videojs-contrib-hls from information
20120 // in the m3u8 for us to use
20121 inbandTextTracks[captionStream] = track;
20122 } else {
20123 // This section gets called when we have caption services that aren't specified in the manifest.
20124 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
20125 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
20126 var label = captionStream;
20127 var language = captionStream;
20128 var def = false;
20129 var captionService = captionServices[instreamId];
20130
20131 if (captionService) {
20132 label = captionService.label;
20133 language = captionService.language;
20134 def = captionService.default;
20135 } // Otherwise, create a track with the default `CC#` label and
20136 // without a language
20137
20138
20139 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
20140 kind: 'captions',
20141 id: instreamId,
20142 // TODO: investigate why this doesn't seem to turn the caption on by default
20143 default: def,
20144 label: label,
20145 language: language
20146 }, false).track;
20147 }
20148 }
20149 };
20150 /**
20151 * Add caption text track data to a source handler given an array of captions
20152 *
20153 * @param {Object}
20154 * @param {Object} inbandTextTracks the inband text tracks
20155 * @param {number} timestampOffset the timestamp offset of the source buffer
20156 * @param {Array} captionArray an array of caption data
20157 * @private
20158 */
20159
20160 var addCaptionData = function addCaptionData(_ref) {
20161 var inbandTextTracks = _ref.inbandTextTracks,
20162 captionArray = _ref.captionArray,
20163 timestampOffset = _ref.timestampOffset;
20164
20165 if (!captionArray) {
20166 return;
20167 }
20168
20169 var Cue = window.WebKitDataCue || window.VTTCue;
20170 captionArray.forEach(function (caption) {
20171 var track = caption.stream;
20172 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
20173 });
20174 };
20175 /**
20176 * Define properties on a cue for backwards compatability,
20177 * but warn the user that the way that they are using it
20178 * is depricated and will be removed at a later date.
20179 *
20180 * @param {Cue} cue the cue to add the properties on
20181 * @private
20182 */
20183
20184 var deprecateOldCue = function deprecateOldCue(cue) {
20185 Object.defineProperties(cue.frame, {
20186 id: {
20187 get: function get() {
20188 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
20189 return cue.value.key;
20190 }
20191 },
20192 value: {
20193 get: function get() {
20194 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
20195 return cue.value.data;
20196 }
20197 },
20198 privateData: {
20199 get: function get() {
20200 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
20201 return cue.value.data;
20202 }
20203 }
20204 });
20205 };
20206 /**
20207 * Add metadata text track data to a source handler given an array of metadata
20208 *
20209 * @param {Object}
20210 * @param {Object} inbandTextTracks the inband text tracks
20211 * @param {Array} metadataArray an array of meta data
20212 * @param {number} timestampOffset the timestamp offset of the source buffer
20213 * @param {number} videoDuration the duration of the video
20214 * @private
20215 */
20216
20217
20218 var addMetadata = function addMetadata(_ref2) {
20219 var inbandTextTracks = _ref2.inbandTextTracks,
20220 metadataArray = _ref2.metadataArray,
20221 timestampOffset = _ref2.timestampOffset,
20222 videoDuration = _ref2.videoDuration;
20223
20224 if (!metadataArray) {
20225 return;
20226 }
20227
20228 var Cue = window.WebKitDataCue || window.VTTCue;
20229 var metadataTrack = inbandTextTracks.metadataTrack_;
20230
20231 if (!metadataTrack) {
20232 return;
20233 }
20234
20235 metadataArray.forEach(function (metadata) {
20236 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
20237 // ignore this bit of metadata.
20238 // This likely occurs when you have an non-timed ID3 tag like TIT2,
20239 // which is the "Title/Songname/Content description" frame
20240
20241 if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
20242 return;
20243 }
20244
20245 metadata.frames.forEach(function (frame) {
20246 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
20247 cue.frame = frame;
20248 cue.value = frame;
20249 deprecateOldCue(cue);
20250 metadataTrack.addCue(cue);
20251 });
20252 });
20253
20254 if (!metadataTrack.cues || !metadataTrack.cues.length) {
20255 return;
20256 } // Updating the metadeta cues so that
20257 // the endTime of each cue is the startTime of the next cue
20258 // the endTime of last cue is the duration of the video
20259
20260
20261 var cues = metadataTrack.cues;
20262 var cuesArray = []; // Create a copy of the TextTrackCueList...
20263 // ...disregarding cues with a falsey value
20264
20265 for (var i = 0; i < cues.length; i++) {
20266 if (cues[i]) {
20267 cuesArray.push(cues[i]);
20268 }
20269 } // Group cues by their startTime value
20270
20271
20272 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
20273 var timeSlot = obj[cue.startTime] || [];
20274 timeSlot.push(cue);
20275 obj[cue.startTime] = timeSlot;
20276 return obj;
20277 }, {}); // Sort startTimes by ascending order
20278
20279 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
20280 return Number(a) - Number(b);
20281 }); // Map each cue group's endTime to the next group's startTime
20282
20283 sortedStartTimes.forEach(function (startTime, idx) {
20284 var cueGroup = cuesGroupedByStartTime[startTime];
20285 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
20286
20287 cueGroup.forEach(function (cue) {
20288 cue.endTime = nextTime;
20289 });
20290 });
20291 };
20292 /**
20293 * Create metadata text track on video.js if it does not exist
20294 *
20295 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20296 * @param {string} dispatchType the inband metadata track dispatch type
20297 * @param {Object} tech the video.js tech
20298 * @private
20299 */
20300
20301 var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
20302 if (inbandTextTracks.metadataTrack_) {
20303 return;
20304 }
20305
20306 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
20307 kind: 'metadata',
20308 label: 'Timed Metadata'
20309 }, false).track;
20310 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
20311 };
20312 /**
20313 * Remove cues from a track on video.js.
20314 *
20315 * @param {Double} start start of where we should remove the cue
20316 * @param {Double} end end of where the we should remove the cue
20317 * @param {Object} track the text track to remove the cues from
20318 * @private
20319 */
20320
20321 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
20322 var i;
20323 var cue;
20324
20325 if (!track) {
20326 return;
20327 }
20328
20329 if (!track.cues) {
20330 return;
20331 }
20332
20333 i = track.cues.length;
20334
20335 while (i--) {
20336 cue = track.cues[i]; // Remove any cue within the provided start and end time
20337
20338 if (cue.startTime >= start && cue.endTime <= end) {
20339 track.removeCue(cue);
20340 }
20341 }
20342 };
20343 /**
20344 * Remove duplicate cues from a track on video.js (a cue is considered a
20345 * duplicate if it has the same time interval and text as another)
20346 *
20347 * @param {Object} track the text track to remove the duplicate cues from
20348 * @private
20349 */
20350
20351 var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
20352 var cues = track.cues;
20353
20354 if (!cues) {
20355 return;
20356 }
20357
20358 for (var i = 0; i < cues.length; i++) {
20359 var duplicates = [];
20360 var occurrences = 0;
20361
20362 for (var j = 0; j < cues.length; j++) {
20363 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
20364 occurrences++;
20365
20366 if (occurrences > 1) {
20367 duplicates.push(cues[j]);
20368 }
20369 }
20370 }
20371
20372 if (duplicates.length) {
20373 duplicates.forEach(function (dupe) {
20374 return track.removeCue(dupe);
20375 });
20376 }
20377 }
20378 };
20379
20380 /**
20381 * mux.js
20382 *
20383 * Copyright (c) Brightcove
20384 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
20385 */
20386 var ONE_SECOND_IN_TS = 90000,
20387 // 90kHz clock
20388 secondsToVideoTs,
20389 secondsToAudioTs,
20390 videoTsToSeconds,
20391 audioTsToSeconds,
20392 audioTsToVideoTs,
20393 videoTsToAudioTs,
20394 metadataTsToSeconds;
20395
20396 secondsToVideoTs = function secondsToVideoTs(seconds) {
20397 return seconds * ONE_SECOND_IN_TS;
20398 };
20399
20400 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
20401 return seconds * sampleRate;
20402 };
20403
20404 videoTsToSeconds = function videoTsToSeconds(timestamp) {
20405 return timestamp / ONE_SECOND_IN_TS;
20406 };
20407
20408 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
20409 return timestamp / sampleRate;
20410 };
20411
20412 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
20413 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
20414 };
20415
20416 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
20417 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
20418 };
20419 /**
20420 * Adjust ID3 tag or caption timing information by the timeline pts values
20421 * (if keepOriginalTimestamps is false) and convert to seconds
20422 */
20423
20424
20425 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
20426 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
20427 };
20428
20429 var clock = {
20430 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
20431 secondsToVideoTs: secondsToVideoTs,
20432 secondsToAudioTs: secondsToAudioTs,
20433 videoTsToSeconds: videoTsToSeconds,
20434 audioTsToSeconds: audioTsToSeconds,
20435 audioTsToVideoTs: audioTsToVideoTs,
20436 videoTsToAudioTs: videoTsToAudioTs,
20437 metadataTsToSeconds: metadataTsToSeconds
20438 };
20439
20440 /**
20441 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
20442 * front of current time.
20443 *
20444 * @param {Array} buffer
20445 * The current buffer of gop information
20446 * @param {number} currentTime
20447 * The current time
20448 * @param {Double} mapping
20449 * Offset to map display time to stream presentation time
20450 * @return {Array}
20451 * List of gops considered safe to append over
20452 */
20453
20454 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
20455 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
20456 return [];
20457 } // pts value for current time + 3 seconds to give a bit more wiggle room
20458
20459
20460 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
20461 var i;
20462
20463 for (i = 0; i < buffer.length; i++) {
20464 if (buffer[i].pts > currentTimePts) {
20465 break;
20466 }
20467 }
20468
20469 return buffer.slice(i);
20470 };
20471 /**
20472 * Appends gop information (timing and byteLength) received by the transmuxer for the
20473 * gops appended in the last call to appendBuffer
20474 *
20475 * @param {Array} buffer
20476 * The current buffer of gop information
20477 * @param {Array} gops
20478 * List of new gop information
20479 * @param {boolean} replace
20480 * If true, replace the buffer with the new gop information. If false, append the
20481 * new gop information to the buffer in the right location of time.
20482 * @return {Array}
20483 * Updated list of gop information
20484 */
20485
20486 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
20487 if (!gops.length) {
20488 return buffer;
20489 }
20490
20491 if (replace) {
20492 // If we are in safe append mode, then completely overwrite the gop buffer
20493 // with the most recent appeneded data. This will make sure that when appending
20494 // future segments, we only try to align with gops that are both ahead of current
20495 // time and in the last segment appended.
20496 return gops.slice();
20497 }
20498
20499 var start = gops[0].pts;
20500 var i = 0;
20501
20502 for (i; i < buffer.length; i++) {
20503 if (buffer[i].pts >= start) {
20504 break;
20505 }
20506 }
20507
20508 return buffer.slice(0, i).concat(gops);
20509 };
20510 /**
20511 * Removes gop information in buffer that overlaps with provided start and end
20512 *
20513 * @param {Array} buffer
20514 * The current buffer of gop information
20515 * @param {Double} start
20516 * position to start the remove at
20517 * @param {Double} end
20518 * position to end the remove at
20519 * @param {Double} mapping
20520 * Offset to map display time to stream presentation time
20521 */
20522
20523 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
20524 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
20525 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
20526 var updatedBuffer = buffer.slice();
20527 var i = buffer.length;
20528
20529 while (i--) {
20530 if (buffer[i].pts <= endPts) {
20531 break;
20532 }
20533 }
20534
20535 if (i === -1) {
20536 // no removal because end of remove range is before start of buffer
20537 return updatedBuffer;
20538 }
20539
20540 var j = i + 1;
20541
20542 while (j--) {
20543 if (buffer[j].pts <= startPts) {
20544 break;
20545 }
20546 } // clamp remove range start to 0 index
20547
20548
20549 j = Math.max(j, 0);
20550 updatedBuffer.splice(j, i - j + 1);
20551 return updatedBuffer;
20552 };
20553
20554 var shallowEqual = function shallowEqual(a, b) {
20555 // if both are undefined
20556 // or one or the other is undefined
20557 // they are not equal
20558 if (!a && !b || !a && b || a && !b) {
20559 return false;
20560 } // they are the same object and thus, equal
20561
20562
20563 if (a === b) {
20564 return true;
20565 } // sort keys so we can make sure they have
20566 // all the same keys later.
20567
20568
20569 var akeys = Object.keys(a).sort();
20570 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
20571
20572 if (akeys.length !== bkeys.length) {
20573 return false;
20574 }
20575
20576 for (var i = 0; i < akeys.length; i++) {
20577 var key = akeys[i]; // different sorted keys, not equal
20578
20579 if (key !== bkeys[i]) {
20580 return false;
20581 } // different values, not equal
20582
20583
20584 if (a[key] !== b[key]) {
20585 return false;
20586 }
20587 }
20588
20589 return true;
20590 };
20591
20592 // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
20593 var QUOTA_EXCEEDED_ERR = 22;
20594
20595 /**
20596 * The segment loader has no recourse except to fetch a segment in the
20597 * current playlist and use the internal timestamps in that segment to
20598 * generate a syncPoint. This function returns a good candidate index
20599 * for that process.
20600 *
20601 * @param {Array} segments - the segments array from a playlist.
20602 * @return {number} An index of a segment from the playlist to load
20603 */
20604
20605 var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
20606 segments = segments || [];
20607 var timelineSegments = [];
20608 var time = 0;
20609
20610 for (var i = 0; i < segments.length; i++) {
20611 var segment = segments[i];
20612
20613 if (currentTimeline === segment.timeline) {
20614 timelineSegments.push(i);
20615 time += segment.duration;
20616
20617 if (time > targetTime) {
20618 return i;
20619 }
20620 }
20621 }
20622
20623 if (timelineSegments.length === 0) {
20624 return 0;
20625 } // default to the last timeline segment
20626
20627
20628 return timelineSegments[timelineSegments.length - 1];
20629 }; // In the event of a quota exceeded error, keep at least one second of back buffer. This
20630 // number was arbitrarily chosen and may be updated in the future, but seemed reasonable
20631 // as a start to prevent any potential issues with removing content too close to the
20632 // playhead.
20633
20634 var MIN_BACK_BUFFER = 1; // in ms
20635
20636 var CHECK_BUFFER_DELAY = 500;
20637
20638 var finite = function finite(num) {
20639 return typeof num === 'number' && isFinite(num);
20640 }; // With most content hovering around 30fps, if a segment has a duration less than a half
20641 // frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
20642 // not accurately reflect the rest of the content.
20643
20644
20645 var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
20646 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
20647 // Although these checks should most likely cover non 'main' types, for now it narrows
20648 // the scope of our checks.
20649 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
20650 return null;
20651 }
20652
20653 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
20654 return 'Neither audio nor video found in segment.';
20655 }
20656
20657 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
20658 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
20659 }
20660
20661 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
20662 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
20663 }
20664
20665 return null;
20666 };
20667 /**
20668 * Calculates a time value that is safe to remove from the back buffer without interrupting
20669 * playback.
20670 *
20671 * @param {TimeRange} seekable
20672 * The current seekable range
20673 * @param {number} currentTime
20674 * The current time of the player
20675 * @param {number} targetDuration
20676 * The target duration of the current playlist
20677 * @return {number}
20678 * Time that is safe to remove from the back buffer without interrupting playback
20679 */
20680
20681 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
20682 // 30 seconds before the playhead provides a safe default for trimming.
20683 //
20684 // Choosing a reasonable default is particularly important for high bitrate content and
20685 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
20686 // throw an APPEND_BUFFER_ERR.
20687 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
20688
20689 if (seekable.length) {
20690 // Some live playlists may have a shorter window of content than the full allowed back
20691 // buffer. For these playlists, don't save content that's no longer within the window.
20692 trimTime = Math.max(trimTime, seekable.start(0));
20693 } // Don't remove within target duration of the current time to avoid the possibility of
20694 // removing the GOP currently being played, as removing it can cause playback stalls.
20695
20696
20697 var maxTrimTime = currentTime - targetDuration;
20698 return Math.min(maxTrimTime, trimTime);
20699 };
20700
20701 var segmentInfoString = function segmentInfoString(segmentInfo) {
20702 var startOfSegment = segmentInfo.startOfSegment,
20703 duration = segmentInfo.duration,
20704 segment = segmentInfo.segment,
20705 part = segmentInfo.part,
20706 _segmentInfo$playlist = segmentInfo.playlist,
20707 seq = _segmentInfo$playlist.mediaSequence,
20708 id = _segmentInfo$playlist.id,
20709 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
20710 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
20711 index = segmentInfo.mediaIndex,
20712 partIndex = segmentInfo.partIndex,
20713 timeline = segmentInfo.timeline;
20714 var segmentLen = segments.length - 1;
20715 var selection = 'mediaIndex/partIndex increment';
20716
20717 if (segmentInfo.getMediaInfoForTime) {
20718 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
20719 } else if (segmentInfo.isSyncRequest) {
20720 selection = 'getSyncSegmentCandidate (isSyncRequest)';
20721 }
20722
20723 var hasPartIndex = typeof partIndex === 'number';
20724 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
20725 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
20726 preloadSegment: segment
20727 }) - 1 : 0;
20728 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
20729 };
20730
20731 var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
20732 return mediaType + "TimingInfo";
20733 };
20734 /**
20735 * Returns the timestamp offset to use for the segment.
20736 *
20737 * @param {number} segmentTimeline
20738 * The timeline of the segment
20739 * @param {number} currentTimeline
20740 * The timeline currently being followed by the loader
20741 * @param {number} startOfSegment
20742 * The estimated segment start
20743 * @param {TimeRange[]} buffered
20744 * The loader's buffer
20745 * @param {boolean} overrideCheck
20746 * If true, no checks are made to see if the timestamp offset value should be set,
20747 * but sets it directly to a value.
20748 *
20749 * @return {number|null}
20750 * Either a number representing a new timestamp offset, or null if the segment is
20751 * part of the same timeline
20752 */
20753
20754
20755 var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
20756 var segmentTimeline = _ref.segmentTimeline,
20757 currentTimeline = _ref.currentTimeline,
20758 startOfSegment = _ref.startOfSegment,
20759 buffered = _ref.buffered,
20760 overrideCheck = _ref.overrideCheck;
20761
20762 // Check to see if we are crossing a discontinuity to see if we need to set the
20763 // timestamp offset on the transmuxer and source buffer.
20764 //
20765 // Previously, we changed the timestampOffset if the start of this segment was less than
20766 // the currently set timestampOffset, but this isn't desirable as it can produce bad
20767 // behavior, especially around long running live streams.
20768 if (!overrideCheck && segmentTimeline === currentTimeline) {
20769 return null;
20770 } // When changing renditions, it's possible to request a segment on an older timeline. For
20771 // instance, given two renditions with the following:
20772 //
20773 // #EXTINF:10
20774 // segment1
20775 // #EXT-X-DISCONTINUITY
20776 // #EXTINF:10
20777 // segment2
20778 // #EXTINF:10
20779 // segment3
20780 //
20781 // And the current player state:
20782 //
20783 // current time: 8
20784 // buffer: 0 => 20
20785 //
20786 // The next segment on the current rendition would be segment3, filling the buffer from
20787 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
20788 // then the next segment to be requested will be segment1 from the new rendition in
20789 // order to fill time 8 and onwards. Using the buffered end would result in repeated
20790 // content (since it would position segment1 of the new rendition starting at 20s). This
20791 // case can be identified when the new segment's timeline is a prior value. Instead of
20792 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
20793 // more accurate to the actual start time of the segment.
20794
20795
20796 if (segmentTimeline < currentTimeline) {
20797 return startOfSegment;
20798 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
20799 // value uses the end of the last segment if it is available. While this value
20800 // should often be correct, it's better to rely on the buffered end, as the new
20801 // content post discontinuity should line up with the buffered end as if it were
20802 // time 0 for the new content.
20803
20804
20805 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
20806 };
20807 /**
20808 * Returns whether or not the loader should wait for a timeline change from the timeline
20809 * change controller before processing the segment.
20810 *
20811 * Primary timing in VHS goes by video. This is different from most media players, as
20812 * audio is more often used as the primary timing source. For the foreseeable future, VHS
20813 * will continue to use video as the primary timing source, due to the current logic and
20814 * expectations built around it.
20815
20816 * Since the timing follows video, in order to maintain sync, the video loader is
20817 * responsible for setting both audio and video source buffer timestamp offsets.
20818 *
20819 * Setting different values for audio and video source buffers could lead to
20820 * desyncing. The following examples demonstrate some of the situations where this
20821 * distinction is important. Note that all of these cases involve demuxed content. When
20822 * content is muxed, the audio and video are packaged together, therefore syncing
20823 * separate media playlists is not an issue.
20824 *
20825 * CASE 1: Audio prepares to load a new timeline before video:
20826 *
20827 * Timeline: 0 1
20828 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
20829 * Audio Loader: ^
20830 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
20831 * Video Loader ^
20832 *
20833 * In the above example, the audio loader is preparing to load the 6th segment, the first
20834 * after a discontinuity, while the video loader is still loading the 5th segment, before
20835 * the discontinuity.
20836 *
20837 * If the audio loader goes ahead and loads and appends the 6th segment before the video
20838 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
20839 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
20840 * the audio loader must provide the audioAppendStart value to trim the content in the
20841 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
20842 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
20843 * segment until that value is provided.
20844 *
20845 * CASE 2: Video prepares to load a new timeline before audio:
20846 *
20847 * Timeline: 0 1
20848 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
20849 * Audio Loader: ^
20850 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
20851 * Video Loader ^
20852 *
20853 * In the above example, the video loader is preparing to load the 6th segment, the first
20854 * after a discontinuity, while the audio loader is still loading the 5th segment, before
20855 * the discontinuity.
20856 *
20857 * If the video loader goes ahead and loads and appends the 6th segment, then once the
20858 * segment is loaded and processed, both the video and audio timestamp offsets will be
20859 * set, since video is used as the primary timing source. This is to ensure content lines
20860 * up appropriately, as any modifications to the video timing are reflected by audio when
20861 * the video loader sets the audio and video timestamp offsets to the same value. However,
20862 * setting the timestamp offset for audio before audio has had a chance to change
20863 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
20864 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
20865 *
20866 * CASE 3: When seeking, audio prepares to load a new timeline before video
20867 *
20868 * Timeline: 0 1
20869 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
20870 * Audio Loader: ^
20871 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
20872 * Video Loader ^
20873 *
20874 * In the above example, both audio and video loaders are loading segments from timeline
20875 * 0, but imagine that the seek originated from timeline 1.
20876 *
20877 * When seeking to a new timeline, the timestamp offset will be set based on the expected
20878 * segment start of the loaded video segment. In order to maintain sync, the audio loader
20879 * must wait for the video loader to load its segment and update both the audio and video
20880 * timestamp offsets before it may load and append its own segment. This is the case
20881 * whether the seek results in a mismatched segment request (e.g., the audio loader
20882 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
20883 * loaders choose to load the same segment index from each playlist, as the segments may
20884 * not be aligned perfectly, even for matching segment indexes.
20885 *
20886 * @param {Object} timelinechangeController
20887 * @param {number} currentTimeline
20888 * The timeline currently being followed by the loader
20889 * @param {number} segmentTimeline
20890 * The timeline of the segment being loaded
20891 * @param {('main'|'audio')} loaderType
20892 * The loader type
20893 * @param {boolean} audioDisabled
20894 * Whether the audio is disabled for the loader. This should only be true when the
20895 * loader may have muxed audio in its segment, but should not append it, e.g., for
20896 * the main loader when an alternate audio playlist is active.
20897 *
20898 * @return {boolean}
20899 * Whether the loader should wait for a timeline change from the timeline change
20900 * controller before processing the segment
20901 */
20902
20903 var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
20904 var timelineChangeController = _ref2.timelineChangeController,
20905 currentTimeline = _ref2.currentTimeline,
20906 segmentTimeline = _ref2.segmentTimeline,
20907 loaderType = _ref2.loaderType,
20908 audioDisabled = _ref2.audioDisabled;
20909
20910 if (currentTimeline === segmentTimeline) {
20911 return false;
20912 }
20913
20914 if (loaderType === 'audio') {
20915 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
20916 type: 'main'
20917 }); // Audio loader should wait if:
20918 //
20919 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
20920 // * main hasn't yet changed to the timeline audio is looking to load
20921
20922 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
20923 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
20924 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
20925 // loader's segments (or the content is audio/video only and handled by the main
20926 // loader).
20927
20928
20929 if (loaderType === 'main' && audioDisabled) {
20930 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
20931 type: 'audio'
20932 }); // Main loader should wait for the audio loader if audio is not pending a timeline
20933 // change to the current timeline.
20934 //
20935 // Since the main loader is responsible for setting the timestamp offset for both
20936 // audio and video, the main loader must wait for audio to be about to change to its
20937 // timeline before setting the offset, otherwise, if audio is behind in loading,
20938 // segments from the previous timeline would be adjusted by the new timestamp offset.
20939 //
20940 // This requirement means that video will not cross a timeline until the audio is
20941 // about to cross to it, so that way audio and video will always cross the timeline
20942 // together.
20943 //
20944 // In addition to normal timeline changes, these rules also apply to the start of a
20945 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
20946 // that these rules apply to the first timeline change because if they did not, it's
20947 // possible that the main loader will cross two timelines before the audio loader has
20948 // crossed one. Logic may be implemented to handle the startup as a special case, but
20949 // it's easier to simply treat all timeline changes the same.
20950
20951 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
20952 return false;
20953 }
20954
20955 return true;
20956 }
20957
20958 return false;
20959 };
20960 var mediaDuration = function mediaDuration(audioTimingInfo, videoTimingInfo) {
20961 var audioDuration = audioTimingInfo && typeof audioTimingInfo.start === 'number' && typeof audioTimingInfo.end === 'number' ? audioTimingInfo.end - audioTimingInfo.start : 0;
20962 var videoDuration = videoTimingInfo && typeof videoTimingInfo.start === 'number' && typeof videoTimingInfo.end === 'number' ? videoTimingInfo.end - videoTimingInfo.start : 0;
20963 return Math.max(audioDuration, videoDuration);
20964 };
20965 var segmentTooLong = function segmentTooLong(_ref3) {
20966 var segmentDuration = _ref3.segmentDuration,
20967 maxDuration = _ref3.maxDuration;
20968
20969 // 0 duration segments are most likely due to metadata only segments or a lack of
20970 // information.
20971 if (!segmentDuration) {
20972 return false;
20973 } // For HLS:
20974 //
20975 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
20976 // The EXTINF duration of each Media Segment in the Playlist
20977 // file, when rounded to the nearest integer, MUST be less than or equal
20978 // to the target duration; longer segments can trigger playback stalls
20979 // or other errors.
20980 //
20981 // For DASH, the mpd-parser uses the largest reported segment duration as the target
20982 // duration. Although that reported duration is occasionally approximate (i.e., not
20983 // exact), a strict check may report that a segment is too long more often in DASH.
20984
20985
20986 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
20987 };
20988 var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
20989 // Right now we aren't following DASH's timing model exactly, so only perform
20990 // this check for HLS content.
20991 if (sourceType !== 'hls') {
20992 return null;
20993 }
20994
20995 var segmentDuration = mediaDuration(segmentInfo.audioTimingInfo, segmentInfo.videoTimingInfo); // Don't report if we lack information.
20996 //
20997 // If the segment has a duration of 0 it is either a lack of information or a
20998 // metadata only segment and shouldn't be reported here.
20999
21000 if (!segmentDuration) {
21001 return null;
21002 }
21003
21004 var targetDuration = segmentInfo.playlist.targetDuration;
21005 var isSegmentWayTooLong = segmentTooLong({
21006 segmentDuration: segmentDuration,
21007 maxDuration: targetDuration * 2
21008 });
21009 var isSegmentSlightlyTooLong = segmentTooLong({
21010 segmentDuration: segmentDuration,
21011 maxDuration: targetDuration
21012 });
21013 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
21014
21015 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
21016 return {
21017 severity: isSegmentWayTooLong ? 'warn' : 'info',
21018 message: segmentTooLongMessage
21019 };
21020 }
21021
21022 return null;
21023 };
21024 /**
21025 * An object that manages segment loading and appending.
21026 *
21027 * @class SegmentLoader
21028 * @param {Object} options required and optional options
21029 * @extends videojs.EventTarget
21030 */
21031
21032 var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
21033 inheritsLoose(SegmentLoader, _videojs$EventTarget);
21034
21035 function SegmentLoader(settings, options) {
21036 var _this;
21037
21038 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
21039
21040 if (!settings) {
21041 throw new TypeError('Initialization settings are required');
21042 }
21043
21044 if (typeof settings.currentTime !== 'function') {
21045 throw new TypeError('No currentTime getter specified');
21046 }
21047
21048 if (!settings.mediaSource) {
21049 throw new TypeError('No MediaSource specified');
21050 } // public properties
21051
21052
21053 _this.bandwidth = settings.bandwidth;
21054 _this.throughput = {
21055 rate: 0,
21056 count: 0
21057 };
21058 _this.roundTrip = NaN;
21059
21060 _this.resetStats_();
21061
21062 _this.mediaIndex = null;
21063 _this.partIndex = null; // private settings
21064
21065 _this.hasPlayed_ = settings.hasPlayed;
21066 _this.currentTime_ = settings.currentTime;
21067 _this.seekable_ = settings.seekable;
21068 _this.seeking_ = settings.seeking;
21069 _this.duration_ = settings.duration;
21070 _this.mediaSource_ = settings.mediaSource;
21071 _this.vhs_ = settings.vhs;
21072 _this.loaderType_ = settings.loaderType;
21073 _this.currentMediaInfo_ = void 0;
21074 _this.startingMediaInfo_ = void 0;
21075 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
21076 _this.goalBufferLength_ = settings.goalBufferLength;
21077 _this.sourceType_ = settings.sourceType;
21078 _this.sourceUpdater_ = settings.sourceUpdater;
21079 _this.inbandTextTracks_ = settings.inbandTextTracks;
21080 _this.state_ = 'INIT';
21081 _this.timelineChangeController_ = settings.timelineChangeController;
21082 _this.shouldSaveSegmentTimingInfo_ = true;
21083 _this.parse708captions_ = settings.parse708captions;
21084 _this.captionServices_ = settings.captionServices;
21085 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
21086
21087 _this.checkBufferTimeout_ = null;
21088 _this.error_ = void 0;
21089 _this.currentTimeline_ = -1;
21090 _this.pendingSegment_ = null;
21091 _this.xhrOptions_ = null;
21092 _this.pendingSegments_ = [];
21093 _this.audioDisabled_ = false;
21094 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
21095
21096 _this.gopBuffer_ = [];
21097 _this.timeMapping_ = 0;
21098 _this.safeAppend_ = videojs__default["default"].browser.IE_VERSION >= 11;
21099 _this.appendInitSegment_ = {
21100 audio: true,
21101 video: true
21102 };
21103 _this.playlistOfLastInitSegment_ = {
21104 audio: null,
21105 video: null
21106 };
21107 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
21108 // information yet to start the loading process (e.g., if the audio loader wants to
21109 // load a segment from the next timeline but the main loader hasn't yet crossed that
21110 // timeline), then the load call will be added to the queue until it is ready to be
21111 // processed.
21112
21113 _this.loadQueue_ = [];
21114 _this.metadataQueue_ = {
21115 id3: [],
21116 caption: []
21117 };
21118 _this.waitingOnRemove_ = false;
21119 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
21120
21121 _this.activeInitSegmentId_ = null;
21122 _this.initSegments_ = {}; // HLSe playback
21123
21124 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
21125 _this.keyCache_ = {};
21126 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
21127 // between a time in the display time and a segment index within
21128 // a playlist
21129
21130 _this.syncController_ = settings.syncController;
21131 _this.syncPoint_ = {
21132 segmentIndex: 0,
21133 time: 0
21134 };
21135 _this.transmuxer_ = _this.createTransmuxer_();
21136
21137 _this.triggerSyncInfoUpdate_ = function () {
21138 return _this.trigger('syncinfoupdate');
21139 };
21140
21141 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
21142
21143 _this.mediaSource_.addEventListener('sourceopen', function () {
21144 if (!_this.isEndOfStream_()) {
21145 _this.ended_ = false;
21146 }
21147 }); // ...for determining the fetch location
21148
21149
21150 _this.fetchAtBuffer_ = false;
21151 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
21152 Object.defineProperty(assertThisInitialized(_this), 'state', {
21153 get: function get() {
21154 return this.state_;
21155 },
21156 set: function set(newState) {
21157 if (newState !== this.state_) {
21158 this.logger_(this.state_ + " -> " + newState);
21159 this.state_ = newState;
21160 this.trigger('statechange');
21161 }
21162 }
21163 });
21164
21165 _this.sourceUpdater_.on('ready', function () {
21166 if (_this.hasEnoughInfoToAppend_()) {
21167 _this.processCallQueue_();
21168 }
21169 }); // Only the main loader needs to listen for pending timeline changes, as the main
21170 // loader should wait for audio to be ready to change its timeline so that both main
21171 // and audio timelines change together. For more details, see the
21172 // shouldWaitForTimelineChange function.
21173
21174
21175 if (_this.loaderType_ === 'main') {
21176 _this.timelineChangeController_.on('pendingtimelinechange', function () {
21177 if (_this.hasEnoughInfoToAppend_()) {
21178 _this.processCallQueue_();
21179 }
21180 });
21181 } // The main loader only listens on pending timeline changes, but the audio loader,
21182 // since its loads follow main, needs to listen on timeline changes. For more details,
21183 // see the shouldWaitForTimelineChange function.
21184
21185
21186 if (_this.loaderType_ === 'audio') {
21187 _this.timelineChangeController_.on('timelinechange', function () {
21188 if (_this.hasEnoughInfoToLoad_()) {
21189 _this.processLoadQueue_();
21190 }
21191
21192 if (_this.hasEnoughInfoToAppend_()) {
21193 _this.processCallQueue_();
21194 }
21195 });
21196 }
21197
21198 return _this;
21199 }
21200
21201 var _proto = SegmentLoader.prototype;
21202
21203 _proto.createTransmuxer_ = function createTransmuxer_() {
21204 return segmentTransmuxer.createTransmuxer({
21205 remux: false,
21206 alignGopsAtEnd: this.safeAppend_,
21207 keepOriginalTimestamps: true,
21208 parse708captions: this.parse708captions_,
21209 captionServices: this.captionServices_
21210 });
21211 }
21212 /**
21213 * reset all of our media stats
21214 *
21215 * @private
21216 */
21217 ;
21218
21219 _proto.resetStats_ = function resetStats_() {
21220 this.mediaBytesTransferred = 0;
21221 this.mediaRequests = 0;
21222 this.mediaRequestsAborted = 0;
21223 this.mediaRequestsTimedout = 0;
21224 this.mediaRequestsErrored = 0;
21225 this.mediaTransferDuration = 0;
21226 this.mediaSecondsLoaded = 0;
21227 this.mediaAppends = 0;
21228 }
21229 /**
21230 * dispose of the SegmentLoader and reset to the default state
21231 */
21232 ;
21233
21234 _proto.dispose = function dispose() {
21235 this.trigger('dispose');
21236 this.state = 'DISPOSED';
21237 this.pause();
21238 this.abort_();
21239
21240 if (this.transmuxer_) {
21241 this.transmuxer_.terminate();
21242 }
21243
21244 this.resetStats_();
21245
21246 if (this.checkBufferTimeout_) {
21247 window.clearTimeout(this.checkBufferTimeout_);
21248 }
21249
21250 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
21251 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
21252 }
21253
21254 this.off();
21255 };
21256
21257 _proto.setAudio = function setAudio(enable) {
21258 this.audioDisabled_ = !enable;
21259
21260 if (enable) {
21261 this.appendInitSegment_.audio = true;
21262 } else {
21263 // remove current track audio if it gets disabled
21264 this.sourceUpdater_.removeAudio(0, this.duration_());
21265 }
21266 }
21267 /**
21268 * abort anything that is currently doing on with the SegmentLoader
21269 * and reset to a default state
21270 */
21271 ;
21272
21273 _proto.abort = function abort() {
21274 if (this.state !== 'WAITING') {
21275 if (this.pendingSegment_) {
21276 this.pendingSegment_ = null;
21277 }
21278
21279 return;
21280 }
21281
21282 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
21283 // since we are no longer "waiting" on any requests. XHR callback is not always run
21284 // when the request is aborted. This will prevent the loader from being stuck in the
21285 // WAITING state indefinitely.
21286
21287 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
21288 // next segment
21289
21290 if (!this.paused()) {
21291 this.monitorBuffer_();
21292 }
21293 }
21294 /**
21295 * abort all pending xhr requests and null any pending segements
21296 *
21297 * @private
21298 */
21299 ;
21300
21301 _proto.abort_ = function abort_() {
21302 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
21303 this.pendingSegment_.abortRequests();
21304 } // clear out the segment being processed
21305
21306
21307 this.pendingSegment_ = null;
21308 this.callQueue_ = [];
21309 this.loadQueue_ = [];
21310 this.metadataQueue_.id3 = [];
21311 this.metadataQueue_.caption = [];
21312 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
21313 this.waitingOnRemove_ = false;
21314 window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
21315 this.quotaExceededErrorRetryTimeout_ = null;
21316 };
21317
21318 _proto.checkForAbort_ = function checkForAbort_(requestId) {
21319 // If the state is APPENDING, then aborts will not modify the state, meaning the first
21320 // callback that happens should reset the state to READY so that loading can continue.
21321 if (this.state === 'APPENDING' && !this.pendingSegment_) {
21322 this.state = 'READY';
21323 return true;
21324 }
21325
21326 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
21327 return true;
21328 }
21329
21330 return false;
21331 }
21332 /**
21333 * set an error on the segment loader and null out any pending segements
21334 *
21335 * @param {Error} error the error to set on the SegmentLoader
21336 * @return {Error} the error that was set or that is currently set
21337 */
21338 ;
21339
21340 _proto.error = function error(_error) {
21341 if (typeof _error !== 'undefined') {
21342 this.logger_('error occurred:', _error);
21343 this.error_ = _error;
21344 }
21345
21346 this.pendingSegment_ = null;
21347 return this.error_;
21348 };
21349
21350 _proto.endOfStream = function endOfStream() {
21351 this.ended_ = true;
21352
21353 if (this.transmuxer_) {
21354 // need to clear out any cached data to prepare for the new segment
21355 segmentTransmuxer.reset(this.transmuxer_);
21356 }
21357
21358 this.gopBuffer_.length = 0;
21359 this.pause();
21360 this.trigger('ended');
21361 }
21362 /**
21363 * Indicates which time ranges are buffered
21364 *
21365 * @return {TimeRange}
21366 * TimeRange object representing the current buffered ranges
21367 */
21368 ;
21369
21370 _proto.buffered_ = function buffered_() {
21371 var trackInfo = this.getMediaInfo_();
21372
21373 if (!this.sourceUpdater_ || !trackInfo) {
21374 return videojs__default["default"].createTimeRanges();
21375 }
21376
21377 if (this.loaderType_ === 'main') {
21378 var hasAudio = trackInfo.hasAudio,
21379 hasVideo = trackInfo.hasVideo,
21380 isMuxed = trackInfo.isMuxed;
21381
21382 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
21383 return this.sourceUpdater_.buffered();
21384 }
21385
21386 if (hasVideo) {
21387 return this.sourceUpdater_.videoBuffered();
21388 }
21389 } // One case that can be ignored for now is audio only with alt audio,
21390 // as we don't yet have proper support for that.
21391
21392
21393 return this.sourceUpdater_.audioBuffered();
21394 }
21395 /**
21396 * Gets and sets init segment for the provided map
21397 *
21398 * @param {Object} map
21399 * The map object representing the init segment to get or set
21400 * @param {boolean=} set
21401 * If true, the init segment for the provided map should be saved
21402 * @return {Object}
21403 * map object for desired init segment
21404 */
21405 ;
21406
21407 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
21408 if (set === void 0) {
21409 set = false;
21410 }
21411
21412 if (!map) {
21413 return null;
21414 }
21415
21416 var id = initSegmentId(map);
21417 var storedMap = this.initSegments_[id];
21418
21419 if (set && !storedMap && map.bytes) {
21420 this.initSegments_[id] = storedMap = {
21421 resolvedUri: map.resolvedUri,
21422 byterange: map.byterange,
21423 bytes: map.bytes,
21424 tracks: map.tracks,
21425 timescales: map.timescales
21426 };
21427 }
21428
21429 return storedMap || map;
21430 }
21431 /**
21432 * Gets and sets key for the provided key
21433 *
21434 * @param {Object} key
21435 * The key object representing the key to get or set
21436 * @param {boolean=} set
21437 * If true, the key for the provided key should be saved
21438 * @return {Object}
21439 * Key object for desired key
21440 */
21441 ;
21442
21443 _proto.segmentKey = function segmentKey(key, set) {
21444 if (set === void 0) {
21445 set = false;
21446 }
21447
21448 if (!key) {
21449 return null;
21450 }
21451
21452 var id = segmentKeyId(key);
21453 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
21454 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
21455
21456 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
21457 this.keyCache_[id] = storedKey = {
21458 resolvedUri: key.resolvedUri,
21459 bytes: key.bytes
21460 };
21461 }
21462
21463 var result = {
21464 resolvedUri: (storedKey || key).resolvedUri
21465 };
21466
21467 if (storedKey) {
21468 result.bytes = storedKey.bytes;
21469 }
21470
21471 return result;
21472 }
21473 /**
21474 * Returns true if all configuration required for loading is present, otherwise false.
21475 *
21476 * @return {boolean} True if the all configuration is ready for loading
21477 * @private
21478 */
21479 ;
21480
21481 _proto.couldBeginLoading_ = function couldBeginLoading_() {
21482 return this.playlist_ && !this.paused();
21483 }
21484 /**
21485 * load a playlist and start to fill the buffer
21486 */
21487 ;
21488
21489 _proto.load = function load() {
21490 // un-pause
21491 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
21492 // specified
21493
21494 if (!this.playlist_) {
21495 return;
21496 } // if all the configuration is ready, initialize and begin loading
21497
21498
21499 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21500 return this.init_();
21501 } // if we're in the middle of processing a segment already, don't
21502 // kick off an additional segment request
21503
21504
21505 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
21506 return;
21507 }
21508
21509 this.state = 'READY';
21510 }
21511 /**
21512 * Once all the starting parameters have been specified, begin
21513 * operation. This method should only be invoked from the INIT
21514 * state.
21515 *
21516 * @private
21517 */
21518 ;
21519
21520 _proto.init_ = function init_() {
21521 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
21522 // audio data from the muxed content should be removed
21523
21524 this.resetEverything();
21525 return this.monitorBuffer_();
21526 }
21527 /**
21528 * set a playlist on the segment loader
21529 *
21530 * @param {PlaylistLoader} media the playlist to set on the segment loader
21531 */
21532 ;
21533
21534 _proto.playlist = function playlist(newPlaylist, options) {
21535 if (options === void 0) {
21536 options = {};
21537 }
21538
21539 if (!newPlaylist) {
21540 return;
21541 }
21542
21543 var oldPlaylist = this.playlist_;
21544 var segmentInfo = this.pendingSegment_;
21545 this.playlist_ = newPlaylist;
21546 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
21547 // is always our zero-time so force a sync update each time the playlist
21548 // is refreshed from the server
21549 //
21550 // Use the INIT state to determine if playback has started, as the playlist sync info
21551 // should be fixed once requests begin (as sync points are generated based on sync
21552 // info), but not before then.
21553
21554 if (this.state === 'INIT') {
21555 newPlaylist.syncInfo = {
21556 mediaSequence: newPlaylist.mediaSequence,
21557 time: 0
21558 }; // Setting the date time mapping means mapping the program date time (if available)
21559 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
21560 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
21561 // be updated as the playlist is refreshed before the loader starts loading, the
21562 // program date time mapping needs to be updated as well.
21563 //
21564 // This mapping is only done for the main loader because a program date time should
21565 // map equivalently between playlists.
21566
21567 if (this.loaderType_ === 'main') {
21568 this.syncController_.setDateTimeMappingForStart(newPlaylist);
21569 }
21570 }
21571
21572 var oldId = null;
21573
21574 if (oldPlaylist) {
21575 if (oldPlaylist.id) {
21576 oldId = oldPlaylist.id;
21577 } else if (oldPlaylist.uri) {
21578 oldId = oldPlaylist.uri;
21579 }
21580 }
21581
21582 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
21583 // in LIVE, we always want to update with new playlists (including refreshes)
21584
21585 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
21586 // buffering now
21587
21588 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21589 return this.init_();
21590 }
21591
21592 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
21593 if (this.mediaIndex !== null) {
21594 // we must "resync" the segment loader when we switch renditions and
21595 // the segment loader is already synced to the previous rendition
21596 this.resyncLoader();
21597 }
21598
21599 this.currentMediaInfo_ = void 0;
21600 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
21601
21602 return;
21603 } // we reloaded the same playlist so we are in a live scenario
21604 // and we will likely need to adjust the mediaIndex
21605
21606
21607 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
21608 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
21609 // this is important because we can abort a request and this value must be
21610 // equal to the last appended mediaIndex
21611
21612 if (this.mediaIndex !== null) {
21613 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
21614 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
21615 // new playlist was incremented by 1.
21616
21617 if (this.mediaIndex < 0) {
21618 this.mediaIndex = null;
21619 this.partIndex = null;
21620 } else {
21621 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
21622 // unless parts fell off of the playlist for this segment.
21623 // In that case we need to reset partIndex and resync
21624
21625 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
21626 var mediaIndex = this.mediaIndex;
21627 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
21628 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
21629 // as the part was dropped from our current playlists segment.
21630 // The mediaIndex will still be valid so keep that around.
21631
21632 this.mediaIndex = mediaIndex;
21633 }
21634 }
21635 } // update the mediaIndex on the SegmentInfo object
21636 // this is important because we will update this.mediaIndex with this value
21637 // in `handleAppendsDone_` after the segment has been successfully appended
21638
21639
21640 if (segmentInfo) {
21641 segmentInfo.mediaIndex -= mediaSequenceDiff;
21642
21643 if (segmentInfo.mediaIndex < 0) {
21644 segmentInfo.mediaIndex = null;
21645 segmentInfo.partIndex = null;
21646 } else {
21647 // we need to update the referenced segment so that timing information is
21648 // saved for the new playlist's segment, however, if the segment fell off the
21649 // playlist, we can leave the old reference and just lose the timing info
21650 if (segmentInfo.mediaIndex >= 0) {
21651 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
21652 }
21653
21654 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
21655 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
21656 }
21657 }
21658 }
21659
21660 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
21661 }
21662 /**
21663 * Prevent the loader from fetching additional segments. If there
21664 * is a segment request outstanding, it will finish processing
21665 * before the loader halts. A segment loader can be unpaused by
21666 * calling load().
21667 */
21668 ;
21669
21670 _proto.pause = function pause() {
21671 if (this.checkBufferTimeout_) {
21672 window.clearTimeout(this.checkBufferTimeout_);
21673 this.checkBufferTimeout_ = null;
21674 }
21675 }
21676 /**
21677 * Returns whether the segment loader is fetching additional
21678 * segments when given the opportunity. This property can be
21679 * modified through calls to pause() and load().
21680 */
21681 ;
21682
21683 _proto.paused = function paused() {
21684 return this.checkBufferTimeout_ === null;
21685 }
21686 /**
21687 * Delete all the buffered data and reset the SegmentLoader
21688 *
21689 * @param {Function} [done] an optional callback to be executed when the remove
21690 * operation is complete
21691 */
21692 ;
21693
21694 _proto.resetEverything = function resetEverything(done) {
21695 this.ended_ = false;
21696 this.appendInitSegment_ = {
21697 audio: true,
21698 video: true
21699 };
21700 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
21701 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
21702 // we then clamp the value to duration if necessary.
21703
21704 this.remove(0, Infinity, done); // clears fmp4 captions
21705
21706 if (this.transmuxer_) {
21707 this.transmuxer_.postMessage({
21708 action: 'clearAllMp4Captions'
21709 }); // reset the cache in the transmuxer
21710
21711 this.transmuxer_.postMessage({
21712 action: 'reset'
21713 });
21714 }
21715 }
21716 /**
21717 * Force the SegmentLoader to resync and start loading around the currentTime instead
21718 * of starting at the end of the buffer
21719 *
21720 * Useful for fast quality changes
21721 */
21722 ;
21723
21724 _proto.resetLoader = function resetLoader() {
21725 this.fetchAtBuffer_ = false;
21726 this.resyncLoader();
21727 }
21728 /**
21729 * Force the SegmentLoader to restart synchronization and make a conservative guess
21730 * before returning to the simple walk-forward method
21731 */
21732 ;
21733
21734 _proto.resyncLoader = function resyncLoader() {
21735 if (this.transmuxer_) {
21736 // need to clear out any cached data to prepare for the new segment
21737 segmentTransmuxer.reset(this.transmuxer_);
21738 }
21739
21740 this.mediaIndex = null;
21741 this.partIndex = null;
21742 this.syncPoint_ = null;
21743 this.isPendingTimestampOffset_ = false;
21744 this.callQueue_ = [];
21745 this.loadQueue_ = [];
21746 this.metadataQueue_.id3 = [];
21747 this.metadataQueue_.caption = [];
21748 this.abort();
21749
21750 if (this.transmuxer_) {
21751 this.transmuxer_.postMessage({
21752 action: 'clearParsedMp4Captions'
21753 });
21754 }
21755 }
21756 /**
21757 * Remove any data in the source buffer between start and end times
21758 *
21759 * @param {number} start - the start time of the region to remove from the buffer
21760 * @param {number} end - the end time of the region to remove from the buffer
21761 * @param {Function} [done] - an optional callback to be executed when the remove
21762 * @param {boolean} force - force all remove operations to happen
21763 * operation is complete
21764 */
21765 ;
21766
21767 _proto.remove = function remove(start, end, done, force) {
21768 if (done === void 0) {
21769 done = function done() {};
21770 }
21771
21772 if (force === void 0) {
21773 force = false;
21774 }
21775
21776 // clamp end to duration if we need to remove everything.
21777 // This is due to a browser bug that causes issues if we remove to Infinity.
21778 // videojs/videojs-contrib-hls#1225
21779 if (end === Infinity) {
21780 end = this.duration_();
21781 } // skip removes that would throw an error
21782 // commonly happens during a rendition switch at the start of a video
21783 // from start 0 to end 0
21784
21785
21786 if (end <= start) {
21787 this.logger_('skipping remove because end ${end} is <= start ${start}');
21788 return;
21789 }
21790
21791 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
21792 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
21793
21794 return;
21795 } // set it to one to complete this function's removes
21796
21797
21798 var removesRemaining = 1;
21799
21800 var removeFinished = function removeFinished() {
21801 removesRemaining--;
21802
21803 if (removesRemaining === 0) {
21804 done();
21805 }
21806 };
21807
21808 if (force || !this.audioDisabled_) {
21809 removesRemaining++;
21810 this.sourceUpdater_.removeAudio(start, end, removeFinished);
21811 } // While it would be better to only remove video if the main loader has video, this
21812 // should be safe with audio only as removeVideo will call back even if there's no
21813 // video buffer.
21814 //
21815 // In theory we can check to see if there's video before calling the remove, but in
21816 // the event that we're switching between renditions and from video to audio only
21817 // (when we add support for that), we may need to clear the video contents despite
21818 // what the new media will contain.
21819
21820
21821 if (force || this.loaderType_ === 'main') {
21822 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
21823 removesRemaining++;
21824 this.sourceUpdater_.removeVideo(start, end, removeFinished);
21825 } // remove any captions and ID3 tags
21826
21827
21828 for (var track in this.inbandTextTracks_) {
21829 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
21830 }
21831
21832 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
21833
21834 removeFinished();
21835 }
21836 /**
21837 * (re-)schedule monitorBufferTick_ to run as soon as possible
21838 *
21839 * @private
21840 */
21841 ;
21842
21843 _proto.monitorBuffer_ = function monitorBuffer_() {
21844 if (this.checkBufferTimeout_) {
21845 window.clearTimeout(this.checkBufferTimeout_);
21846 }
21847
21848 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);
21849 }
21850 /**
21851 * As long as the SegmentLoader is in the READY state, periodically
21852 * invoke fillBuffer_().
21853 *
21854 * @private
21855 */
21856 ;
21857
21858 _proto.monitorBufferTick_ = function monitorBufferTick_() {
21859 if (this.state === 'READY') {
21860 this.fillBuffer_();
21861 }
21862
21863 if (this.checkBufferTimeout_) {
21864 window.clearTimeout(this.checkBufferTimeout_);
21865 }
21866
21867 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
21868 }
21869 /**
21870 * fill the buffer with segements unless the sourceBuffers are
21871 * currently updating
21872 *
21873 * Note: this function should only ever be called by monitorBuffer_
21874 * and never directly
21875 *
21876 * @private
21877 */
21878 ;
21879
21880 _proto.fillBuffer_ = function fillBuffer_() {
21881 // TODO since the source buffer maintains a queue, and we shouldn't call this function
21882 // except when we're ready for the next segment, this check can most likely be removed
21883 if (this.sourceUpdater_.updating()) {
21884 return;
21885 } // see if we need to begin loading immediately
21886
21887
21888 var segmentInfo = this.chooseNextRequest_();
21889
21890 if (!segmentInfo) {
21891 return;
21892 }
21893
21894 if (typeof segmentInfo.timestampOffset === 'number') {
21895 this.isPendingTimestampOffset_ = false;
21896 this.timelineChangeController_.pendingTimelineChange({
21897 type: this.loaderType_,
21898 from: this.currentTimeline_,
21899 to: segmentInfo.timeline
21900 });
21901 }
21902
21903 this.loadSegment_(segmentInfo);
21904 }
21905 /**
21906 * Determines if we should call endOfStream on the media source based
21907 * on the state of the buffer or if appened segment was the final
21908 * segment in the playlist.
21909 *
21910 * @param {number} [mediaIndex] the media index of segment we last appended
21911 * @param {Object} [playlist] a media playlist object
21912 * @return {boolean} do we need to call endOfStream on the MediaSource
21913 */
21914 ;
21915
21916 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
21917 if (mediaIndex === void 0) {
21918 mediaIndex = this.mediaIndex;
21919 }
21920
21921 if (playlist === void 0) {
21922 playlist = this.playlist_;
21923 }
21924
21925 if (partIndex === void 0) {
21926 partIndex = this.partIndex;
21927 }
21928
21929 if (!playlist || !this.mediaSource_) {
21930 return false;
21931 }
21932
21933 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
21934
21935 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
21936
21937 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
21938 // so that MediaSources can trigger the `ended` event when it runs out of
21939 // buffered data instead of waiting for me
21940
21941 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
21942 }
21943 /**
21944 * Determines what request should be made given current segment loader state.
21945 *
21946 * @return {Object} a request object that describes the segment/part to load
21947 */
21948 ;
21949
21950 _proto.chooseNextRequest_ = function chooseNextRequest_() {
21951 var bufferedEnd = lastBufferedEnd(this.buffered_()) || 0;
21952 var bufferedTime = Math.max(0, bufferedEnd - this.currentTime_());
21953 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
21954 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
21955 var segments = this.playlist_.segments; // return no segment if:
21956 // 1. we don't have segments
21957 // 2. The video has not yet played and we already downloaded a segment
21958 // 3. we already have enough buffered time
21959
21960 if (!segments.length || preloaded || haveEnoughBuffer) {
21961 return null;
21962 }
21963
21964 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
21965 var next = {
21966 partIndex: null,
21967 mediaIndex: null,
21968 startOfSegment: null,
21969 playlist: this.playlist_,
21970 isSyncRequest: Boolean(!this.syncPoint_)
21971 };
21972
21973 if (next.isSyncRequest) {
21974 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
21975 } else if (this.mediaIndex !== null) {
21976 var segment = segments[this.mediaIndex];
21977 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
21978 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
21979
21980 if (segment.parts && segment.parts[partIndex + 1]) {
21981 next.mediaIndex = this.mediaIndex;
21982 next.partIndex = partIndex + 1;
21983 } else {
21984 next.mediaIndex = this.mediaIndex + 1;
21985 }
21986 } else {
21987 // Find the segment containing the end of the buffer or current time.
21988 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
21989 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
21990 playlist: this.playlist_,
21991 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
21992 startingPartIndex: this.syncPoint_.partIndex,
21993 startingSegmentIndex: this.syncPoint_.segmentIndex,
21994 startTime: this.syncPoint_.time
21995 }),
21996 segmentIndex = _Playlist$getMediaInf.segmentIndex,
21997 startTime = _Playlist$getMediaInf.startTime,
21998 _partIndex = _Playlist$getMediaInf.partIndex;
21999
22000 next.getMediaInfoForTime = this.fetchAtBuffer_ ? 'bufferedEnd' : 'currentTime';
22001 next.mediaIndex = segmentIndex;
22002 next.startOfSegment = startTime;
22003 next.partIndex = _partIndex;
22004 }
22005
22006 var nextSegment = segments[next.mediaIndex];
22007 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
22008 // the next partIndex is invalid do not choose a next segment.
22009
22010 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
22011 return null;
22012 } // if the next segment has parts, and we don't have a partIndex.
22013 // Set partIndex to 0
22014
22015
22016 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
22017 next.partIndex = 0;
22018 }
22019
22020 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
22021 // 1. this is the last segment in the playlist
22022 // 2. end of stream has been called on the media source already
22023 // 3. the player is not seeking
22024
22025 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
22026 return null;
22027 }
22028
22029 return this.generateSegmentInfo_(next);
22030 };
22031
22032 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
22033 var playlist = options.playlist,
22034 mediaIndex = options.mediaIndex,
22035 startOfSegment = options.startOfSegment,
22036 isSyncRequest = options.isSyncRequest,
22037 partIndex = options.partIndex,
22038 forceTimestampOffset = options.forceTimestampOffset,
22039 getMediaInfoForTime = options.getMediaInfoForTime;
22040 var segment = playlist.segments[mediaIndex];
22041 var part = typeof partIndex === 'number' && segment.parts[partIndex];
22042 var segmentInfo = {
22043 requestId: 'segment-loader-' + Math.random(),
22044 // resolve the segment URL relative to the playlist
22045 uri: part && part.resolvedUri || segment.resolvedUri,
22046 // the segment's mediaIndex at the time it was requested
22047 mediaIndex: mediaIndex,
22048 partIndex: part ? partIndex : null,
22049 // whether or not to update the SegmentLoader's state with this
22050 // segment's mediaIndex
22051 isSyncRequest: isSyncRequest,
22052 startOfSegment: startOfSegment,
22053 // the segment's playlist
22054 playlist: playlist,
22055 // unencrypted bytes of the segment
22056 bytes: null,
22057 // when a key is defined for this segment, the encrypted bytes
22058 encryptedBytes: null,
22059 // The target timestampOffset for this segment when we append it
22060 // to the source buffer
22061 timestampOffset: null,
22062 // The timeline that the segment is in
22063 timeline: segment.timeline,
22064 // The expected duration of the segment in seconds
22065 duration: part && part.duration || segment.duration,
22066 // retain the segment in case the playlist updates while doing an async process
22067 segment: segment,
22068 part: part,
22069 byteLength: 0,
22070 transmuxer: this.transmuxer_,
22071 // type of getMediaInfoForTime that was used to get this segment
22072 getMediaInfoForTime: getMediaInfoForTime
22073 };
22074 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
22075 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
22076 segmentTimeline: segment.timeline,
22077 currentTimeline: this.currentTimeline_,
22078 startOfSegment: startOfSegment,
22079 buffered: this.buffered_(),
22080 overrideCheck: overrideCheck
22081 });
22082 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
22083
22084 if (typeof audioBufferedEnd === 'number') {
22085 // since the transmuxer is using the actual timing values, but the buffer is
22086 // adjusted by the timestamp offset, we must adjust the value here
22087 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
22088 }
22089
22090 if (this.sourceUpdater_.videoBuffered().length) {
22091 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
22092 // adjusted by the timestmap offset, we must adjust the value here
22093 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
22094 }
22095
22096 return segmentInfo;
22097 } // get the timestampoffset for a segment,
22098 // added so that vtt segment loader can override and prevent
22099 // adding timestamp offsets.
22100 ;
22101
22102 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
22103 return timestampOffsetForSegment(options);
22104 }
22105 /**
22106 * Determines if the network has enough bandwidth to complete the current segment
22107 * request in a timely manner. If not, the request will be aborted early and bandwidth
22108 * updated to trigger a playlist switch.
22109 *
22110 * @param {Object} stats
22111 * Object containing stats about the request timing and size
22112 * @private
22113 */
22114 ;
22115
22116 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
22117 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
22118 // TODO: Replace using timeout with a boolean indicating whether this playlist is
22119 // the lowestEnabledRendition.
22120 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
22121 !this.playlist_.attributes.BANDWIDTH) {
22122 return;
22123 } // Wait at least 1 second since the first byte of data has been received before
22124 // using the calculated bandwidth from the progress event to allow the bitrate
22125 // to stabilize
22126
22127
22128 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
22129 return;
22130 }
22131
22132 var currentTime = this.currentTime_();
22133 var measuredBandwidth = stats.bandwidth;
22134 var segmentDuration = this.pendingSegment_.duration;
22135 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
22136 // if we are only left with less than 1 second when the request completes.
22137 // A negative timeUntilRebuffering indicates we are already rebuffering
22138
22139 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
22140 // is larger than the estimated time until the player runs out of forward buffer
22141
22142 if (requestTimeRemaining <= timeUntilRebuffer$1) {
22143 return;
22144 }
22145
22146 var switchCandidate = minRebufferMaxBandwidthSelector({
22147 master: this.vhs_.playlists.master,
22148 currentTime: currentTime,
22149 bandwidth: measuredBandwidth,
22150 duration: this.duration_(),
22151 segmentDuration: segmentDuration,
22152 timeUntilRebuffer: timeUntilRebuffer$1,
22153 currentTimeline: this.currentTimeline_,
22154 syncController: this.syncController_
22155 });
22156
22157 if (!switchCandidate) {
22158 return;
22159 }
22160
22161 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
22162 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
22163 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
22164 // potential round trip time of the new request so that we are not too aggressive
22165 // with switching to a playlist that might save us a fraction of a second.
22166
22167 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
22168 minimumTimeSaving = 1;
22169 }
22170
22171 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
22172 return;
22173 } // set the bandwidth to that of the desired playlist being sure to scale by
22174 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
22175 // don't trigger a bandwidthupdate as the bandwidth is artifial
22176
22177
22178 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
22179 this.trigger('earlyabort');
22180 };
22181
22182 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
22183 this.logger_("Aborting " + segmentInfoString(segmentInfo));
22184 this.mediaRequestsAborted += 1;
22185 }
22186 /**
22187 * XHR `progress` event handler
22188 *
22189 * @param {Event}
22190 * The XHR `progress` event
22191 * @param {Object} simpleSegment
22192 * A simplified segment object copy
22193 * @private
22194 */
22195 ;
22196
22197 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
22198 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22199
22200 if (this.checkForAbort_(simpleSegment.requestId)) {
22201 return;
22202 }
22203
22204 this.trigger('progress');
22205 };
22206
22207 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
22208 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22209
22210 if (this.checkForAbort_(simpleSegment.requestId)) {
22211 return;
22212 }
22213
22214 if (this.checkForIllegalMediaSwitch(trackInfo)) {
22215 return;
22216 }
22217
22218 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
22219 // Guard against cases where we're not getting track info at all until we are
22220 // certain that all streams will provide it.
22221
22222 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
22223 this.appendInitSegment_ = {
22224 audio: true,
22225 video: true
22226 };
22227 this.startingMediaInfo_ = trackInfo;
22228 this.currentMediaInfo_ = trackInfo;
22229 this.logger_('trackinfo update', trackInfo);
22230 this.trigger('trackinfo');
22231 } // trackinfo may cause an abort if the trackinfo
22232 // causes a codec change to an unsupported codec.
22233
22234
22235 if (this.checkForAbort_(simpleSegment.requestId)) {
22236 return;
22237 } // set trackinfo on the pending segment so that
22238 // it can append.
22239
22240
22241 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
22242
22243 if (this.hasEnoughInfoToAppend_()) {
22244 this.processCallQueue_();
22245 }
22246 };
22247
22248 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
22249 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22250
22251 if (this.checkForAbort_(simpleSegment.requestId)) {
22252 return;
22253 }
22254
22255 var segmentInfo = this.pendingSegment_;
22256 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
22257 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
22258 segmentInfo[timingInfoProperty][timeType] = time;
22259 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
22260
22261 if (this.hasEnoughInfoToAppend_()) {
22262 this.processCallQueue_();
22263 }
22264 };
22265
22266 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
22267 var _this2 = this;
22268
22269 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22270
22271 if (this.checkForAbort_(simpleSegment.requestId)) {
22272 return;
22273 } // This could only happen with fmp4 segments, but
22274 // should still not happen in general
22275
22276
22277 if (captionData.length === 0) {
22278 this.logger_('SegmentLoader received no captions from a caption event');
22279 return;
22280 }
22281
22282 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
22283 // can be adjusted by the timestamp offset
22284
22285 if (!segmentInfo.hasAppendedData_) {
22286 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
22287 return;
22288 }
22289
22290 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
22291 var captionTracks = {}; // get total start/end and captions for each track/stream
22292
22293 captionData.forEach(function (caption) {
22294 // caption.stream is actually a track name...
22295 // set to the existing values in tracks or default values
22296 captionTracks[caption.stream] = captionTracks[caption.stream] || {
22297 // Infinity, as any other value will be less than this
22298 startTime: Infinity,
22299 captions: [],
22300 // 0 as an other value will be more than this
22301 endTime: 0
22302 };
22303 var captionTrack = captionTracks[caption.stream];
22304 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
22305 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
22306 captionTrack.captions.push(caption);
22307 });
22308 Object.keys(captionTracks).forEach(function (trackName) {
22309 var _captionTracks$trackN = captionTracks[trackName],
22310 startTime = _captionTracks$trackN.startTime,
22311 endTime = _captionTracks$trackN.endTime,
22312 captions = _captionTracks$trackN.captions;
22313 var inbandTextTracks = _this2.inbandTextTracks_;
22314
22315 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
22316
22317 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
22318 // We do this because a rendition change that also changes the timescale for captions
22319 // will result in captions being re-parsed for certain segments. If we add them again
22320 // without clearing we will have two of the same captions visible.
22321
22322 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
22323 addCaptionData({
22324 captionArray: captions,
22325 inbandTextTracks: inbandTextTracks,
22326 timestampOffset: timestampOffset
22327 });
22328 }); // Reset stored captions since we added parsed
22329 // captions to a text track at this point
22330
22331 if (this.transmuxer_) {
22332 this.transmuxer_.postMessage({
22333 action: 'clearParsedMp4Captions'
22334 });
22335 }
22336 };
22337
22338 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
22339 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22340
22341 if (this.checkForAbort_(simpleSegment.requestId)) {
22342 return;
22343 }
22344
22345 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
22346
22347 if (!segmentInfo.hasAppendedData_) {
22348 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
22349 return;
22350 }
22351
22352 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
22353 // audio/video source with a metadata track, and an alt audio with a metadata track.
22354 // However, this probably won't happen, and if it does it can be handled then.
22355
22356 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
22357 addMetadata({
22358 inbandTextTracks: this.inbandTextTracks_,
22359 metadataArray: id3Frames,
22360 timestampOffset: timestampOffset,
22361 videoDuration: this.duration_()
22362 });
22363 };
22364
22365 _proto.processMetadataQueue_ = function processMetadataQueue_() {
22366 this.metadataQueue_.id3.forEach(function (fn) {
22367 return fn();
22368 });
22369 this.metadataQueue_.caption.forEach(function (fn) {
22370 return fn();
22371 });
22372 this.metadataQueue_.id3 = [];
22373 this.metadataQueue_.caption = [];
22374 };
22375
22376 _proto.processCallQueue_ = function processCallQueue_() {
22377 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
22378 // functions may check the length of the load queue and default to pushing themselves
22379 // back onto the queue.
22380
22381 this.callQueue_ = [];
22382 callQueue.forEach(function (fun) {
22383 return fun();
22384 });
22385 };
22386
22387 _proto.processLoadQueue_ = function processLoadQueue_() {
22388 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
22389 // functions may check the length of the load queue and default to pushing themselves
22390 // back onto the queue.
22391
22392 this.loadQueue_ = [];
22393 loadQueue.forEach(function (fun) {
22394 return fun();
22395 });
22396 }
22397 /**
22398 * Determines whether the loader has enough info to load the next segment.
22399 *
22400 * @return {boolean}
22401 * Whether or not the loader has enough info to load the next segment
22402 */
22403 ;
22404
22405 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
22406 // Since primary timing goes by video, only the audio loader potentially needs to wait
22407 // to load.
22408 if (this.loaderType_ !== 'audio') {
22409 return true;
22410 }
22411
22412 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
22413 // enough info to load.
22414
22415 if (!segmentInfo) {
22416 return false;
22417 } // The first segment can and should be loaded immediately so that source buffers are
22418 // created together (before appending). Source buffer creation uses the presence of
22419 // audio and video data to determine whether to create audio/video source buffers, and
22420 // uses processed (transmuxed or parsed) media to determine the types required.
22421
22422
22423 if (!this.getCurrentMediaInfo_()) {
22424 return true;
22425 }
22426
22427 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
22428 // can be requested and downloaded and only wait before it is transmuxed or parsed.
22429 // But in practice, there are a few reasons why it is better to wait until a loader
22430 // is ready to append that segment before requesting and downloading:
22431 //
22432 // 1. Because audio and main loaders cross discontinuities together, if this loader
22433 // is waiting for the other to catch up, then instead of requesting another
22434 // segment and using up more bandwidth, by not yet loading, more bandwidth is
22435 // allotted to the loader currently behind.
22436 // 2. media-segment-request doesn't have to have logic to consider whether a segment
22437 // is ready to be processed or not, isolating the queueing behavior to the loader.
22438 // 3. The audio loader bases some of its segment properties on timing information
22439 // provided by the main loader, meaning that, if the logic for waiting on
22440 // processing was in media-segment-request, then it would also need to know how
22441 // to re-generate the segment information after the main loader caught up.
22442 shouldWaitForTimelineChange({
22443 timelineChangeController: this.timelineChangeController_,
22444 currentTimeline: this.currentTimeline_,
22445 segmentTimeline: segmentInfo.timeline,
22446 loaderType: this.loaderType_,
22447 audioDisabled: this.audioDisabled_
22448 })) {
22449 return false;
22450 }
22451
22452 return true;
22453 };
22454
22455 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
22456 if (segmentInfo === void 0) {
22457 segmentInfo = this.pendingSegment_;
22458 }
22459
22460 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
22461 };
22462
22463 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
22464 if (segmentInfo === void 0) {
22465 segmentInfo = this.pendingSegment_;
22466 }
22467
22468 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
22469 };
22470
22471 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
22472 if (!this.sourceUpdater_.ready()) {
22473 return false;
22474 } // If content needs to be removed or the loader is waiting on an append reattempt,
22475 // then no additional content should be appended until the prior append is resolved.
22476
22477
22478 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
22479 return false;
22480 }
22481
22482 var segmentInfo = this.pendingSegment_;
22483 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
22484 // we do not have information on this specific
22485 // segment yet
22486
22487 if (!segmentInfo || !trackInfo) {
22488 return false;
22489 }
22490
22491 var hasAudio = trackInfo.hasAudio,
22492 hasVideo = trackInfo.hasVideo,
22493 isMuxed = trackInfo.isMuxed;
22494
22495 if (hasVideo && !segmentInfo.videoTimingInfo) {
22496 return false;
22497 } // muxed content only relies on video timing information for now.
22498
22499
22500 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
22501 return false;
22502 }
22503
22504 if (shouldWaitForTimelineChange({
22505 timelineChangeController: this.timelineChangeController_,
22506 currentTimeline: this.currentTimeline_,
22507 segmentTimeline: segmentInfo.timeline,
22508 loaderType: this.loaderType_,
22509 audioDisabled: this.audioDisabled_
22510 })) {
22511 return false;
22512 }
22513
22514 return true;
22515 };
22516
22517 _proto.handleData_ = function handleData_(simpleSegment, result) {
22518 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22519
22520 if (this.checkForAbort_(simpleSegment.requestId)) {
22521 return;
22522 } // If there's anything in the call queue, then this data came later and should be
22523 // executed after the calls currently queued.
22524
22525
22526 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
22527 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
22528 return;
22529 }
22530
22531 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
22532
22533 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
22534
22535 this.updateMediaSecondsLoaded_(segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
22536 // logic may change behavior depending on the state, and changing state too early may
22537 // inflate our estimates of bandwidth. In the future this should be re-examined to
22538 // note more granular states.
22539 // don't process and append data if the mediaSource is closed
22540
22541 if (this.mediaSource_.readyState === 'closed') {
22542 return;
22543 } // if this request included an initialization segment, save that data
22544 // to the initSegment cache
22545
22546
22547 if (simpleSegment.map) {
22548 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
22549
22550 segmentInfo.segment.map = simpleSegment.map;
22551 } // if this request included a segment key, save that data in the cache
22552
22553
22554 if (simpleSegment.key) {
22555 this.segmentKey(simpleSegment.key, true);
22556 }
22557
22558 segmentInfo.isFmp4 = simpleSegment.isFmp4;
22559 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
22560
22561 if (segmentInfo.isFmp4) {
22562 this.trigger('fmp4');
22563 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
22564 } else {
22565 var trackInfo = this.getCurrentMediaInfo_();
22566 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
22567 var firstVideoFrameTimeForData;
22568
22569 if (useVideoTimingInfo) {
22570 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
22571 } // Segment loader knows more about segment timing than the transmuxer (in certain
22572 // aspects), so make any changes required for a more accurate start time.
22573 // Don't set the end time yet, as the segment may not be finished processing.
22574
22575
22576 segmentInfo.timingInfo.start = this.trueSegmentStart_({
22577 currentStart: segmentInfo.timingInfo.start,
22578 playlist: segmentInfo.playlist,
22579 mediaIndex: segmentInfo.mediaIndex,
22580 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
22581 useVideoTimingInfo: useVideoTimingInfo,
22582 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
22583 videoTimingInfo: segmentInfo.videoTimingInfo,
22584 audioTimingInfo: segmentInfo.audioTimingInfo
22585 });
22586 } // Init segments for audio and video only need to be appended in certain cases. Now
22587 // that data is about to be appended, we can check the final cases to determine
22588 // whether we should append an init segment.
22589
22590
22591 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
22592 // as we use the start of the segment to offset the best guess (playlist provided)
22593 // timestamp offset.
22594
22595 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
22596 // be appended or not.
22597
22598 if (segmentInfo.isSyncRequest) {
22599 // first save/update our timing info for this segment.
22600 // this is what allows us to choose an accurate segment
22601 // and the main reason we make a sync request.
22602 this.updateTimingInfoEnd_(segmentInfo);
22603 this.syncController_.saveSegmentTimingInfo({
22604 segmentInfo: segmentInfo,
22605 shouldSaveTimelineMapping: this.loaderType_ === 'main'
22606 });
22607 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
22608 // after taking into account its timing info, do not append it.
22609
22610 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
22611 this.logger_('sync segment was incorrect, not appending');
22612 return;
22613 } // otherwise append it like any other segment as our guess was correct.
22614
22615
22616 this.logger_('sync segment was correct, appending');
22617 } // Save some state so that in the future anything waiting on first append (and/or
22618 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
22619 // we need some notion of whether the timestamp offset or other relevant information
22620 // has had a chance to be set.
22621
22622
22623 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
22624
22625 this.processMetadataQueue_();
22626 this.appendData_(segmentInfo, result);
22627 };
22628
22629 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
22630 // alt audio doesn't manage timestamp offset
22631 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
22632 // segment for each chunk
22633 !segmentInfo.changedTimestampOffset) {
22634 // if the timestamp offset changed, the timeline may have changed, so we have to re-
22635 // append init segments
22636 this.appendInitSegment_ = {
22637 audio: true,
22638 video: true
22639 };
22640 }
22641
22642 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
22643 // make sure we append init segment on playlist changes, in case the media config
22644 // changed
22645 this.appendInitSegment_[type] = true;
22646 }
22647 };
22648
22649 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
22650 var type = _ref4.type,
22651 initSegment = _ref4.initSegment,
22652 map = _ref4.map,
22653 playlist = _ref4.playlist;
22654
22655 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
22656 // (Section 3) required to parse the applicable Media Segments. It applies to every
22657 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
22658 // or until the end of the playlist."
22659 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
22660 if (map) {
22661 var id = initSegmentId(map);
22662
22663 if (this.activeInitSegmentId_ === id) {
22664 // don't need to re-append the init segment if the ID matches
22665 return null;
22666 } // a map-specified init segment takes priority over any transmuxed (or otherwise
22667 // obtained) init segment
22668 //
22669 // this also caches the init segment for later use
22670
22671
22672 initSegment = this.initSegmentForMap(map, true).bytes;
22673 this.activeInitSegmentId_ = id;
22674 } // We used to always prepend init segments for video, however, that shouldn't be
22675 // necessary. Instead, we should only append on changes, similar to what we've always
22676 // done for audio. This is more important (though may not be that important) for
22677 // frame-by-frame appending for LHLS, simply because of the increased quantity of
22678 // appends.
22679
22680
22681 if (initSegment && this.appendInitSegment_[type]) {
22682 // Make sure we track the playlist that we last used for the init segment, so that
22683 // we can re-append the init segment in the event that we get data from a new
22684 // playlist. Discontinuities and track changes are handled in other sections.
22685 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
22686
22687 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
22688 // we are appending the muxer init segment
22689
22690 this.activeInitSegmentId_ = null;
22691 return initSegment;
22692 }
22693
22694 return null;
22695 };
22696
22697 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
22698 var _this3 = this;
22699
22700 var segmentInfo = _ref5.segmentInfo,
22701 type = _ref5.type,
22702 bytes = _ref5.bytes;
22703 var audioBuffered = this.sourceUpdater_.audioBuffered();
22704 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
22705 // should be cleared out during the buffer removals. However, log in case it helps
22706 // debug.
22707
22708 if (audioBuffered.length > 1) {
22709 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
22710 }
22711
22712 if (videoBuffered.length > 1) {
22713 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
22714 }
22715
22716 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
22717 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
22718 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
22719 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
22720
22721 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
22722 // Can't remove enough buffer to make room for new segment (or the browser doesn't
22723 // allow for appends of segments this size). In the future, it may be possible to
22724 // split up the segment and append in pieces, but for now, error out this playlist
22725 // in an attempt to switch to a more manageable rendition.
22726 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
22727 this.error({
22728 message: 'Quota exceeded error with append of a single segment of content',
22729 excludeUntil: Infinity
22730 });
22731 this.trigger('error');
22732 return;
22733 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
22734 // that the segment-loader should block on future events until this one is handled, so
22735 // that it doesn't keep moving onto further segments. Adding the call to the call
22736 // queue will prevent further appends until waitingOnRemove_ and
22737 // quotaExceededErrorRetryTimeout_ are cleared.
22738 //
22739 // Note that this will only block the current loader. In the case of demuxed content,
22740 // the other load may keep filling as fast as possible. In practice, this should be
22741 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
22742 // source buffer, or video fills without enough room for audio to append (and without
22743 // the availability of clearing out seconds of back buffer to make room for audio).
22744 // But it might still be good to handle this case in the future as a TODO.
22745
22746
22747 this.waitingOnRemove_ = true;
22748 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
22749 segmentInfo: segmentInfo,
22750 type: type,
22751 bytes: bytes
22752 }));
22753 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
22754 // before retrying.
22755
22756 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
22757 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
22758 this.remove(0, timeToRemoveUntil, function () {
22759 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
22760
22761 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
22762 // attempts (since we can't clear less than the minimum)
22763
22764 _this3.quotaExceededErrorRetryTimeout_ = window.setTimeout(function () {
22765 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
22766
22767 _this3.quotaExceededErrorRetryTimeout_ = null;
22768
22769 _this3.processCallQueue_();
22770 }, MIN_BACK_BUFFER * 1000);
22771 }, true);
22772 };
22773
22774 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
22775 var segmentInfo = _ref6.segmentInfo,
22776 type = _ref6.type,
22777 bytes = _ref6.bytes;
22778
22779 // if there's no error, nothing to do
22780 if (!error) {
22781 return;
22782 }
22783
22784 if (error.code === QUOTA_EXCEEDED_ERR) {
22785 this.handleQuotaExceededError_({
22786 segmentInfo: segmentInfo,
22787 type: type,
22788 bytes: bytes
22789 }); // A quota exceeded error should be recoverable with a future re-append, so no need
22790 // to trigger an append error.
22791
22792 return;
22793 }
22794
22795 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
22796 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
22797 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
22798 //
22799 // Trigger a special error so that it can be handled separately from normal,
22800 // recoverable errors.
22801
22802 this.trigger('appenderror');
22803 };
22804
22805 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
22806 var segmentInfo = _ref7.segmentInfo,
22807 type = _ref7.type,
22808 initSegment = _ref7.initSegment,
22809 data = _ref7.data,
22810 bytes = _ref7.bytes;
22811
22812 // If this is a re-append, bytes were already created and don't need to be recreated
22813 if (!bytes) {
22814 var segments = [data];
22815 var byteLength = data.byteLength;
22816
22817 if (initSegment) {
22818 // if the media initialization segment is changing, append it before the content
22819 // segment
22820 segments.unshift(initSegment);
22821 byteLength += initSegment.byteLength;
22822 } // Technically we should be OK appending the init segment separately, however, we
22823 // haven't yet tested that, and prepending is how we have always done things.
22824
22825
22826 bytes = concatSegments({
22827 bytes: byteLength,
22828 segments: segments
22829 });
22830 }
22831
22832 this.sourceUpdater_.appendBuffer({
22833 segmentInfo: segmentInfo,
22834 type: type,
22835 bytes: bytes
22836 }, this.handleAppendError_.bind(this, {
22837 segmentInfo: segmentInfo,
22838 type: type,
22839 bytes: bytes
22840 }));
22841 };
22842
22843 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
22844 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
22845 return;
22846 }
22847
22848 var segment = this.pendingSegment_.segment;
22849 var timingInfoProperty = type + "TimingInfo";
22850
22851 if (!segment[timingInfoProperty]) {
22852 segment[timingInfoProperty] = {};
22853 }
22854
22855 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
22856 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
22857 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
22858 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
22859 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
22860
22861 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
22862 };
22863
22864 _proto.appendData_ = function appendData_(segmentInfo, result) {
22865 var type = result.type,
22866 data = result.data;
22867
22868 if (!data || !data.byteLength) {
22869 return;
22870 }
22871
22872 if (type === 'audio' && this.audioDisabled_) {
22873 return;
22874 }
22875
22876 var initSegment = this.getInitSegmentAndUpdateState_({
22877 type: type,
22878 initSegment: result.initSegment,
22879 playlist: segmentInfo.playlist,
22880 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
22881 });
22882 this.appendToSourceBuffer_({
22883 segmentInfo: segmentInfo,
22884 type: type,
22885 initSegment: initSegment,
22886 data: data
22887 });
22888 }
22889 /**
22890 * load a specific segment from a request into the buffer
22891 *
22892 * @private
22893 */
22894 ;
22895
22896 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
22897 var _this4 = this;
22898
22899 this.state = 'WAITING';
22900 this.pendingSegment_ = segmentInfo;
22901 this.trimBackBuffer_(segmentInfo);
22902
22903 if (typeof segmentInfo.timestampOffset === 'number') {
22904 if (this.transmuxer_) {
22905 this.transmuxer_.postMessage({
22906 action: 'clearAllMp4Captions'
22907 });
22908 }
22909 }
22910
22911 if (!this.hasEnoughInfoToLoad_()) {
22912 this.loadQueue_.push(function () {
22913 // regenerate the audioAppendStart, timestampOffset, etc as they
22914 // may have changed since this function was added to the queue.
22915 var options = _extends_1({}, segmentInfo, {
22916 forceTimestampOffset: true
22917 });
22918
22919 _extends_1(segmentInfo, _this4.generateSegmentInfo_(options));
22920
22921 _this4.isPendingTimestampOffset_ = false;
22922
22923 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
22924 });
22925 return;
22926 }
22927
22928 this.updateTransmuxerAndRequestSegment_(segmentInfo);
22929 };
22930
22931 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
22932 var _this5 = this;
22933
22934 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
22935 // the transmuxer still needs to be updated before then.
22936 //
22937 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
22938 // offset must be passed to the transmuxer for stream correcting adjustments.
22939 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
22940 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
22941
22942 segmentInfo.gopsToAlignWith = [];
22943 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
22944
22945 this.transmuxer_.postMessage({
22946 action: 'reset'
22947 });
22948 this.transmuxer_.postMessage({
22949 action: 'setTimestampOffset',
22950 timestampOffset: segmentInfo.timestampOffset
22951 });
22952 }
22953
22954 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
22955 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
22956 var isWalkingForward = this.mediaIndex !== null;
22957 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
22958 // the first timeline
22959 segmentInfo.timeline > 0;
22960 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
22961 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
22962 // then this init segment has never been seen before and should be appended.
22963 //
22964 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
22965 // both to true and leave the decision of whether to append the init segment to append time.
22966
22967 if (simpleSegment.map && !simpleSegment.map.bytes) {
22968 this.logger_('going to request init segment.');
22969 this.appendInitSegment_ = {
22970 video: true,
22971 audio: true
22972 };
22973 }
22974
22975 segmentInfo.abortRequests = mediaSegmentRequest({
22976 xhr: this.vhs_.xhr,
22977 xhrOptions: this.xhrOptions_,
22978 decryptionWorker: this.decrypter_,
22979 segment: simpleSegment,
22980 abortFn: this.handleAbort_.bind(this, segmentInfo),
22981 progressFn: this.handleProgress_.bind(this),
22982 trackInfoFn: this.handleTrackInfo_.bind(this),
22983 timingInfoFn: this.handleTimingInfo_.bind(this),
22984 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
22985 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
22986 captionsFn: this.handleCaptions_.bind(this),
22987 isEndOfTimeline: isEndOfTimeline,
22988 endedTimelineFn: function endedTimelineFn() {
22989 _this5.logger_('received endedtimeline callback');
22990 },
22991 id3Fn: this.handleId3_.bind(this),
22992 dataFn: this.handleData_.bind(this),
22993 doneFn: this.segmentRequestFinished_.bind(this),
22994 onTransmuxerLog: function onTransmuxerLog(_ref8) {
22995 var message = _ref8.message,
22996 level = _ref8.level,
22997 stream = _ref8.stream;
22998
22999 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
23000 }
23001 });
23002 }
23003 /**
23004 * trim the back buffer so that we don't have too much data
23005 * in the source buffer
23006 *
23007 * @private
23008 *
23009 * @param {Object} segmentInfo - the current segment
23010 */
23011 ;
23012
23013 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
23014 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
23015 // buffer and a very conservative "garbage collector"
23016 // We manually clear out the old buffer to ensure
23017 // we don't trigger the QuotaExceeded error
23018 // on the source buffer during subsequent appends
23019
23020 if (removeToTime > 0) {
23021 this.remove(0, removeToTime);
23022 }
23023 }
23024 /**
23025 * created a simplified copy of the segment object with just the
23026 * information necessary to perform the XHR and decryption
23027 *
23028 * @private
23029 *
23030 * @param {Object} segmentInfo - the current segment
23031 * @return {Object} a simplified segment object copy
23032 */
23033 ;
23034
23035 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
23036 var segment = segmentInfo.segment;
23037 var part = segmentInfo.part;
23038 var simpleSegment = {
23039 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
23040 byterange: part ? part.byterange : segment.byterange,
23041 requestId: segmentInfo.requestId,
23042 transmuxer: segmentInfo.transmuxer,
23043 audioAppendStart: segmentInfo.audioAppendStart,
23044 gopsToAlignWith: segmentInfo.gopsToAlignWith,
23045 part: segmentInfo.part
23046 };
23047 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
23048
23049 if (previousSegment && previousSegment.timeline === segment.timeline) {
23050 // The baseStartTime of a segment is used to handle rollover when probing the TS
23051 // segment to retrieve timing information. Since the probe only looks at the media's
23052 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
23053 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
23054 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
23055 // seconds of media time, so should be used here. The previous segment is used since
23056 // the end of the previous segment should represent the beginning of the current
23057 // segment, so long as they are on the same timeline.
23058 if (previousSegment.videoTimingInfo) {
23059 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
23060 } else if (previousSegment.audioTimingInfo) {
23061 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
23062 }
23063 }
23064
23065 if (segment.key) {
23066 // if the media sequence is greater than 2^32, the IV will be incorrect
23067 // assuming 10s segments, that would be about 1300 years
23068 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
23069 simpleSegment.key = this.segmentKey(segment.key);
23070 simpleSegment.key.iv = iv;
23071 }
23072
23073 if (segment.map) {
23074 simpleSegment.map = this.initSegmentForMap(segment.map);
23075 }
23076
23077 return simpleSegment;
23078 };
23079
23080 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
23081 // every request counts as a media request even if it has been aborted
23082 // or canceled due to a timeout
23083 this.mediaRequests += 1;
23084
23085 if (stats) {
23086 this.mediaBytesTransferred += stats.bytesReceived;
23087 this.mediaTransferDuration += stats.roundTripTime;
23088 }
23089 };
23090
23091 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
23092 // byteLength will be used for throughput, and should be based on bytes receieved,
23093 // which we only know at the end of the request and should reflect total bytes
23094 // downloaded rather than just bytes processed from components of the segment
23095 this.pendingSegment_.byteLength = stats.bytesReceived;
23096
23097 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23098 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23099 return;
23100 }
23101
23102 this.bandwidth = stats.bandwidth;
23103 this.roundTrip = stats.roundTripTime;
23104 };
23105
23106 _proto.handleTimeout_ = function handleTimeout_() {
23107 // although the VTT segment loader bandwidth isn't really used, it's good to
23108 // maintain functinality between segment loaders
23109 this.mediaRequestsTimedout += 1;
23110 this.bandwidth = 1;
23111 this.roundTrip = NaN;
23112 this.trigger('bandwidthupdate');
23113 }
23114 /**
23115 * Handle the callback from the segmentRequest function and set the
23116 * associated SegmentLoader state and errors if necessary
23117 *
23118 * @private
23119 */
23120 ;
23121
23122 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
23123 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
23124 // check the call queue directly since this function doesn't need to deal with any
23125 // data, and can continue even if the source buffers are not set up and we didn't get
23126 // any data from the segment
23127 if (this.callQueue_.length) {
23128 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
23129 return;
23130 }
23131
23132 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
23133
23134 if (!this.pendingSegment_) {
23135 return;
23136 } // the request was aborted and the SegmentLoader has already started
23137 // another request. this can happen when the timeout for an aborted
23138 // request triggers due to a limitation in the XHR library
23139 // do not count this as any sort of request or we risk double-counting
23140
23141
23142 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
23143 return;
23144 } // an error occurred from the active pendingSegment_ so reset everything
23145
23146
23147 if (error) {
23148 this.pendingSegment_ = null;
23149 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
23150
23151 if (error.code === REQUEST_ERRORS.ABORTED) {
23152 return;
23153 }
23154
23155 this.pause(); // the error is really just that at least one of the requests timed-out
23156 // set the bandwidth to a very low value and trigger an ABR switch to
23157 // take emergency action
23158
23159 if (error.code === REQUEST_ERRORS.TIMEOUT) {
23160 this.handleTimeout_();
23161 return;
23162 } // if control-flow has arrived here, then the error is real
23163 // emit an error event to blacklist the current playlist
23164
23165
23166 this.mediaRequestsErrored += 1;
23167 this.error(error);
23168 this.trigger('error');
23169 return;
23170 }
23171
23172 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
23173 // generated for ABR purposes
23174
23175 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
23176 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
23177
23178 if (result.gopInfo) {
23179 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
23180 } // Although we may have already started appending on progress, we shouldn't switch the
23181 // state away from loading until we are officially done loading the segment data.
23182
23183
23184 this.state = 'APPENDING'; // used for testing
23185
23186 this.trigger('appending');
23187 this.waitForAppendsToComplete_(segmentInfo);
23188 };
23189
23190 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
23191 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
23192
23193 if (timelineMapping !== null) {
23194 this.timeMapping_ = timelineMapping;
23195 }
23196 };
23197
23198 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
23199 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
23200 this.mediaSecondsLoaded += segment.end - segment.start;
23201 } else {
23202 this.mediaSecondsLoaded += segment.duration;
23203 }
23204 };
23205
23206 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
23207 if (timestampOffset === null) {
23208 return false;
23209 } // note that we're potentially using the same timestamp offset for both video and
23210 // audio
23211
23212
23213 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23214 return true;
23215 }
23216
23217 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23218 return true;
23219 }
23220
23221 return false;
23222 };
23223
23224 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
23225 var currentStart = _ref9.currentStart,
23226 playlist = _ref9.playlist,
23227 mediaIndex = _ref9.mediaIndex,
23228 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
23229 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
23230 useVideoTimingInfo = _ref9.useVideoTimingInfo,
23231 videoTimingInfo = _ref9.videoTimingInfo,
23232 audioTimingInfo = _ref9.audioTimingInfo;
23233
23234 if (typeof currentStart !== 'undefined') {
23235 // if start was set once, keep using it
23236 return currentStart;
23237 }
23238
23239 if (!useVideoTimingInfo) {
23240 return audioTimingInfo.start;
23241 }
23242
23243 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
23244 // within that segment. Since the transmuxer maintains a cache of incomplete data
23245 // from and/or the last frame seen, the start time may reflect a frame that starts
23246 // in the previous segment. Check for that case and ensure the start time is
23247 // accurate for the segment.
23248
23249 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
23250 return firstVideoFrameTimeForData;
23251 }
23252
23253 return videoTimingInfo.start;
23254 };
23255
23256 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
23257 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
23258
23259 if (!trackInfo) {
23260 this.error({
23261 message: 'No starting media returned, likely due to an unsupported media format.',
23262 blacklistDuration: Infinity
23263 });
23264 this.trigger('error');
23265 return;
23266 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
23267 // on each queue this loader is responsible for to ensure that the appends are
23268 // complete.
23269
23270
23271 var hasAudio = trackInfo.hasAudio,
23272 hasVideo = trackInfo.hasVideo,
23273 isMuxed = trackInfo.isMuxed;
23274 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
23275 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
23276 segmentInfo.waitingOnAppends = 0; // segments with no data
23277
23278 if (!segmentInfo.hasAppendedData_) {
23279 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
23280 // When there's no audio or video data in the segment, there's no audio or video
23281 // timing information.
23282 //
23283 // If there's no audio or video timing information, then the timestamp offset
23284 // can't be adjusted to the appropriate value for the transmuxer and source
23285 // buffers.
23286 //
23287 // Therefore, the next segment should be used to set the timestamp offset.
23288 this.isPendingTimestampOffset_ = true;
23289 } // override settings for metadata only segments
23290
23291
23292 segmentInfo.timingInfo = {
23293 start: 0
23294 };
23295 segmentInfo.waitingOnAppends++;
23296
23297 if (!this.isPendingTimestampOffset_) {
23298 // update the timestampoffset
23299 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
23300 // no video/audio data.
23301
23302 this.processMetadataQueue_();
23303 } // append is "done" instantly with no data.
23304
23305
23306 this.checkAppendsDone_(segmentInfo);
23307 return;
23308 } // Since source updater could call back synchronously, do the increments first.
23309
23310
23311 if (waitForVideo) {
23312 segmentInfo.waitingOnAppends++;
23313 }
23314
23315 if (waitForAudio) {
23316 segmentInfo.waitingOnAppends++;
23317 }
23318
23319 if (waitForVideo) {
23320 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
23321 }
23322
23323 if (waitForAudio) {
23324 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
23325 }
23326 };
23327
23328 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
23329 if (this.checkForAbort_(segmentInfo.requestId)) {
23330 return;
23331 }
23332
23333 segmentInfo.waitingOnAppends--;
23334
23335 if (segmentInfo.waitingOnAppends === 0) {
23336 this.handleAppendsDone_();
23337 }
23338 };
23339
23340 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
23341 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
23342
23343 if (illegalMediaSwitchError) {
23344 this.error({
23345 message: illegalMediaSwitchError,
23346 blacklistDuration: Infinity
23347 });
23348 this.trigger('error');
23349 return true;
23350 }
23351
23352 return false;
23353 };
23354
23355 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
23356 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
23357 // priority, timing-wise, so we must wait
23358 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
23359 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
23360 this.loaderType_ !== 'main') {
23361 return;
23362 }
23363
23364 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
23365 // the timing info here comes from video. In the event that the audio is longer than
23366 // the video, this will trim the start of the audio.
23367 // This also trims any offset from 0 at the beginning of the media
23368
23369 segmentInfo.timestampOffset -= segmentInfo.timingInfo.start; // In the event that there are part segment downloads, each will try to update the
23370 // timestamp offset. Retaining this bit of state prevents us from updating in the
23371 // future (within the same segment), however, there may be a better way to handle it.
23372
23373 segmentInfo.changedTimestampOffset = true;
23374
23375 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23376 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
23377 didChange = true;
23378 }
23379
23380 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23381 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
23382 didChange = true;
23383 }
23384
23385 if (didChange) {
23386 this.trigger('timestampoffset');
23387 }
23388 };
23389
23390 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
23391 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
23392 var trackInfo = this.getMediaInfo_();
23393 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
23394 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
23395
23396 if (!prioritizedTimingInfo) {
23397 return;
23398 }
23399
23400 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
23401 // current example is the case of fmp4), so use the rough duration to calculate an
23402 // end time.
23403 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
23404 }
23405 /**
23406 * callback to run when appendBuffer is finished. detects if we are
23407 * in a good state to do things with the data we got, or if we need
23408 * to wait for more
23409 *
23410 * @private
23411 */
23412 ;
23413
23414 _proto.handleAppendsDone_ = function handleAppendsDone_() {
23415 // appendsdone can cause an abort
23416 if (this.pendingSegment_) {
23417 this.trigger('appendsdone');
23418 }
23419
23420 if (!this.pendingSegment_) {
23421 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
23422 // all appending cases?
23423
23424 if (!this.paused()) {
23425 this.monitorBuffer_();
23426 }
23427
23428 return;
23429 }
23430
23431 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
23432 // best to wait until all appends are done so we're sure that the primary media is
23433 // finished (and we have its end time).
23434
23435 this.updateTimingInfoEnd_(segmentInfo);
23436
23437 if (this.shouldSaveSegmentTimingInfo_) {
23438 // Timeline mappings should only be saved for the main loader. This is for multiple
23439 // reasons:
23440 //
23441 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
23442 // and the main loader try to save the timeline mapping, whichever comes later
23443 // will overwrite the first. In theory this is OK, as the mappings should be the
23444 // same, however, it breaks for (2)
23445 // 2) In the event of a live stream, the initial live point will make for a somewhat
23446 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
23447 // the mapping will be off for one of the streams, dependent on which one was
23448 // first saved (see (1)).
23449 // 3) Primary timing goes by video in VHS, so the mapping should be video.
23450 //
23451 // Since the audio loader will wait for the main loader to load the first segment,
23452 // the main loader will save the first timeline mapping, and ensure that there won't
23453 // be a case where audio loads two segments without saving a mapping (thus leading
23454 // to missing segment timing info).
23455 this.syncController_.saveSegmentTimingInfo({
23456 segmentInfo: segmentInfo,
23457 shouldSaveTimelineMapping: this.loaderType_ === 'main'
23458 });
23459 }
23460
23461 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
23462
23463 if (segmentDurationMessage) {
23464 if (segmentDurationMessage.severity === 'warn') {
23465 videojs__default["default"].log.warn(segmentDurationMessage.message);
23466 } else {
23467 this.logger_(segmentDurationMessage.message);
23468 }
23469 }
23470
23471 this.recordThroughput_(segmentInfo);
23472 this.pendingSegment_ = null;
23473 this.state = 'READY';
23474
23475 if (segmentInfo.isSyncRequest) {
23476 this.trigger('syncinfoupdate'); // if the sync request was not appended
23477 // then it was not the correct segment.
23478 // throw it away and use the data it gave us
23479 // to get the correct one.
23480
23481 if (!segmentInfo.hasAppendedData_) {
23482 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
23483 return;
23484 }
23485 }
23486
23487 this.logger_("Appended " + segmentInfoString(segmentInfo));
23488 this.addSegmentMetadataCue_(segmentInfo);
23489 this.fetchAtBuffer_ = true;
23490
23491 if (this.currentTimeline_ !== segmentInfo.timeline) {
23492 this.timelineChangeController_.lastTimelineChange({
23493 type: this.loaderType_,
23494 from: this.currentTimeline_,
23495 to: segmentInfo.timeline
23496 }); // If audio is not disabled, the main segment loader is responsible for updating
23497 // the audio timeline as well. If the content is video only, this won't have any
23498 // impact.
23499
23500 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
23501 this.timelineChangeController_.lastTimelineChange({
23502 type: 'audio',
23503 from: this.currentTimeline_,
23504 to: segmentInfo.timeline
23505 });
23506 }
23507 }
23508
23509 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
23510 // the following conditional otherwise it may consider this a bad "guess"
23511 // and attempt to resync when the post-update seekable window and live
23512 // point would mean that this was the perfect segment to fetch
23513
23514 this.trigger('syncinfoupdate');
23515 var segment = segmentInfo.segment; // If we previously appended a segment that ends more than 3 targetDurations before
23516 // the currentTime_ that means that our conservative guess was too conservative.
23517 // In that case, reset the loader state so that we try to use any information gained
23518 // from the previous request to create a new, more accurate, sync-point.
23519
23520 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
23521 this.resetEverything();
23522 return;
23523 }
23524
23525 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
23526 // and conservatively guess
23527
23528 if (isWalkingForward) {
23529 this.trigger('bandwidthupdate');
23530 }
23531
23532 this.trigger('progress');
23533 this.mediaIndex = segmentInfo.mediaIndex;
23534 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
23535 // buffer, end the stream. this ensures the "ended" event will
23536 // fire if playback reaches that point.
23537
23538 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
23539 this.endOfStream();
23540 } // used for testing
23541
23542
23543 this.trigger('appended');
23544
23545 if (segmentInfo.hasAppendedData_) {
23546 this.mediaAppends++;
23547 }
23548
23549 if (!this.paused()) {
23550 this.monitorBuffer_();
23551 }
23552 }
23553 /**
23554 * Records the current throughput of the decrypt, transmux, and append
23555 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
23556 * moving average of the throughput. `throughput.count` is the number of
23557 * data points in the average.
23558 *
23559 * @private
23560 * @param {Object} segmentInfo the object returned by loadSegment
23561 */
23562 ;
23563
23564 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
23565 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23566 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23567 return;
23568 }
23569
23570 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
23571 // by zero in the case where the throughput is ridiculously high
23572
23573 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
23574
23575 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
23576 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
23577
23578 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
23579 }
23580 /**
23581 * Adds a cue to the segment-metadata track with some metadata information about the
23582 * segment
23583 *
23584 * @private
23585 * @param {Object} segmentInfo
23586 * the object returned by loadSegment
23587 * @method addSegmentMetadataCue_
23588 */
23589 ;
23590
23591 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
23592 if (!this.segmentMetadataTrack_) {
23593 return;
23594 }
23595
23596 var segment = segmentInfo.segment;
23597 var start = segment.start;
23598 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
23599
23600 if (!finite(start) || !finite(end)) {
23601 return;
23602 }
23603
23604 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
23605 var Cue = window.WebKitDataCue || window.VTTCue;
23606 var value = {
23607 custom: segment.custom,
23608 dateTimeObject: segment.dateTimeObject,
23609 dateTimeString: segment.dateTimeString,
23610 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
23611 resolution: segmentInfo.playlist.attributes.RESOLUTION,
23612 codecs: segmentInfo.playlist.attributes.CODECS,
23613 byteLength: segmentInfo.byteLength,
23614 uri: segmentInfo.uri,
23615 timeline: segmentInfo.timeline,
23616 playlist: segmentInfo.playlist.id,
23617 start: start,
23618 end: end
23619 };
23620 var data = JSON.stringify(value);
23621 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
23622 // the differences of WebKitDataCue in safari and VTTCue in other browsers
23623
23624 cue.value = value;
23625 this.segmentMetadataTrack_.addCue(cue);
23626 };
23627
23628 return SegmentLoader;
23629 }(videojs__default["default"].EventTarget);
23630
23631 function noop() {}
23632
23633 var toTitleCase = function toTitleCase(string) {
23634 if (typeof string !== 'string') {
23635 return string;
23636 }
23637
23638 return string.replace(/./, function (w) {
23639 return w.toUpperCase();
23640 });
23641 };
23642
23643 var bufferTypes = ['video', 'audio'];
23644
23645 var _updating = function updating(type, sourceUpdater) {
23646 var sourceBuffer = sourceUpdater[type + "Buffer"];
23647 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
23648 };
23649
23650 var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
23651 for (var i = 0; i < queue.length; i++) {
23652 var queueEntry = queue[i];
23653
23654 if (queueEntry.type === 'mediaSource') {
23655 // If the next entry is a media source entry (uses multiple source buffers), block
23656 // processing to allow it to go through first.
23657 return null;
23658 }
23659
23660 if (queueEntry.type === type) {
23661 return i;
23662 }
23663 }
23664
23665 return null;
23666 };
23667
23668 var shiftQueue = function shiftQueue(type, sourceUpdater) {
23669 if (sourceUpdater.queue.length === 0) {
23670 return;
23671 }
23672
23673 var queueIndex = 0;
23674 var queueEntry = sourceUpdater.queue[queueIndex];
23675
23676 if (queueEntry.type === 'mediaSource') {
23677 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
23678 sourceUpdater.queue.shift();
23679 queueEntry.action(sourceUpdater);
23680
23681 if (queueEntry.doneFn) {
23682 queueEntry.doneFn();
23683 } // Only specific source buffer actions must wait for async updateend events. Media
23684 // Source actions process synchronously. Therefore, both audio and video source
23685 // buffers are now clear to process the next queue entries.
23686
23687
23688 shiftQueue('audio', sourceUpdater);
23689 shiftQueue('video', sourceUpdater);
23690 } // Media Source actions require both source buffers, so if the media source action
23691 // couldn't process yet (because one or both source buffers are busy), block other
23692 // queue actions until both are available and the media source action can process.
23693
23694
23695 return;
23696 }
23697
23698 if (type === 'mediaSource') {
23699 // If the queue was shifted by a media source action (this happens when pushing a
23700 // media source action onto the queue), then it wasn't from an updateend event from an
23701 // audio or video source buffer, so there's no change from previous state, and no
23702 // processing should be done.
23703 return;
23704 } // Media source queue entries don't need to consider whether the source updater is
23705 // started (i.e., source buffers are created) as they don't need the source buffers, but
23706 // source buffer queue entries do.
23707
23708
23709 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
23710 return;
23711 }
23712
23713 if (queueEntry.type !== type) {
23714 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
23715
23716 if (queueIndex === null) {
23717 // Either there's no queue entry that uses this source buffer type in the queue, or
23718 // there's a media source queue entry before the next entry of this type, in which
23719 // case wait for that action to process first.
23720 return;
23721 }
23722
23723 queueEntry = sourceUpdater.queue[queueIndex];
23724 }
23725
23726 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
23727 //
23728 // The queue pending operation must be set before the action is performed in the event
23729 // that the action results in a synchronous event that is acted upon. For instance, if
23730 // an exception is thrown that can be handled, it's possible that new actions will be
23731 // appended to an empty queue and immediately executed, but would not have the correct
23732 // pending information if this property was set after the action was performed.
23733
23734 sourceUpdater.queuePending[type] = queueEntry;
23735 queueEntry.action(type, sourceUpdater);
23736
23737 if (!queueEntry.doneFn) {
23738 // synchronous operation, process next entry
23739 sourceUpdater.queuePending[type] = null;
23740 shiftQueue(type, sourceUpdater);
23741 return;
23742 }
23743 };
23744
23745 var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
23746 var buffer = sourceUpdater[type + "Buffer"];
23747 var titleType = toTitleCase(type);
23748
23749 if (!buffer) {
23750 return;
23751 }
23752
23753 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
23754 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
23755 sourceUpdater.codecs[type] = null;
23756 sourceUpdater[type + "Buffer"] = null;
23757 };
23758
23759 var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
23760 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
23761 };
23762
23763 var actions = {
23764 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
23765 return function (type, sourceUpdater) {
23766 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
23767 // or the media source does not contain this source buffer.
23768
23769 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23770 return;
23771 }
23772
23773 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
23774
23775 try {
23776 sourceBuffer.appendBuffer(bytes);
23777 } catch (e) {
23778 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
23779 sourceUpdater.queuePending[type] = null;
23780 onError(e);
23781 }
23782 };
23783 },
23784 remove: function remove(start, end) {
23785 return function (type, sourceUpdater) {
23786 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
23787 // or the media source does not contain this source buffer.
23788
23789 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23790 return;
23791 }
23792
23793 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
23794
23795 try {
23796 sourceBuffer.remove(start, end);
23797 } catch (e) {
23798 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
23799 }
23800 };
23801 },
23802 timestampOffset: function timestampOffset(offset) {
23803 return function (type, sourceUpdater) {
23804 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
23805 // or the media source does not contain this source buffer.
23806
23807 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23808 return;
23809 }
23810
23811 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
23812 sourceBuffer.timestampOffset = offset;
23813 };
23814 },
23815 callback: function callback(_callback) {
23816 return function (type, sourceUpdater) {
23817 _callback();
23818 };
23819 },
23820 endOfStream: function endOfStream(error) {
23821 return function (sourceUpdater) {
23822 if (sourceUpdater.mediaSource.readyState !== 'open') {
23823 return;
23824 }
23825
23826 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
23827
23828 try {
23829 sourceUpdater.mediaSource.endOfStream(error);
23830 } catch (e) {
23831 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
23832 }
23833 };
23834 },
23835 duration: function duration(_duration) {
23836 return function (sourceUpdater) {
23837 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
23838
23839 try {
23840 sourceUpdater.mediaSource.duration = _duration;
23841 } catch (e) {
23842 videojs__default["default"].log.warn('Failed to set media source duration', e);
23843 }
23844 };
23845 },
23846 abort: function abort() {
23847 return function (type, sourceUpdater) {
23848 if (sourceUpdater.mediaSource.readyState !== 'open') {
23849 return;
23850 }
23851
23852 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
23853 // or the media source does not contain this source buffer.
23854
23855 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23856 return;
23857 }
23858
23859 sourceUpdater.logger_("calling abort on " + type + "Buffer");
23860
23861 try {
23862 sourceBuffer.abort();
23863 } catch (e) {
23864 videojs__default["default"].log.warn("Failed to abort on " + type + "Buffer", e);
23865 }
23866 };
23867 },
23868 addSourceBuffer: function addSourceBuffer(type, codec) {
23869 return function (sourceUpdater) {
23870 var titleType = toTitleCase(type);
23871 var mime = getMimeForCodec(codec);
23872 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
23873 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
23874 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
23875 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
23876 sourceUpdater.codecs[type] = codec;
23877 sourceUpdater[type + "Buffer"] = sourceBuffer;
23878 };
23879 },
23880 removeSourceBuffer: function removeSourceBuffer(type) {
23881 return function (sourceUpdater) {
23882 var sourceBuffer = sourceUpdater[type + "Buffer"];
23883 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
23884 // or the media source does not contain this source buffer.
23885
23886 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23887 return;
23888 }
23889
23890 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
23891
23892 try {
23893 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
23894 } catch (e) {
23895 videojs__default["default"].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
23896 }
23897 };
23898 },
23899 changeType: function changeType(codec) {
23900 return function (type, sourceUpdater) {
23901 var sourceBuffer = sourceUpdater[type + "Buffer"];
23902 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
23903 // or the media source does not contain this source buffer.
23904
23905 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23906 return;
23907 } // do not update codec if we don't need to.
23908
23909
23910 if (sourceUpdater.codecs[type] === codec) {
23911 return;
23912 }
23913
23914 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
23915 sourceBuffer.changeType(mime);
23916 sourceUpdater.codecs[type] = codec;
23917 };
23918 }
23919 };
23920
23921 var pushQueue = function pushQueue(_ref) {
23922 var type = _ref.type,
23923 sourceUpdater = _ref.sourceUpdater,
23924 action = _ref.action,
23925 doneFn = _ref.doneFn,
23926 name = _ref.name;
23927 sourceUpdater.queue.push({
23928 type: type,
23929 action: action,
23930 doneFn: doneFn,
23931 name: name
23932 });
23933 shiftQueue(type, sourceUpdater);
23934 };
23935
23936 var onUpdateend = function onUpdateend(type, sourceUpdater) {
23937 return function (e) {
23938 // Although there should, in theory, be a pending action for any updateend receieved,
23939 // there are some actions that may trigger updateend events without set definitions in
23940 // the w3c spec. For instance, setting the duration on the media source may trigger
23941 // updateend events on source buffers. This does not appear to be in the spec. As such,
23942 // if we encounter an updateend without a corresponding pending action from our queue
23943 // for that source buffer type, process the next action.
23944 if (sourceUpdater.queuePending[type]) {
23945 var doneFn = sourceUpdater.queuePending[type].doneFn;
23946 sourceUpdater.queuePending[type] = null;
23947
23948 if (doneFn) {
23949 // if there's an error, report it
23950 doneFn(sourceUpdater[type + "Error_"]);
23951 }
23952 }
23953
23954 shiftQueue(type, sourceUpdater);
23955 };
23956 };
23957 /**
23958 * A queue of callbacks to be serialized and applied when a
23959 * MediaSource and its associated SourceBuffers are not in the
23960 * updating state. It is used by the segment loader to update the
23961 * underlying SourceBuffers when new data is loaded, for instance.
23962 *
23963 * @class SourceUpdater
23964 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
23965 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
23966 */
23967
23968
23969 var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
23970 inheritsLoose(SourceUpdater, _videojs$EventTarget);
23971
23972 function SourceUpdater(mediaSource) {
23973 var _this;
23974
23975 _this = _videojs$EventTarget.call(this) || this;
23976 _this.mediaSource = mediaSource;
23977
23978 _this.sourceopenListener_ = function () {
23979 return shiftQueue('mediaSource', assertThisInitialized(_this));
23980 };
23981
23982 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
23983
23984 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
23985
23986 _this.audioTimestampOffset_ = 0;
23987 _this.videoTimestampOffset_ = 0;
23988 _this.queue = [];
23989 _this.queuePending = {
23990 audio: null,
23991 video: null
23992 };
23993 _this.delayedAudioAppendQueue_ = [];
23994 _this.videoAppendQueued_ = false;
23995 _this.codecs = {};
23996 _this.onVideoUpdateEnd_ = onUpdateend('video', assertThisInitialized(_this));
23997 _this.onAudioUpdateEnd_ = onUpdateend('audio', assertThisInitialized(_this));
23998
23999 _this.onVideoError_ = function (e) {
24000 // used for debugging
24001 _this.videoError_ = e;
24002 };
24003
24004 _this.onAudioError_ = function (e) {
24005 // used for debugging
24006 _this.audioError_ = e;
24007 };
24008
24009 _this.createdSourceBuffers_ = false;
24010 _this.initializedEme_ = false;
24011 _this.triggeredReady_ = false;
24012 return _this;
24013 }
24014
24015 var _proto = SourceUpdater.prototype;
24016
24017 _proto.initializedEme = function initializedEme() {
24018 this.initializedEme_ = true;
24019 this.triggerReady();
24020 };
24021
24022 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
24023 // if false, likely waiting on one of the segment loaders to get enough data to create
24024 // source buffers
24025 return this.createdSourceBuffers_;
24026 };
24027
24028 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
24029 return this.initializedEme_;
24030 };
24031
24032 _proto.ready = function ready() {
24033 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
24034 };
24035
24036 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
24037 if (this.hasCreatedSourceBuffers()) {
24038 // already created them before
24039 return;
24040 } // the intial addOrChangeSourceBuffers will always be
24041 // two add buffers.
24042
24043
24044 this.addOrChangeSourceBuffers(codecs);
24045 this.createdSourceBuffers_ = true;
24046 this.trigger('createdsourcebuffers');
24047 this.triggerReady();
24048 };
24049
24050 _proto.triggerReady = function triggerReady() {
24051 // only allow ready to be triggered once, this prevents the case
24052 // where:
24053 // 1. we trigger createdsourcebuffers
24054 // 2. ie 11 synchronously initializates eme
24055 // 3. the synchronous initialization causes us to trigger ready
24056 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
24057 if (this.ready() && !this.triggeredReady_) {
24058 this.triggeredReady_ = true;
24059 this.trigger('ready');
24060 }
24061 }
24062 /**
24063 * Add a type of source buffer to the media source.
24064 *
24065 * @param {string} type
24066 * The type of source buffer to add.
24067 *
24068 * @param {string} codec
24069 * The codec to add the source buffer with.
24070 */
24071 ;
24072
24073 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
24074 pushQueue({
24075 type: 'mediaSource',
24076 sourceUpdater: this,
24077 action: actions.addSourceBuffer(type, codec),
24078 name: 'addSourceBuffer'
24079 });
24080 }
24081 /**
24082 * call abort on a source buffer.
24083 *
24084 * @param {string} type
24085 * The type of source buffer to call abort on.
24086 */
24087 ;
24088
24089 _proto.abort = function abort(type) {
24090 pushQueue({
24091 type: type,
24092 sourceUpdater: this,
24093 action: actions.abort(type),
24094 name: 'abort'
24095 });
24096 }
24097 /**
24098 * Call removeSourceBuffer and remove a specific type
24099 * of source buffer on the mediaSource.
24100 *
24101 * @param {string} type
24102 * The type of source buffer to remove.
24103 */
24104 ;
24105
24106 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
24107 if (!this.canRemoveSourceBuffer()) {
24108 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
24109 return;
24110 }
24111
24112 pushQueue({
24113 type: 'mediaSource',
24114 sourceUpdater: this,
24115 action: actions.removeSourceBuffer(type),
24116 name: 'removeSourceBuffer'
24117 });
24118 }
24119 /**
24120 * Whether or not the removeSourceBuffer function is supported
24121 * on the mediaSource.
24122 *
24123 * @return {boolean}
24124 * if removeSourceBuffer can be called.
24125 */
24126 ;
24127
24128 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
24129 // IE reports that it supports removeSourceBuffer, but often throws
24130 // errors when attempting to use the function. So we report that it
24131 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
24132 // throws errors, so we report that it does not support this as well.
24133 return !videojs__default["default"].browser.IE_VERSION && !videojs__default["default"].browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
24134 }
24135 /**
24136 * Whether or not the changeType function is supported
24137 * on our SourceBuffers.
24138 *
24139 * @return {boolean}
24140 * if changeType can be called.
24141 */
24142 ;
24143
24144 SourceUpdater.canChangeType = function canChangeType() {
24145 return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';
24146 }
24147 /**
24148 * Whether or not the changeType function is supported
24149 * on our SourceBuffers.
24150 *
24151 * @return {boolean}
24152 * if changeType can be called.
24153 */
24154 ;
24155
24156 _proto.canChangeType = function canChangeType() {
24157 return this.constructor.canChangeType();
24158 }
24159 /**
24160 * Call the changeType function on a source buffer, given the code and type.
24161 *
24162 * @param {string} type
24163 * The type of source buffer to call changeType on.
24164 *
24165 * @param {string} codec
24166 * The codec string to change type with on the source buffer.
24167 */
24168 ;
24169
24170 _proto.changeType = function changeType(type, codec) {
24171 if (!this.canChangeType()) {
24172 videojs__default["default"].log.error('changeType is not supported!');
24173 return;
24174 }
24175
24176 pushQueue({
24177 type: type,
24178 sourceUpdater: this,
24179 action: actions.changeType(codec),
24180 name: 'changeType'
24181 });
24182 }
24183 /**
24184 * Add source buffers with a codec or, if they are already created,
24185 * call changeType on source buffers using changeType.
24186 *
24187 * @param {Object} codecs
24188 * Codecs to switch to
24189 */
24190 ;
24191
24192 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
24193 var _this2 = this;
24194
24195 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
24196 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
24197 }
24198
24199 Object.keys(codecs).forEach(function (type) {
24200 var codec = codecs[type];
24201
24202 if (!_this2.hasCreatedSourceBuffers()) {
24203 return _this2.addSourceBuffer(type, codec);
24204 }
24205
24206 if (_this2.canChangeType()) {
24207 _this2.changeType(type, codec);
24208 }
24209 });
24210 }
24211 /**
24212 * Queue an update to append an ArrayBuffer.
24213 *
24214 * @param {MediaObject} object containing audioBytes and/or videoBytes
24215 * @param {Function} done the function to call when done
24216 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
24217 */
24218 ;
24219
24220 _proto.appendBuffer = function appendBuffer(options, doneFn) {
24221 var _this3 = this;
24222
24223 var segmentInfo = options.segmentInfo,
24224 type = options.type,
24225 bytes = options.bytes;
24226 this.processedAppend_ = true;
24227
24228 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
24229 this.delayedAudioAppendQueue_.push([options, doneFn]);
24230 this.logger_("delayed audio append of " + bytes.length + " until video append");
24231 return;
24232 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
24233 // not be fired. This means that the queue will be blocked until the next action
24234 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
24235 // these errors by calling the doneFn with the specific error.
24236
24237
24238 var onError = doneFn;
24239 pushQueue({
24240 type: type,
24241 sourceUpdater: this,
24242 action: actions.appendBuffer(bytes, segmentInfo || {
24243 mediaIndex: -1
24244 }, onError),
24245 doneFn: doneFn,
24246 name: 'appendBuffer'
24247 });
24248
24249 if (type === 'video') {
24250 this.videoAppendQueued_ = true;
24251
24252 if (!this.delayedAudioAppendQueue_.length) {
24253 return;
24254 }
24255
24256 var queue = this.delayedAudioAppendQueue_.slice();
24257 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
24258 this.delayedAudioAppendQueue_.length = 0;
24259 queue.forEach(function (que) {
24260 _this3.appendBuffer.apply(_this3, que);
24261 });
24262 }
24263 }
24264 /**
24265 * Get the audio buffer's buffered timerange.
24266 *
24267 * @return {TimeRange}
24268 * The audio buffer's buffered time range
24269 */
24270 ;
24271
24272 _proto.audioBuffered = function audioBuffered() {
24273 // no media source/source buffer or it isn't in the media sources
24274 // source buffer list
24275 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
24276 return videojs__default["default"].createTimeRange();
24277 }
24278
24279 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default["default"].createTimeRange();
24280 }
24281 /**
24282 * Get the video buffer's buffered timerange.
24283 *
24284 * @return {TimeRange}
24285 * The video buffer's buffered time range
24286 */
24287 ;
24288
24289 _proto.videoBuffered = function videoBuffered() {
24290 // no media source/source buffer or it isn't in the media sources
24291 // source buffer list
24292 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
24293 return videojs__default["default"].createTimeRange();
24294 }
24295
24296 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default["default"].createTimeRange();
24297 }
24298 /**
24299 * Get a combined video/audio buffer's buffered timerange.
24300 *
24301 * @return {TimeRange}
24302 * the combined time range
24303 */
24304 ;
24305
24306 _proto.buffered = function buffered() {
24307 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
24308 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
24309
24310 if (audio && !video) {
24311 return this.audioBuffered();
24312 }
24313
24314 if (video && !audio) {
24315 return this.videoBuffered();
24316 }
24317
24318 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
24319 }
24320 /**
24321 * Add a callback to the queue that will set duration on the mediaSource.
24322 *
24323 * @param {number} duration
24324 * The duration to set
24325 *
24326 * @param {Function} [doneFn]
24327 * function to run after duration has been set.
24328 */
24329 ;
24330
24331 _proto.setDuration = function setDuration(duration, doneFn) {
24332 if (doneFn === void 0) {
24333 doneFn = noop;
24334 }
24335
24336 // In order to set the duration on the media source, it's necessary to wait for all
24337 // source buffers to no longer be updating. "If the updating attribute equals true on
24338 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
24339 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
24340 pushQueue({
24341 type: 'mediaSource',
24342 sourceUpdater: this,
24343 action: actions.duration(duration),
24344 name: 'duration',
24345 doneFn: doneFn
24346 });
24347 }
24348 /**
24349 * Add a mediaSource endOfStream call to the queue
24350 *
24351 * @param {Error} [error]
24352 * Call endOfStream with an error
24353 *
24354 * @param {Function} [doneFn]
24355 * A function that should be called when the
24356 * endOfStream call has finished.
24357 */
24358 ;
24359
24360 _proto.endOfStream = function endOfStream(error, doneFn) {
24361 if (error === void 0) {
24362 error = null;
24363 }
24364
24365 if (doneFn === void 0) {
24366 doneFn = noop;
24367 }
24368
24369 if (typeof error !== 'string') {
24370 error = undefined;
24371 } // In order to set the duration on the media source, it's necessary to wait for all
24372 // source buffers to no longer be updating. "If the updating attribute equals true on
24373 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
24374 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
24375
24376
24377 pushQueue({
24378 type: 'mediaSource',
24379 sourceUpdater: this,
24380 action: actions.endOfStream(error),
24381 name: 'endOfStream',
24382 doneFn: doneFn
24383 });
24384 }
24385 /**
24386 * Queue an update to remove a time range from the buffer.
24387 *
24388 * @param {number} start where to start the removal
24389 * @param {number} end where to end the removal
24390 * @param {Function} [done=noop] optional callback to be executed when the remove
24391 * operation is complete
24392 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
24393 */
24394 ;
24395
24396 _proto.removeAudio = function removeAudio(start, end, done) {
24397 if (done === void 0) {
24398 done = noop;
24399 }
24400
24401 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
24402 done();
24403 return;
24404 }
24405
24406 pushQueue({
24407 type: 'audio',
24408 sourceUpdater: this,
24409 action: actions.remove(start, end),
24410 doneFn: done,
24411 name: 'remove'
24412 });
24413 }
24414 /**
24415 * Queue an update to remove a time range from the buffer.
24416 *
24417 * @param {number} start where to start the removal
24418 * @param {number} end where to end the removal
24419 * @param {Function} [done=noop] optional callback to be executed when the remove
24420 * operation is complete
24421 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
24422 */
24423 ;
24424
24425 _proto.removeVideo = function removeVideo(start, end, done) {
24426 if (done === void 0) {
24427 done = noop;
24428 }
24429
24430 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
24431 done();
24432 return;
24433 }
24434
24435 pushQueue({
24436 type: 'video',
24437 sourceUpdater: this,
24438 action: actions.remove(start, end),
24439 doneFn: done,
24440 name: 'remove'
24441 });
24442 }
24443 /**
24444 * Whether the underlying sourceBuffer is updating or not
24445 *
24446 * @return {boolean} the updating status of the SourceBuffer
24447 */
24448 ;
24449
24450 _proto.updating = function updating() {
24451 // the audio/video source buffer is updating
24452 if (_updating('audio', this) || _updating('video', this)) {
24453 return true;
24454 }
24455
24456 return false;
24457 }
24458 /**
24459 * Set/get the timestampoffset on the audio SourceBuffer
24460 *
24461 * @return {number} the timestamp offset
24462 */
24463 ;
24464
24465 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
24466 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
24467 this.audioTimestampOffset_ !== offset) {
24468 pushQueue({
24469 type: 'audio',
24470 sourceUpdater: this,
24471 action: actions.timestampOffset(offset),
24472 name: 'timestampOffset'
24473 });
24474 this.audioTimestampOffset_ = offset;
24475 }
24476
24477 return this.audioTimestampOffset_;
24478 }
24479 /**
24480 * Set/get the timestampoffset on the video SourceBuffer
24481 *
24482 * @return {number} the timestamp offset
24483 */
24484 ;
24485
24486 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
24487 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
24488 this.videoTimestampOffset !== offset) {
24489 pushQueue({
24490 type: 'video',
24491 sourceUpdater: this,
24492 action: actions.timestampOffset(offset),
24493 name: 'timestampOffset'
24494 });
24495 this.videoTimestampOffset_ = offset;
24496 }
24497
24498 return this.videoTimestampOffset_;
24499 }
24500 /**
24501 * Add a function to the queue that will be called
24502 * when it is its turn to run in the audio queue.
24503 *
24504 * @param {Function} callback
24505 * The callback to queue.
24506 */
24507 ;
24508
24509 _proto.audioQueueCallback = function audioQueueCallback(callback) {
24510 if (!this.audioBuffer) {
24511 return;
24512 }
24513
24514 pushQueue({
24515 type: 'audio',
24516 sourceUpdater: this,
24517 action: actions.callback(callback),
24518 name: 'callback'
24519 });
24520 }
24521 /**
24522 * Add a function to the queue that will be called
24523 * when it is its turn to run in the video queue.
24524 *
24525 * @param {Function} callback
24526 * The callback to queue.
24527 */
24528 ;
24529
24530 _proto.videoQueueCallback = function videoQueueCallback(callback) {
24531 if (!this.videoBuffer) {
24532 return;
24533 }
24534
24535 pushQueue({
24536 type: 'video',
24537 sourceUpdater: this,
24538 action: actions.callback(callback),
24539 name: 'callback'
24540 });
24541 }
24542 /**
24543 * dispose of the source updater and the underlying sourceBuffer
24544 */
24545 ;
24546
24547 _proto.dispose = function dispose() {
24548 var _this4 = this;
24549
24550 this.trigger('dispose');
24551 bufferTypes.forEach(function (type) {
24552 _this4.abort(type);
24553
24554 if (_this4.canRemoveSourceBuffer()) {
24555 _this4.removeSourceBuffer(type);
24556 } else {
24557 _this4[type + "QueueCallback"](function () {
24558 return cleanupBuffer(type, _this4);
24559 });
24560 }
24561 });
24562 this.videoAppendQueued_ = false;
24563 this.delayedAudioAppendQueue_.length = 0;
24564
24565 if (this.sourceopenListener_) {
24566 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
24567 }
24568
24569 this.off();
24570 };
24571
24572 return SourceUpdater;
24573 }(videojs__default["default"].EventTarget);
24574
24575 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
24576 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
24577 };
24578
24579 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
24580 return char.charCodeAt(0);
24581 }));
24582 /**
24583 * An object that manages segment loading and appending.
24584 *
24585 * @class VTTSegmentLoader
24586 * @param {Object} options required and optional options
24587 * @extends videojs.EventTarget
24588 */
24589
24590 var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
24591 inheritsLoose(VTTSegmentLoader, _SegmentLoader);
24592
24593 function VTTSegmentLoader(settings, options) {
24594 var _this;
24595
24596 if (options === void 0) {
24597 options = {};
24598 }
24599
24600 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
24601 // however, VTTSegmentLoader has no need of a media source, so delete the reference
24602
24603 _this.mediaSource_ = null;
24604 _this.subtitlesTrack_ = null;
24605 _this.loaderType_ = 'subtitle';
24606 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
24607 // the sync controller leads to improper behavior.
24608
24609 _this.shouldSaveSegmentTimingInfo_ = false;
24610 return _this;
24611 }
24612
24613 var _proto = VTTSegmentLoader.prototype;
24614
24615 _proto.createTransmuxer_ = function createTransmuxer_() {
24616 // don't need to transmux any subtitles
24617 return null;
24618 }
24619 /**
24620 * Indicates which time ranges are buffered
24621 *
24622 * @return {TimeRange}
24623 * TimeRange object representing the current buffered ranges
24624 */
24625 ;
24626
24627 _proto.buffered_ = function buffered_() {
24628 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
24629 return videojs__default["default"].createTimeRanges();
24630 }
24631
24632 var cues = this.subtitlesTrack_.cues;
24633 var start = cues[0].startTime;
24634 var end = cues[cues.length - 1].startTime;
24635 return videojs__default["default"].createTimeRanges([[start, end]]);
24636 }
24637 /**
24638 * Gets and sets init segment for the provided map
24639 *
24640 * @param {Object} map
24641 * The map object representing the init segment to get or set
24642 * @param {boolean=} set
24643 * If true, the init segment for the provided map should be saved
24644 * @return {Object}
24645 * map object for desired init segment
24646 */
24647 ;
24648
24649 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
24650 if (set === void 0) {
24651 set = false;
24652 }
24653
24654 if (!map) {
24655 return null;
24656 }
24657
24658 var id = initSegmentId(map);
24659 var storedMap = this.initSegments_[id];
24660
24661 if (set && !storedMap && map.bytes) {
24662 // append WebVTT line terminators to the media initialization segment if it exists
24663 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
24664 // requires two or more WebVTT line terminators between the WebVTT header and the
24665 // rest of the file
24666 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
24667 var combinedSegment = new Uint8Array(combinedByteLength);
24668 combinedSegment.set(map.bytes);
24669 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
24670 this.initSegments_[id] = storedMap = {
24671 resolvedUri: map.resolvedUri,
24672 byterange: map.byterange,
24673 bytes: combinedSegment
24674 };
24675 }
24676
24677 return storedMap || map;
24678 }
24679 /**
24680 * Returns true if all configuration required for loading is present, otherwise false.
24681 *
24682 * @return {boolean} True if the all configuration is ready for loading
24683 * @private
24684 */
24685 ;
24686
24687 _proto.couldBeginLoading_ = function couldBeginLoading_() {
24688 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
24689 }
24690 /**
24691 * Once all the starting parameters have been specified, begin
24692 * operation. This method should only be invoked from the INIT
24693 * state.
24694 *
24695 * @private
24696 */
24697 ;
24698
24699 _proto.init_ = function init_() {
24700 this.state = 'READY';
24701 this.resetEverything();
24702 return this.monitorBuffer_();
24703 }
24704 /**
24705 * Set a subtitle track on the segment loader to add subtitles to
24706 *
24707 * @param {TextTrack=} track
24708 * The text track to add loaded subtitles to
24709 * @return {TextTrack}
24710 * Returns the subtitles track
24711 */
24712 ;
24713
24714 _proto.track = function track(_track) {
24715 if (typeof _track === 'undefined') {
24716 return this.subtitlesTrack_;
24717 }
24718
24719 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
24720 // buffering now
24721
24722 if (this.state === 'INIT' && this.couldBeginLoading_()) {
24723 this.init_();
24724 }
24725
24726 return this.subtitlesTrack_;
24727 }
24728 /**
24729 * Remove any data in the source buffer between start and end times
24730 *
24731 * @param {number} start - the start time of the region to remove from the buffer
24732 * @param {number} end - the end time of the region to remove from the buffer
24733 */
24734 ;
24735
24736 _proto.remove = function remove(start, end) {
24737 removeCuesFromTrack(start, end, this.subtitlesTrack_);
24738 }
24739 /**
24740 * fill the buffer with segements unless the sourceBuffers are
24741 * currently updating
24742 *
24743 * Note: this function should only ever be called by monitorBuffer_
24744 * and never directly
24745 *
24746 * @private
24747 */
24748 ;
24749
24750 _proto.fillBuffer_ = function fillBuffer_() {
24751 var _this2 = this;
24752
24753 // see if we need to begin loading immediately
24754 var segmentInfo = this.chooseNextRequest_();
24755
24756 if (!segmentInfo) {
24757 return;
24758 }
24759
24760 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
24761 // We don't have the timestamp offset that we need to sync subtitles.
24762 // Rerun on a timestamp offset or user interaction.
24763 var checkTimestampOffset = function checkTimestampOffset() {
24764 _this2.state = 'READY';
24765
24766 if (!_this2.paused()) {
24767 // if not paused, queue a buffer check as soon as possible
24768 _this2.monitorBuffer_();
24769 }
24770 };
24771
24772 this.syncController_.one('timestampoffset', checkTimestampOffset);
24773 this.state = 'WAITING_ON_TIMELINE';
24774 return;
24775 }
24776
24777 this.loadSegment_(segmentInfo);
24778 } // never set a timestamp offset for vtt segments.
24779 ;
24780
24781 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
24782 return null;
24783 };
24784
24785 _proto.chooseNextRequest_ = function chooseNextRequest_() {
24786 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
24787 }
24788 /**
24789 * Prevents the segment loader from requesting segments we know contain no subtitles
24790 * by walking forward until we find the next segment that we don't know whether it is
24791 * empty or not.
24792 *
24793 * @param {Object} segmentInfo
24794 * a segment info object that describes the current segment
24795 * @return {Object}
24796 * a segment info object that describes the current segment
24797 */
24798 ;
24799
24800 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
24801 while (segmentInfo && segmentInfo.segment.empty) {
24802 // stop at the last possible segmentInfo
24803 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
24804 segmentInfo = null;
24805 break;
24806 }
24807
24808 segmentInfo = this.generateSegmentInfo_({
24809 playlist: segmentInfo.playlist,
24810 mediaIndex: segmentInfo.mediaIndex + 1,
24811 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
24812 isSyncRequest: segmentInfo.isSyncRequest
24813 });
24814 }
24815
24816 return segmentInfo;
24817 };
24818
24819 _proto.stopForError = function stopForError(error) {
24820 this.error(error);
24821 this.state = 'READY';
24822 this.pause();
24823 this.trigger('error');
24824 }
24825 /**
24826 * append a decrypted segement to the SourceBuffer through a SourceUpdater
24827 *
24828 * @private
24829 */
24830 ;
24831
24832 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
24833 var _this3 = this;
24834
24835 if (!this.subtitlesTrack_) {
24836 this.state = 'READY';
24837 return;
24838 }
24839
24840 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
24841
24842 if (!this.pendingSegment_) {
24843 this.state = 'READY';
24844 this.mediaRequestsAborted += 1;
24845 return;
24846 }
24847
24848 if (error) {
24849 if (error.code === REQUEST_ERRORS.TIMEOUT) {
24850 this.handleTimeout_();
24851 }
24852
24853 if (error.code === REQUEST_ERRORS.ABORTED) {
24854 this.mediaRequestsAborted += 1;
24855 } else {
24856 this.mediaRequestsErrored += 1;
24857 }
24858
24859 this.stopForError(error);
24860 return;
24861 }
24862
24863 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
24864 // maintain functionality between segment loaders
24865
24866 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
24867 this.state = 'APPENDING'; // used for tests
24868
24869 this.trigger('appending');
24870 var segment = segmentInfo.segment;
24871
24872 if (segment.map) {
24873 segment.map.bytes = simpleSegment.map.bytes;
24874 }
24875
24876 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
24877
24878 if (typeof window.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
24879 var loadHandler;
24880
24881 var errorHandler = function errorHandler() {
24882 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
24883
24884 _this3.stopForError({
24885 message: 'Error loading vtt.js'
24886 });
24887
24888 return;
24889 };
24890
24891 loadHandler = function loadHandler() {
24892 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
24893
24894 _this3.segmentRequestFinished_(error, simpleSegment, result);
24895 };
24896
24897 this.state = 'WAITING_ON_VTTJS';
24898 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
24899 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
24900 return;
24901 }
24902
24903 segment.requested = true;
24904
24905 try {
24906 this.parseVTTCues_(segmentInfo);
24907 } catch (e) {
24908 this.stopForError({
24909 message: e.message
24910 });
24911 return;
24912 }
24913
24914 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
24915
24916 if (segmentInfo.cues.length) {
24917 segmentInfo.timingInfo = {
24918 start: segmentInfo.cues[0].startTime,
24919 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
24920 };
24921 } else {
24922 segmentInfo.timingInfo = {
24923 start: segmentInfo.startOfSegment,
24924 end: segmentInfo.startOfSegment + segmentInfo.duration
24925 };
24926 }
24927
24928 if (segmentInfo.isSyncRequest) {
24929 this.trigger('syncinfoupdate');
24930 this.pendingSegment_ = null;
24931 this.state = 'READY';
24932 return;
24933 }
24934
24935 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
24936 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
24937 // the subtitle track
24938
24939 segmentInfo.cues.forEach(function (cue) {
24940 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
24941 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
24942 // cues to have identical time-intervals, but if the text is also identical
24943 // we can safely assume it is a duplicate that can be removed (ex. when a cue
24944 // "overlaps" VTT segments)
24945
24946 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
24947 this.handleAppendsDone_();
24948 };
24949
24950 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
24951 // that we do not support here.
24952 };
24953
24954 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
24955 }
24956 /**
24957 * Uses the WebVTT parser to parse the segment response
24958 *
24959 * @param {Object} segmentInfo
24960 * a segment info object that describes the current segment
24961 * @private
24962 */
24963 ;
24964
24965 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
24966 var decoder;
24967 var decodeBytesToString = false;
24968
24969 if (typeof window.TextDecoder === 'function') {
24970 decoder = new window.TextDecoder('utf8');
24971 } else {
24972 decoder = window.WebVTT.StringDecoder();
24973 decodeBytesToString = true;
24974 }
24975
24976 var parser = new window.WebVTT.Parser(window, window.vttjs, decoder);
24977 segmentInfo.cues = [];
24978 segmentInfo.timestampmap = {
24979 MPEGTS: 0,
24980 LOCAL: 0
24981 };
24982 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
24983
24984 parser.ontimestampmap = function (map) {
24985 segmentInfo.timestampmap = map;
24986 };
24987
24988 parser.onparsingerror = function (error) {
24989 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
24990 };
24991
24992 if (segmentInfo.segment.map) {
24993 var mapData = segmentInfo.segment.map.bytes;
24994
24995 if (decodeBytesToString) {
24996 mapData = uint8ToUtf8(mapData);
24997 }
24998
24999 parser.parse(mapData);
25000 }
25001
25002 var segmentData = segmentInfo.bytes;
25003
25004 if (decodeBytesToString) {
25005 segmentData = uint8ToUtf8(segmentData);
25006 }
25007
25008 parser.parse(segmentData);
25009 parser.flush();
25010 }
25011 /**
25012 * Updates the start and end times of any cues parsed by the WebVTT parser using
25013 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
25014 * from the SyncController
25015 *
25016 * @param {Object} segmentInfo
25017 * a segment info object that describes the current segment
25018 * @param {Object} mappingObj
25019 * object containing a mapping from TS to media time
25020 * @param {Object} playlist
25021 * the playlist object containing the segment
25022 * @private
25023 */
25024 ;
25025
25026 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
25027 var segment = segmentInfo.segment;
25028
25029 if (!mappingObj) {
25030 // If the sync controller does not have a mapping of TS to Media Time for the
25031 // timeline, then we don't have enough information to update the cue
25032 // start/end times
25033 return;
25034 }
25035
25036 if (!segmentInfo.cues.length) {
25037 // If there are no cues, we also do not have enough information to figure out
25038 // segment timing. Mark that the segment contains no cues so we don't re-request
25039 // an empty segment.
25040 segment.empty = true;
25041 return;
25042 }
25043
25044 var timestampmap = segmentInfo.timestampmap;
25045 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
25046 segmentInfo.cues.forEach(function (cue) {
25047 // First convert cue time to TS time using the timestamp-map provided within the vtt
25048 cue.startTime += diff;
25049 cue.endTime += diff;
25050 });
25051
25052 if (!playlist.syncInfo) {
25053 var firstStart = segmentInfo.cues[0].startTime;
25054 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
25055 playlist.syncInfo = {
25056 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
25057 time: Math.min(firstStart, lastStart - segment.duration)
25058 };
25059 }
25060 };
25061
25062 return VTTSegmentLoader;
25063 }(SegmentLoader);
25064
25065 /**
25066 * @file ad-cue-tags.js
25067 */
25068
25069 /**
25070 * Searches for an ad cue that overlaps with the given mediaTime
25071 *
25072 * @param {Object} track
25073 * the track to find the cue for
25074 *
25075 * @param {number} mediaTime
25076 * the time to find the cue at
25077 *
25078 * @return {Object|null}
25079 * the found cue or null
25080 */
25081 var findAdCue = function findAdCue(track, mediaTime) {
25082 var cues = track.cues;
25083
25084 for (var i = 0; i < cues.length; i++) {
25085 var cue = cues[i];
25086
25087 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
25088 return cue;
25089 }
25090 }
25091
25092 return null;
25093 };
25094 var updateAdCues = function updateAdCues(media, track, offset) {
25095 if (offset === void 0) {
25096 offset = 0;
25097 }
25098
25099 if (!media.segments) {
25100 return;
25101 }
25102
25103 var mediaTime = offset;
25104 var cue;
25105
25106 for (var i = 0; i < media.segments.length; i++) {
25107 var segment = media.segments[i];
25108
25109 if (!cue) {
25110 // Since the cues will span for at least the segment duration, adding a fudge
25111 // factor of half segment duration will prevent duplicate cues from being
25112 // created when timing info is not exact (e.g. cue start time initialized
25113 // at 10.006677, but next call mediaTime is 10.003332 )
25114 cue = findAdCue(track, mediaTime + segment.duration / 2);
25115 }
25116
25117 if (cue) {
25118 if ('cueIn' in segment) {
25119 // Found a CUE-IN so end the cue
25120 cue.endTime = mediaTime;
25121 cue.adEndTime = mediaTime;
25122 mediaTime += segment.duration;
25123 cue = null;
25124 continue;
25125 }
25126
25127 if (mediaTime < cue.endTime) {
25128 // Already processed this mediaTime for this cue
25129 mediaTime += segment.duration;
25130 continue;
25131 } // otherwise extend cue until a CUE-IN is found
25132
25133
25134 cue.endTime += segment.duration;
25135 } else {
25136 if ('cueOut' in segment) {
25137 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
25138 cue.adStartTime = mediaTime; // Assumes tag format to be
25139 // #EXT-X-CUE-OUT:30
25140
25141 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
25142 track.addCue(cue);
25143 }
25144
25145 if ('cueOutCont' in segment) {
25146 // Entered into the middle of an ad cue
25147 // Assumes tag formate to be
25148 // #EXT-X-CUE-OUT-CONT:10/30
25149 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
25150 adOffset = _segment$cueOutCont$s[0],
25151 adTotal = _segment$cueOutCont$s[1];
25152
25153 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');
25154 cue.adStartTime = mediaTime - adOffset;
25155 cue.adEndTime = cue.adStartTime + adTotal;
25156 track.addCue(cue);
25157 }
25158 }
25159
25160 mediaTime += segment.duration;
25161 }
25162 };
25163
25164 // synchronize expired playlist segments.
25165 // the max media sequence diff is 48 hours of live stream
25166 // content with two second segments. Anything larger than that
25167 // will likely be invalid.
25168
25169 var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
25170 var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
25171 // the equivalence display-time 0 === segment-index 0
25172 {
25173 name: 'VOD',
25174 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25175 if (duration !== Infinity) {
25176 var syncPoint = {
25177 time: 0,
25178 segmentIndex: 0,
25179 partIndex: null
25180 };
25181 return syncPoint;
25182 }
25183
25184 return null;
25185 }
25186 }, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
25187 {
25188 name: 'ProgramDateTime',
25189 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25190 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
25191 return null;
25192 }
25193
25194 var syncPoint = null;
25195 var lastDistance = null;
25196 var partsAndSegments = getPartsAndSegments(playlist);
25197 currentTime = currentTime || 0;
25198
25199 for (var i = 0; i < partsAndSegments.length; i++) {
25200 // start from the end and loop backwards for live
25201 // or start from the front and loop forwards for non-live
25202 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
25203 var partAndSegment = partsAndSegments[index];
25204 var segment = partAndSegment.segment;
25205 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
25206
25207 if (!datetimeMapping) {
25208 continue;
25209 }
25210
25211 if (segment.dateTimeObject) {
25212 var segmentTime = segment.dateTimeObject.getTime() / 1000;
25213 var start = segmentTime + datetimeMapping; // take part duration into account.
25214
25215 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
25216 for (var z = 0; z < partAndSegment.partIndex; z++) {
25217 start += segment.parts[z].duration;
25218 }
25219 }
25220
25221 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
25222 // currentTime and can stop looking for better candidates
25223
25224 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
25225 break;
25226 }
25227
25228 lastDistance = distance;
25229 syncPoint = {
25230 time: start,
25231 segmentIndex: partAndSegment.segmentIndex,
25232 partIndex: partAndSegment.partIndex
25233 };
25234 }
25235 }
25236
25237 return syncPoint;
25238 }
25239 }, // Stategy "Segment": We have a known time mapping for a timeline and a
25240 // segment in the current timeline with timing data
25241 {
25242 name: 'Segment',
25243 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25244 var syncPoint = null;
25245 var lastDistance = null;
25246 currentTime = currentTime || 0;
25247 var partsAndSegments = getPartsAndSegments(playlist);
25248
25249 for (var i = 0; i < partsAndSegments.length; i++) {
25250 // start from the end and loop backwards for live
25251 // or start from the front and loop forwards for non-live
25252 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
25253 var partAndSegment = partsAndSegments[index];
25254 var segment = partAndSegment.segment;
25255 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
25256
25257 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
25258 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
25259 // currentTime and can stop looking for better candidates
25260
25261 if (lastDistance !== null && lastDistance < distance) {
25262 break;
25263 }
25264
25265 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
25266 lastDistance = distance;
25267 syncPoint = {
25268 time: start,
25269 segmentIndex: partAndSegment.segmentIndex,
25270 partIndex: partAndSegment.partIndex
25271 };
25272 }
25273 }
25274 }
25275
25276 return syncPoint;
25277 }
25278 }, // Stategy "Discontinuity": We have a discontinuity with a known
25279 // display-time
25280 {
25281 name: 'Discontinuity',
25282 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25283 var syncPoint = null;
25284 currentTime = currentTime || 0;
25285
25286 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
25287 var lastDistance = null;
25288
25289 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
25290 var segmentIndex = playlist.discontinuityStarts[i];
25291 var discontinuity = playlist.discontinuitySequence + i + 1;
25292 var discontinuitySync = syncController.discontinuities[discontinuity];
25293
25294 if (discontinuitySync) {
25295 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
25296 // currentTime and can stop looking for better candidates
25297
25298 if (lastDistance !== null && lastDistance < distance) {
25299 break;
25300 }
25301
25302 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
25303 lastDistance = distance;
25304 syncPoint = {
25305 time: discontinuitySync.time,
25306 segmentIndex: segmentIndex,
25307 partIndex: null
25308 };
25309 }
25310 }
25311 }
25312 }
25313
25314 return syncPoint;
25315 }
25316 }, // Stategy "Playlist": We have a playlist with a known mapping of
25317 // segment index to display time
25318 {
25319 name: 'Playlist',
25320 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25321 if (playlist.syncInfo) {
25322 var syncPoint = {
25323 time: playlist.syncInfo.time,
25324 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
25325 partIndex: null
25326 };
25327 return syncPoint;
25328 }
25329
25330 return null;
25331 }
25332 }];
25333
25334 var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
25335 inheritsLoose(SyncController, _videojs$EventTarget);
25336
25337 function SyncController(options) {
25338 var _this;
25339
25340 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
25341
25342 _this.timelines = [];
25343 _this.discontinuities = [];
25344 _this.timelineToDatetimeMappings = {};
25345 _this.logger_ = logger('SyncController');
25346 return _this;
25347 }
25348 /**
25349 * Find a sync-point for the playlist specified
25350 *
25351 * A sync-point is defined as a known mapping from display-time to
25352 * a segment-index in the current playlist.
25353 *
25354 * @param {Playlist} playlist
25355 * The playlist that needs a sync-point
25356 * @param {number} duration
25357 * Duration of the MediaSource (Infinite if playing a live source)
25358 * @param {number} currentTimeline
25359 * The last timeline from which a segment was loaded
25360 * @return {Object}
25361 * A sync-point object
25362 */
25363
25364
25365 var _proto = SyncController.prototype;
25366
25367 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
25368 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
25369
25370 if (!syncPoints.length) {
25371 // Signal that we need to attempt to get a sync-point manually
25372 // by fetching a segment in the playlist and constructing
25373 // a sync-point from that information
25374 return null;
25375 } // Now find the sync-point that is closest to the currentTime because
25376 // that should result in the most accurate guess about which segment
25377 // to fetch
25378
25379
25380 return this.selectSyncPoint_(syncPoints, {
25381 key: 'time',
25382 value: currentTime
25383 });
25384 }
25385 /**
25386 * Calculate the amount of time that has expired off the playlist during playback
25387 *
25388 * @param {Playlist} playlist
25389 * Playlist object to calculate expired from
25390 * @param {number} duration
25391 * Duration of the MediaSource (Infinity if playling a live source)
25392 * @return {number|null}
25393 * The amount of time that has expired off the playlist during playback. Null
25394 * if no sync-points for the playlist can be found.
25395 */
25396 ;
25397
25398 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
25399 if (!playlist || !playlist.segments) {
25400 return null;
25401 }
25402
25403 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
25404
25405 if (!syncPoints.length) {
25406 return null;
25407 }
25408
25409 var syncPoint = this.selectSyncPoint_(syncPoints, {
25410 key: 'segmentIndex',
25411 value: 0
25412 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
25413 // duration from index 0 to syncPoint.segmentIndex instead of adding.
25414
25415 if (syncPoint.segmentIndex > 0) {
25416 syncPoint.time *= -1;
25417 }
25418
25419 return Math.abs(syncPoint.time + sumDurations({
25420 defaultDuration: playlist.targetDuration,
25421 durationList: playlist.segments,
25422 startIndex: syncPoint.segmentIndex,
25423 endIndex: 0
25424 }));
25425 }
25426 /**
25427 * Runs each sync-point strategy and returns a list of sync-points returned by the
25428 * strategies
25429 *
25430 * @private
25431 * @param {Playlist} playlist
25432 * The playlist that needs a sync-point
25433 * @param {number} duration
25434 * Duration of the MediaSource (Infinity if playing a live source)
25435 * @param {number} currentTimeline
25436 * The last timeline from which a segment was loaded
25437 * @return {Array}
25438 * A list of sync-point objects
25439 */
25440 ;
25441
25442 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
25443 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
25444
25445 for (var i = 0; i < syncPointStrategies.length; i++) {
25446 var strategy = syncPointStrategies[i];
25447 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
25448
25449 if (syncPoint) {
25450 syncPoint.strategy = strategy.name;
25451 syncPoints.push({
25452 strategy: strategy.name,
25453 syncPoint: syncPoint
25454 });
25455 }
25456 }
25457
25458 return syncPoints;
25459 }
25460 /**
25461 * Selects the sync-point nearest the specified target
25462 *
25463 * @private
25464 * @param {Array} syncPoints
25465 * List of sync-points to select from
25466 * @param {Object} target
25467 * Object specifying the property and value we are targeting
25468 * @param {string} target.key
25469 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
25470 * @param {number} target.value
25471 * The value to target for the specified key.
25472 * @return {Object}
25473 * The sync-point nearest the target
25474 */
25475 ;
25476
25477 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
25478 var bestSyncPoint = syncPoints[0].syncPoint;
25479 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
25480 var bestStrategy = syncPoints[0].strategy;
25481
25482 for (var i = 1; i < syncPoints.length; i++) {
25483 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
25484
25485 if (newDistance < bestDistance) {
25486 bestDistance = newDistance;
25487 bestSyncPoint = syncPoints[i].syncPoint;
25488 bestStrategy = syncPoints[i].strategy;
25489 }
25490 }
25491
25492 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
25493 return bestSyncPoint;
25494 }
25495 /**
25496 * Save any meta-data present on the segments when segments leave
25497 * the live window to the playlist to allow for synchronization at the
25498 * playlist level later.
25499 *
25500 * @param {Playlist} oldPlaylist - The previous active playlist
25501 * @param {Playlist} newPlaylist - The updated and most current playlist
25502 */
25503 ;
25504
25505 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
25506 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
25507
25508 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
25509 videojs__default["default"].log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
25510 return;
25511 } // When a segment expires from the playlist and it has a start time
25512 // save that information as a possible sync-point reference in future
25513
25514
25515 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
25516 var lastRemovedSegment = oldPlaylist.segments[i];
25517
25518 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
25519 newPlaylist.syncInfo = {
25520 mediaSequence: oldPlaylist.mediaSequence + i,
25521 time: lastRemovedSegment.start
25522 };
25523 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
25524 this.trigger('syncinfoupdate');
25525 break;
25526 }
25527 }
25528 }
25529 /**
25530 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
25531 * before segments start to load.
25532 *
25533 * @param {Playlist} playlist - The currently active playlist
25534 */
25535 ;
25536
25537 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
25538 // It's possible for the playlist to be updated before playback starts, meaning time
25539 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
25540 // crossed, then the old time zero mapping (for the prior timeline) would be retained
25541 // unless the mappings are cleared.
25542 this.timelineToDatetimeMappings = {};
25543
25544 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
25545 var firstSegment = playlist.segments[0];
25546 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
25547 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
25548 }
25549 }
25550 /**
25551 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
25552 * based on the latest timing information.
25553 *
25554 * @param {Object} options
25555 * Options object
25556 * @param {SegmentInfo} options.segmentInfo
25557 * The current active request information
25558 * @param {boolean} options.shouldSaveTimelineMapping
25559 * If there's a timeline change, determines if the timeline mapping should be
25560 * saved for timeline mapping and program date time mappings.
25561 */
25562 ;
25563
25564 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
25565 var segmentInfo = _ref.segmentInfo,
25566 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
25567 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
25568 var segment = segmentInfo.segment;
25569
25570 if (didCalculateSegmentTimeMapping) {
25571 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
25572 // now with segment timing information
25573
25574 if (!segmentInfo.playlist.syncInfo) {
25575 segmentInfo.playlist.syncInfo = {
25576 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
25577 time: segment.start
25578 };
25579 }
25580 }
25581
25582 var dateTime = segment.dateTimeObject;
25583
25584 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
25585 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
25586 }
25587 };
25588
25589 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
25590 if (typeof this.timelines[timeline] === 'undefined') {
25591 return null;
25592 }
25593
25594 return this.timelines[timeline].time;
25595 };
25596
25597 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
25598 if (typeof this.timelines[timeline] === 'undefined') {
25599 return null;
25600 }
25601
25602 return this.timelines[timeline].mapping;
25603 }
25604 /**
25605 * Use the "media time" for a segment to generate a mapping to "display time" and
25606 * save that display time to the segment.
25607 *
25608 * @private
25609 * @param {SegmentInfo} segmentInfo
25610 * The current active request information
25611 * @param {Object} timingInfo
25612 * The start and end time of the current segment in "media time"
25613 * @param {boolean} shouldSaveTimelineMapping
25614 * If there's a timeline change, determines if the timeline mapping should be
25615 * saved in timelines.
25616 * @return {boolean}
25617 * Returns false if segment time mapping could not be calculated
25618 */
25619 ;
25620
25621 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
25622 // TODO: remove side effects
25623 var segment = segmentInfo.segment;
25624 var part = segmentInfo.part;
25625 var mappingObj = this.timelines[segmentInfo.timeline];
25626 var start;
25627 var end;
25628
25629 if (typeof segmentInfo.timestampOffset === 'number') {
25630 mappingObj = {
25631 time: segmentInfo.startOfSegment,
25632 mapping: segmentInfo.startOfSegment - timingInfo.start
25633 };
25634
25635 if (shouldSaveTimelineMapping) {
25636 this.timelines[segmentInfo.timeline] = mappingObj;
25637 this.trigger('timestampoffset');
25638 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
25639 }
25640
25641 start = segmentInfo.startOfSegment;
25642 end = timingInfo.end + mappingObj.mapping;
25643 } else if (mappingObj) {
25644 start = timingInfo.start + mappingObj.mapping;
25645 end = timingInfo.end + mappingObj.mapping;
25646 } else {
25647 return false;
25648 }
25649
25650 if (part) {
25651 part.start = start;
25652 part.end = end;
25653 } // If we don't have a segment start yet or the start value we got
25654 // is less than our current segment.start value, save a new start value.
25655 // We have to do this because parts will have segment timing info saved
25656 // multiple times and we want segment start to be the earliest part start
25657 // value for that segment.
25658
25659
25660 if (!segment.start || start < segment.start) {
25661 segment.start = start;
25662 }
25663
25664 segment.end = end;
25665 return true;
25666 }
25667 /**
25668 * Each time we have discontinuity in the playlist, attempt to calculate the location
25669 * in display of the start of the discontinuity and save that. We also save an accuracy
25670 * value so that we save values with the most accuracy (closest to 0.)
25671 *
25672 * @private
25673 * @param {SegmentInfo} segmentInfo - The current active request information
25674 */
25675 ;
25676
25677 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
25678 var playlist = segmentInfo.playlist;
25679 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
25680 // the start of the range and it's accuracy is 0 (greater accuracy values
25681 // mean more approximation)
25682
25683 if (segment.discontinuity) {
25684 this.discontinuities[segment.timeline] = {
25685 time: segment.start,
25686 accuracy: 0
25687 };
25688 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
25689 // Search for future discontinuities that we can provide better timing
25690 // information for and save that information for sync purposes
25691 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
25692 var segmentIndex = playlist.discontinuityStarts[i];
25693 var discontinuity = playlist.discontinuitySequence + i + 1;
25694 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
25695 var accuracy = Math.abs(mediaIndexDiff);
25696
25697 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
25698 var time = void 0;
25699
25700 if (mediaIndexDiff < 0) {
25701 time = segment.start - sumDurations({
25702 defaultDuration: playlist.targetDuration,
25703 durationList: playlist.segments,
25704 startIndex: segmentInfo.mediaIndex,
25705 endIndex: segmentIndex
25706 });
25707 } else {
25708 time = segment.end + sumDurations({
25709 defaultDuration: playlist.targetDuration,
25710 durationList: playlist.segments,
25711 startIndex: segmentInfo.mediaIndex + 1,
25712 endIndex: segmentIndex
25713 });
25714 }
25715
25716 this.discontinuities[discontinuity] = {
25717 time: time,
25718 accuracy: accuracy
25719 };
25720 }
25721 }
25722 }
25723 };
25724
25725 _proto.dispose = function dispose() {
25726 this.trigger('dispose');
25727 this.off();
25728 };
25729
25730 return SyncController;
25731 }(videojs__default["default"].EventTarget);
25732
25733 /**
25734 * The TimelineChangeController acts as a source for segment loaders to listen for and
25735 * keep track of latest and pending timeline changes. This is useful to ensure proper
25736 * sync, as each loader may need to make a consideration for what timeline the other
25737 * loader is on before making changes which could impact the other loader's media.
25738 *
25739 * @class TimelineChangeController
25740 * @extends videojs.EventTarget
25741 */
25742
25743 var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
25744 inheritsLoose(TimelineChangeController, _videojs$EventTarget);
25745
25746 function TimelineChangeController() {
25747 var _this;
25748
25749 _this = _videojs$EventTarget.call(this) || this;
25750 _this.pendingTimelineChanges_ = {};
25751 _this.lastTimelineChanges_ = {};
25752 return _this;
25753 }
25754
25755 var _proto = TimelineChangeController.prototype;
25756
25757 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
25758 this.pendingTimelineChanges_[type] = null;
25759 this.trigger('pendingtimelinechange');
25760 };
25761
25762 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
25763 var type = _ref.type,
25764 from = _ref.from,
25765 to = _ref.to;
25766
25767 if (typeof from === 'number' && typeof to === 'number') {
25768 this.pendingTimelineChanges_[type] = {
25769 type: type,
25770 from: from,
25771 to: to
25772 };
25773 this.trigger('pendingtimelinechange');
25774 }
25775
25776 return this.pendingTimelineChanges_[type];
25777 };
25778
25779 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
25780 var type = _ref2.type,
25781 from = _ref2.from,
25782 to = _ref2.to;
25783
25784 if (typeof from === 'number' && typeof to === 'number') {
25785 this.lastTimelineChanges_[type] = {
25786 type: type,
25787 from: from,
25788 to: to
25789 };
25790 delete this.pendingTimelineChanges_[type];
25791 this.trigger('timelinechange');
25792 }
25793
25794 return this.lastTimelineChanges_[type];
25795 };
25796
25797 _proto.dispose = function dispose() {
25798 this.trigger('dispose');
25799 this.pendingTimelineChanges_ = {};
25800 this.lastTimelineChanges_ = {};
25801 this.off();
25802 };
25803
25804 return TimelineChangeController;
25805 }(videojs__default["default"].EventTarget);
25806
25807 /* rollup-plugin-worker-factory start for worker!/Users/bcasey/Projects/videojs-http-streaming/src/decrypter-worker.js */
25808 var workerCode = transform(function (self) {
25809
25810 function createCommonjsModule(fn, basedir, module) {
25811 return module = {
25812 path: basedir,
25813 exports: {},
25814 require: function require(path, base) {
25815 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
25816 }
25817 }, fn(module, module.exports), module.exports;
25818 }
25819
25820 function commonjsRequire() {
25821 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
25822 }
25823
25824 var createClass = createCommonjsModule(function (module) {
25825 function _defineProperties(target, props) {
25826 for (var i = 0; i < props.length; i++) {
25827 var descriptor = props[i];
25828 descriptor.enumerable = descriptor.enumerable || false;
25829 descriptor.configurable = true;
25830 if ("value" in descriptor) descriptor.writable = true;
25831 Object.defineProperty(target, descriptor.key, descriptor);
25832 }
25833 }
25834
25835 function _createClass(Constructor, protoProps, staticProps) {
25836 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
25837 if (staticProps) _defineProperties(Constructor, staticProps);
25838 return Constructor;
25839 }
25840
25841 module.exports = _createClass;
25842 module.exports["default"] = module.exports, module.exports.__esModule = true;
25843 });
25844 var setPrototypeOf = createCommonjsModule(function (module) {
25845 function _setPrototypeOf(o, p) {
25846 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
25847 o.__proto__ = p;
25848 return o;
25849 };
25850
25851 module.exports["default"] = module.exports, module.exports.__esModule = true;
25852 return _setPrototypeOf(o, p);
25853 }
25854
25855 module.exports = _setPrototypeOf;
25856 module.exports["default"] = module.exports, module.exports.__esModule = true;
25857 });
25858 var inheritsLoose = createCommonjsModule(function (module) {
25859 function _inheritsLoose(subClass, superClass) {
25860 subClass.prototype = Object.create(superClass.prototype);
25861 subClass.prototype.constructor = subClass;
25862 setPrototypeOf(subClass, superClass);
25863 }
25864
25865 module.exports = _inheritsLoose;
25866 module.exports["default"] = module.exports, module.exports.__esModule = true;
25867 });
25868 /**
25869 * @file stream.js
25870 */
25871
25872 /**
25873 * A lightweight readable stream implemention that handles event dispatching.
25874 *
25875 * @class Stream
25876 */
25877
25878 var Stream = /*#__PURE__*/function () {
25879 function Stream() {
25880 this.listeners = {};
25881 }
25882 /**
25883 * Add a listener for a specified event type.
25884 *
25885 * @param {string} type the event name
25886 * @param {Function} listener the callback to be invoked when an event of
25887 * the specified type occurs
25888 */
25889
25890
25891 var _proto = Stream.prototype;
25892
25893 _proto.on = function on(type, listener) {
25894 if (!this.listeners[type]) {
25895 this.listeners[type] = [];
25896 }
25897
25898 this.listeners[type].push(listener);
25899 }
25900 /**
25901 * Remove a listener for a specified event type.
25902 *
25903 * @param {string} type the event name
25904 * @param {Function} listener a function previously registered for this
25905 * type of event through `on`
25906 * @return {boolean} if we could turn it off or not
25907 */
25908 ;
25909
25910 _proto.off = function off(type, listener) {
25911 if (!this.listeners[type]) {
25912 return false;
25913 }
25914
25915 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
25916 // In Video.js we slice listener functions
25917 // on trigger so that it does not mess up the order
25918 // while we loop through.
25919 //
25920 // Here we slice on off so that the loop in trigger
25921 // can continue using it's old reference to loop without
25922 // messing up the order.
25923
25924 this.listeners[type] = this.listeners[type].slice(0);
25925 this.listeners[type].splice(index, 1);
25926 return index > -1;
25927 }
25928 /**
25929 * Trigger an event of the specified type on this stream. Any additional
25930 * arguments to this function are passed as parameters to event listeners.
25931 *
25932 * @param {string} type the event name
25933 */
25934 ;
25935
25936 _proto.trigger = function trigger(type) {
25937 var callbacks = this.listeners[type];
25938
25939 if (!callbacks) {
25940 return;
25941 } // Slicing the arguments on every invocation of this method
25942 // can add a significant amount of overhead. Avoid the
25943 // intermediate object creation for the common case of a
25944 // single callback argument
25945
25946
25947 if (arguments.length === 2) {
25948 var length = callbacks.length;
25949
25950 for (var i = 0; i < length; ++i) {
25951 callbacks[i].call(this, arguments[1]);
25952 }
25953 } else {
25954 var args = Array.prototype.slice.call(arguments, 1);
25955 var _length = callbacks.length;
25956
25957 for (var _i = 0; _i < _length; ++_i) {
25958 callbacks[_i].apply(this, args);
25959 }
25960 }
25961 }
25962 /**
25963 * Destroys the stream and cleans up.
25964 */
25965 ;
25966
25967 _proto.dispose = function dispose() {
25968 this.listeners = {};
25969 }
25970 /**
25971 * Forwards all `data` events on this stream to the destination stream. The
25972 * destination stream should provide a method `push` to receive the data
25973 * events as they arrive.
25974 *
25975 * @param {Stream} destination the stream that will receive all `data` events
25976 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
25977 */
25978 ;
25979
25980 _proto.pipe = function pipe(destination) {
25981 this.on('data', function (data) {
25982 destination.push(data);
25983 });
25984 };
25985
25986 return Stream;
25987 }();
25988 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
25989
25990 /**
25991 * Returns the subarray of a Uint8Array without PKCS#7 padding.
25992 *
25993 * @param padded {Uint8Array} unencrypted bytes that have been padded
25994 * @return {Uint8Array} the unpadded bytes
25995 * @see http://tools.ietf.org/html/rfc5652
25996 */
25997
25998
25999 function unpad(padded) {
26000 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
26001 }
26002 /*! @name aes-decrypter @version 3.1.2 @license Apache-2.0 */
26003
26004 /**
26005 * @file aes.js
26006 *
26007 * This file contains an adaptation of the AES decryption algorithm
26008 * from the Standford Javascript Cryptography Library. That work is
26009 * covered by the following copyright and permissions notice:
26010 *
26011 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
26012 * All rights reserved.
26013 *
26014 * Redistribution and use in source and binary forms, with or without
26015 * modification, are permitted provided that the following conditions are
26016 * met:
26017 *
26018 * 1. Redistributions of source code must retain the above copyright
26019 * notice, this list of conditions and the following disclaimer.
26020 *
26021 * 2. Redistributions in binary form must reproduce the above
26022 * copyright notice, this list of conditions and the following
26023 * disclaimer in the documentation and/or other materials provided
26024 * with the distribution.
26025 *
26026 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
26027 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
26028 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26029 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
26030 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26031 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26032 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
26033 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26034 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26035 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26036 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26037 *
26038 * The views and conclusions contained in the software and documentation
26039 * are those of the authors and should not be interpreted as representing
26040 * official policies, either expressed or implied, of the authors.
26041 */
26042
26043 /**
26044 * Expand the S-box tables.
26045 *
26046 * @private
26047 */
26048
26049
26050 var precompute = function precompute() {
26051 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
26052 var encTable = tables[0];
26053 var decTable = tables[1];
26054 var sbox = encTable[4];
26055 var sboxInv = decTable[4];
26056 var i;
26057 var x;
26058 var xInv;
26059 var d = [];
26060 var th = [];
26061 var x2;
26062 var x4;
26063 var x8;
26064 var s;
26065 var tEnc;
26066 var tDec; // Compute double and third tables
26067
26068 for (i = 0; i < 256; i++) {
26069 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
26070 }
26071
26072 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
26073 // Compute sbox
26074 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
26075 s = s >> 8 ^ s & 255 ^ 99;
26076 sbox[x] = s;
26077 sboxInv[s] = x; // Compute MixColumns
26078
26079 x8 = d[x4 = d[x2 = d[x]]];
26080 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
26081 tEnc = d[s] * 0x101 ^ s * 0x1010100;
26082
26083 for (i = 0; i < 4; i++) {
26084 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
26085 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
26086 }
26087 } // Compactify. Considerable speedup on Firefox.
26088
26089
26090 for (i = 0; i < 5; i++) {
26091 encTable[i] = encTable[i].slice(0);
26092 decTable[i] = decTable[i].slice(0);
26093 }
26094
26095 return tables;
26096 };
26097
26098 var aesTables = null;
26099 /**
26100 * Schedule out an AES key for both encryption and decryption. This
26101 * is a low-level class. Use a cipher mode to do bulk encryption.
26102 *
26103 * @class AES
26104 * @param key {Array} The key as an array of 4, 6 or 8 words.
26105 */
26106
26107 var AES = /*#__PURE__*/function () {
26108 function AES(key) {
26109 /**
26110 * The expanded S-box and inverse S-box tables. These will be computed
26111 * on the client so that we don't have to send them down the wire.
26112 *
26113 * There are two tables, _tables[0] is for encryption and
26114 * _tables[1] is for decryption.
26115 *
26116 * The first 4 sub-tables are the expanded S-box with MixColumns. The
26117 * last (_tables[01][4]) is the S-box itself.
26118 *
26119 * @private
26120 */
26121 // if we have yet to precompute the S-box tables
26122 // do so now
26123 if (!aesTables) {
26124 aesTables = precompute();
26125 } // then make a copy of that object for use
26126
26127
26128 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
26129 var i;
26130 var j;
26131 var tmp;
26132 var sbox = this._tables[0][4];
26133 var decTable = this._tables[1];
26134 var keyLen = key.length;
26135 var rcon = 1;
26136
26137 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
26138 throw new Error('Invalid aes key size');
26139 }
26140
26141 var encKey = key.slice(0);
26142 var decKey = [];
26143 this._key = [encKey, decKey]; // schedule encryption keys
26144
26145 for (i = keyLen; i < 4 * keyLen + 28; i++) {
26146 tmp = encKey[i - 1]; // apply sbox
26147
26148 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
26149 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
26150
26151 if (i % keyLen === 0) {
26152 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
26153 rcon = rcon << 1 ^ (rcon >> 7) * 283;
26154 }
26155 }
26156
26157 encKey[i] = encKey[i - keyLen] ^ tmp;
26158 } // schedule decryption keys
26159
26160
26161 for (j = 0; i; j++, i--) {
26162 tmp = encKey[j & 3 ? i : i - 4];
26163
26164 if (i <= 4 || j < 4) {
26165 decKey[j] = tmp;
26166 } else {
26167 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
26168 }
26169 }
26170 }
26171 /**
26172 * Decrypt 16 bytes, specified as four 32-bit words.
26173 *
26174 * @param {number} encrypted0 the first word to decrypt
26175 * @param {number} encrypted1 the second word to decrypt
26176 * @param {number} encrypted2 the third word to decrypt
26177 * @param {number} encrypted3 the fourth word to decrypt
26178 * @param {Int32Array} out the array to write the decrypted words
26179 * into
26180 * @param {number} offset the offset into the output array to start
26181 * writing results
26182 * @return {Array} The plaintext.
26183 */
26184
26185
26186 var _proto = AES.prototype;
26187
26188 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
26189 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
26190
26191 var a = encrypted0 ^ key[0];
26192 var b = encrypted3 ^ key[1];
26193 var c = encrypted2 ^ key[2];
26194 var d = encrypted1 ^ key[3];
26195 var a2;
26196 var b2;
26197 var c2; // key.length === 2 ?
26198
26199 var nInnerRounds = key.length / 4 - 2;
26200 var i;
26201 var kIndex = 4;
26202 var table = this._tables[1]; // load up the tables
26203
26204 var table0 = table[0];
26205 var table1 = table[1];
26206 var table2 = table[2];
26207 var table3 = table[3];
26208 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
26209
26210 for (i = 0; i < nInnerRounds; i++) {
26211 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
26212 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
26213 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
26214 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
26215 kIndex += 4;
26216 a = a2;
26217 b = b2;
26218 c = c2;
26219 } // Last round.
26220
26221
26222 for (i = 0; i < 4; i++) {
26223 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
26224 a2 = a;
26225 a = b;
26226 b = c;
26227 c = d;
26228 d = a2;
26229 }
26230 };
26231
26232 return AES;
26233 }();
26234 /**
26235 * A wrapper around the Stream class to use setTimeout
26236 * and run stream "jobs" Asynchronously
26237 *
26238 * @class AsyncStream
26239 * @extends Stream
26240 */
26241
26242
26243 var AsyncStream = /*#__PURE__*/function (_Stream) {
26244 inheritsLoose(AsyncStream, _Stream);
26245
26246 function AsyncStream() {
26247 var _this;
26248
26249 _this = _Stream.call(this, Stream) || this;
26250 _this.jobs = [];
26251 _this.delay = 1;
26252 _this.timeout_ = null;
26253 return _this;
26254 }
26255 /**
26256 * process an async job
26257 *
26258 * @private
26259 */
26260
26261
26262 var _proto = AsyncStream.prototype;
26263
26264 _proto.processJob_ = function processJob_() {
26265 this.jobs.shift()();
26266
26267 if (this.jobs.length) {
26268 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
26269 } else {
26270 this.timeout_ = null;
26271 }
26272 }
26273 /**
26274 * push a job into the stream
26275 *
26276 * @param {Function} job the job to push into the stream
26277 */
26278 ;
26279
26280 _proto.push = function push(job) {
26281 this.jobs.push(job);
26282
26283 if (!this.timeout_) {
26284 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
26285 }
26286 };
26287
26288 return AsyncStream;
26289 }(Stream);
26290 /**
26291 * Convert network-order (big-endian) bytes into their little-endian
26292 * representation.
26293 */
26294
26295
26296 var ntoh = function ntoh(word) {
26297 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
26298 };
26299 /**
26300 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
26301 *
26302 * @param {Uint8Array} encrypted the encrypted bytes
26303 * @param {Uint32Array} key the bytes of the decryption key
26304 * @param {Uint32Array} initVector the initialization vector (IV) to
26305 * use for the first round of CBC.
26306 * @return {Uint8Array} the decrypted bytes
26307 *
26308 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
26309 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
26310 * @see https://tools.ietf.org/html/rfc2315
26311 */
26312
26313
26314 var decrypt = function decrypt(encrypted, key, initVector) {
26315 // word-level access to the encrypted bytes
26316 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
26317 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
26318
26319 var decrypted = new Uint8Array(encrypted.byteLength);
26320 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
26321 // decrypted data
26322
26323 var init0;
26324 var init1;
26325 var init2;
26326 var init3;
26327 var encrypted0;
26328 var encrypted1;
26329 var encrypted2;
26330 var encrypted3; // iteration variable
26331
26332 var wordIx; // pull out the words of the IV to ensure we don't modify the
26333 // passed-in reference and easier access
26334
26335 init0 = initVector[0];
26336 init1 = initVector[1];
26337 init2 = initVector[2];
26338 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
26339 // to each decrypted block
26340
26341 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
26342 // convert big-endian (network order) words into little-endian
26343 // (javascript order)
26344 encrypted0 = ntoh(encrypted32[wordIx]);
26345 encrypted1 = ntoh(encrypted32[wordIx + 1]);
26346 encrypted2 = ntoh(encrypted32[wordIx + 2]);
26347 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
26348
26349 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
26350 // plaintext
26351
26352 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
26353 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
26354 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
26355 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
26356
26357 init0 = encrypted0;
26358 init1 = encrypted1;
26359 init2 = encrypted2;
26360 init3 = encrypted3;
26361 }
26362
26363 return decrypted;
26364 };
26365 /**
26366 * The `Decrypter` class that manages decryption of AES
26367 * data through `AsyncStream` objects and the `decrypt`
26368 * function
26369 *
26370 * @param {Uint8Array} encrypted the encrypted bytes
26371 * @param {Uint32Array} key the bytes of the decryption key
26372 * @param {Uint32Array} initVector the initialization vector (IV) to
26373 * @param {Function} done the function to run when done
26374 * @class Decrypter
26375 */
26376
26377
26378 var Decrypter = /*#__PURE__*/function () {
26379 function Decrypter(encrypted, key, initVector, done) {
26380 var step = Decrypter.STEP;
26381 var encrypted32 = new Int32Array(encrypted.buffer);
26382 var decrypted = new Uint8Array(encrypted.byteLength);
26383 var i = 0;
26384 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
26385
26386 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
26387
26388 for (i = step; i < encrypted32.length; i += step) {
26389 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
26390 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
26391 } // invoke the done() callback when everything is finished
26392
26393
26394 this.asyncStream_.push(function () {
26395 // remove pkcs#7 padding from the decrypted bytes
26396 done(null, unpad(decrypted));
26397 });
26398 }
26399 /**
26400 * a getter for step the maximum number of bytes to process at one time
26401 *
26402 * @return {number} the value of step 32000
26403 */
26404
26405
26406 var _proto = Decrypter.prototype;
26407 /**
26408 * @private
26409 */
26410
26411 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
26412 return function () {
26413 var bytes = decrypt(encrypted, key, initVector);
26414 decrypted.set(bytes, encrypted.byteOffset);
26415 };
26416 };
26417
26418 createClass(Decrypter, null, [{
26419 key: "STEP",
26420 get: function get() {
26421 // 4 * 8000;
26422 return 32000;
26423 }
26424 }]);
26425 return Decrypter;
26426 }();
26427 /**
26428 * @file bin-utils.js
26429 */
26430
26431 /**
26432 * Creates an object for sending to a web worker modifying properties that are TypedArrays
26433 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
26434 *
26435 * @param {Object} message
26436 * Object of properties and values to send to the web worker
26437 * @return {Object}
26438 * Modified message with TypedArray values expanded
26439 * @function createTransferableMessage
26440 */
26441
26442
26443 var createTransferableMessage = function createTransferableMessage(message) {
26444 var transferable = {};
26445 Object.keys(message).forEach(function (key) {
26446 var value = message[key];
26447
26448 if (ArrayBuffer.isView(value)) {
26449 transferable[key] = {
26450 bytes: value.buffer,
26451 byteOffset: value.byteOffset,
26452 byteLength: value.byteLength
26453 };
26454 } else {
26455 transferable[key] = value;
26456 }
26457 });
26458 return transferable;
26459 };
26460 /* global self */
26461
26462 /**
26463 * Our web worker interface so that things can talk to aes-decrypter
26464 * that will be running in a web worker. the scope is passed to this by
26465 * webworkify.
26466 */
26467
26468
26469 self.onmessage = function (event) {
26470 var data = event.data;
26471 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
26472 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
26473 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
26474 /* eslint-disable no-new, handle-callback-err */
26475
26476 new Decrypter(encrypted, key, iv, function (err, bytes) {
26477 self.postMessage(createTransferableMessage({
26478 source: data.source,
26479 decrypted: bytes
26480 }), [bytes.buffer]);
26481 });
26482 /* eslint-enable */
26483 };
26484 });
26485 var Decrypter = factory(workerCode);
26486 /* rollup-plugin-worker-factory end for worker!/Users/bcasey/Projects/videojs-http-streaming/src/decrypter-worker.js */
26487
26488 /**
26489 * Convert the properties of an HLS track into an audioTrackKind.
26490 *
26491 * @private
26492 */
26493
26494 var audioTrackKind_ = function audioTrackKind_(properties) {
26495 var kind = properties.default ? 'main' : 'alternative';
26496
26497 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
26498 kind = 'main-desc';
26499 }
26500
26501 return kind;
26502 };
26503 /**
26504 * Pause provided segment loader and playlist loader if active
26505 *
26506 * @param {SegmentLoader} segmentLoader
26507 * SegmentLoader to pause
26508 * @param {Object} mediaType
26509 * Active media type
26510 * @function stopLoaders
26511 */
26512
26513
26514 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
26515 segmentLoader.abort();
26516 segmentLoader.pause();
26517
26518 if (mediaType && mediaType.activePlaylistLoader) {
26519 mediaType.activePlaylistLoader.pause();
26520 mediaType.activePlaylistLoader = null;
26521 }
26522 };
26523 /**
26524 * Start loading provided segment loader and playlist loader
26525 *
26526 * @param {PlaylistLoader} playlistLoader
26527 * PlaylistLoader to start loading
26528 * @param {Object} mediaType
26529 * Active media type
26530 * @function startLoaders
26531 */
26532
26533 var startLoaders = function startLoaders(playlistLoader, mediaType) {
26534 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
26535 // playlist loader
26536 mediaType.activePlaylistLoader = playlistLoader;
26537 playlistLoader.load();
26538 };
26539 /**
26540 * Returns a function to be called when the media group changes. It performs a
26541 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
26542 * change of group is merely a rendition switch of the same content at another encoding,
26543 * rather than a change of content, such as switching audio from English to Spanish.
26544 *
26545 * @param {string} type
26546 * MediaGroup type
26547 * @param {Object} settings
26548 * Object containing required information for media groups
26549 * @return {Function}
26550 * Handler for a non-destructive resync of SegmentLoader when the active media
26551 * group changes.
26552 * @function onGroupChanged
26553 */
26554
26555 var onGroupChanged = function onGroupChanged(type, settings) {
26556 return function () {
26557 var _settings$segmentLoad = settings.segmentLoaders,
26558 segmentLoader = _settings$segmentLoad[type],
26559 mainSegmentLoader = _settings$segmentLoad.main,
26560 mediaType = settings.mediaTypes[type];
26561 var activeTrack = mediaType.activeTrack();
26562 var activeGroup = mediaType.getActiveGroup();
26563 var previousActiveLoader = mediaType.activePlaylistLoader;
26564 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
26565
26566 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
26567 return;
26568 }
26569
26570 mediaType.lastGroup_ = activeGroup;
26571 mediaType.lastTrack_ = activeTrack;
26572 stopLoaders(segmentLoader, mediaType);
26573
26574 if (!activeGroup || activeGroup.isMasterPlaylist) {
26575 // there is no group active or active group is a main playlist and won't change
26576 return;
26577 }
26578
26579 if (!activeGroup.playlistLoader) {
26580 if (previousActiveLoader) {
26581 // The previous group had a playlist loader but the new active group does not
26582 // this means we are switching from demuxed to muxed audio. In this case we want to
26583 // do a destructive reset of the main segment loader and not restart the audio
26584 // loaders.
26585 mainSegmentLoader.resetEverything();
26586 }
26587
26588 return;
26589 } // Non-destructive resync
26590
26591
26592 segmentLoader.resyncLoader();
26593 startLoaders(activeGroup.playlistLoader, mediaType);
26594 };
26595 };
26596 var onGroupChanging = function onGroupChanging(type, settings) {
26597 return function () {
26598 var segmentLoader = settings.segmentLoaders[type],
26599 mediaType = settings.mediaTypes[type];
26600 mediaType.lastGroup_ = null;
26601 segmentLoader.abort();
26602 segmentLoader.pause();
26603 };
26604 };
26605 /**
26606 * Returns a function to be called when the media track changes. It performs a
26607 * destructive reset of the SegmentLoader to ensure we start loading as close to
26608 * currentTime as possible.
26609 *
26610 * @param {string} type
26611 * MediaGroup type
26612 * @param {Object} settings
26613 * Object containing required information for media groups
26614 * @return {Function}
26615 * Handler for a destructive reset of SegmentLoader when the active media
26616 * track changes.
26617 * @function onTrackChanged
26618 */
26619
26620 var onTrackChanged = function onTrackChanged(type, settings) {
26621 return function () {
26622 var masterPlaylistLoader = settings.masterPlaylistLoader,
26623 _settings$segmentLoad2 = settings.segmentLoaders,
26624 segmentLoader = _settings$segmentLoad2[type],
26625 mainSegmentLoader = _settings$segmentLoad2.main,
26626 mediaType = settings.mediaTypes[type];
26627 var activeTrack = mediaType.activeTrack();
26628 var activeGroup = mediaType.getActiveGroup();
26629 var previousActiveLoader = mediaType.activePlaylistLoader;
26630 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
26631
26632 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
26633 return;
26634 }
26635
26636 mediaType.lastGroup_ = activeGroup;
26637 mediaType.lastTrack_ = activeTrack;
26638 stopLoaders(segmentLoader, mediaType);
26639
26640 if (!activeGroup) {
26641 // there is no group active so we do not want to restart loaders
26642 return;
26643 }
26644
26645 if (activeGroup.isMasterPlaylist) {
26646 // track did not change, do nothing
26647 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
26648 return;
26649 }
26650
26651 var mpc = settings.vhs.masterPlaylistController_;
26652 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
26653
26654 if (mpc.media() === newPlaylist) {
26655 return;
26656 }
26657
26658 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
26659 masterPlaylistLoader.pause();
26660 mainSegmentLoader.resetEverything();
26661 mpc.fastQualityChange_(newPlaylist);
26662 return;
26663 }
26664
26665 if (type === 'AUDIO') {
26666 if (!activeGroup.playlistLoader) {
26667 // when switching from demuxed audio/video to muxed audio/video (noted by no
26668 // playlist loader for the audio group), we want to do a destructive reset of the
26669 // main segment loader and not restart the audio loaders
26670 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
26671 // it should be stopped
26672
26673 mainSegmentLoader.resetEverything();
26674 return;
26675 } // although the segment loader is an audio segment loader, call the setAudio
26676 // function to ensure it is prepared to re-append the init segment (or handle other
26677 // config changes)
26678
26679
26680 segmentLoader.setAudio(true);
26681 mainSegmentLoader.setAudio(false);
26682 }
26683
26684 if (previousActiveLoader === activeGroup.playlistLoader) {
26685 // Nothing has actually changed. This can happen because track change events can fire
26686 // multiple times for a "single" change. One for enabling the new active track, and
26687 // one for disabling the track that was active
26688 startLoaders(activeGroup.playlistLoader, mediaType);
26689 return;
26690 }
26691
26692 if (segmentLoader.track) {
26693 // For WebVTT, set the new text track in the segmentloader
26694 segmentLoader.track(activeTrack);
26695 } // destructive reset
26696
26697
26698 segmentLoader.resetEverything();
26699 startLoaders(activeGroup.playlistLoader, mediaType);
26700 };
26701 };
26702 var onError = {
26703 /**
26704 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
26705 * an error.
26706 *
26707 * @param {string} type
26708 * MediaGroup type
26709 * @param {Object} settings
26710 * Object containing required information for media groups
26711 * @return {Function}
26712 * Error handler. Logs warning (or error if the playlist is blacklisted) to
26713 * console and switches back to default audio track.
26714 * @function onError.AUDIO
26715 */
26716 AUDIO: function AUDIO(type, settings) {
26717 return function () {
26718 var segmentLoader = settings.segmentLoaders[type],
26719 mediaType = settings.mediaTypes[type],
26720 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
26721 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
26722
26723 var activeTrack = mediaType.activeTrack();
26724 var activeGroup = mediaType.activeGroup();
26725 var id = (activeGroup.filter(function (group) {
26726 return group.default;
26727 })[0] || activeGroup[0]).id;
26728 var defaultTrack = mediaType.tracks[id];
26729
26730 if (activeTrack === defaultTrack) {
26731 // Default track encountered an error. All we can do now is blacklist the current
26732 // rendition and hope another will switch audio groups
26733 blacklistCurrentPlaylist({
26734 message: 'Problem encountered loading the default audio track.'
26735 });
26736 return;
26737 }
26738
26739 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
26740
26741 for (var trackId in mediaType.tracks) {
26742 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
26743 }
26744
26745 mediaType.onTrackChanged();
26746 };
26747 },
26748
26749 /**
26750 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
26751 * an error.
26752 *
26753 * @param {string} type
26754 * MediaGroup type
26755 * @param {Object} settings
26756 * Object containing required information for media groups
26757 * @return {Function}
26758 * Error handler. Logs warning to console and disables the active subtitle track
26759 * @function onError.SUBTITLES
26760 */
26761 SUBTITLES: function SUBTITLES(type, settings) {
26762 return function () {
26763 var segmentLoader = settings.segmentLoaders[type],
26764 mediaType = settings.mediaTypes[type];
26765 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
26766 stopLoaders(segmentLoader, mediaType);
26767 var track = mediaType.activeTrack();
26768
26769 if (track) {
26770 track.mode = 'disabled';
26771 }
26772
26773 mediaType.onTrackChanged();
26774 };
26775 }
26776 };
26777 var setupListeners = {
26778 /**
26779 * Setup event listeners for audio playlist loader
26780 *
26781 * @param {string} type
26782 * MediaGroup type
26783 * @param {PlaylistLoader|null} playlistLoader
26784 * PlaylistLoader to register listeners on
26785 * @param {Object} settings
26786 * Object containing required information for media groups
26787 * @function setupListeners.AUDIO
26788 */
26789 AUDIO: function AUDIO(type, playlistLoader, settings) {
26790 if (!playlistLoader) {
26791 // no playlist loader means audio will be muxed with the video
26792 return;
26793 }
26794
26795 var tech = settings.tech,
26796 requestOptions = settings.requestOptions,
26797 segmentLoader = settings.segmentLoaders[type];
26798 playlistLoader.on('loadedmetadata', function () {
26799 var media = playlistLoader.media();
26800 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
26801 // permits, start downloading segments
26802
26803 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
26804 segmentLoader.load();
26805 }
26806 });
26807 playlistLoader.on('loadedplaylist', function () {
26808 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
26809
26810 if (!tech.paused()) {
26811 segmentLoader.load();
26812 }
26813 });
26814 playlistLoader.on('error', onError[type](type, settings));
26815 },
26816
26817 /**
26818 * Setup event listeners for subtitle playlist loader
26819 *
26820 * @param {string} type
26821 * MediaGroup type
26822 * @param {PlaylistLoader|null} playlistLoader
26823 * PlaylistLoader to register listeners on
26824 * @param {Object} settings
26825 * Object containing required information for media groups
26826 * @function setupListeners.SUBTITLES
26827 */
26828 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
26829 var tech = settings.tech,
26830 requestOptions = settings.requestOptions,
26831 segmentLoader = settings.segmentLoaders[type],
26832 mediaType = settings.mediaTypes[type];
26833 playlistLoader.on('loadedmetadata', function () {
26834 var media = playlistLoader.media();
26835 segmentLoader.playlist(media, requestOptions);
26836 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
26837 // permits, start downloading segments
26838
26839 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
26840 segmentLoader.load();
26841 }
26842 });
26843 playlistLoader.on('loadedplaylist', function () {
26844 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
26845
26846 if (!tech.paused()) {
26847 segmentLoader.load();
26848 }
26849 });
26850 playlistLoader.on('error', onError[type](type, settings));
26851 }
26852 };
26853 var initialize = {
26854 /**
26855 * Setup PlaylistLoaders and AudioTracks for the audio groups
26856 *
26857 * @param {string} type
26858 * MediaGroup type
26859 * @param {Object} settings
26860 * Object containing required information for media groups
26861 * @function initialize.AUDIO
26862 */
26863 'AUDIO': function AUDIO(type, settings) {
26864 var vhs = settings.vhs,
26865 sourceType = settings.sourceType,
26866 segmentLoader = settings.segmentLoaders[type],
26867 requestOptions = settings.requestOptions,
26868 mediaGroups = settings.master.mediaGroups,
26869 _settings$mediaTypes$ = settings.mediaTypes[type],
26870 groups = _settings$mediaTypes$.groups,
26871 tracks = _settings$mediaTypes$.tracks,
26872 logger_ = _settings$mediaTypes$.logger_,
26873 masterPlaylistLoader = settings.masterPlaylistLoader;
26874 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
26875
26876 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
26877 mediaGroups[type] = {
26878 main: {
26879 default: {
26880 default: true
26881 }
26882 }
26883 };
26884
26885 if (audioOnlyMaster) {
26886 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
26887 }
26888 }
26889
26890 for (var groupId in mediaGroups[type]) {
26891 if (!groups[groupId]) {
26892 groups[groupId] = [];
26893 }
26894
26895 for (var variantLabel in mediaGroups[type][groupId]) {
26896 var properties = mediaGroups[type][groupId][variantLabel];
26897 var playlistLoader = void 0;
26898
26899 if (audioOnlyMaster) {
26900 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
26901 properties.isMasterPlaylist = true;
26902 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
26903 // use the resolved media playlist object
26904 } else if (sourceType === 'vhs-json' && properties.playlists) {
26905 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
26906 } else if (properties.resolvedUri) {
26907 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
26908 // should we even have properties.playlists in this check.
26909 } else if (properties.playlists && sourceType === 'dash') {
26910 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
26911 } else {
26912 // no resolvedUri means the audio is muxed with the video when using this
26913 // audio track
26914 playlistLoader = null;
26915 }
26916
26917 properties = videojs__default["default"].mergeOptions({
26918 id: variantLabel,
26919 playlistLoader: playlistLoader
26920 }, properties);
26921 setupListeners[type](type, properties.playlistLoader, settings);
26922 groups[groupId].push(properties);
26923
26924 if (typeof tracks[variantLabel] === 'undefined') {
26925 var track = new videojs__default["default"].AudioTrack({
26926 id: variantLabel,
26927 kind: audioTrackKind_(properties),
26928 enabled: false,
26929 language: properties.language,
26930 default: properties.default,
26931 label: variantLabel
26932 });
26933 tracks[variantLabel] = track;
26934 }
26935 }
26936 } // setup single error event handler for the segment loader
26937
26938
26939 segmentLoader.on('error', onError[type](type, settings));
26940 },
26941
26942 /**
26943 * Setup PlaylistLoaders and TextTracks for the subtitle groups
26944 *
26945 * @param {string} type
26946 * MediaGroup type
26947 * @param {Object} settings
26948 * Object containing required information for media groups
26949 * @function initialize.SUBTITLES
26950 */
26951 'SUBTITLES': function SUBTITLES(type, settings) {
26952 var tech = settings.tech,
26953 vhs = settings.vhs,
26954 sourceType = settings.sourceType,
26955 segmentLoader = settings.segmentLoaders[type],
26956 requestOptions = settings.requestOptions,
26957 mediaGroups = settings.master.mediaGroups,
26958 _settings$mediaTypes$2 = settings.mediaTypes[type],
26959 groups = _settings$mediaTypes$2.groups,
26960 tracks = _settings$mediaTypes$2.tracks,
26961 masterPlaylistLoader = settings.masterPlaylistLoader;
26962
26963 for (var groupId in mediaGroups[type]) {
26964 if (!groups[groupId]) {
26965 groups[groupId] = [];
26966 }
26967
26968 for (var variantLabel in mediaGroups[type][groupId]) {
26969 if (mediaGroups[type][groupId][variantLabel].forced) {
26970 // Subtitle playlists with the forced attribute are not selectable in Safari.
26971 // According to Apple's HLS Authoring Specification:
26972 // If content has forced subtitles and regular subtitles in a given language,
26973 // the regular subtitles track in that language MUST contain both the forced
26974 // subtitles and the regular subtitles for that language.
26975 // Because of this requirement and that Safari does not add forced subtitles,
26976 // forced subtitles are skipped here to maintain consistent experience across
26977 // all platforms
26978 continue;
26979 }
26980
26981 var properties = mediaGroups[type][groupId][variantLabel];
26982 var playlistLoader = void 0;
26983
26984 if (sourceType === 'hls') {
26985 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
26986 } else if (sourceType === 'dash') {
26987 var playlists = properties.playlists.filter(function (p) {
26988 return p.excludeUntil !== Infinity;
26989 });
26990
26991 if (!playlists.length) {
26992 return;
26993 }
26994
26995 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
26996 } else if (sourceType === 'vhs-json') {
26997 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
26998 // as provided, otherwise use the resolved URI to load the playlist
26999 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
27000 }
27001
27002 properties = videojs__default["default"].mergeOptions({
27003 id: variantLabel,
27004 playlistLoader: playlistLoader
27005 }, properties);
27006 setupListeners[type](type, properties.playlistLoader, settings);
27007 groups[groupId].push(properties);
27008
27009 if (typeof tracks[variantLabel] === 'undefined') {
27010 var track = tech.addRemoteTextTrack({
27011 id: variantLabel,
27012 kind: 'subtitles',
27013 default: properties.default && properties.autoselect,
27014 language: properties.language,
27015 label: variantLabel
27016 }, false).track;
27017 tracks[variantLabel] = track;
27018 }
27019 }
27020 } // setup single error event handler for the segment loader
27021
27022
27023 segmentLoader.on('error', onError[type](type, settings));
27024 },
27025
27026 /**
27027 * Setup TextTracks for the closed-caption groups
27028 *
27029 * @param {String} type
27030 * MediaGroup type
27031 * @param {Object} settings
27032 * Object containing required information for media groups
27033 * @function initialize['CLOSED-CAPTIONS']
27034 */
27035 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
27036 var tech = settings.tech,
27037 mediaGroups = settings.master.mediaGroups,
27038 _settings$mediaTypes$3 = settings.mediaTypes[type],
27039 groups = _settings$mediaTypes$3.groups,
27040 tracks = _settings$mediaTypes$3.tracks;
27041
27042 for (var groupId in mediaGroups[type]) {
27043 if (!groups[groupId]) {
27044 groups[groupId] = [];
27045 }
27046
27047 for (var variantLabel in mediaGroups[type][groupId]) {
27048 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
27049
27050 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
27051 continue;
27052 }
27053
27054 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
27055 var newProps = {
27056 label: variantLabel,
27057 language: properties.language,
27058 instreamId: properties.instreamId,
27059 default: properties.default && properties.autoselect
27060 };
27061
27062 if (captionServices[newProps.instreamId]) {
27063 newProps = videojs__default["default"].mergeOptions(newProps, captionServices[newProps.instreamId]);
27064 }
27065
27066 if (newProps.default === undefined) {
27067 delete newProps.default;
27068 } // No PlaylistLoader is required for Closed-Captions because the captions are
27069 // embedded within the video stream
27070
27071
27072 groups[groupId].push(videojs__default["default"].mergeOptions({
27073 id: variantLabel
27074 }, properties));
27075
27076 if (typeof tracks[variantLabel] === 'undefined') {
27077 var track = tech.addRemoteTextTrack({
27078 id: newProps.instreamId,
27079 kind: 'captions',
27080 default: newProps.default,
27081 language: newProps.language,
27082 label: newProps.label
27083 }, false).track;
27084 tracks[variantLabel] = track;
27085 }
27086 }
27087 }
27088 }
27089 };
27090
27091 var groupMatch = function groupMatch(list, media) {
27092 for (var i = 0; i < list.length; i++) {
27093 if (playlistMatch(media, list[i])) {
27094 return true;
27095 }
27096
27097 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
27098 return true;
27099 }
27100 }
27101
27102 return false;
27103 };
27104 /**
27105 * Returns a function used to get the active group of the provided type
27106 *
27107 * @param {string} type
27108 * MediaGroup type
27109 * @param {Object} settings
27110 * Object containing required information for media groups
27111 * @return {Function}
27112 * Function that returns the active media group for the provided type. Takes an
27113 * optional parameter {TextTrack} track. If no track is provided, a list of all
27114 * variants in the group, otherwise the variant corresponding to the provided
27115 * track is returned.
27116 * @function activeGroup
27117 */
27118
27119
27120 var activeGroup = function activeGroup(type, settings) {
27121 return function (track) {
27122 var masterPlaylistLoader = settings.masterPlaylistLoader,
27123 groups = settings.mediaTypes[type].groups;
27124 var media = masterPlaylistLoader.media();
27125
27126 if (!media) {
27127 return null;
27128 }
27129
27130 var variants = null; // set to variants to main media active group
27131
27132 if (media.attributes[type]) {
27133 variants = groups[media.attributes[type]];
27134 }
27135
27136 var groupKeys = Object.keys(groups);
27137
27138 if (!variants) {
27139 // find the masterPlaylistLoader media
27140 // that is in a media group if we are dealing
27141 // with audio only
27142 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
27143 for (var i = 0; i < groupKeys.length; i++) {
27144 var groupPropertyList = groups[groupKeys[i]];
27145
27146 if (groupMatch(groupPropertyList, media)) {
27147 variants = groupPropertyList;
27148 break;
27149 }
27150 } // use the main group if it exists
27151
27152 } else if (groups.main) {
27153 variants = groups.main; // only one group, use that one
27154 } else if (groupKeys.length === 1) {
27155 variants = groups[groupKeys[0]];
27156 }
27157 }
27158
27159 if (typeof track === 'undefined') {
27160 return variants;
27161 }
27162
27163 if (track === null || !variants) {
27164 // An active track was specified so a corresponding group is expected. track === null
27165 // means no track is currently active so there is no corresponding group
27166 return null;
27167 }
27168
27169 return variants.filter(function (props) {
27170 return props.id === track.id;
27171 })[0] || null;
27172 };
27173 };
27174 var activeTrack = {
27175 /**
27176 * Returns a function used to get the active track of type provided
27177 *
27178 * @param {string} type
27179 * MediaGroup type
27180 * @param {Object} settings
27181 * Object containing required information for media groups
27182 * @return {Function}
27183 * Function that returns the active media track for the provided type. Returns
27184 * null if no track is active
27185 * @function activeTrack.AUDIO
27186 */
27187 AUDIO: function AUDIO(type, settings) {
27188 return function () {
27189 var tracks = settings.mediaTypes[type].tracks;
27190
27191 for (var id in tracks) {
27192 if (tracks[id].enabled) {
27193 return tracks[id];
27194 }
27195 }
27196
27197 return null;
27198 };
27199 },
27200
27201 /**
27202 * Returns a function used to get the active track of type provided
27203 *
27204 * @param {string} type
27205 * MediaGroup type
27206 * @param {Object} settings
27207 * Object containing required information for media groups
27208 * @return {Function}
27209 * Function that returns the active media track for the provided type. Returns
27210 * null if no track is active
27211 * @function activeTrack.SUBTITLES
27212 */
27213 SUBTITLES: function SUBTITLES(type, settings) {
27214 return function () {
27215 var tracks = settings.mediaTypes[type].tracks;
27216
27217 for (var id in tracks) {
27218 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
27219 return tracks[id];
27220 }
27221 }
27222
27223 return null;
27224 };
27225 }
27226 };
27227 var getActiveGroup = function getActiveGroup(type, _ref) {
27228 var mediaTypes = _ref.mediaTypes;
27229 return function () {
27230 var activeTrack_ = mediaTypes[type].activeTrack();
27231
27232 if (!activeTrack_) {
27233 return null;
27234 }
27235
27236 return mediaTypes[type].activeGroup(activeTrack_);
27237 };
27238 };
27239 /**
27240 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
27241 * Closed-Captions) specified in the master manifest.
27242 *
27243 * @param {Object} settings
27244 * Object containing required information for setting up the media groups
27245 * @param {Tech} settings.tech
27246 * The tech of the player
27247 * @param {Object} settings.requestOptions
27248 * XHR request options used by the segment loaders
27249 * @param {PlaylistLoader} settings.masterPlaylistLoader
27250 * PlaylistLoader for the master source
27251 * @param {VhsHandler} settings.vhs
27252 * VHS SourceHandler
27253 * @param {Object} settings.master
27254 * The parsed master manifest
27255 * @param {Object} settings.mediaTypes
27256 * Object to store the loaders, tracks, and utility methods for each media type
27257 * @param {Function} settings.blacklistCurrentPlaylist
27258 * Blacklists the current rendition and forces a rendition switch.
27259 * @function setupMediaGroups
27260 */
27261
27262 var setupMediaGroups = function setupMediaGroups(settings) {
27263 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
27264 initialize[type](type, settings);
27265 });
27266 var mediaTypes = settings.mediaTypes,
27267 masterPlaylistLoader = settings.masterPlaylistLoader,
27268 tech = settings.tech,
27269 vhs = settings.vhs,
27270 _settings$segmentLoad3 = settings.segmentLoaders,
27271 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
27272 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
27273
27274 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
27275 mediaTypes[type].activeGroup = activeGroup(type, settings);
27276 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
27277 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
27278 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
27279 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
27280 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
27281 }); // DO NOT enable the default subtitle or caption track.
27282 // DO enable the default audio track
27283
27284 var audioGroup = mediaTypes.AUDIO.activeGroup();
27285
27286 if (audioGroup) {
27287 var groupId = (audioGroup.filter(function (group) {
27288 return group.default;
27289 })[0] || audioGroup[0]).id;
27290 mediaTypes.AUDIO.tracks[groupId].enabled = true;
27291 mediaTypes.AUDIO.onGroupChanged();
27292 mediaTypes.AUDIO.onTrackChanged();
27293 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
27294 // track is changed, but needs to be handled here since the track may not be considered
27295 // changed on the first call to onTrackChanged
27296
27297 if (!activeAudioGroup.playlistLoader) {
27298 // either audio is muxed with video or the stream is audio only
27299 mainSegmentLoader.setAudio(true);
27300 } else {
27301 // audio is demuxed
27302 mainSegmentLoader.setAudio(false);
27303 audioSegmentLoader.setAudio(true);
27304 }
27305 }
27306
27307 masterPlaylistLoader.on('mediachange', function () {
27308 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
27309 return mediaTypes[type].onGroupChanged();
27310 });
27311 });
27312 masterPlaylistLoader.on('mediachanging', function () {
27313 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
27314 return mediaTypes[type].onGroupChanging();
27315 });
27316 }); // custom audio track change event handler for usage event
27317
27318 var onAudioTrackChanged = function onAudioTrackChanged() {
27319 mediaTypes.AUDIO.onTrackChanged();
27320 tech.trigger({
27321 type: 'usage',
27322 name: 'vhs-audio-change'
27323 });
27324 tech.trigger({
27325 type: 'usage',
27326 name: 'hls-audio-change'
27327 });
27328 };
27329
27330 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
27331 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
27332 vhs.on('dispose', function () {
27333 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
27334 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
27335 }); // clear existing audio tracks and add the ones we just created
27336
27337 tech.clearTracks('audio');
27338
27339 for (var id in mediaTypes.AUDIO.tracks) {
27340 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
27341 }
27342 };
27343 /**
27344 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
27345 * media type
27346 *
27347 * @return {Object}
27348 * Object to store the loaders, tracks, and utility methods for each media type
27349 * @function createMediaTypes
27350 */
27351
27352 var createMediaTypes = function createMediaTypes() {
27353 var mediaTypes = {};
27354 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
27355 mediaTypes[type] = {
27356 groups: {},
27357 tracks: {},
27358 activePlaylistLoader: null,
27359 activeGroup: noop,
27360 activeTrack: noop,
27361 getActiveGroup: noop,
27362 onGroupChanged: noop,
27363 onTrackChanged: noop,
27364 lastTrack_: null,
27365 logger_: logger("MediaGroups[" + type + "]")
27366 };
27367 });
27368 return mediaTypes;
27369 };
27370
27371 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
27372 var Vhs$1; // SegmentLoader stats that need to have each loader's
27373 // values summed to calculate the final value
27374
27375 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
27376
27377 var sumLoaderStat = function sumLoaderStat(stat) {
27378 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
27379 };
27380
27381 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
27382 var currentPlaylist = _ref.currentPlaylist,
27383 nextPlaylist = _ref.nextPlaylist,
27384 forwardBuffer = _ref.forwardBuffer,
27385 bufferLowWaterLine = _ref.bufferLowWaterLine,
27386 bufferHighWaterLine = _ref.bufferHighWaterLine,
27387 duration = _ref.duration,
27388 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
27389 log = _ref.log;
27390
27391 // we have no other playlist to switch to
27392 if (!nextPlaylist) {
27393 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
27394 return false;
27395 }
27396
27397 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
27398
27399 if (!currentPlaylist) {
27400 log(sharedLogLine + " as current playlist is not set");
27401 return true;
27402 } // no need to switch if playlist is the same
27403
27404
27405 if (nextPlaylist.id === currentPlaylist.id) {
27406 return false;
27407 } // If the playlist is live, then we want to not take low water line into account.
27408 // This is because in LIVE, the player plays 3 segments from the end of the
27409 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
27410 // in those segments, a viewer will never experience a rendition upswitch.
27411
27412
27413 if (!currentPlaylist.endList) {
27414 log(sharedLogLine + " as current playlist is live");
27415 return true;
27416 }
27417
27418 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
27419 // duration is below the max potential low water line
27420
27421 if (duration < maxBufferLowWaterLine) {
27422 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
27423 return true;
27424 }
27425
27426 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
27427 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
27428 // we can switch down
27429
27430 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
27431 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
27432
27433 if (experimentalBufferBasedABR) {
27434 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
27435 }
27436
27437 log(logLine);
27438 return true;
27439 } // and if our buffer is higher than the low water line,
27440 // we can switch up
27441
27442
27443 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
27444 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
27445
27446 if (experimentalBufferBasedABR) {
27447 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
27448 }
27449
27450 log(_logLine);
27451 return true;
27452 }
27453
27454 log("not " + sharedLogLine + " as no switching criteria met");
27455 return false;
27456 };
27457 /**
27458 * the master playlist controller controller all interactons
27459 * between playlists and segmentloaders. At this time this mainly
27460 * involves a master playlist and a series of audio playlists
27461 * if they are available
27462 *
27463 * @class MasterPlaylistController
27464 * @extends videojs.EventTarget
27465 */
27466
27467
27468 var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
27469 inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
27470
27471 function MasterPlaylistController(options) {
27472 var _this;
27473
27474 _this = _videojs$EventTarget.call(this) || this;
27475 var src = options.src,
27476 handleManifestRedirects = options.handleManifestRedirects,
27477 withCredentials = options.withCredentials,
27478 tech = options.tech,
27479 bandwidth = options.bandwidth,
27480 externVhs = options.externVhs,
27481 useCueTags = options.useCueTags,
27482 blacklistDuration = options.blacklistDuration,
27483 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
27484 sourceType = options.sourceType,
27485 cacheEncryptionKeys = options.cacheEncryptionKeys,
27486 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
27487 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
27488 captionServices = options.captionServices;
27489
27490 if (!src) {
27491 throw new Error('A non-empty playlist URL or JSON manifest string is required');
27492 }
27493
27494 var maxPlaylistRetries = options.maxPlaylistRetries;
27495
27496 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
27497 maxPlaylistRetries = Infinity;
27498 }
27499
27500 Vhs$1 = externVhs;
27501 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
27502 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
27503 _this.withCredentials = withCredentials;
27504 _this.tech_ = tech;
27505 _this.vhs_ = tech.vhs;
27506 _this.sourceType_ = sourceType;
27507 _this.useCueTags_ = useCueTags;
27508 _this.blacklistDuration = blacklistDuration;
27509 _this.maxPlaylistRetries = maxPlaylistRetries;
27510 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
27511
27512 if (_this.useCueTags_) {
27513 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
27514 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
27515 }
27516
27517 _this.requestOptions_ = {
27518 withCredentials: withCredentials,
27519 handleManifestRedirects: handleManifestRedirects,
27520 maxPlaylistRetries: maxPlaylistRetries,
27521 timeout: null
27522 };
27523
27524 _this.on('error', _this.pauseLoading);
27525
27526 _this.mediaTypes_ = createMediaTypes();
27527 _this.mediaSource = new window.MediaSource();
27528 _this.handleDurationChange_ = _this.handleDurationChange_.bind(assertThisInitialized(_this));
27529 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(assertThisInitialized(_this));
27530 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(assertThisInitialized(_this));
27531
27532 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
27533
27534
27535 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
27536
27537 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
27538 // everything, and the MediaSource should not be detached without a proper disposal
27539
27540
27541 _this.seekable_ = videojs__default["default"].createTimeRanges();
27542 _this.hasPlayed_ = false;
27543 _this.syncController_ = new SyncController(options);
27544 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
27545 kind: 'metadata',
27546 label: 'segment-metadata'
27547 }, false).track;
27548 _this.decrypter_ = new Decrypter();
27549 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
27550 _this.inbandTextTracks_ = {};
27551 _this.timelineChangeController_ = new TimelineChangeController();
27552 var segmentLoaderSettings = {
27553 vhs: _this.vhs_,
27554 parse708captions: options.parse708captions,
27555 captionServices: captionServices,
27556 mediaSource: _this.mediaSource,
27557 currentTime: _this.tech_.currentTime.bind(_this.tech_),
27558 seekable: function seekable() {
27559 return _this.seekable();
27560 },
27561 seeking: function seeking() {
27562 return _this.tech_.seeking();
27563 },
27564 duration: function duration() {
27565 return _this.duration();
27566 },
27567 hasPlayed: function hasPlayed() {
27568 return _this.hasPlayed_;
27569 },
27570 goalBufferLength: function goalBufferLength() {
27571 return _this.goalBufferLength();
27572 },
27573 bandwidth: bandwidth,
27574 syncController: _this.syncController_,
27575 decrypter: _this.decrypter_,
27576 sourceType: _this.sourceType_,
27577 inbandTextTracks: _this.inbandTextTracks_,
27578 cacheEncryptionKeys: cacheEncryptionKeys,
27579 sourceUpdater: _this.sourceUpdater_,
27580 timelineChangeController: _this.timelineChangeController_,
27581 experimentalExactManifestTimings: options.experimentalExactManifestTimings
27582 }; // The source type check not only determines whether a special DASH playlist loader
27583 // should be used, but also covers the case where the provided src is a vhs-json
27584 // manifest object (instead of a URL). In the case of vhs-json, the default
27585 // PlaylistLoader should be used.
27586
27587 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
27588
27589 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
27590 // combined audio/video or just video when alternate audio track is selected
27591
27592
27593 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
27594 segmentMetadataTrack: _this.segmentMetadataTrack_,
27595 loaderType: 'main'
27596 }), options); // alternate audio track
27597
27598 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
27599 loaderType: 'audio'
27600 }), options);
27601 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
27602 loaderType: 'vtt',
27603 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
27604 }), options);
27605
27606 _this.setupSegmentLoaderListeners_();
27607
27608 if (_this.experimentalBufferBasedABR) {
27609 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
27610 return _this.startABRTimer_();
27611 });
27612
27613 _this.tech_.on('pause', function () {
27614 return _this.stopABRTimer_();
27615 });
27616
27617 _this.tech_.on('play', function () {
27618 return _this.startABRTimer_();
27619 });
27620 } // Create SegmentLoader stat-getters
27621 // mediaRequests_
27622 // mediaRequestsAborted_
27623 // mediaRequestsTimedout_
27624 // mediaRequestsErrored_
27625 // mediaTransferDuration_
27626 // mediaBytesTransferred_
27627 // mediaAppends_
27628
27629
27630 loaderStats.forEach(function (stat) {
27631 _this[stat + '_'] = sumLoaderStat.bind(assertThisInitialized(_this), stat);
27632 });
27633 _this.logger_ = logger('MPC');
27634 _this.triggeredFmp4Usage = false;
27635
27636 if (_this.tech_.preload() === 'none') {
27637 _this.loadOnPlay_ = function () {
27638 _this.loadOnPlay_ = null;
27639
27640 _this.masterPlaylistLoader_.load();
27641 };
27642
27643 _this.tech_.one('play', _this.loadOnPlay_);
27644 } else {
27645 _this.masterPlaylistLoader_.load();
27646 }
27647
27648 _this.timeToLoadedData__ = -1;
27649 _this.mainAppendsToLoadedData__ = -1;
27650 _this.audioAppendsToLoadedData__ = -1;
27651 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
27652
27653 _this.tech_.one(event, function () {
27654 var timeToLoadedDataStart = Date.now();
27655
27656 _this.tech_.one('loadeddata', function () {
27657 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
27658 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
27659 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
27660 });
27661 });
27662
27663 return _this;
27664 }
27665
27666 var _proto = MasterPlaylistController.prototype;
27667
27668 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
27669 return this.mainAppendsToLoadedData__;
27670 };
27671
27672 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
27673 return this.audioAppendsToLoadedData__;
27674 };
27675
27676 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
27677 var main = this.mainAppendsToLoadedData_();
27678 var audio = this.audioAppendsToLoadedData_();
27679
27680 if (main === -1 || audio === -1) {
27681 return -1;
27682 }
27683
27684 return main + audio;
27685 };
27686
27687 _proto.timeToLoadedData_ = function timeToLoadedData_() {
27688 return this.timeToLoadedData__;
27689 }
27690 /**
27691 * Run selectPlaylist and switch to the new playlist if we should
27692 *
27693 * @private
27694 *
27695 */
27696 ;
27697
27698 _proto.checkABR_ = function checkABR_() {
27699 var nextPlaylist = this.selectPlaylist();
27700
27701 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
27702 this.switchMedia_(nextPlaylist, 'abr');
27703 }
27704 };
27705
27706 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
27707 var oldMedia = this.media();
27708 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
27709 var newId = playlist.id || playlist.uri;
27710
27711 if (oldId && oldId !== newId) {
27712 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
27713 this.tech_.trigger({
27714 type: 'usage',
27715 name: "vhs-rendition-change-" + cause
27716 });
27717 }
27718
27719 this.masterPlaylistLoader_.media(playlist, delay);
27720 }
27721 /**
27722 * Start a timer that periodically calls checkABR_
27723 *
27724 * @private
27725 */
27726 ;
27727
27728 _proto.startABRTimer_ = function startABRTimer_() {
27729 var _this2 = this;
27730
27731 this.stopABRTimer_();
27732 this.abrTimer_ = window.setInterval(function () {
27733 return _this2.checkABR_();
27734 }, 250);
27735 }
27736 /**
27737 * Stop the timer that periodically calls checkABR_
27738 *
27739 * @private
27740 */
27741 ;
27742
27743 _proto.stopABRTimer_ = function stopABRTimer_() {
27744 // if we're scrubbing, we don't need to pause.
27745 // This getter will be added to Video.js in version 7.11.
27746 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
27747 return;
27748 }
27749
27750 window.clearInterval(this.abrTimer_);
27751 this.abrTimer_ = null;
27752 }
27753 /**
27754 * Get a list of playlists for the currently selected audio playlist
27755 *
27756 * @return {Array} the array of audio playlists
27757 */
27758 ;
27759
27760 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
27761 var master = this.master();
27762 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
27763 // assume that the audio tracks are contained in masters
27764 // playlist array, use that or an empty array.
27765
27766 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
27767 return defaultPlaylists;
27768 }
27769
27770 var AUDIO = master.mediaGroups.AUDIO;
27771 var groupKeys = Object.keys(AUDIO);
27772 var track; // get the current active track
27773
27774 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
27775 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
27776 } else {
27777 // default group is `main` or just the first group.
27778 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
27779
27780 for (var label in defaultGroup) {
27781 if (defaultGroup[label].default) {
27782 track = {
27783 label: label
27784 };
27785 break;
27786 }
27787 }
27788 } // no active track no playlists.
27789
27790
27791 if (!track) {
27792 return defaultPlaylists;
27793 }
27794
27795 var playlists = []; // get all of the playlists that are possible for the
27796 // active track.
27797
27798 for (var group in AUDIO) {
27799 if (AUDIO[group][track.label]) {
27800 var properties = AUDIO[group][track.label];
27801
27802 if (properties.playlists && properties.playlists.length) {
27803 playlists.push.apply(playlists, properties.playlists);
27804 } else if (properties.uri) {
27805 playlists.push(properties);
27806 } else if (master.playlists.length) {
27807 // if an audio group does not have a uri
27808 // see if we have main playlists that use it as a group.
27809 // if we do then add those to the playlists list.
27810 for (var i = 0; i < master.playlists.length; i++) {
27811 var playlist = master.playlists[i];
27812
27813 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
27814 playlists.push(playlist);
27815 }
27816 }
27817 }
27818 }
27819 }
27820
27821 if (!playlists.length) {
27822 return defaultPlaylists;
27823 }
27824
27825 return playlists;
27826 }
27827 /**
27828 * Register event handlers on the master playlist loader. A helper
27829 * function for construction time.
27830 *
27831 * @private
27832 */
27833 ;
27834
27835 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
27836 var _this3 = this;
27837
27838 this.masterPlaylistLoader_.on('loadedmetadata', function () {
27839 var media = _this3.masterPlaylistLoader_.media();
27840
27841 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
27842 // timeout the request.
27843
27844 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
27845 _this3.requestOptions_.timeout = 0;
27846 } else {
27847 _this3.requestOptions_.timeout = requestTimeout;
27848 } // if this isn't a live video and preload permits, start
27849 // downloading segments
27850
27851
27852 if (media.endList && _this3.tech_.preload() !== 'none') {
27853 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
27854
27855 _this3.mainSegmentLoader_.load();
27856 }
27857
27858 setupMediaGroups({
27859 sourceType: _this3.sourceType_,
27860 segmentLoaders: {
27861 AUDIO: _this3.audioSegmentLoader_,
27862 SUBTITLES: _this3.subtitleSegmentLoader_,
27863 main: _this3.mainSegmentLoader_
27864 },
27865 tech: _this3.tech_,
27866 requestOptions: _this3.requestOptions_,
27867 masterPlaylistLoader: _this3.masterPlaylistLoader_,
27868 vhs: _this3.vhs_,
27869 master: _this3.master(),
27870 mediaTypes: _this3.mediaTypes_,
27871 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
27872 });
27873
27874 _this3.triggerPresenceUsage_(_this3.master(), media);
27875
27876 _this3.setupFirstPlay();
27877
27878 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
27879 _this3.trigger('selectedinitialmedia');
27880 } else {
27881 // We must wait for the active audio playlist loader to
27882 // finish setting up before triggering this event so the
27883 // representations API and EME setup is correct
27884 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
27885 _this3.trigger('selectedinitialmedia');
27886 });
27887 }
27888 });
27889 this.masterPlaylistLoader_.on('loadedplaylist', function () {
27890 if (_this3.loadOnPlay_) {
27891 _this3.tech_.off('play', _this3.loadOnPlay_);
27892 }
27893
27894 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
27895
27896 if (!updatedPlaylist) {
27897 // exclude any variants that are not supported by the browser before selecting
27898 // an initial media as the playlist selectors do not consider browser support
27899 _this3.excludeUnsupportedVariants_();
27900
27901 var selectedMedia;
27902
27903 if (_this3.enableLowInitialPlaylist) {
27904 selectedMedia = _this3.selectInitialPlaylist();
27905 }
27906
27907 if (!selectedMedia) {
27908 selectedMedia = _this3.selectPlaylist();
27909 }
27910
27911 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
27912 return;
27913 }
27914
27915 _this3.initialMedia_ = selectedMedia;
27916
27917 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
27918 // fire again since the playlist will be requested. In the case of vhs-json
27919 // (where the manifest object is provided as the source), when the media
27920 // playlist's `segments` list is already available, a media playlist won't be
27921 // requested, and loadedplaylist won't fire again, so the playlist handler must be
27922 // called on its own here.
27923
27924
27925 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
27926
27927 if (!haveJsonSource) {
27928 return;
27929 }
27930
27931 updatedPlaylist = _this3.initialMedia_;
27932 }
27933
27934 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
27935 });
27936 this.masterPlaylistLoader_.on('error', function () {
27937 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
27938 });
27939 this.masterPlaylistLoader_.on('mediachanging', function () {
27940 _this3.mainSegmentLoader_.abort();
27941
27942 _this3.mainSegmentLoader_.pause();
27943 });
27944 this.masterPlaylistLoader_.on('mediachange', function () {
27945 var media = _this3.masterPlaylistLoader_.media();
27946
27947 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
27948 // timeout the request.
27949
27950 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
27951 _this3.requestOptions_.timeout = 0;
27952 } else {
27953 _this3.requestOptions_.timeout = requestTimeout;
27954 } // TODO: Create a new event on the PlaylistLoader that signals
27955 // that the segments have changed in some way and use that to
27956 // update the SegmentLoader instead of doing it twice here and
27957 // on `loadedplaylist`
27958
27959
27960 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
27961
27962 _this3.mainSegmentLoader_.load();
27963
27964 _this3.tech_.trigger({
27965 type: 'mediachange',
27966 bubbles: true
27967 });
27968 });
27969 this.masterPlaylistLoader_.on('playlistunchanged', function () {
27970 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
27971 // excluded for not-changing. We likely just have a really slowly updating
27972 // playlist.
27973
27974
27975 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
27976 return;
27977 }
27978
27979 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
27980
27981 if (playlistOutdated) {
27982 // Playlist has stopped updating and we're stuck at its end. Try to
27983 // blacklist it and switch to another playlist in the hope that that
27984 // one is updating (and give the player a chance to re-adjust to the
27985 // safe live point).
27986 _this3.blacklistCurrentPlaylist({
27987 message: 'Playlist no longer updating.',
27988 reason: 'playlist-unchanged'
27989 }); // useful for monitoring QoS
27990
27991
27992 _this3.tech_.trigger('playliststuck');
27993 }
27994 });
27995 this.masterPlaylistLoader_.on('renditiondisabled', function () {
27996 _this3.tech_.trigger({
27997 type: 'usage',
27998 name: 'vhs-rendition-disabled'
27999 });
28000
28001 _this3.tech_.trigger({
28002 type: 'usage',
28003 name: 'hls-rendition-disabled'
28004 });
28005 });
28006 this.masterPlaylistLoader_.on('renditionenabled', function () {
28007 _this3.tech_.trigger({
28008 type: 'usage',
28009 name: 'vhs-rendition-enabled'
28010 });
28011
28012 _this3.tech_.trigger({
28013 type: 'usage',
28014 name: 'hls-rendition-enabled'
28015 });
28016 });
28017 }
28018 /**
28019 * Given an updated media playlist (whether it was loaded for the first time, or
28020 * refreshed for live playlists), update any relevant properties and state to reflect
28021 * changes in the media that should be accounted for (e.g., cues and duration).
28022 *
28023 * @param {Object} updatedPlaylist the updated media playlist object
28024 *
28025 * @private
28026 */
28027 ;
28028
28029 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
28030 if (this.useCueTags_) {
28031 this.updateAdCues_(updatedPlaylist);
28032 } // TODO: Create a new event on the PlaylistLoader that signals
28033 // that the segments have changed in some way and use that to
28034 // update the SegmentLoader instead of doing it twice here and
28035 // on `mediachange`
28036
28037
28038 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
28039 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
28040 // as it is possible that it was temporarily stopped while waiting for
28041 // a playlist (e.g., in case the playlist errored and we re-requested it).
28042
28043 if (!this.tech_.paused()) {
28044 this.mainSegmentLoader_.load();
28045
28046 if (this.audioSegmentLoader_) {
28047 this.audioSegmentLoader_.load();
28048 }
28049 }
28050 }
28051 /**
28052 * A helper function for triggerring presence usage events once per source
28053 *
28054 * @private
28055 */
28056 ;
28057
28058 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
28059 var mediaGroups = master.mediaGroups || {};
28060 var defaultDemuxed = true;
28061 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
28062
28063 for (var mediaGroup in mediaGroups.AUDIO) {
28064 for (var label in mediaGroups.AUDIO[mediaGroup]) {
28065 var properties = mediaGroups.AUDIO[mediaGroup][label];
28066
28067 if (!properties.uri) {
28068 defaultDemuxed = false;
28069 }
28070 }
28071 }
28072
28073 if (defaultDemuxed) {
28074 this.tech_.trigger({
28075 type: 'usage',
28076 name: 'vhs-demuxed'
28077 });
28078 this.tech_.trigger({
28079 type: 'usage',
28080 name: 'hls-demuxed'
28081 });
28082 }
28083
28084 if (Object.keys(mediaGroups.SUBTITLES).length) {
28085 this.tech_.trigger({
28086 type: 'usage',
28087 name: 'vhs-webvtt'
28088 });
28089 this.tech_.trigger({
28090 type: 'usage',
28091 name: 'hls-webvtt'
28092 });
28093 }
28094
28095 if (Vhs$1.Playlist.isAes(media)) {
28096 this.tech_.trigger({
28097 type: 'usage',
28098 name: 'vhs-aes'
28099 });
28100 this.tech_.trigger({
28101 type: 'usage',
28102 name: 'hls-aes'
28103 });
28104 }
28105
28106 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
28107 this.tech_.trigger({
28108 type: 'usage',
28109 name: 'vhs-alternate-audio'
28110 });
28111 this.tech_.trigger({
28112 type: 'usage',
28113 name: 'hls-alternate-audio'
28114 });
28115 }
28116
28117 if (this.useCueTags_) {
28118 this.tech_.trigger({
28119 type: 'usage',
28120 name: 'vhs-playlist-cue-tags'
28121 });
28122 this.tech_.trigger({
28123 type: 'usage',
28124 name: 'hls-playlist-cue-tags'
28125 });
28126 }
28127 };
28128
28129 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
28130 var currentPlaylist = this.masterPlaylistLoader_.media();
28131 var buffered = this.tech_.buffered();
28132 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - this.tech_.currentTime() : 0;
28133 var bufferLowWaterLine = this.bufferLowWaterLine();
28134 var bufferHighWaterLine = this.bufferHighWaterLine();
28135 return shouldSwitchToMedia({
28136 currentPlaylist: currentPlaylist,
28137 nextPlaylist: nextPlaylist,
28138 forwardBuffer: forwardBuffer,
28139 bufferLowWaterLine: bufferLowWaterLine,
28140 bufferHighWaterLine: bufferHighWaterLine,
28141 duration: this.duration(),
28142 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
28143 log: this.logger_
28144 });
28145 }
28146 /**
28147 * Register event handlers on the segment loaders. A helper function
28148 * for construction time.
28149 *
28150 * @private
28151 */
28152 ;
28153
28154 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
28155 var _this4 = this;
28156
28157 if (!this.experimentalBufferBasedABR) {
28158 this.mainSegmentLoader_.on('bandwidthupdate', function () {
28159 var nextPlaylist = _this4.selectPlaylist();
28160
28161 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
28162 _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
28163 }
28164
28165 _this4.tech_.trigger('bandwidthupdate');
28166 });
28167 this.mainSegmentLoader_.on('progress', function () {
28168 _this4.trigger('progress');
28169 });
28170 }
28171
28172 this.mainSegmentLoader_.on('error', function () {
28173 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
28174 });
28175 this.mainSegmentLoader_.on('appenderror', function () {
28176 _this4.error = _this4.mainSegmentLoader_.error_;
28177
28178 _this4.trigger('error');
28179 });
28180 this.mainSegmentLoader_.on('syncinfoupdate', function () {
28181 _this4.onSyncInfoUpdate_();
28182 });
28183 this.mainSegmentLoader_.on('timestampoffset', function () {
28184 _this4.tech_.trigger({
28185 type: 'usage',
28186 name: 'vhs-timestamp-offset'
28187 });
28188
28189 _this4.tech_.trigger({
28190 type: 'usage',
28191 name: 'hls-timestamp-offset'
28192 });
28193 });
28194 this.audioSegmentLoader_.on('syncinfoupdate', function () {
28195 _this4.onSyncInfoUpdate_();
28196 });
28197 this.audioSegmentLoader_.on('appenderror', function () {
28198 _this4.error = _this4.audioSegmentLoader_.error_;
28199
28200 _this4.trigger('error');
28201 });
28202 this.mainSegmentLoader_.on('ended', function () {
28203 _this4.logger_('main segment loader ended');
28204
28205 _this4.onEndOfStream();
28206 });
28207 this.mainSegmentLoader_.on('earlyabort', function (event) {
28208 // never try to early abort with the new ABR algorithm
28209 if (_this4.experimentalBufferBasedABR) {
28210 return;
28211 }
28212
28213 _this4.delegateLoaders_('all', ['abort']);
28214
28215 _this4.blacklistCurrentPlaylist({
28216 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
28217 }, ABORT_EARLY_BLACKLIST_SECONDS);
28218 });
28219
28220 var updateCodecs = function updateCodecs() {
28221 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
28222 return _this4.tryToCreateSourceBuffers_();
28223 }
28224
28225 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
28226
28227
28228 if (!codecs) {
28229 return;
28230 }
28231
28232 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
28233 };
28234
28235 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
28236 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
28237 this.mainSegmentLoader_.on('fmp4', function () {
28238 if (!_this4.triggeredFmp4Usage) {
28239 _this4.tech_.trigger({
28240 type: 'usage',
28241 name: 'vhs-fmp4'
28242 });
28243
28244 _this4.tech_.trigger({
28245 type: 'usage',
28246 name: 'hls-fmp4'
28247 });
28248
28249 _this4.triggeredFmp4Usage = true;
28250 }
28251 });
28252 this.audioSegmentLoader_.on('fmp4', function () {
28253 if (!_this4.triggeredFmp4Usage) {
28254 _this4.tech_.trigger({
28255 type: 'usage',
28256 name: 'vhs-fmp4'
28257 });
28258
28259 _this4.tech_.trigger({
28260 type: 'usage',
28261 name: 'hls-fmp4'
28262 });
28263
28264 _this4.triggeredFmp4Usage = true;
28265 }
28266 });
28267 this.audioSegmentLoader_.on('ended', function () {
28268 _this4.logger_('audioSegmentLoader ended');
28269
28270 _this4.onEndOfStream();
28271 });
28272 };
28273
28274 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
28275 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
28276 }
28277 /**
28278 * Call load on our SegmentLoaders
28279 */
28280 ;
28281
28282 _proto.load = function load() {
28283 this.mainSegmentLoader_.load();
28284
28285 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28286 this.audioSegmentLoader_.load();
28287 }
28288
28289 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
28290 this.subtitleSegmentLoader_.load();
28291 }
28292 }
28293 /**
28294 * Re-tune playback quality level for the current player
28295 * conditions without performing destructive actions, like
28296 * removing already buffered content
28297 *
28298 * @private
28299 * @deprecated
28300 */
28301 ;
28302
28303 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
28304 if (media === void 0) {
28305 media = this.selectPlaylist();
28306 }
28307
28308 this.fastQualityChange_(media);
28309 }
28310 /**
28311 * Re-tune playback quality level for the current player
28312 * conditions. This method will perform destructive actions like removing
28313 * already buffered content in order to readjust the currently active
28314 * playlist quickly. This is good for manual quality changes
28315 *
28316 * @private
28317 */
28318 ;
28319
28320 _proto.fastQualityChange_ = function fastQualityChange_(media) {
28321 var _this5 = this;
28322
28323 if (media === void 0) {
28324 media = this.selectPlaylist();
28325 }
28326
28327 if (media === this.masterPlaylistLoader_.media()) {
28328 this.logger_('skipping fastQualityChange because new media is same as old');
28329 return;
28330 }
28331
28332 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
28333 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
28334 // ahead is roughly the minimum that will accomplish this across a variety of content
28335 // in IE and Edge, but seeking in place is sufficient on all other browsers)
28336 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
28337 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
28338
28339 this.mainSegmentLoader_.resetEverything(function () {
28340 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
28341 // from the previously enabled rendition to load before the new playlist has finished loading
28342 if (videojs__default["default"].browser.IE_VERSION || videojs__default["default"].browser.IS_EDGE) {
28343 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
28344 } else {
28345 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
28346 }
28347 }); // don't need to reset audio as it is reset when media changes
28348 }
28349 /**
28350 * Begin playback.
28351 */
28352 ;
28353
28354 _proto.play = function play() {
28355 if (this.setupFirstPlay()) {
28356 return;
28357 }
28358
28359 if (this.tech_.ended()) {
28360 this.tech_.setCurrentTime(0);
28361 }
28362
28363 if (this.hasPlayed_) {
28364 this.load();
28365 }
28366
28367 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
28368 // seek forward to the live point
28369
28370 if (this.tech_.duration() === Infinity) {
28371 if (this.tech_.currentTime() < seekable.start(0)) {
28372 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
28373 }
28374 }
28375 }
28376 /**
28377 * Seek to the latest media position if this is a live video and the
28378 * player and video are loaded and initialized.
28379 */
28380 ;
28381
28382 _proto.setupFirstPlay = function setupFirstPlay() {
28383 var _this6 = this;
28384
28385 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
28386 // If 1) there is no active media
28387 // 2) the player is paused
28388 // 3) the first play has already been setup
28389 // then exit early
28390
28391 if (!media || this.tech_.paused() || this.hasPlayed_) {
28392 return false;
28393 } // when the video is a live stream
28394
28395
28396 if (!media.endList) {
28397 var seekable = this.seekable();
28398
28399 if (!seekable.length) {
28400 // without a seekable range, the player cannot seek to begin buffering at the live
28401 // point
28402 return false;
28403 }
28404
28405 if (videojs__default["default"].browser.IE_VERSION && this.tech_.readyState() === 0) {
28406 // IE11 throws an InvalidStateError if you try to set currentTime while the
28407 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
28408 this.tech_.one('loadedmetadata', function () {
28409 _this6.trigger('firstplay');
28410
28411 _this6.tech_.setCurrentTime(seekable.end(0));
28412
28413 _this6.hasPlayed_ = true;
28414 });
28415 return false;
28416 } // trigger firstplay to inform the source handler to ignore the next seek event
28417
28418
28419 this.trigger('firstplay'); // seek to the live point
28420
28421 this.tech_.setCurrentTime(seekable.end(0));
28422 }
28423
28424 this.hasPlayed_ = true; // we can begin loading now that everything is ready
28425
28426 this.load();
28427 return true;
28428 }
28429 /**
28430 * handle the sourceopen event on the MediaSource
28431 *
28432 * @private
28433 */
28434 ;
28435
28436 _proto.handleSourceOpen_ = function handleSourceOpen_() {
28437 // Only attempt to create the source buffer if none already exist.
28438 // handleSourceOpen is also called when we are "re-opening" a source buffer
28439 // after `endOfStream` has been called (in response to a seek for instance)
28440 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
28441 // code in video.js but is required because play() must be invoked
28442 // *after* the media source has opened.
28443
28444 if (this.tech_.autoplay()) {
28445 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
28446 // on browsers which return a promise
28447
28448 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
28449 playPromise.then(null, function (e) {});
28450 }
28451 }
28452
28453 this.trigger('sourceopen');
28454 }
28455 /**
28456 * handle the sourceended event on the MediaSource
28457 *
28458 * @private
28459 */
28460 ;
28461
28462 _proto.handleSourceEnded_ = function handleSourceEnded_() {
28463 if (!this.inbandTextTracks_.metadataTrack_) {
28464 return;
28465 }
28466
28467 var cues = this.inbandTextTracks_.metadataTrack_.cues;
28468
28469 if (!cues || !cues.length) {
28470 return;
28471 }
28472
28473 var duration = this.duration();
28474 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
28475 }
28476 /**
28477 * handle the durationchange event on the MediaSource
28478 *
28479 * @private
28480 */
28481 ;
28482
28483 _proto.handleDurationChange_ = function handleDurationChange_() {
28484 this.tech_.trigger('durationchange');
28485 }
28486 /**
28487 * Calls endOfStream on the media source when all active stream types have called
28488 * endOfStream
28489 *
28490 * @param {string} streamType
28491 * Stream type of the segment loader that called endOfStream
28492 * @private
28493 */
28494 ;
28495
28496 _proto.onEndOfStream = function onEndOfStream() {
28497 var isEndOfStream = this.mainSegmentLoader_.ended_;
28498
28499 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28500 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
28501
28502 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
28503 // if we do not know if the main segment loader contains video yet or if we
28504 // definitively know the main segment loader contains video, then we need to wait
28505 // for both main and audio segment loaders to call endOfStream
28506 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
28507 } else {
28508 // otherwise just rely on the audio loader
28509 isEndOfStream = this.audioSegmentLoader_.ended_;
28510 }
28511 }
28512
28513 if (!isEndOfStream) {
28514 return;
28515 }
28516
28517 this.stopABRTimer_();
28518 this.sourceUpdater_.endOfStream();
28519 }
28520 /**
28521 * Check if a playlist has stopped being updated
28522 *
28523 * @param {Object} playlist the media playlist object
28524 * @return {boolean} whether the playlist has stopped being updated or not
28525 */
28526 ;
28527
28528 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
28529 var seekable = this.seekable();
28530
28531 if (!seekable.length) {
28532 // playlist doesn't have enough information to determine whether we are stuck
28533 return false;
28534 }
28535
28536 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
28537
28538 if (expired === null) {
28539 return false;
28540 } // does not use the safe live end to calculate playlist end, since we
28541 // don't want to say we are stuck while there is still content
28542
28543
28544 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
28545 var currentTime = this.tech_.currentTime();
28546 var buffered = this.tech_.buffered();
28547
28548 if (!buffered.length) {
28549 // return true if the playhead reached the absolute end of the playlist
28550 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
28551 }
28552
28553 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
28554 // end of playlist
28555
28556 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
28557 }
28558 /**
28559 * Blacklists a playlist when an error occurs for a set amount of time
28560 * making it unavailable for selection by the rendition selection algorithm
28561 * and then forces a new playlist (rendition) selection.
28562 *
28563 * @param {Object=} error an optional error that may include the playlist
28564 * to blacklist
28565 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
28566 * playlist
28567 */
28568 ;
28569
28570 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
28571 if (error === void 0) {
28572 error = {};
28573 }
28574
28575 // If the `error` was generated by the playlist loader, it will contain
28576 // the playlist we were trying to load (but failed) and that should be
28577 // blacklisted instead of the currently selected playlist which is likely
28578 // out-of-date in this scenario
28579 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
28580 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
28581 // trying to load the master OR while we were disposing of the tech
28582
28583 if (!currentPlaylist) {
28584 this.error = error;
28585
28586 if (this.mediaSource.readyState !== 'open') {
28587 this.trigger('error');
28588 } else {
28589 this.sourceUpdater_.endOfStream('network');
28590 }
28591
28592 return;
28593 }
28594
28595 currentPlaylist.playlistErrors_++;
28596 var playlists = this.masterPlaylistLoader_.master.playlists;
28597 var enabledPlaylists = playlists.filter(isEnabled);
28598 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
28599 // forever
28600
28601 if (playlists.length === 1 && blacklistDuration !== Infinity) {
28602 videojs__default["default"].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
28603 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
28604
28605 return this.masterPlaylistLoader_.load(isFinalRendition);
28606 }
28607
28608 if (isFinalRendition) {
28609 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
28610 // it, instead of erring the player or retrying this playlist, clear out the current
28611 // blacklist. This allows other playlists to be attempted in case any have been
28612 // fixed.
28613 var reincluded = false;
28614 playlists.forEach(function (playlist) {
28615 // skip current playlist which is about to be blacklisted
28616 if (playlist === currentPlaylist) {
28617 return;
28618 }
28619
28620 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
28621
28622 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
28623 reincluded = true;
28624 delete playlist.excludeUntil;
28625 }
28626 });
28627
28628 if (reincluded) {
28629 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
28630 // playlist. This is needed for users relying on the retryplaylist event to catch a
28631 // case where the player might be stuck and looping through "dead" playlists.
28632
28633 this.tech_.trigger('retryplaylist');
28634 }
28635 } // Blacklist this playlist
28636
28637
28638 var excludeUntil;
28639
28640 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
28641 excludeUntil = Infinity;
28642 } else {
28643 excludeUntil = Date.now() + blacklistDuration * 1000;
28644 }
28645
28646 currentPlaylist.excludeUntil = excludeUntil;
28647
28648 if (error.reason) {
28649 currentPlaylist.lastExcludeReason_ = error.reason;
28650 }
28651
28652 this.tech_.trigger('blacklistplaylist');
28653 this.tech_.trigger({
28654 type: 'usage',
28655 name: 'vhs-rendition-blacklisted'
28656 });
28657 this.tech_.trigger({
28658 type: 'usage',
28659 name: 'hls-rendition-blacklisted'
28660 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
28661 // Would be something like media().id !=== currentPlaylist.id and we would need something
28662 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
28663 // from loading a new playlist on any blacklist.
28664 // Select a new playlist
28665
28666 var nextPlaylist = this.selectPlaylist();
28667
28668 if (!nextPlaylist) {
28669 this.error = 'Playback cannot continue. No available working or supported playlists.';
28670 this.trigger('error');
28671 return;
28672 }
28673
28674 var logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
28675 var errorMessage = error.message ? ' ' + error.message : '';
28676 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
28677
28678 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
28679 this.delegateLoaders_('audio', ['abort', 'pause']);
28680 } // if subtitle group changed reset subtitle loaders
28681
28682
28683 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
28684 this.delegateLoaders_('subtitle', ['abort', 'pause']);
28685 }
28686
28687 this.delegateLoaders_('main', ['abort', 'pause']);
28688 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
28689 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
28690
28691 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
28692 }
28693 /**
28694 * Pause all segment/playlist loaders
28695 */
28696 ;
28697
28698 _proto.pauseLoading = function pauseLoading() {
28699 this.delegateLoaders_('all', ['abort', 'pause']);
28700 this.stopABRTimer_();
28701 }
28702 /**
28703 * Call a set of functions in order on playlist loaders, segment loaders,
28704 * or both types of loaders.
28705 *
28706 * @param {string} filter
28707 * Filter loaders that should call fnNames using a string. Can be:
28708 * * all - run on all loaders
28709 * * audio - run on all audio loaders
28710 * * subtitle - run on all subtitle loaders
28711 * * main - run on the main/master loaders
28712 *
28713 * @param {Array|string} fnNames
28714 * A string or array of function names to call.
28715 */
28716 ;
28717
28718 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
28719 var _this7 = this;
28720
28721 var loaders = [];
28722 var dontFilterPlaylist = filter === 'all';
28723
28724 if (dontFilterPlaylist || filter === 'main') {
28725 loaders.push(this.masterPlaylistLoader_);
28726 }
28727
28728 var mediaTypes = [];
28729
28730 if (dontFilterPlaylist || filter === 'audio') {
28731 mediaTypes.push('AUDIO');
28732 }
28733
28734 if (dontFilterPlaylist || filter === 'subtitle') {
28735 mediaTypes.push('CLOSED-CAPTIONS');
28736 mediaTypes.push('SUBTITLES');
28737 }
28738
28739 mediaTypes.forEach(function (mediaType) {
28740 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
28741
28742 if (loader) {
28743 loaders.push(loader);
28744 }
28745 });
28746 ['main', 'audio', 'subtitle'].forEach(function (name) {
28747 var loader = _this7[name + "SegmentLoader_"];
28748
28749 if (loader && (filter === name || filter === 'all')) {
28750 loaders.push(loader);
28751 }
28752 });
28753 loaders.forEach(function (loader) {
28754 return fnNames.forEach(function (fnName) {
28755 if (typeof loader[fnName] === 'function') {
28756 loader[fnName]();
28757 }
28758 });
28759 });
28760 }
28761 /**
28762 * set the current time on all segment loaders
28763 *
28764 * @param {TimeRange} currentTime the current time to set
28765 * @return {TimeRange} the current time
28766 */
28767 ;
28768
28769 _proto.setCurrentTime = function setCurrentTime(currentTime) {
28770 var buffered = findRange(this.tech_.buffered(), currentTime);
28771
28772 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
28773 // return immediately if the metadata is not ready yet
28774 return 0;
28775 } // it's clearly an edge-case but don't thrown an error if asked to
28776 // seek within an empty playlist
28777
28778
28779 if (!this.masterPlaylistLoader_.media().segments) {
28780 return 0;
28781 } // if the seek location is already buffered, continue buffering as usual
28782
28783
28784 if (buffered && buffered.length) {
28785 return currentTime;
28786 } // cancel outstanding requests so we begin buffering at the new
28787 // location
28788
28789
28790 this.mainSegmentLoader_.resetEverything();
28791 this.mainSegmentLoader_.abort();
28792
28793 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28794 this.audioSegmentLoader_.resetEverything();
28795 this.audioSegmentLoader_.abort();
28796 }
28797
28798 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
28799 this.subtitleSegmentLoader_.resetEverything();
28800 this.subtitleSegmentLoader_.abort();
28801 } // start segment loader loading in case they are paused
28802
28803
28804 this.load();
28805 }
28806 /**
28807 * get the current duration
28808 *
28809 * @return {TimeRange} the duration
28810 */
28811 ;
28812
28813 _proto.duration = function duration() {
28814 if (!this.masterPlaylistLoader_) {
28815 return 0;
28816 }
28817
28818 var media = this.masterPlaylistLoader_.media();
28819
28820 if (!media) {
28821 // no playlists loaded yet, so can't determine a duration
28822 return 0;
28823 } // Don't rely on the media source for duration in the case of a live playlist since
28824 // setting the native MediaSource's duration to infinity ends up with consequences to
28825 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
28826 //
28827 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
28828 // however, few browsers have support for setLiveSeekableRange()
28829 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
28830 //
28831 // Until a time when the duration of the media source can be set to infinity, and a
28832 // seekable range specified across browsers, just return Infinity.
28833
28834
28835 if (!media.endList) {
28836 return Infinity;
28837 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
28838 // available). If it's not available, fall back to a playlist-calculated estimate.
28839
28840
28841 if (this.mediaSource) {
28842 return this.mediaSource.duration;
28843 }
28844
28845 return Vhs$1.Playlist.duration(media);
28846 }
28847 /**
28848 * check the seekable range
28849 *
28850 * @return {TimeRange} the seekable range
28851 */
28852 ;
28853
28854 _proto.seekable = function seekable() {
28855 return this.seekable_;
28856 };
28857
28858 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
28859 var audioSeekable;
28860
28861 if (!this.masterPlaylistLoader_) {
28862 return;
28863 }
28864
28865 var media = this.masterPlaylistLoader_.media();
28866
28867 if (!media) {
28868 return;
28869 }
28870
28871 var expired = this.syncController_.getExpiredTime(media, this.duration());
28872
28873 if (expired === null) {
28874 // not enough information to update seekable
28875 return;
28876 }
28877
28878 var master = this.masterPlaylistLoader_.master;
28879 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
28880
28881 if (mainSeekable.length === 0) {
28882 return;
28883 }
28884
28885 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28886 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
28887 expired = this.syncController_.getExpiredTime(media, this.duration());
28888
28889 if (expired === null) {
28890 return;
28891 }
28892
28893 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
28894
28895 if (audioSeekable.length === 0) {
28896 return;
28897 }
28898 }
28899
28900 var oldEnd;
28901 var oldStart;
28902
28903 if (this.seekable_ && this.seekable_.length) {
28904 oldEnd = this.seekable_.end(0);
28905 oldStart = this.seekable_.start(0);
28906 }
28907
28908 if (!audioSeekable) {
28909 // seekable has been calculated based on buffering video data so it
28910 // can be returned directly
28911 this.seekable_ = mainSeekable;
28912 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
28913 // seekables are pretty far off, rely on main
28914 this.seekable_ = mainSeekable;
28915 } else {
28916 this.seekable_ = videojs__default["default"].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
28917 } // seekable is the same as last time
28918
28919
28920 if (this.seekable_ && this.seekable_.length) {
28921 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
28922 return;
28923 }
28924 }
28925
28926 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
28927 this.tech_.trigger('seekablechanged');
28928 }
28929 /**
28930 * Update the player duration
28931 */
28932 ;
28933
28934 _proto.updateDuration = function updateDuration(isLive) {
28935 if (this.updateDuration_) {
28936 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
28937 this.updateDuration_ = null;
28938 }
28939
28940 if (this.mediaSource.readyState !== 'open') {
28941 this.updateDuration_ = this.updateDuration.bind(this, isLive);
28942 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
28943 return;
28944 }
28945
28946 if (isLive) {
28947 var seekable = this.seekable();
28948
28949 if (!seekable.length) {
28950 return;
28951 } // Even in the case of a live playlist, the native MediaSource's duration should not
28952 // be set to Infinity (even though this would be expected for a live playlist), since
28953 // setting the native MediaSource's duration to infinity ends up with consequences to
28954 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
28955 //
28956 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
28957 // however, few browsers have support for setLiveSeekableRange()
28958 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
28959 //
28960 // Until a time when the duration of the media source can be set to infinity, and a
28961 // seekable range specified across browsers, the duration should be greater than or
28962 // equal to the last possible seekable value.
28963 // MediaSource duration starts as NaN
28964 // It is possible (and probable) that this case will never be reached for many
28965 // sources, since the MediaSource reports duration as the highest value without
28966 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
28967 // we buffered times 0 to 100 with real times of 100 to 200, even though current
28968 // time will be between 0 and 100, the native media source may report the duration
28969 // as 200. However, since we report duration separate from the media source (as
28970 // Infinity), and as long as the native media source duration value is greater than
28971 // our reported seekable range, seeks will work as expected. The large number as
28972 // duration for live is actually a strategy used by some players to work around the
28973 // issue of live seekable ranges cited above.
28974
28975
28976 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
28977 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
28978 }
28979
28980 return;
28981 }
28982
28983 var buffered = this.tech_.buffered();
28984 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
28985
28986 if (buffered.length > 0) {
28987 duration = Math.max(duration, buffered.end(buffered.length - 1));
28988 }
28989
28990 if (this.mediaSource.duration !== duration) {
28991 this.sourceUpdater_.setDuration(duration);
28992 }
28993 }
28994 /**
28995 * dispose of the MasterPlaylistController and everything
28996 * that it controls
28997 */
28998 ;
28999
29000 _proto.dispose = function dispose() {
29001 var _this8 = this;
29002
29003 this.trigger('dispose');
29004 this.decrypter_.terminate();
29005 this.masterPlaylistLoader_.dispose();
29006 this.mainSegmentLoader_.dispose();
29007
29008 if (this.loadOnPlay_) {
29009 this.tech_.off('play', this.loadOnPlay_);
29010 }
29011
29012 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
29013 var groups = _this8.mediaTypes_[type].groups;
29014
29015 for (var id in groups) {
29016 groups[id].forEach(function (group) {
29017 if (group.playlistLoader) {
29018 group.playlistLoader.dispose();
29019 }
29020 });
29021 }
29022 });
29023 this.audioSegmentLoader_.dispose();
29024 this.subtitleSegmentLoader_.dispose();
29025 this.sourceUpdater_.dispose();
29026 this.timelineChangeController_.dispose();
29027 this.stopABRTimer_();
29028
29029 if (this.updateDuration_) {
29030 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29031 }
29032
29033 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
29034
29035 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
29036 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
29037 this.off();
29038 }
29039 /**
29040 * return the master playlist object if we have one
29041 *
29042 * @return {Object} the master playlist object that we parsed
29043 */
29044 ;
29045
29046 _proto.master = function master() {
29047 return this.masterPlaylistLoader_.master;
29048 }
29049 /**
29050 * return the currently selected playlist
29051 *
29052 * @return {Object} the currently selected playlist object that we parsed
29053 */
29054 ;
29055
29056 _proto.media = function media() {
29057 // playlist loader will not return media if it has not been fully loaded
29058 return this.masterPlaylistLoader_.media() || this.initialMedia_;
29059 };
29060
29061 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
29062 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
29063 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
29064 // otherwise check on the segment loader.
29065
29066 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
29067
29068 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
29069 return false;
29070 }
29071
29072 return true;
29073 };
29074
29075 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
29076 var _this9 = this;
29077
29078 var media = {
29079 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
29080 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
29081 }; // set "main" media equal to video
29082
29083 media.video = media.main;
29084 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
29085 var codecs = {};
29086 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
29087
29088 if (media.main.hasVideo) {
29089 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
29090 }
29091
29092 if (media.main.isMuxed) {
29093 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
29094 }
29095
29096 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
29097 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
29098
29099 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
29100 } // no codecs, no playback.
29101
29102
29103 if (!codecs.audio && !codecs.video) {
29104 this.blacklistCurrentPlaylist({
29105 playlist: this.media(),
29106 message: 'Could not determine codecs for playlist.',
29107 blacklistDuration: Infinity
29108 });
29109 return;
29110 } // fmp4 relies on browser support, while ts relies on muxer support
29111
29112
29113 var supportFunction = function supportFunction(isFmp4, codec) {
29114 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
29115 };
29116
29117 var unsupportedCodecs = {};
29118 var unsupportedAudio;
29119 ['video', 'audio'].forEach(function (type) {
29120 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
29121 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
29122 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
29123 unsupportedCodecs[supporter].push(codecs[type]);
29124
29125 if (type === 'audio') {
29126 unsupportedAudio = supporter;
29127 }
29128 }
29129 });
29130
29131 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
29132 var audioGroup = this.media().attributes.AUDIO;
29133 this.master().playlists.forEach(function (variant) {
29134 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
29135
29136 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
29137 variant.excludeUntil = Infinity;
29138 }
29139 });
29140 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
29141 } // if we have any unsupported codecs blacklist this playlist.
29142
29143
29144 if (Object.keys(unsupportedCodecs).length) {
29145 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
29146 if (acc) {
29147 acc += ', ';
29148 }
29149
29150 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
29151 return acc;
29152 }, '') + '.';
29153 this.blacklistCurrentPlaylist({
29154 playlist: this.media(),
29155 internal: true,
29156 message: message,
29157 blacklistDuration: Infinity
29158 });
29159 return;
29160 } // check if codec switching is happening
29161
29162
29163 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
29164 var switchMessages = [];
29165 ['video', 'audio'].forEach(function (type) {
29166 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
29167 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
29168
29169 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
29170 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
29171 }
29172 });
29173
29174 if (switchMessages.length) {
29175 this.blacklistCurrentPlaylist({
29176 playlist: this.media(),
29177 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
29178 blacklistDuration: Infinity,
29179 internal: true
29180 });
29181 return;
29182 }
29183 } // TODO: when using the muxer shouldn't we just return
29184 // the codecs that the muxer outputs?
29185
29186
29187 return codecs;
29188 }
29189 /**
29190 * Create source buffers and exlude any incompatible renditions.
29191 *
29192 * @private
29193 */
29194 ;
29195
29196 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
29197 // media source is not ready yet or sourceBuffers are already
29198 // created.
29199 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
29200 return;
29201 }
29202
29203 if (!this.areMediaTypesKnown_()) {
29204 return;
29205 }
29206
29207 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
29208
29209 if (!codecs) {
29210 return;
29211 }
29212
29213 this.sourceUpdater_.createSourceBuffers(codecs);
29214 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
29215 this.excludeIncompatibleVariants_(codecString);
29216 }
29217 /**
29218 * Excludes playlists with codecs that are unsupported by the muxer and browser.
29219 */
29220 ;
29221
29222 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
29223 var _this10 = this;
29224
29225 var playlists = this.master().playlists;
29226 var ids = []; // TODO: why don't we have a property to loop through all
29227 // playlist? Why did we ever mix indexes and keys?
29228
29229 Object.keys(playlists).forEach(function (key) {
29230 var variant = playlists[key]; // check if we already processed this playlist.
29231
29232 if (ids.indexOf(variant.id) !== -1) {
29233 return;
29234 }
29235
29236 ids.push(variant.id);
29237 var codecs = codecsForPlaylist(_this10.master, variant);
29238 var unsupported = [];
29239
29240 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
29241 unsupported.push("audio codec " + codecs.audio);
29242 }
29243
29244 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
29245 unsupported.push("video codec " + codecs.video);
29246 }
29247
29248 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
29249 unsupported.push("text codec " + codecs.text);
29250 }
29251
29252 if (unsupported.length) {
29253 variant.excludeUntil = Infinity;
29254
29255 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
29256 }
29257 });
29258 }
29259 /**
29260 * Blacklist playlists that are known to be codec or
29261 * stream-incompatible with the SourceBuffer configuration. For
29262 * instance, Media Source Extensions would cause the video element to
29263 * stall waiting for video data if you switched from a variant with
29264 * video and audio to an audio-only one.
29265 *
29266 * @param {Object} media a media playlist compatible with the current
29267 * set of SourceBuffers. Variants in the current master playlist that
29268 * do not appear to have compatible codec or stream configurations
29269 * will be excluded from the default playlist selection algorithm
29270 * indefinitely.
29271 * @private
29272 */
29273 ;
29274
29275 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
29276 var _this11 = this;
29277
29278 var ids = [];
29279 var playlists = this.master().playlists;
29280 var codecs = unwrapCodecList(parseCodecs(codecString));
29281 var codecCount_ = codecCount(codecs);
29282 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
29283 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
29284 Object.keys(playlists).forEach(function (key) {
29285 var variant = playlists[key]; // check if we already processed this playlist.
29286 // or it if it is already excluded forever.
29287
29288 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
29289 return;
29290 }
29291
29292 ids.push(variant.id);
29293 var blacklistReasons = []; // get codecs from the playlist for this variant
29294
29295 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
29296 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
29297 // variant is incompatible. Wait for mux.js to probe
29298
29299 if (!variantCodecs.audio && !variantCodecs.video) {
29300 return;
29301 } // TODO: we can support this by removing the
29302 // old media source and creating a new one, but it will take some work.
29303 // The number of streams cannot change
29304
29305
29306 if (variantCodecCount !== codecCount_) {
29307 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
29308 } // only exclude playlists by codec change, if codecs cannot switch
29309 // during playback.
29310
29311
29312 if (!_this11.sourceUpdater_.canChangeType()) {
29313 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
29314 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
29315
29316 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
29317 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
29318 } // the audio codec cannot change
29319
29320
29321 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
29322 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
29323 }
29324 }
29325
29326 if (blacklistReasons.length) {
29327 variant.excludeUntil = Infinity;
29328
29329 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
29330 }
29331 });
29332 };
29333
29334 _proto.updateAdCues_ = function updateAdCues_(media) {
29335 var offset = 0;
29336 var seekable = this.seekable();
29337
29338 if (seekable.length) {
29339 offset = seekable.start(0);
29340 }
29341
29342 updateAdCues(media, this.cueTagsTrack_, offset);
29343 }
29344 /**
29345 * Calculates the desired forward buffer length based on current time
29346 *
29347 * @return {number} Desired forward buffer length in seconds
29348 */
29349 ;
29350
29351 _proto.goalBufferLength = function goalBufferLength() {
29352 var currentTime = this.tech_.currentTime();
29353 var initial = Config.GOAL_BUFFER_LENGTH;
29354 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
29355 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
29356 return Math.min(initial + currentTime * rate, max);
29357 }
29358 /**
29359 * Calculates the desired buffer low water line based on current time
29360 *
29361 * @return {number} Desired buffer low water line in seconds
29362 */
29363 ;
29364
29365 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
29366 var currentTime = this.tech_.currentTime();
29367 var initial = Config.BUFFER_LOW_WATER_LINE;
29368 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
29369 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
29370 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
29371 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
29372 };
29373
29374 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
29375 return Config.BUFFER_HIGH_WATER_LINE;
29376 };
29377
29378 return MasterPlaylistController;
29379 }(videojs__default["default"].EventTarget);
29380
29381 /**
29382 * Returns a function that acts as the Enable/disable playlist function.
29383 *
29384 * @param {PlaylistLoader} loader - The master playlist loader
29385 * @param {string} playlistID - id of the playlist
29386 * @param {Function} changePlaylistFn - A function to be called after a
29387 * playlist's enabled-state has been changed. Will NOT be called if a
29388 * playlist's enabled-state is unchanged
29389 * @param {boolean=} enable - Value to set the playlist enabled-state to
29390 * or if undefined returns the current enabled-state for the playlist
29391 * @return {Function} Function for setting/getting enabled
29392 */
29393
29394 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
29395 return function (enable) {
29396 var playlist = loader.master.playlists[playlistID];
29397 var incompatible = isIncompatible(playlist);
29398 var currentlyEnabled = isEnabled(playlist);
29399
29400 if (typeof enable === 'undefined') {
29401 return currentlyEnabled;
29402 }
29403
29404 if (enable) {
29405 delete playlist.disabled;
29406 } else {
29407 playlist.disabled = true;
29408 }
29409
29410 if (enable !== currentlyEnabled && !incompatible) {
29411 // Ensure the outside world knows about our changes
29412 changePlaylistFn();
29413
29414 if (enable) {
29415 loader.trigger('renditionenabled');
29416 } else {
29417 loader.trigger('renditiondisabled');
29418 }
29419 }
29420
29421 return enable;
29422 };
29423 };
29424 /**
29425 * The representation object encapsulates the publicly visible information
29426 * in a media playlist along with a setter/getter-type function (enabled)
29427 * for changing the enabled-state of a particular playlist entry
29428 *
29429 * @class Representation
29430 */
29431
29432
29433 var Representation = function Representation(vhsHandler, playlist, id) {
29434 var mpc = vhsHandler.masterPlaylistController_,
29435 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
29436
29437 var changeType = smoothQualityChange ? 'smooth' : 'fast';
29438 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
29439
29440 if (playlist.attributes) {
29441 var resolution = playlist.attributes.RESOLUTION;
29442 this.width = resolution && resolution.width;
29443 this.height = resolution && resolution.height;
29444 this.bandwidth = playlist.attributes.BANDWIDTH;
29445 }
29446
29447 this.codecs = codecsForPlaylist(mpc.master(), playlist);
29448 this.playlist = playlist; // The id is simply the ordinality of the media playlist
29449 // within the master playlist
29450
29451 this.id = id; // Partially-apply the enableFunction to create a playlist-
29452 // specific variant
29453
29454 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
29455 };
29456 /**
29457 * A mixin function that adds the `representations` api to an instance
29458 * of the VhsHandler class
29459 *
29460 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
29461 * representation API into
29462 */
29463
29464
29465 var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
29466 // Add a single API-specific function to the VhsHandler instance
29467 vhsHandler.representations = function () {
29468 var master = vhsHandler.masterPlaylistController_.master();
29469 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
29470
29471 if (!playlists) {
29472 return [];
29473 }
29474
29475 return playlists.filter(function (media) {
29476 return !isIncompatible(media);
29477 }).map(function (e, i) {
29478 return new Representation(vhsHandler, e, e.id);
29479 });
29480 };
29481 };
29482
29483 /**
29484 * @file playback-watcher.js
29485 *
29486 * Playback starts, and now my watch begins. It shall not end until my death. I shall
29487 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
29488 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
29489 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
29490 * my life and honor to the Playback Watch, for this Player and all the Players to come.
29491 */
29492
29493 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
29494 /**
29495 * @class PlaybackWatcher
29496 */
29497
29498 var PlaybackWatcher = /*#__PURE__*/function () {
29499 /**
29500 * Represents an PlaybackWatcher object.
29501 *
29502 * @class
29503 * @param {Object} options an object that includes the tech and settings
29504 */
29505 function PlaybackWatcher(options) {
29506 var _this = this;
29507
29508 this.masterPlaylistController_ = options.masterPlaylistController;
29509 this.tech_ = options.tech;
29510 this.seekable = options.seekable;
29511 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
29512 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
29513 this.media = options.media;
29514 this.consecutiveUpdates = 0;
29515 this.lastRecordedTime = null;
29516 this.timer_ = null;
29517 this.checkCurrentTimeTimeout_ = null;
29518 this.logger_ = logger('PlaybackWatcher');
29519 this.logger_('initialize');
29520
29521 var playHandler = function playHandler() {
29522 return _this.monitorCurrentTime_();
29523 };
29524
29525 var canPlayHandler = function canPlayHandler() {
29526 return _this.monitorCurrentTime_();
29527 };
29528
29529 var waitingHandler = function waitingHandler() {
29530 return _this.techWaiting_();
29531 };
29532
29533 var cancelTimerHandler = function cancelTimerHandler() {
29534 return _this.cancelTimer_();
29535 };
29536
29537 var mpc = this.masterPlaylistController_;
29538 var loaderTypes = ['main', 'subtitle', 'audio'];
29539 var loaderChecks = {};
29540 loaderTypes.forEach(function (type) {
29541 loaderChecks[type] = {
29542 reset: function reset() {
29543 return _this.resetSegmentDownloads_(type);
29544 },
29545 updateend: function updateend() {
29546 return _this.checkSegmentDownloads_(type);
29547 }
29548 };
29549 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
29550 // isn't changing we want to reset. We cannot assume that the new rendition
29551 // will also be stalled, until after new appends.
29552
29553 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
29554 // This prevents one segment playlists (single vtt or single segment content)
29555 // from being detected as stalling. As the buffer will not change in those cases, since
29556 // the buffer is the entire video duration.
29557
29558 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
29559 });
29560 /**
29561 * We check if a seek was into a gap through the following steps:
29562 * 1. We get a seeking event and we do not get a seeked event. This means that
29563 * a seek was attempted but not completed.
29564 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
29565 * removed everything from our buffer and appended a segment, and should be ready
29566 * to check for gaps.
29567 */
29568
29569 var setSeekingHandlers = function setSeekingHandlers(fn) {
29570 ['main', 'audio'].forEach(function (type) {
29571 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
29572 });
29573 };
29574
29575 this.seekingAppendCheck_ = function () {
29576 if (_this.fixesBadSeeks_()) {
29577 _this.consecutiveUpdates = 0;
29578 _this.lastRecordedTime = _this.tech_.currentTime();
29579 setSeekingHandlers('off');
29580 }
29581 };
29582
29583 this.clearSeekingAppendCheck_ = function () {
29584 return setSeekingHandlers('off');
29585 };
29586
29587 this.watchForBadSeeking_ = function () {
29588 _this.clearSeekingAppendCheck_();
29589
29590 setSeekingHandlers('on');
29591 };
29592
29593 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
29594 this.tech_.on('seeking', this.watchForBadSeeking_);
29595 this.tech_.on('waiting', waitingHandler);
29596 this.tech_.on(timerCancelEvents, cancelTimerHandler);
29597 this.tech_.on('canplay', canPlayHandler);
29598 /*
29599 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
29600 is surfaced in one of two ways:
29601 1) The `waiting` event is fired before the player has buffered content, making it impossible
29602 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
29603 we can check if playback is stalled due to a gap, and skip the gap if necessary.
29604 2) A source with a gap at the beginning of the stream is loaded programatically while the player
29605 is in a playing state. To catch this case, it's important that our one-time play listener is setup
29606 even if the player is in a playing state
29607 */
29608
29609 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
29610
29611 this.dispose = function () {
29612 _this.clearSeekingAppendCheck_();
29613
29614 _this.logger_('dispose');
29615
29616 _this.tech_.off('waiting', waitingHandler);
29617
29618 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
29619
29620 _this.tech_.off('canplay', canPlayHandler);
29621
29622 _this.tech_.off('play', playHandler);
29623
29624 _this.tech_.off('seeking', _this.watchForBadSeeking_);
29625
29626 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
29627
29628 loaderTypes.forEach(function (type) {
29629 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
29630 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
29631
29632 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
29633 });
29634
29635 if (_this.checkCurrentTimeTimeout_) {
29636 window.clearTimeout(_this.checkCurrentTimeTimeout_);
29637 }
29638
29639 _this.cancelTimer_();
29640 };
29641 }
29642 /**
29643 * Periodically check current time to see if playback stopped
29644 *
29645 * @private
29646 */
29647
29648
29649 var _proto = PlaybackWatcher.prototype;
29650
29651 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
29652 this.checkCurrentTime_();
29653
29654 if (this.checkCurrentTimeTimeout_) {
29655 window.clearTimeout(this.checkCurrentTimeTimeout_);
29656 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
29657
29658
29659 this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
29660 }
29661 /**
29662 * Reset stalled download stats for a specific type of loader
29663 *
29664 * @param {string} type
29665 * The segment loader type to check.
29666 *
29667 * @listens SegmentLoader#playlistupdate
29668 * @listens Tech#seeking
29669 * @listens Tech#seeked
29670 */
29671 ;
29672
29673 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
29674 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
29675
29676 if (this[type + "StalledDownloads_"] > 0) {
29677 this.logger_("resetting possible stalled download count for " + type + " loader");
29678 }
29679
29680 this[type + "StalledDownloads_"] = 0;
29681 this[type + "Buffered_"] = loader.buffered_();
29682 }
29683 /**
29684 * Checks on every segment `appendsdone` to see
29685 * if segment appends are making progress. If they are not
29686 * and we are still downloading bytes. We blacklist the playlist.
29687 *
29688 * @param {string} type
29689 * The segment loader type to check.
29690 *
29691 * @listens SegmentLoader#appendsdone
29692 */
29693 ;
29694
29695 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
29696 var mpc = this.masterPlaylistController_;
29697 var loader = mpc[type + "SegmentLoader_"];
29698 var buffered = loader.buffered_();
29699 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
29700 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
29701 // the buffered value for this loader changed
29702 // appends are working
29703
29704 if (isBufferedDifferent) {
29705 this.resetSegmentDownloads_(type);
29706 return;
29707 }
29708
29709 this[type + "StalledDownloads_"]++;
29710 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
29711 playlistId: loader.playlist_ && loader.playlist_.id,
29712 buffered: timeRangesToArray(buffered)
29713 }); // after 10 possibly stalled appends with no reset, exclude
29714
29715 if (this[type + "StalledDownloads_"] < 10) {
29716 return;
29717 }
29718
29719 this.logger_(type + " loader stalled download exclusion");
29720 this.resetSegmentDownloads_(type);
29721 this.tech_.trigger({
29722 type: 'usage',
29723 name: "vhs-" + type + "-download-exclusion"
29724 });
29725
29726 if (type === 'subtitle') {
29727 return;
29728 } // TODO: should we exclude audio tracks rather than main tracks
29729 // when type is audio?
29730
29731
29732 mpc.blacklistCurrentPlaylist({
29733 message: "Excessive " + type + " segment downloading detected."
29734 }, Infinity);
29735 }
29736 /**
29737 * The purpose of this function is to emulate the "waiting" event on
29738 * browsers that do not emit it when they are waiting for more
29739 * data to continue playback
29740 *
29741 * @private
29742 */
29743 ;
29744
29745 _proto.checkCurrentTime_ = function checkCurrentTime_() {
29746 if (this.tech_.paused() || this.tech_.seeking()) {
29747 return;
29748 }
29749
29750 var currentTime = this.tech_.currentTime();
29751 var buffered = this.tech_.buffered();
29752
29753 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
29754 // If current time is at the end of the final buffered region, then any playback
29755 // stall is most likely caused by buffering in a low bandwidth environment. The tech
29756 // should fire a `waiting` event in this scenario, but due to browser and tech
29757 // inconsistencies. Calling `techWaiting_` here allows us to simulate
29758 // responding to a native `waiting` event when the tech fails to emit one.
29759 return this.techWaiting_();
29760 }
29761
29762 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
29763 this.consecutiveUpdates++;
29764 this.waiting_();
29765 } else if (currentTime === this.lastRecordedTime) {
29766 this.consecutiveUpdates++;
29767 } else {
29768 this.consecutiveUpdates = 0;
29769 this.lastRecordedTime = currentTime;
29770 }
29771 }
29772 /**
29773 * Cancels any pending timers and resets the 'timeupdate' mechanism
29774 * designed to detect that we are stalled
29775 *
29776 * @private
29777 */
29778 ;
29779
29780 _proto.cancelTimer_ = function cancelTimer_() {
29781 this.consecutiveUpdates = 0;
29782
29783 if (this.timer_) {
29784 this.logger_('cancelTimer_');
29785 clearTimeout(this.timer_);
29786 }
29787
29788 this.timer_ = null;
29789 }
29790 /**
29791 * Fixes situations where there's a bad seek
29792 *
29793 * @return {boolean} whether an action was taken to fix the seek
29794 * @private
29795 */
29796 ;
29797
29798 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
29799 var seeking = this.tech_.seeking();
29800
29801 if (!seeking) {
29802 return false;
29803 } // TODO: It's possible that these seekable checks should be moved out of this function
29804 // and into a function that runs on seekablechange. It's also possible that we only need
29805 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
29806 // seekable range.
29807
29808
29809 var seekable = this.seekable();
29810 var currentTime = this.tech_.currentTime();
29811 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
29812 var seekTo;
29813
29814 if (isAfterSeekableRange) {
29815 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
29816
29817 seekTo = seekableEnd;
29818 }
29819
29820 if (this.beforeSeekableWindow_(seekable, currentTime)) {
29821 var seekableStart = seekable.start(0); // sync to the beginning of the live window
29822 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
29823
29824 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
29825 // happen in live with a 3 segment playlist), then don't use a time delta
29826 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
29827 }
29828
29829 if (typeof seekTo !== 'undefined') {
29830 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
29831 this.tech_.setCurrentTime(seekTo);
29832 return true;
29833 }
29834
29835 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
29836 var buffered = this.tech_.buffered();
29837 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
29838 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null; // verify that at least two segment durations have been
29839 // appended before checking for a gap.
29840
29841 var twoSegmentDurations = (this.media().targetDuration - TIME_FUDGE_FACTOR) * 2;
29842 var bufferedToCheck = [audioBuffered, videoBuffered];
29843
29844 for (var i = 0; i < bufferedToCheck.length; i++) {
29845 // skip null buffered
29846 if (!bufferedToCheck[i]) {
29847 continue;
29848 }
29849
29850 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations behind,
29851 // we haven't appended enough to call this a bad seek.
29852
29853 if (timeAhead < twoSegmentDurations) {
29854 return false;
29855 }
29856 }
29857
29858 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
29859 // to seek over the gap
29860
29861 if (nextRange.length === 0) {
29862 return false;
29863 }
29864
29865 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
29866 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
29867 this.tech_.setCurrentTime(seekTo);
29868 return true;
29869 }
29870 /**
29871 * Handler for situations when we determine the player is waiting.
29872 *
29873 * @private
29874 */
29875 ;
29876
29877 _proto.waiting_ = function waiting_() {
29878 if (this.techWaiting_()) {
29879 return;
29880 } // All tech waiting checks failed. Use last resort correction
29881
29882
29883 var currentTime = this.tech_.currentTime();
29884 var buffered = this.tech_.buffered();
29885 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
29886 // region with no indication that anything is amiss (seen in Firefox). Seeking to
29887 // currentTime is usually enough to kickstart the player. This checks that the player
29888 // is currently within a buffered region before attempting a corrective seek.
29889 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
29890 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
29891 // make sure there is ~3 seconds of forward buffer before taking any corrective action
29892 // to avoid triggering an `unknownwaiting` event when the network is slow.
29893
29894 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
29895 this.cancelTimer_();
29896 this.tech_.setCurrentTime(currentTime);
29897 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
29898
29899 this.tech_.trigger({
29900 type: 'usage',
29901 name: 'vhs-unknown-waiting'
29902 });
29903 this.tech_.trigger({
29904 type: 'usage',
29905 name: 'hls-unknown-waiting'
29906 });
29907 return;
29908 }
29909 }
29910 /**
29911 * Handler for situations when the tech fires a `waiting` event
29912 *
29913 * @return {boolean}
29914 * True if an action (or none) was needed to correct the waiting. False if no
29915 * checks passed
29916 * @private
29917 */
29918 ;
29919
29920 _proto.techWaiting_ = function techWaiting_() {
29921 var seekable = this.seekable();
29922 var currentTime = this.tech_.currentTime();
29923
29924 if (this.tech_.seeking() || this.timer_ !== null) {
29925 // Tech is seeking or already waiting on another action, no action needed
29926 return true;
29927 }
29928
29929 if (this.beforeSeekableWindow_(seekable, currentTime)) {
29930 var livePoint = seekable.end(seekable.length - 1);
29931 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
29932 this.cancelTimer_();
29933 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
29934
29935 this.tech_.trigger({
29936 type: 'usage',
29937 name: 'vhs-live-resync'
29938 });
29939 this.tech_.trigger({
29940 type: 'usage',
29941 name: 'hls-live-resync'
29942 });
29943 return true;
29944 }
29945
29946 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
29947 var buffered = this.tech_.buffered();
29948 var videoUnderflow = this.videoUnderflow_({
29949 audioBuffered: sourceUpdater.audioBuffered(),
29950 videoBuffered: sourceUpdater.videoBuffered(),
29951 currentTime: currentTime
29952 });
29953
29954 if (videoUnderflow) {
29955 // Even though the video underflowed and was stuck in a gap, the audio overplayed
29956 // the gap, leading currentTime into a buffered range. Seeking to currentTime
29957 // allows the video to catch up to the audio position without losing any audio
29958 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
29959 this.cancelTimer_();
29960 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
29961
29962 this.tech_.trigger({
29963 type: 'usage',
29964 name: 'vhs-video-underflow'
29965 });
29966 this.tech_.trigger({
29967 type: 'usage',
29968 name: 'hls-video-underflow'
29969 });
29970 return true;
29971 }
29972
29973 var nextRange = findNextRange(buffered, currentTime); // check for gap
29974
29975 if (nextRange.length > 0) {
29976 var difference = nextRange.start(0) - currentTime;
29977 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
29978 this.cancelTimer_();
29979 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
29980 return true;
29981 } // All checks failed. Returning false to indicate failure to correct waiting
29982
29983
29984 return false;
29985 };
29986
29987 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
29988 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
29989 allowSeeksWithinUnsafeLiveWindow = false;
29990 }
29991
29992 if (!seekable.length) {
29993 // we can't make a solid case if there's no seekable, default to false
29994 return false;
29995 }
29996
29997 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
29998 var isLive = !playlist.endList;
29999
30000 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
30001 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
30002 }
30003
30004 if (currentTime > allowedEnd) {
30005 return true;
30006 }
30007
30008 return false;
30009 };
30010
30011 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
30012 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
30013 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
30014 return true;
30015 }
30016
30017 return false;
30018 };
30019
30020 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
30021 var videoBuffered = _ref.videoBuffered,
30022 audioBuffered = _ref.audioBuffered,
30023 currentTime = _ref.currentTime;
30024
30025 // audio only content will not have video underflow :)
30026 if (!videoBuffered) {
30027 return;
30028 }
30029
30030 var gap; // find a gap in demuxed content.
30031
30032 if (videoBuffered.length && audioBuffered.length) {
30033 // in Chrome audio will continue to play for ~3s when we run out of video
30034 // so we have to check that the video buffer did have some buffer in the
30035 // past.
30036 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
30037 var videoRange = findRange(videoBuffered, currentTime);
30038 var audioRange = findRange(audioBuffered, currentTime);
30039
30040 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
30041 gap = {
30042 start: lastVideoRange.end(0),
30043 end: audioRange.end(0)
30044 };
30045 } // find a gap in muxed content.
30046
30047 } else {
30048 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
30049 // stuck in a gap due to video underflow.
30050
30051 if (!nextRange.length) {
30052 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
30053 }
30054 }
30055
30056 if (gap) {
30057 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
30058 return true;
30059 }
30060
30061 return false;
30062 }
30063 /**
30064 * Timer callback. If playback still has not proceeded, then we seek
30065 * to the start of the next buffered region.
30066 *
30067 * @private
30068 */
30069 ;
30070
30071 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
30072 var buffered = this.tech_.buffered();
30073 var currentTime = this.tech_.currentTime();
30074 var nextRange = findNextRange(buffered, currentTime);
30075 this.cancelTimer_();
30076
30077 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
30078 return;
30079 }
30080
30081 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
30082
30083 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
30084 this.tech_.trigger({
30085 type: 'usage',
30086 name: 'vhs-gap-skip'
30087 });
30088 this.tech_.trigger({
30089 type: 'usage',
30090 name: 'hls-gap-skip'
30091 });
30092 };
30093
30094 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
30095 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
30096 // playing for ~3 seconds after the video gap starts. This is done to account for
30097 // video buffer underflow/underrun (note that this is not done when there is audio
30098 // buffer underflow/underrun -- in that case the video will stop as soon as it
30099 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
30100 // video stalls). The player's time will reflect the playthrough of audio, so the
30101 // time will appear as if we are in a buffered region, even if we are stuck in a
30102 // "gap."
30103 //
30104 // Example:
30105 // video buffer: 0 => 10.1, 10.2 => 20
30106 // audio buffer: 0 => 20
30107 // overall buffer: 0 => 10.1, 10.2 => 20
30108 // current time: 13
30109 //
30110 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
30111 // however, the audio continued playing until it reached ~3 seconds past the gap
30112 // (13 seconds), at which point it stops as well. Since current time is past the
30113 // gap, findNextRange will return no ranges.
30114 //
30115 // To check for this issue, we see if there is a gap that starts somewhere within
30116 // a 3 second range (3 seconds +/- 1 second) back from our current time.
30117 var gaps = findGaps(buffered);
30118
30119 for (var i = 0; i < gaps.length; i++) {
30120 var start = gaps.start(i);
30121 var end = gaps.end(i); // gap is starts no more than 4 seconds back
30122
30123 if (currentTime - start < 4 && currentTime - start > 2) {
30124 return {
30125 start: start,
30126 end: end
30127 };
30128 }
30129 }
30130
30131 return null;
30132 };
30133
30134 return PlaybackWatcher;
30135 }();
30136
30137 var defaultOptions = {
30138 errorInterval: 30,
30139 getSource: function getSource(next) {
30140 var tech = this.tech({
30141 IWillNotUseThisInPlugins: true
30142 });
30143 var sourceObj = tech.currentSource_ || this.currentSource();
30144 return next(sourceObj);
30145 }
30146 };
30147 /**
30148 * Main entry point for the plugin
30149 *
30150 * @param {Player} player a reference to a videojs Player instance
30151 * @param {Object} [options] an object with plugin options
30152 * @private
30153 */
30154
30155 var initPlugin = function initPlugin(player, options) {
30156 var lastCalled = 0;
30157 var seekTo = 0;
30158 var localOptions = videojs__default["default"].mergeOptions(defaultOptions, options);
30159 player.ready(function () {
30160 player.trigger({
30161 type: 'usage',
30162 name: 'vhs-error-reload-initialized'
30163 });
30164 player.trigger({
30165 type: 'usage',
30166 name: 'hls-error-reload-initialized'
30167 });
30168 });
30169 /**
30170 * Player modifications to perform that must wait until `loadedmetadata`
30171 * has been triggered
30172 *
30173 * @private
30174 */
30175
30176 var loadedMetadataHandler = function loadedMetadataHandler() {
30177 if (seekTo) {
30178 player.currentTime(seekTo);
30179 }
30180 };
30181 /**
30182 * Set the source on the player element, play, and seek if necessary
30183 *
30184 * @param {Object} sourceObj An object specifying the source url and mime-type to play
30185 * @private
30186 */
30187
30188
30189 var setSource = function setSource(sourceObj) {
30190 if (sourceObj === null || sourceObj === undefined) {
30191 return;
30192 }
30193
30194 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
30195 player.one('loadedmetadata', loadedMetadataHandler);
30196 player.src(sourceObj);
30197 player.trigger({
30198 type: 'usage',
30199 name: 'vhs-error-reload'
30200 });
30201 player.trigger({
30202 type: 'usage',
30203 name: 'hls-error-reload'
30204 });
30205 player.play();
30206 };
30207 /**
30208 * Attempt to get a source from either the built-in getSource function
30209 * or a custom function provided via the options
30210 *
30211 * @private
30212 */
30213
30214
30215 var errorHandler = function errorHandler() {
30216 // Do not attempt to reload the source if a source-reload occurred before
30217 // 'errorInterval' time has elapsed since the last source-reload
30218 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
30219 player.trigger({
30220 type: 'usage',
30221 name: 'vhs-error-reload-canceled'
30222 });
30223 player.trigger({
30224 type: 'usage',
30225 name: 'hls-error-reload-canceled'
30226 });
30227 return;
30228 }
30229
30230 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
30231 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
30232 return;
30233 }
30234
30235 lastCalled = Date.now();
30236 return localOptions.getSource.call(player, setSource);
30237 };
30238 /**
30239 * Unbind any event handlers that were bound by the plugin
30240 *
30241 * @private
30242 */
30243
30244
30245 var cleanupEvents = function cleanupEvents() {
30246 player.off('loadedmetadata', loadedMetadataHandler);
30247 player.off('error', errorHandler);
30248 player.off('dispose', cleanupEvents);
30249 };
30250 /**
30251 * Cleanup before re-initializing the plugin
30252 *
30253 * @param {Object} [newOptions] an object with plugin options
30254 * @private
30255 */
30256
30257
30258 var reinitPlugin = function reinitPlugin(newOptions) {
30259 cleanupEvents();
30260 initPlugin(player, newOptions);
30261 };
30262
30263 player.on('error', errorHandler);
30264 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
30265 // initializing the plugin
30266
30267 player.reloadSourceOnError = reinitPlugin;
30268 };
30269 /**
30270 * Reload the source when an error is detected as long as there
30271 * wasn't an error previously within the last 30 seconds
30272 *
30273 * @param {Object} [options] an object with plugin options
30274 */
30275
30276
30277 var reloadSourceOnError = function reloadSourceOnError(options) {
30278 initPlugin(this, options);
30279 };
30280
30281 var version$4 = "2.11.1";
30282
30283 var version$3 = "5.14.1";
30284
30285 var version$2 = "0.19.2";
30286
30287 var version$1 = "4.7.0";
30288
30289 var version = "3.1.2";
30290
30291 var Vhs = {
30292 PlaylistLoader: PlaylistLoader,
30293 Playlist: Playlist,
30294 utils: utils,
30295 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
30296 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
30297 lastBandwidthSelector: lastBandwidthSelector,
30298 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
30299 comparePlaylistBandwidth: comparePlaylistBandwidth,
30300 comparePlaylistResolution: comparePlaylistResolution,
30301 xhr: xhrFactory()
30302 }; // Define getter/setters for config properties
30303
30304 Object.keys(Config).forEach(function (prop) {
30305 Object.defineProperty(Vhs, prop, {
30306 get: function get() {
30307 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
30308 return Config[prop];
30309 },
30310 set: function set(value) {
30311 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
30312
30313 if (typeof value !== 'number' || value < 0) {
30314 videojs__default["default"].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
30315 return;
30316 }
30317
30318 Config[prop] = value;
30319 }
30320 });
30321 });
30322 var LOCAL_STORAGE_KEY = 'videojs-vhs';
30323 /**
30324 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
30325 *
30326 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
30327 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
30328 * @function handleVhsMediaChange
30329 */
30330
30331 var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
30332 var newPlaylist = playlistLoader.media();
30333 var selectedIndex = -1;
30334
30335 for (var i = 0; i < qualityLevels.length; i++) {
30336 if (qualityLevels[i].id === newPlaylist.id) {
30337 selectedIndex = i;
30338 break;
30339 }
30340 }
30341
30342 qualityLevels.selectedIndex_ = selectedIndex;
30343 qualityLevels.trigger({
30344 selectedIndex: selectedIndex,
30345 type: 'change'
30346 });
30347 };
30348 /**
30349 * Adds quality levels to list once playlist metadata is available
30350 *
30351 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
30352 * @param {Object} vhs Vhs object to listen to for media events.
30353 * @function handleVhsLoadedMetadata
30354 */
30355
30356
30357 var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
30358 vhs.representations().forEach(function (rep) {
30359 qualityLevels.addQualityLevel(rep);
30360 });
30361 handleVhsMediaChange(qualityLevels, vhs.playlists);
30362 }; // HLS is a source handler, not a tech. Make sure attempts to use it
30363 // as one do not cause exceptions.
30364
30365
30366 Vhs.canPlaySource = function () {
30367 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
30368 };
30369
30370 var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
30371 if (!keySystemOptions) {
30372 return keySystemOptions;
30373 }
30374
30375 var codecs = {};
30376
30377 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
30378 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
30379 }
30380
30381 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
30382 codecs.audio = audioPlaylist.attributes.CODECS;
30383 }
30384
30385 var videoContentType = getMimeForCodec(codecs.video);
30386 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
30387
30388 var keySystemContentTypes = {};
30389
30390 for (var keySystem in keySystemOptions) {
30391 keySystemContentTypes[keySystem] = {};
30392
30393 if (audioContentType) {
30394 keySystemContentTypes[keySystem].audioContentType = audioContentType;
30395 }
30396
30397 if (videoContentType) {
30398 keySystemContentTypes[keySystem].videoContentType = videoContentType;
30399 } // Default to using the video playlist's PSSH even though they may be different, as
30400 // videojs-contrib-eme will only accept one in the options.
30401 //
30402 // This shouldn't be an issue for most cases as early intialization will handle all
30403 // unique PSSH values, and if they aren't, then encrypted events should have the
30404 // specific information needed for the unique license.
30405
30406
30407 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
30408 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
30409 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
30410 // so we need to prevent overwriting the URL entirely
30411
30412
30413 if (typeof keySystemOptions[keySystem] === 'string') {
30414 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
30415 }
30416 }
30417
30418 return videojs__default["default"].mergeOptions(keySystemOptions, keySystemContentTypes);
30419 };
30420 /**
30421 * @typedef {Object} KeySystems
30422 *
30423 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
30424 * Note: not all options are listed here.
30425 *
30426 * @property {Uint8Array} [pssh]
30427 * Protection System Specific Header
30428 */
30429
30430 /**
30431 * Goes through all the playlists and collects an array of KeySystems options objects
30432 * containing each playlist's keySystems and their pssh values, if available.
30433 *
30434 * @param {Object[]} playlists
30435 * The playlists to look through
30436 * @param {string[]} keySystems
30437 * The keySystems to collect pssh values for
30438 *
30439 * @return {KeySystems[]}
30440 * An array of KeySystems objects containing available key systems and their
30441 * pssh values
30442 */
30443
30444
30445 var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
30446 return playlists.reduce(function (keySystemsArr, playlist) {
30447 if (!playlist.contentProtection) {
30448 return keySystemsArr;
30449 }
30450
30451 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
30452 var keySystemOptions = playlist.contentProtection[keySystem];
30453
30454 if (keySystemOptions && keySystemOptions.pssh) {
30455 keySystemsObj[keySystem] = {
30456 pssh: keySystemOptions.pssh
30457 };
30458 }
30459
30460 return keySystemsObj;
30461 }, {});
30462
30463 if (Object.keys(keySystemsOptions).length) {
30464 keySystemsArr.push(keySystemsOptions);
30465 }
30466
30467 return keySystemsArr;
30468 }, []);
30469 };
30470 /**
30471 * Returns a promise that waits for the
30472 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
30473 *
30474 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
30475 * browsers.
30476 *
30477 * As per the above ticket, this is particularly important for Chrome, where, if
30478 * unencrypted content is appended before encrypted content and the key session has not
30479 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
30480 * during playback.
30481 *
30482 * @param {Object} player
30483 * The player instance
30484 * @param {Object[]} sourceKeySystems
30485 * The key systems options from the player source
30486 * @param {Object} [audioMedia]
30487 * The active audio media playlist (optional)
30488 * @param {Object[]} mainPlaylists
30489 * The playlists found on the master playlist object
30490 *
30491 * @return {Object}
30492 * Promise that resolves when the key session has been created
30493 */
30494
30495
30496 var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
30497 var player = _ref.player,
30498 sourceKeySystems = _ref.sourceKeySystems,
30499 audioMedia = _ref.audioMedia,
30500 mainPlaylists = _ref.mainPlaylists;
30501
30502 if (!player.eme.initializeMediaKeys) {
30503 return Promise.resolve();
30504 } // TODO should all audio PSSH values be initialized for DRM?
30505 //
30506 // All unique video rendition pssh values are initialized for DRM, but here only
30507 // the initial audio playlist license is initialized. In theory, an encrypted
30508 // event should be fired if the user switches to an alternative audio playlist
30509 // where a license is required, but this case hasn't yet been tested. In addition, there
30510 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
30511 // languages).
30512
30513
30514 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
30515 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
30516 var initializationFinishedPromises = [];
30517 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
30518 // only place where it should not be deduped is for ms-prefixed APIs, but the early
30519 // return for IE11 above, and the existence of modern EME APIs in addition to
30520 // ms-prefixed APIs on Edge should prevent this from being a concern.
30521 // initializeMediaKeys also won't use the webkit-prefixed APIs.
30522
30523 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
30524 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
30525 player.tech_.one('keysessioncreated', resolve);
30526 }));
30527 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
30528 player.eme.initializeMediaKeys({
30529 keySystems: keySystemsOptions
30530 }, function (err) {
30531 if (err) {
30532 reject(err);
30533 return;
30534 }
30535
30536 resolve();
30537 });
30538 }));
30539 }); // The reasons Promise.race is chosen over Promise.any:
30540 //
30541 // * Promise.any is only available in Safari 14+.
30542 // * None of these promises are expected to reject. If they do reject, it might be
30543 // better here for the race to surface the rejection, rather than mask it by using
30544 // Promise.any.
30545
30546 return Promise.race([// If a session was previously created, these will all finish resolving without
30547 // creating a new session, otherwise it will take until the end of all license
30548 // requests, which is why the key session check is used (to make setup much faster).
30549 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
30550 Promise.race(keySessionCreatedPromises)]);
30551 };
30552 /**
30553 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
30554 * there are keySystems on the source, sets up source options to prepare the source for
30555 * eme.
30556 *
30557 * @param {Object} player
30558 * The player instance
30559 * @param {Object[]} sourceKeySystems
30560 * The key systems options from the player source
30561 * @param {Object} media
30562 * The active media playlist
30563 * @param {Object} [audioMedia]
30564 * The active audio media playlist (optional)
30565 *
30566 * @return {boolean}
30567 * Whether or not options were configured and EME is available
30568 */
30569
30570 var setupEmeOptions = function setupEmeOptions(_ref2) {
30571 var player = _ref2.player,
30572 sourceKeySystems = _ref2.sourceKeySystems,
30573 media = _ref2.media,
30574 audioMedia = _ref2.audioMedia;
30575 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
30576
30577 if (!sourceOptions) {
30578 return false;
30579 }
30580
30581 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
30582 // do nothing.
30583
30584 if (sourceOptions && !player.eme) {
30585 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
30586 return false;
30587 }
30588
30589 return true;
30590 };
30591
30592 var getVhsLocalStorage = function getVhsLocalStorage() {
30593 if (!window.localStorage) {
30594 return null;
30595 }
30596
30597 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
30598
30599 if (!storedObject) {
30600 return null;
30601 }
30602
30603 try {
30604 return JSON.parse(storedObject);
30605 } catch (e) {
30606 // someone may have tampered with the value
30607 return null;
30608 }
30609 };
30610
30611 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
30612 if (!window.localStorage) {
30613 return false;
30614 }
30615
30616 var objectToStore = getVhsLocalStorage();
30617 objectToStore = objectToStore ? videojs__default["default"].mergeOptions(objectToStore, options) : options;
30618
30619 try {
30620 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
30621 } catch (e) {
30622 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
30623 // storage is set to 0).
30624 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
30625 // No need to perform any operation.
30626 return false;
30627 }
30628
30629 return objectToStore;
30630 };
30631 /**
30632 * Parses VHS-supported media types from data URIs. See
30633 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
30634 * for information on data URIs.
30635 *
30636 * @param {string} dataUri
30637 * The data URI
30638 *
30639 * @return {string|Object}
30640 * The parsed object/string, or the original string if no supported media type
30641 * was found
30642 */
30643
30644
30645 var expandDataUri = function expandDataUri(dataUri) {
30646 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
30647 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
30648 } // no known case for this data URI, return the string as-is
30649
30650
30651 return dataUri;
30652 };
30653 /**
30654 * Whether the browser has built-in HLS support.
30655 */
30656
30657
30658 Vhs.supportsNativeHls = function () {
30659 if (!document || !document.createElement) {
30660 return false;
30661 }
30662
30663 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
30664
30665 if (!videojs__default["default"].getTech('Html5').isSupported()) {
30666 return false;
30667 } // HLS manifests can go by many mime-types
30668
30669
30670 var canPlay = [// Apple santioned
30671 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
30672 'audio/mpegurl', // Very common
30673 'audio/x-mpegurl', // Very common
30674 'application/x-mpegurl', // Included for completeness
30675 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
30676 return canPlay.some(function (canItPlay) {
30677 return /maybe|probably/i.test(video.canPlayType(canItPlay));
30678 });
30679 }();
30680
30681 Vhs.supportsNativeDash = function () {
30682 if (!document || !document.createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
30683 return false;
30684 }
30685
30686 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
30687 }();
30688
30689 Vhs.supportsTypeNatively = function (type) {
30690 if (type === 'hls') {
30691 return Vhs.supportsNativeHls;
30692 }
30693
30694 if (type === 'dash') {
30695 return Vhs.supportsNativeDash;
30696 }
30697
30698 return false;
30699 };
30700 /**
30701 * HLS is a source handler, not a tech. Make sure attempts to use it
30702 * as one do not cause exceptions.
30703 */
30704
30705
30706 Vhs.isSupported = function () {
30707 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
30708 };
30709
30710 var Component = videojs__default["default"].getComponent('Component');
30711 /**
30712 * The Vhs Handler object, where we orchestrate all of the parts
30713 * of HLS to interact with video.js
30714 *
30715 * @class VhsHandler
30716 * @extends videojs.Component
30717 * @param {Object} source the soruce object
30718 * @param {Tech} tech the parent tech object
30719 * @param {Object} options optional and required options
30720 */
30721
30722 var VhsHandler = /*#__PURE__*/function (_Component) {
30723 inheritsLoose(VhsHandler, _Component);
30724
30725 function VhsHandler(source, tech, options) {
30726 var _this;
30727
30728 _this = _Component.call(this, tech, videojs__default["default"].mergeOptions(options.hls, options.vhs)) || this;
30729
30730 if (options.hls && Object.keys(options.hls).length) {
30731 videojs__default["default"].log.warn('Using hls options is deprecated. Use vhs instead.');
30732 } // if a tech level `initialBandwidth` option was passed
30733 // use that over the VHS level `bandwidth` option
30734
30735
30736 if (typeof options.initialBandwidth === 'number') {
30737 _this.options_.bandwidth = options.initialBandwidth;
30738 }
30739
30740 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
30741 // backwards-compatibility
30742
30743 if (tech.options_ && tech.options_.playerId) {
30744 var _player = videojs__default["default"](tech.options_.playerId);
30745
30746 if (!_player.hasOwnProperty('hls')) {
30747 Object.defineProperty(_player, 'hls', {
30748 get: function get() {
30749 videojs__default["default"].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
30750 tech.trigger({
30751 type: 'usage',
30752 name: 'hls-player-access'
30753 });
30754 return assertThisInitialized(_this);
30755 },
30756 configurable: true
30757 });
30758 }
30759
30760 if (!_player.hasOwnProperty('vhs')) {
30761 Object.defineProperty(_player, 'vhs', {
30762 get: function get() {
30763 videojs__default["default"].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
30764 tech.trigger({
30765 type: 'usage',
30766 name: 'vhs-player-access'
30767 });
30768 return assertThisInitialized(_this);
30769 },
30770 configurable: true
30771 });
30772 }
30773
30774 if (!_player.hasOwnProperty('dash')) {
30775 Object.defineProperty(_player, 'dash', {
30776 get: function get() {
30777 videojs__default["default"].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
30778 return assertThisInitialized(_this);
30779 },
30780 configurable: true
30781 });
30782 }
30783
30784 _this.player_ = _player;
30785 }
30786
30787 _this.tech_ = tech;
30788 _this.source_ = source;
30789 _this.stats = {};
30790 _this.ignoreNextSeekingEvent_ = false;
30791
30792 _this.setOptions_();
30793
30794 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
30795 tech.overrideNativeAudioTracks(true);
30796 tech.overrideNativeVideoTracks(true);
30797 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
30798 // overriding native HLS only works if audio tracks have been emulated
30799 // error early if we're misconfigured
30800 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
30801 } // listen for fullscreenchange events for this player so that we
30802 // can adjust our quality selection quickly
30803
30804
30805 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
30806 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
30807
30808 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
30809 _this.masterPlaylistController_.fastQualityChange_();
30810 } else {
30811 // When leaving fullscreen, since the in page pixel dimensions should be smaller
30812 // than full screen, see if there should be a rendition switch down to preserve
30813 // bandwidth.
30814 _this.masterPlaylistController_.checkABR_();
30815 }
30816 });
30817
30818 _this.on(_this.tech_, 'seeking', function () {
30819 if (this.ignoreNextSeekingEvent_) {
30820 this.ignoreNextSeekingEvent_ = false;
30821 return;
30822 }
30823
30824 this.setCurrentTime(this.tech_.currentTime());
30825 });
30826
30827 _this.on(_this.tech_, 'error', function () {
30828 // verify that the error was real and we are loaded
30829 // enough to have mpc loaded.
30830 if (this.tech_.error() && this.masterPlaylistController_) {
30831 this.masterPlaylistController_.pauseLoading();
30832 }
30833 });
30834
30835 _this.on(_this.tech_, 'play', _this.play);
30836
30837 return _this;
30838 }
30839
30840 var _proto = VhsHandler.prototype;
30841
30842 _proto.setOptions_ = function setOptions_() {
30843 var _this2 = this;
30844
30845 // defaults
30846 this.options_.withCredentials = this.options_.withCredentials || false;
30847 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
30848 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
30849 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
30850 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
30851 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
30852 this.options_.customTagParsers = this.options_.customTagParsers || [];
30853 this.options_.customTagMappers = this.options_.customTagMappers || [];
30854 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
30855
30856 if (typeof this.options_.blacklistDuration !== 'number') {
30857 this.options_.blacklistDuration = 5 * 60;
30858 }
30859
30860 if (typeof this.options_.bandwidth !== 'number') {
30861 if (this.options_.useBandwidthFromLocalStorage) {
30862 var storedObject = getVhsLocalStorage();
30863
30864 if (storedObject && storedObject.bandwidth) {
30865 this.options_.bandwidth = storedObject.bandwidth;
30866 this.tech_.trigger({
30867 type: 'usage',
30868 name: 'vhs-bandwidth-from-local-storage'
30869 });
30870 this.tech_.trigger({
30871 type: 'usage',
30872 name: 'hls-bandwidth-from-local-storage'
30873 });
30874 }
30875
30876 if (storedObject && storedObject.throughput) {
30877 this.options_.throughput = storedObject.throughput;
30878 this.tech_.trigger({
30879 type: 'usage',
30880 name: 'vhs-throughput-from-local-storage'
30881 });
30882 this.tech_.trigger({
30883 type: 'usage',
30884 name: 'hls-throughput-from-local-storage'
30885 });
30886 }
30887 }
30888 } // if bandwidth was not set by options or pulled from local storage, start playlist
30889 // selection at a reasonable bandwidth
30890
30891
30892 if (typeof this.options_.bandwidth !== 'number') {
30893 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
30894 } // If the bandwidth number is unchanged from the initial setting
30895 // then this takes precedence over the enableLowInitialPlaylist option
30896
30897
30898 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
30899
30900 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
30901 if (typeof _this2.source_[option] !== 'undefined') {
30902 _this2.options_[option] = _this2.source_[option];
30903 }
30904 });
30905 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
30906 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
30907 }
30908 /**
30909 * called when player.src gets called, handle a new source
30910 *
30911 * @param {Object} src the source object to handle
30912 */
30913 ;
30914
30915 _proto.src = function src(_src, type) {
30916 var _this3 = this;
30917
30918 // do nothing if the src is falsey
30919 if (!_src) {
30920 return;
30921 }
30922
30923 this.setOptions_(); // add master playlist controller options
30924
30925 this.options_.src = expandDataUri(this.source_.src);
30926 this.options_.tech = this.tech_;
30927 this.options_.externVhs = Vhs;
30928 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
30929
30930 this.options_.seekTo = function (time) {
30931 _this3.tech_.setCurrentTime(time);
30932 };
30933
30934 if (this.options_.smoothQualityChange) {
30935 videojs__default["default"].log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
30936 }
30937
30938 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
30939 var playbackWatcherOptions = videojs__default["default"].mergeOptions({
30940 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
30941 }, this.options_, {
30942 seekable: function seekable() {
30943 return _this3.seekable();
30944 },
30945 media: function media() {
30946 return _this3.masterPlaylistController_.media();
30947 },
30948 masterPlaylistController: this.masterPlaylistController_
30949 });
30950 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
30951 this.masterPlaylistController_.on('error', function () {
30952 var player = videojs__default["default"].players[_this3.tech_.options_.playerId];
30953 var error = _this3.masterPlaylistController_.error;
30954
30955 if (typeof error === 'object' && !error.code) {
30956 error.code = 3;
30957 } else if (typeof error === 'string') {
30958 error = {
30959 message: error,
30960 code: 3
30961 };
30962 }
30963
30964 player.error(error);
30965 });
30966 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
30967 // compatibility with < v2
30968
30969 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
30970 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
30971
30972 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
30973 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
30974 // controller. Using a custom property for backwards compatibility
30975 // with < v2
30976
30977 Object.defineProperties(this, {
30978 selectPlaylist: {
30979 get: function get() {
30980 return this.masterPlaylistController_.selectPlaylist;
30981 },
30982 set: function set(selectPlaylist) {
30983 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
30984 }
30985 },
30986 throughput: {
30987 get: function get() {
30988 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
30989 },
30990 set: function set(throughput) {
30991 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
30992 // for the cumulative average
30993
30994 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
30995 }
30996 },
30997 bandwidth: {
30998 get: function get() {
30999 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
31000 },
31001 set: function set(bandwidth) {
31002 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
31003 // `count` is set to zero that current value of `rate` isn't included
31004 // in the cumulative average
31005
31006 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
31007 rate: 0,
31008 count: 0
31009 };
31010 }
31011 },
31012
31013 /**
31014 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
31015 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
31016 * the entire process after that - decryption, transmuxing, and appending - provided
31017 * by `throughput`.
31018 *
31019 * Since the two process are serial, the overall system bandwidth is given by:
31020 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
31021 */
31022 systemBandwidth: {
31023 get: function get() {
31024 var invBandwidth = 1 / (this.bandwidth || 1);
31025 var invThroughput;
31026
31027 if (this.throughput > 0) {
31028 invThroughput = 1 / this.throughput;
31029 } else {
31030 invThroughput = 0;
31031 }
31032
31033 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
31034 return systemBitrate;
31035 },
31036 set: function set() {
31037 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
31038 }
31039 }
31040 });
31041
31042 if (this.options_.bandwidth) {
31043 this.bandwidth = this.options_.bandwidth;
31044 }
31045
31046 if (this.options_.throughput) {
31047 this.throughput = this.options_.throughput;
31048 }
31049
31050 Object.defineProperties(this.stats, {
31051 bandwidth: {
31052 get: function get() {
31053 return _this3.bandwidth || 0;
31054 },
31055 enumerable: true
31056 },
31057 mediaRequests: {
31058 get: function get() {
31059 return _this3.masterPlaylistController_.mediaRequests_() || 0;
31060 },
31061 enumerable: true
31062 },
31063 mediaRequestsAborted: {
31064 get: function get() {
31065 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
31066 },
31067 enumerable: true
31068 },
31069 mediaRequestsTimedout: {
31070 get: function get() {
31071 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
31072 },
31073 enumerable: true
31074 },
31075 mediaRequestsErrored: {
31076 get: function get() {
31077 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
31078 },
31079 enumerable: true
31080 },
31081 mediaTransferDuration: {
31082 get: function get() {
31083 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
31084 },
31085 enumerable: true
31086 },
31087 mediaBytesTransferred: {
31088 get: function get() {
31089 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
31090 },
31091 enumerable: true
31092 },
31093 mediaSecondsLoaded: {
31094 get: function get() {
31095 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
31096 },
31097 enumerable: true
31098 },
31099 mediaAppends: {
31100 get: function get() {
31101 return _this3.masterPlaylistController_.mediaAppends_() || 0;
31102 },
31103 enumerable: true
31104 },
31105 mainAppendsToLoadedData: {
31106 get: function get() {
31107 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
31108 },
31109 enumerable: true
31110 },
31111 audioAppendsToLoadedData: {
31112 get: function get() {
31113 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
31114 },
31115 enumerable: true
31116 },
31117 appendsToLoadedData: {
31118 get: function get() {
31119 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
31120 },
31121 enumerable: true
31122 },
31123 timeToLoadedData: {
31124 get: function get() {
31125 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
31126 },
31127 enumerable: true
31128 },
31129 buffered: {
31130 get: function get() {
31131 return timeRangesToArray(_this3.tech_.buffered());
31132 },
31133 enumerable: true
31134 },
31135 currentTime: {
31136 get: function get() {
31137 return _this3.tech_.currentTime();
31138 },
31139 enumerable: true
31140 },
31141 currentSource: {
31142 get: function get() {
31143 return _this3.tech_.currentSource_;
31144 },
31145 enumerable: true
31146 },
31147 currentTech: {
31148 get: function get() {
31149 return _this3.tech_.name_;
31150 },
31151 enumerable: true
31152 },
31153 duration: {
31154 get: function get() {
31155 return _this3.tech_.duration();
31156 },
31157 enumerable: true
31158 },
31159 master: {
31160 get: function get() {
31161 return _this3.playlists.master;
31162 },
31163 enumerable: true
31164 },
31165 playerDimensions: {
31166 get: function get() {
31167 return _this3.tech_.currentDimensions();
31168 },
31169 enumerable: true
31170 },
31171 seekable: {
31172 get: function get() {
31173 return timeRangesToArray(_this3.tech_.seekable());
31174 },
31175 enumerable: true
31176 },
31177 timestamp: {
31178 get: function get() {
31179 return Date.now();
31180 },
31181 enumerable: true
31182 },
31183 videoPlaybackQuality: {
31184 get: function get() {
31185 return _this3.tech_.getVideoPlaybackQuality();
31186 },
31187 enumerable: true
31188 }
31189 });
31190 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
31191 this.tech_.on('bandwidthupdate', function () {
31192 if (_this3.options_.useBandwidthFromLocalStorage) {
31193 updateVhsLocalStorage({
31194 bandwidth: _this3.bandwidth,
31195 throughput: Math.round(_this3.throughput)
31196 });
31197 }
31198 });
31199 this.masterPlaylistController_.on('selectedinitialmedia', function () {
31200 // Add the manual rendition mix-in to VhsHandler
31201 renditionSelectionMixin(_this3);
31202 });
31203 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
31204 _this3.setupEme_();
31205 }); // the bandwidth of the primary segment loader is our best
31206 // estimate of overall bandwidth
31207
31208 this.on(this.masterPlaylistController_, 'progress', function () {
31209 this.tech_.trigger('progress');
31210 }); // In the live case, we need to ignore the very first `seeking` event since
31211 // that will be the result of the seek-to-live behavior
31212
31213 this.on(this.masterPlaylistController_, 'firstplay', function () {
31214 this.ignoreNextSeekingEvent_ = true;
31215 });
31216 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
31217 // this can occur if someone sets the src in player.ready(), for instance
31218
31219 if (!this.tech_.el()) {
31220 return;
31221 }
31222
31223 this.mediaSourceUrl_ = window.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
31224 this.tech_.src(this.mediaSourceUrl_);
31225 }
31226 /**
31227 * If necessary and EME is available, sets up EME options and waits for key session
31228 * creation.
31229 *
31230 * This function also updates the source updater so taht it can be used, as for some
31231 * browsers, EME must be configured before content is appended (if appending unencrypted
31232 * content before encrypted content).
31233 */
31234 ;
31235
31236 _proto.setupEme_ = function setupEme_() {
31237 var _this4 = this;
31238
31239 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
31240 var didSetupEmeOptions = setupEmeOptions({
31241 player: this.player_,
31242 sourceKeySystems: this.source_.keySystems,
31243 media: this.playlists.media(),
31244 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
31245 });
31246 this.player_.tech_.on('keystatuschange', function (e) {
31247 if (e.status === 'output-restricted') {
31248 _this4.masterPlaylistController_.blacklistCurrentPlaylist({
31249 playlist: _this4.masterPlaylistController_.media(),
31250 message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
31251 blacklistDuration: Infinity
31252 });
31253 }
31254 }); // In IE11 this is too early to initialize media keys, and IE11 does not support
31255 // promises.
31256
31257 if (videojs__default["default"].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
31258 // If EME options were not set up, we've done all we could to initialize EME.
31259 this.masterPlaylistController_.sourceUpdater_.initializedEme();
31260 return;
31261 }
31262
31263 this.logger_('waiting for EME key session creation');
31264 waitForKeySessionCreation({
31265 player: this.player_,
31266 sourceKeySystems: this.source_.keySystems,
31267 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
31268 mainPlaylists: this.playlists.master.playlists
31269 }).then(function () {
31270 _this4.logger_('created EME key session');
31271
31272 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
31273 }).catch(function (err) {
31274 _this4.logger_('error while creating EME key session', err);
31275
31276 _this4.player_.error({
31277 message: 'Failed to initialize media keys for EME',
31278 code: 3
31279 });
31280 });
31281 }
31282 /**
31283 * Initializes the quality levels and sets listeners to update them.
31284 *
31285 * @method setupQualityLevels_
31286 * @private
31287 */
31288 ;
31289
31290 _proto.setupQualityLevels_ = function setupQualityLevels_() {
31291 var _this5 = this;
31292
31293 var player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
31294 // or qualityLevels_ listeners have already been setup, do nothing.
31295
31296 if (!player || !player.qualityLevels || this.qualityLevels_) {
31297 return;
31298 }
31299
31300 this.qualityLevels_ = player.qualityLevels();
31301 this.masterPlaylistController_.on('selectedinitialmedia', function () {
31302 handleVhsLoadedMetadata(_this5.qualityLevels_, _this5);
31303 });
31304 this.playlists.on('mediachange', function () {
31305 handleVhsMediaChange(_this5.qualityLevels_, _this5.playlists);
31306 });
31307 }
31308 /**
31309 * return the version
31310 */
31311 ;
31312
31313 VhsHandler.version = function version$5() {
31314 return {
31315 '@videojs/http-streaming': version$4,
31316 'mux.js': version$3,
31317 'mpd-parser': version$2,
31318 'm3u8-parser': version$1,
31319 'aes-decrypter': version
31320 };
31321 }
31322 /**
31323 * return the version
31324 */
31325 ;
31326
31327 _proto.version = function version() {
31328 return this.constructor.version();
31329 };
31330
31331 _proto.canChangeType = function canChangeType() {
31332 return SourceUpdater.canChangeType();
31333 }
31334 /**
31335 * Begin playing the video.
31336 */
31337 ;
31338
31339 _proto.play = function play() {
31340 this.masterPlaylistController_.play();
31341 }
31342 /**
31343 * a wrapper around the function in MasterPlaylistController
31344 */
31345 ;
31346
31347 _proto.setCurrentTime = function setCurrentTime(currentTime) {
31348 this.masterPlaylistController_.setCurrentTime(currentTime);
31349 }
31350 /**
31351 * a wrapper around the function in MasterPlaylistController
31352 */
31353 ;
31354
31355 _proto.duration = function duration() {
31356 return this.masterPlaylistController_.duration();
31357 }
31358 /**
31359 * a wrapper around the function in MasterPlaylistController
31360 */
31361 ;
31362
31363 _proto.seekable = function seekable() {
31364 return this.masterPlaylistController_.seekable();
31365 }
31366 /**
31367 * Abort all outstanding work and cleanup.
31368 */
31369 ;
31370
31371 _proto.dispose = function dispose() {
31372 if (this.playbackWatcher_) {
31373 this.playbackWatcher_.dispose();
31374 }
31375
31376 if (this.masterPlaylistController_) {
31377 this.masterPlaylistController_.dispose();
31378 }
31379
31380 if (this.qualityLevels_) {
31381 this.qualityLevels_.dispose();
31382 }
31383
31384 if (this.player_) {
31385 delete this.player_.vhs;
31386 delete this.player_.dash;
31387 delete this.player_.hls;
31388 }
31389
31390 if (this.tech_ && this.tech_.vhs) {
31391 delete this.tech_.vhs;
31392 } // don't check this.tech_.hls as it will log a deprecated warning
31393
31394
31395 if (this.tech_) {
31396 delete this.tech_.hls;
31397 }
31398
31399 if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
31400 window.URL.revokeObjectURL(this.mediaSourceUrl_);
31401 this.mediaSourceUrl_ = null;
31402 }
31403
31404 _Component.prototype.dispose.call(this);
31405 };
31406
31407 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
31408 return getProgramTime({
31409 playlist: this.masterPlaylistController_.media(),
31410 time: time,
31411 callback: callback
31412 });
31413 } // the player must be playing before calling this
31414 ;
31415
31416 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
31417 if (pauseAfterSeek === void 0) {
31418 pauseAfterSeek = true;
31419 }
31420
31421 if (retryCount === void 0) {
31422 retryCount = 2;
31423 }
31424
31425 return seekToProgramTime({
31426 programTime: programTime,
31427 playlist: this.masterPlaylistController_.media(),
31428 retryCount: retryCount,
31429 pauseAfterSeek: pauseAfterSeek,
31430 seekTo: this.options_.seekTo,
31431 tech: this.options_.tech,
31432 callback: callback
31433 });
31434 };
31435
31436 return VhsHandler;
31437 }(Component);
31438 /**
31439 * The Source Handler object, which informs video.js what additional
31440 * MIME types are supported and sets up playback. It is registered
31441 * automatically to the appropriate tech based on the capabilities of
31442 * the browser it is running in. It is not necessary to use or modify
31443 * this object in normal usage.
31444 */
31445
31446
31447 var VhsSourceHandler = {
31448 name: 'videojs-http-streaming',
31449 VERSION: version$4,
31450 canHandleSource: function canHandleSource(srcObj, options) {
31451 if (options === void 0) {
31452 options = {};
31453 }
31454
31455 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
31456 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
31457 },
31458 handleSource: function handleSource(source, tech, options) {
31459 if (options === void 0) {
31460 options = {};
31461 }
31462
31463 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
31464 tech.vhs = new VhsHandler(source, tech, localOptions);
31465
31466 if (!videojs__default["default"].hasOwnProperty('hls')) {
31467 Object.defineProperty(tech, 'hls', {
31468 get: function get() {
31469 videojs__default["default"].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
31470 return tech.vhs;
31471 },
31472 configurable: true
31473 });
31474 }
31475
31476 tech.vhs.xhr = xhrFactory();
31477 tech.vhs.src(source.src, source.type);
31478 return tech.vhs;
31479 },
31480 canPlayType: function canPlayType(type, options) {
31481 if (options === void 0) {
31482 options = {};
31483 }
31484
31485 var _videojs$mergeOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options),
31486 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs.overrideNative,
31487 overrideNative = _videojs$mergeOptions2 === void 0 ? !videojs__default["default"].browser.IS_ANY_SAFARI : _videojs$mergeOptions2;
31488
31489 var supportedType = simpleTypeFromSourceType(type);
31490 var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || overrideNative);
31491 return canUseMsePlayback ? 'maybe' : '';
31492 }
31493 };
31494 /**
31495 * Check to see if the native MediaSource object exists and supports
31496 * an MP4 container with both H.264 video and AAC-LC audio.
31497 *
31498 * @return {boolean} if native media sources are supported
31499 */
31500
31501 var supportsNativeMediaSources = function supportsNativeMediaSources() {
31502 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
31503 }; // register source handlers with the appropriate techs
31504
31505
31506 if (supportsNativeMediaSources()) {
31507 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
31508 }
31509
31510 videojs__default["default"].VhsHandler = VhsHandler;
31511 Object.defineProperty(videojs__default["default"], 'HlsHandler', {
31512 get: function get() {
31513 videojs__default["default"].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
31514 return VhsHandler;
31515 },
31516 configurable: true
31517 });
31518 videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
31519 Object.defineProperty(videojs__default["default"], 'HlsSourceHandler', {
31520 get: function get() {
31521 videojs__default["default"].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
31522 return VhsSourceHandler;
31523 },
31524 configurable: true
31525 });
31526 videojs__default["default"].Vhs = Vhs;
31527 Object.defineProperty(videojs__default["default"], 'Hls', {
31528 get: function get() {
31529 videojs__default["default"].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
31530 return Vhs;
31531 },
31532 configurable: true
31533 });
31534
31535 if (!videojs__default["default"].use) {
31536 videojs__default["default"].registerComponent('Hls', Vhs);
31537 videojs__default["default"].registerComponent('Vhs', Vhs);
31538 }
31539
31540 videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
31541 videojs__default["default"].options.hls = videojs__default["default"].options.hls || {};
31542
31543 if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
31544 var registerPlugin = videojs__default["default"].registerPlugin || videojs__default["default"].plugin;
31545 registerPlugin('reloadSourceOnError', reloadSourceOnError);
31546 }
31547
31548 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
31549 exports.Vhs = Vhs;
31550 exports.VhsHandler = VhsHandler;
31551 exports.VhsSourceHandler = VhsSourceHandler;
31552 exports.emeKeySystems = emeKeySystems;
31553 exports.expandDataUri = expandDataUri;
31554 exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
31555 exports.setupEmeOptions = setupEmeOptions;
31556 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
31557 exports.waitForKeySessionCreation = waitForKeySessionCreation;
31558
31559 Object.defineProperty(exports, '__esModule', { value: true });
31560
31561}));