UNPKG

1.17 MBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 3.10.0 @license Apache-2.0 */
2(function (global, factory) {
3 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
4 typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
5 (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.httpStreaming = {}, global.videojs, global.window));
6})(this, (function (exports, videojs, xmldom) { 'use strict';
7
8 function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
10 var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
11
12 function _extends() {
13 _extends = Object.assign || function (target) {
14 for (var i = 1; i < arguments.length; i++) {
15 var source = arguments[i];
16
17 for (var key in source) {
18 if (Object.prototype.hasOwnProperty.call(source, key)) {
19 target[key] = source[key];
20 }
21 }
22 }
23
24 return target;
25 };
26
27 return _extends.apply(this, arguments);
28 }
29
30 var urlToolkit = {exports: {}};
31
32 (function (module, exports) {
33 // see https://tools.ietf.org/html/rfc1808
34 (function (root) {
35 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
36 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
37 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
38 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
39 var URLToolkit = {
40 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
41 // E.g
42 // With opts.alwaysNormalize = false (default, spec compliant)
43 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
44 // With opts.alwaysNormalize = true (not spec compliant)
45 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
46 buildAbsoluteURL: function (baseURL, relativeURL, opts) {
47 opts = opts || {}; // remove any remaining space and CRLF
48
49 baseURL = baseURL.trim();
50 relativeURL = relativeURL.trim();
51
52 if (!relativeURL) {
53 // 2a) If the embedded URL is entirely empty, it inherits the
54 // entire base URL (i.e., is set equal to the base URL)
55 // and we are done.
56 if (!opts.alwaysNormalize) {
57 return baseURL;
58 }
59
60 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
61
62 if (!basePartsForNormalise) {
63 throw new Error('Error trying to parse base URL.');
64 }
65
66 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
67 return URLToolkit.buildURLFromParts(basePartsForNormalise);
68 }
69
70 var relativeParts = URLToolkit.parseURL(relativeURL);
71
72 if (!relativeParts) {
73 throw new Error('Error trying to parse relative URL.');
74 }
75
76 if (relativeParts.scheme) {
77 // 2b) If the embedded URL starts with a scheme name, it is
78 // interpreted as an absolute URL and we are done.
79 if (!opts.alwaysNormalize) {
80 return relativeURL;
81 }
82
83 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
84 return URLToolkit.buildURLFromParts(relativeParts);
85 }
86
87 var baseParts = URLToolkit.parseURL(baseURL);
88
89 if (!baseParts) {
90 throw new Error('Error trying to parse base URL.');
91 }
92
93 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
94 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
95 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
96 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
97 baseParts.netLoc = pathParts[1];
98 baseParts.path = pathParts[2];
99 }
100
101 if (baseParts.netLoc && !baseParts.path) {
102 baseParts.path = '/';
103 }
104
105 var builtParts = {
106 // 2c) Otherwise, the embedded URL inherits the scheme of
107 // the base URL.
108 scheme: baseParts.scheme,
109 netLoc: relativeParts.netLoc,
110 path: null,
111 params: relativeParts.params,
112 query: relativeParts.query,
113 fragment: relativeParts.fragment
114 };
115
116 if (!relativeParts.netLoc) {
117 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
118 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
119 // (if any) of the base URL.
120 builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
121 // path is not relative and we skip to Step 7.
122
123 if (relativeParts.path[0] !== '/') {
124 if (!relativeParts.path) {
125 // 5) If the embedded URL path is empty (and not preceded by a
126 // slash), then the embedded URL inherits the base URL path
127 builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
128 // step 7; otherwise, it inherits the <params> of the base
129 // URL (if any) and
130
131 if (!relativeParts.params) {
132 builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
133 // step 7; otherwise, it inherits the <query> of the base
134 // URL (if any) and we skip to step 7.
135
136 if (!relativeParts.query) {
137 builtParts.query = baseParts.query;
138 }
139 }
140 } else {
141 // 6) The last segment of the base URL's path (anything
142 // following the rightmost slash "/", or the entire path if no
143 // slash is present) is removed and the embedded URL's path is
144 // appended in its place.
145 var baseURLPath = baseParts.path;
146 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
147 builtParts.path = URLToolkit.normalizePath(newPath);
148 }
149 }
150 }
151
152 if (builtParts.path === null) {
153 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
154 }
155
156 return URLToolkit.buildURLFromParts(builtParts);
157 },
158 parseURL: function (url) {
159 var parts = URL_REGEX.exec(url);
160
161 if (!parts) {
162 return null;
163 }
164
165 return {
166 scheme: parts[1] || '',
167 netLoc: parts[2] || '',
168 path: parts[3] || '',
169 params: parts[4] || '',
170 query: parts[5] || '',
171 fragment: parts[6] || ''
172 };
173 },
174 normalizePath: function (path) {
175 // The following operations are
176 // then applied, in order, to the new path:
177 // 6a) All occurrences of "./", where "." is a complete path
178 // segment, are removed.
179 // 6b) If the path ends with "." as a complete path segment,
180 // that "." is removed.
181 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
182 // complete path segment not equal to "..", are removed.
183 // Removal of these path segments is performed iteratively,
184 // removing the leftmost matching pattern on each iteration,
185 // until no matching pattern remains.
186 // 6d) If the path ends with "<segment>/..", where <segment> is a
187 // complete path segment not equal to "..", that
188 // "<segment>/.." is removed.
189
190 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
191
192 return path.split('').reverse().join('');
193 },
194 buildURLFromParts: function (parts) {
195 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
196 }
197 };
198 module.exports = URLToolkit;
199 })();
200 })(urlToolkit);
201
202 var URLToolkit = urlToolkit.exports;
203
204 var DEFAULT_LOCATION = 'http://example.com';
205
206 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
207 // return early if we don't need to resolve
208 if (/^[a-z]+:/i.test(relativeUrl)) {
209 return relativeUrl;
210 } // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
211
212
213 if (/^data:/.test(baseUrl)) {
214 baseUrl = window.location && window.location.href || '';
215 } // IE11 supports URL but not the URL constructor
216 // feature detect the behavior we want
217
218
219 var nativeURL = typeof window.URL === 'function';
220 var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
221 // and if baseUrl isn't an absolute url
222
223 var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
224
225 if (nativeURL) {
226 baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
227 } else if (!/\/\//i.test(baseUrl)) {
228 baseUrl = URLToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
229 }
230
231 if (nativeURL) {
232 var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
233 // and if we're location-less, remove the location
234 // otherwise, return the url unmodified
235
236 if (removeLocation) {
237 return newUrl.href.slice(DEFAULT_LOCATION.length);
238 } else if (protocolLess) {
239 return newUrl.href.slice(newUrl.protocol.length);
240 }
241
242 return newUrl.href;
243 }
244
245 return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
246 };
247
248 /**
249 * @file resolve-url.js - Handling how URLs are resolved and manipulated
250 */
251 const resolveUrl = resolveUrl$1;
252 /**
253 * If the xhr request was redirected, return the responseURL, otherwise,
254 * return the original url.
255 *
256 * @api private
257 *
258 * @param {string} url - an url being requested
259 * @param {XMLHttpRequest} req - xhr request result
260 *
261 * @return {string}
262 */
263
264 const resolveManifestRedirect = (url, req) => {
265 // To understand how the responseURL below is set and generated:
266 // - https://fetch.spec.whatwg.org/#concept-response-url
267 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
268 if (req && req.responseURL && url !== req.responseURL) {
269 return req.responseURL;
270 }
271
272 return url;
273 };
274
275 const logger = source => {
276 if (videojs__default["default"].log.debug) {
277 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', `${source} >`);
278 }
279
280 return function () {};
281 };
282
283 /**
284 * @file stream.js
285 */
286
287 /**
288 * A lightweight readable stream implemention that handles event dispatching.
289 *
290 * @class Stream
291 */
292 var Stream = /*#__PURE__*/function () {
293 function Stream() {
294 this.listeners = {};
295 }
296 /**
297 * Add a listener for a specified event type.
298 *
299 * @param {string} type the event name
300 * @param {Function} listener the callback to be invoked when an event of
301 * the specified type occurs
302 */
303
304
305 var _proto = Stream.prototype;
306
307 _proto.on = function on(type, listener) {
308 if (!this.listeners[type]) {
309 this.listeners[type] = [];
310 }
311
312 this.listeners[type].push(listener);
313 }
314 /**
315 * Remove a listener for a specified event type.
316 *
317 * @param {string} type the event name
318 * @param {Function} listener a function previously registered for this
319 * type of event through `on`
320 * @return {boolean} if we could turn it off or not
321 */
322 ;
323
324 _proto.off = function off(type, listener) {
325 if (!this.listeners[type]) {
326 return false;
327 }
328
329 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
330 // In Video.js we slice listener functions
331 // on trigger so that it does not mess up the order
332 // while we loop through.
333 //
334 // Here we slice on off so that the loop in trigger
335 // can continue using it's old reference to loop without
336 // messing up the order.
337
338 this.listeners[type] = this.listeners[type].slice(0);
339 this.listeners[type].splice(index, 1);
340 return index > -1;
341 }
342 /**
343 * Trigger an event of the specified type on this stream. Any additional
344 * arguments to this function are passed as parameters to event listeners.
345 *
346 * @param {string} type the event name
347 */
348 ;
349
350 _proto.trigger = function trigger(type) {
351 var callbacks = this.listeners[type];
352
353 if (!callbacks) {
354 return;
355 } // Slicing the arguments on every invocation of this method
356 // can add a significant amount of overhead. Avoid the
357 // intermediate object creation for the common case of a
358 // single callback argument
359
360
361 if (arguments.length === 2) {
362 var length = callbacks.length;
363
364 for (var i = 0; i < length; ++i) {
365 callbacks[i].call(this, arguments[1]);
366 }
367 } else {
368 var args = Array.prototype.slice.call(arguments, 1);
369 var _length = callbacks.length;
370
371 for (var _i = 0; _i < _length; ++_i) {
372 callbacks[_i].apply(this, args);
373 }
374 }
375 }
376 /**
377 * Destroys the stream and cleans up.
378 */
379 ;
380
381 _proto.dispose = function dispose() {
382 this.listeners = {};
383 }
384 /**
385 * Forwards all `data` events on this stream to the destination stream. The
386 * destination stream should provide a method `push` to receive the data
387 * events as they arrive.
388 *
389 * @param {Stream} destination the stream that will receive all `data` events
390 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
391 */
392 ;
393
394 _proto.pipe = function pipe(destination) {
395 this.on('data', function (data) {
396 destination.push(data);
397 });
398 };
399
400 return Stream;
401 }();
402
403 var atob = function atob(s) {
404 return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
405 };
406
407 function decodeB64ToUint8Array(b64Text) {
408 var decodedString = atob(b64Text);
409 var array = new Uint8Array(decodedString.length);
410
411 for (var i = 0; i < decodedString.length; i++) {
412 array[i] = decodedString.charCodeAt(i);
413 }
414
415 return array;
416 }
417
418 /*! @name m3u8-parser @version 7.1.0 @license Apache-2.0 */
419 /**
420 * @file m3u8/line-stream.js
421 */
422
423 /**
424 * A stream that buffers string input and generates a `data` event for each
425 * line.
426 *
427 * @class LineStream
428 * @extends Stream
429 */
430
431 class LineStream extends Stream {
432 constructor() {
433 super();
434 this.buffer = '';
435 }
436 /**
437 * Add new data to be parsed.
438 *
439 * @param {string} data the text to process
440 */
441
442
443 push(data) {
444 let nextNewline;
445 this.buffer += data;
446 nextNewline = this.buffer.indexOf('\n');
447
448 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
449 this.trigger('data', this.buffer.substring(0, nextNewline));
450 this.buffer = this.buffer.substring(nextNewline + 1);
451 }
452 }
453
454 }
455
456 const TAB = String.fromCharCode(0x09);
457
458 const parseByterange = function (byterangeString) {
459 // optionally match and capture 0+ digits before `@`
460 // optionally match and capture 0+ digits after `@`
461 const match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
462 const result = {};
463
464 if (match[1]) {
465 result.length = parseInt(match[1], 10);
466 }
467
468 if (match[2]) {
469 result.offset = parseInt(match[2], 10);
470 }
471
472 return result;
473 };
474 /**
475 * "forgiving" attribute list psuedo-grammar:
476 * attributes -> keyvalue (',' keyvalue)*
477 * keyvalue -> key '=' value
478 * key -> [^=]*
479 * value -> '"' [^"]* '"' | [^,]*
480 */
481
482
483 const attributeSeparator = function () {
484 const key = '[^=]*';
485 const value = '"[^"]*"|[^,]*';
486 const keyvalue = '(?:' + key + ')=(?:' + value + ')';
487 return new RegExp('(?:^|,)(' + keyvalue + ')');
488 };
489 /**
490 * Parse attributes from a line given the separator
491 *
492 * @param {string} attributes the attribute line to parse
493 */
494
495
496 const parseAttributes$1 = function (attributes) {
497 const result = {};
498
499 if (!attributes) {
500 return result;
501 } // split the string using attributes as the separator
502
503
504 const attrs = attributes.split(attributeSeparator());
505 let i = attrs.length;
506 let attr;
507
508 while (i--) {
509 // filter out unmatched portions of the string
510 if (attrs[i] === '') {
511 continue;
512 } // split the key and value
513
514
515 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
516
517 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
518 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
519 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
520 result[attr[0]] = attr[1];
521 }
522
523 return result;
524 };
525 /**
526 * A line-level M3U8 parser event stream. It expects to receive input one
527 * line at a time and performs a context-free parse of its contents. A stream
528 * interpretation of a manifest can be useful if the manifest is expected to
529 * be too large to fit comfortably into memory or the entirety of the input
530 * is not immediately available. Otherwise, it's probably much easier to work
531 * with a regular `Parser` object.
532 *
533 * Produces `data` events with an object that captures the parser's
534 * interpretation of the input. That object has a property `tag` that is one
535 * of `uri`, `comment`, or `tag`. URIs only have a single additional
536 * property, `line`, which captures the entirety of the input without
537 * interpretation. Comments similarly have a single additional property
538 * `text` which is the input without the leading `#`.
539 *
540 * Tags always have a property `tagType` which is the lower-cased version of
541 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
542 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
543 * tags are given the tag type `unknown` and a single additional property
544 * `data` with the remainder of the input.
545 *
546 * @class ParseStream
547 * @extends Stream
548 */
549
550
551 class ParseStream extends Stream {
552 constructor() {
553 super();
554 this.customParsers = [];
555 this.tagMappers = [];
556 }
557 /**
558 * Parses an additional line of input.
559 *
560 * @param {string} line a single line of an M3U8 file to parse
561 */
562
563
564 push(line) {
565 let match;
566 let event; // strip whitespace
567
568 line = line.trim();
569
570 if (line.length === 0) {
571 // ignore empty lines
572 return;
573 } // URIs
574
575
576 if (line[0] !== '#') {
577 this.trigger('data', {
578 type: 'uri',
579 uri: line
580 });
581 return;
582 } // map tags
583
584
585 const newLines = this.tagMappers.reduce((acc, mapper) => {
586 const mappedLine = mapper(line); // skip if unchanged
587
588 if (mappedLine === line) {
589 return acc;
590 }
591
592 return acc.concat([mappedLine]);
593 }, [line]);
594 newLines.forEach(newLine => {
595 for (let i = 0; i < this.customParsers.length; i++) {
596 if (this.customParsers[i].call(this, newLine)) {
597 return;
598 }
599 } // Comments
600
601
602 if (newLine.indexOf('#EXT') !== 0) {
603 this.trigger('data', {
604 type: 'comment',
605 text: newLine.slice(1)
606 });
607 return;
608 } // strip off any carriage returns here so the regex matching
609 // doesn't have to account for them.
610
611
612 newLine = newLine.replace('\r', ''); // Tags
613
614 match = /^#EXTM3U/.exec(newLine);
615
616 if (match) {
617 this.trigger('data', {
618 type: 'tag',
619 tagType: 'm3u'
620 });
621 return;
622 }
623
624 match = /^#EXTINF:([0-9\.]*)?,?(.*)?$/.exec(newLine);
625
626 if (match) {
627 event = {
628 type: 'tag',
629 tagType: 'inf'
630 };
631
632 if (match[1]) {
633 event.duration = parseFloat(match[1]);
634 }
635
636 if (match[2]) {
637 event.title = match[2];
638 }
639
640 this.trigger('data', event);
641 return;
642 }
643
644 match = /^#EXT-X-TARGETDURATION:([0-9.]*)?/.exec(newLine);
645
646 if (match) {
647 event = {
648 type: 'tag',
649 tagType: 'targetduration'
650 };
651
652 if (match[1]) {
653 event.duration = parseInt(match[1], 10);
654 }
655
656 this.trigger('data', event);
657 return;
658 }
659
660 match = /^#EXT-X-VERSION:([0-9.]*)?/.exec(newLine);
661
662 if (match) {
663 event = {
664 type: 'tag',
665 tagType: 'version'
666 };
667
668 if (match[1]) {
669 event.version = parseInt(match[1], 10);
670 }
671
672 this.trigger('data', event);
673 return;
674 }
675
676 match = /^#EXT-X-MEDIA-SEQUENCE:(\-?[0-9.]*)?/.exec(newLine);
677
678 if (match) {
679 event = {
680 type: 'tag',
681 tagType: 'media-sequence'
682 };
683
684 if (match[1]) {
685 event.number = parseInt(match[1], 10);
686 }
687
688 this.trigger('data', event);
689 return;
690 }
691
692 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:(\-?[0-9.]*)?/.exec(newLine);
693
694 if (match) {
695 event = {
696 type: 'tag',
697 tagType: 'discontinuity-sequence'
698 };
699
700 if (match[1]) {
701 event.number = parseInt(match[1], 10);
702 }
703
704 this.trigger('data', event);
705 return;
706 }
707
708 match = /^#EXT-X-PLAYLIST-TYPE:(.*)?$/.exec(newLine);
709
710 if (match) {
711 event = {
712 type: 'tag',
713 tagType: 'playlist-type'
714 };
715
716 if (match[1]) {
717 event.playlistType = match[1];
718 }
719
720 this.trigger('data', event);
721 return;
722 }
723
724 match = /^#EXT-X-BYTERANGE:(.*)?$/.exec(newLine);
725
726 if (match) {
727 event = _extends(parseByterange(match[1]), {
728 type: 'tag',
729 tagType: 'byterange'
730 });
731 this.trigger('data', event);
732 return;
733 }
734
735 match = /^#EXT-X-ALLOW-CACHE:(YES|NO)?/.exec(newLine);
736
737 if (match) {
738 event = {
739 type: 'tag',
740 tagType: 'allow-cache'
741 };
742
743 if (match[1]) {
744 event.allowed = !/NO/.test(match[1]);
745 }
746
747 this.trigger('data', event);
748 return;
749 }
750
751 match = /^#EXT-X-MAP:(.*)$/.exec(newLine);
752
753 if (match) {
754 event = {
755 type: 'tag',
756 tagType: 'map'
757 };
758
759 if (match[1]) {
760 const attributes = parseAttributes$1(match[1]);
761
762 if (attributes.URI) {
763 event.uri = attributes.URI;
764 }
765
766 if (attributes.BYTERANGE) {
767 event.byterange = parseByterange(attributes.BYTERANGE);
768 }
769 }
770
771 this.trigger('data', event);
772 return;
773 }
774
775 match = /^#EXT-X-STREAM-INF:(.*)$/.exec(newLine);
776
777 if (match) {
778 event = {
779 type: 'tag',
780 tagType: 'stream-inf'
781 };
782
783 if (match[1]) {
784 event.attributes = parseAttributes$1(match[1]);
785
786 if (event.attributes.RESOLUTION) {
787 const split = event.attributes.RESOLUTION.split('x');
788 const resolution = {};
789
790 if (split[0]) {
791 resolution.width = parseInt(split[0], 10);
792 }
793
794 if (split[1]) {
795 resolution.height = parseInt(split[1], 10);
796 }
797
798 event.attributes.RESOLUTION = resolution;
799 }
800
801 if (event.attributes.BANDWIDTH) {
802 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
803 }
804
805 if (event.attributes['FRAME-RATE']) {
806 event.attributes['FRAME-RATE'] = parseFloat(event.attributes['FRAME-RATE']);
807 }
808
809 if (event.attributes['PROGRAM-ID']) {
810 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
811 }
812 }
813
814 this.trigger('data', event);
815 return;
816 }
817
818 match = /^#EXT-X-MEDIA:(.*)$/.exec(newLine);
819
820 if (match) {
821 event = {
822 type: 'tag',
823 tagType: 'media'
824 };
825
826 if (match[1]) {
827 event.attributes = parseAttributes$1(match[1]);
828 }
829
830 this.trigger('data', event);
831 return;
832 }
833
834 match = /^#EXT-X-ENDLIST/.exec(newLine);
835
836 if (match) {
837 this.trigger('data', {
838 type: 'tag',
839 tagType: 'endlist'
840 });
841 return;
842 }
843
844 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
845
846 if (match) {
847 this.trigger('data', {
848 type: 'tag',
849 tagType: 'discontinuity'
850 });
851 return;
852 }
853
854 match = /^#EXT-X-PROGRAM-DATE-TIME:(.*)$/.exec(newLine);
855
856 if (match) {
857 event = {
858 type: 'tag',
859 tagType: 'program-date-time'
860 };
861
862 if (match[1]) {
863 event.dateTimeString = match[1];
864 event.dateTimeObject = new Date(match[1]);
865 }
866
867 this.trigger('data', event);
868 return;
869 }
870
871 match = /^#EXT-X-KEY:(.*)$/.exec(newLine);
872
873 if (match) {
874 event = {
875 type: 'tag',
876 tagType: 'key'
877 };
878
879 if (match[1]) {
880 event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
881
882 if (event.attributes.IV) {
883 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
884 event.attributes.IV = event.attributes.IV.substring(2);
885 }
886
887 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
888 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
889 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
890 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
891 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
892 event.attributes.IV = new Uint32Array(event.attributes.IV);
893 }
894 }
895
896 this.trigger('data', event);
897 return;
898 }
899
900 match = /^#EXT-X-START:(.*)$/.exec(newLine);
901
902 if (match) {
903 event = {
904 type: 'tag',
905 tagType: 'start'
906 };
907
908 if (match[1]) {
909 event.attributes = parseAttributes$1(match[1]);
910 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
911 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
912 }
913
914 this.trigger('data', event);
915 return;
916 }
917
918 match = /^#EXT-X-CUE-OUT-CONT:(.*)?$/.exec(newLine);
919
920 if (match) {
921 event = {
922 type: 'tag',
923 tagType: 'cue-out-cont'
924 };
925
926 if (match[1]) {
927 event.data = match[1];
928 } else {
929 event.data = '';
930 }
931
932 this.trigger('data', event);
933 return;
934 }
935
936 match = /^#EXT-X-CUE-OUT:(.*)?$/.exec(newLine);
937
938 if (match) {
939 event = {
940 type: 'tag',
941 tagType: 'cue-out'
942 };
943
944 if (match[1]) {
945 event.data = match[1];
946 } else {
947 event.data = '';
948 }
949
950 this.trigger('data', event);
951 return;
952 }
953
954 match = /^#EXT-X-CUE-IN:(.*)?$/.exec(newLine);
955
956 if (match) {
957 event = {
958 type: 'tag',
959 tagType: 'cue-in'
960 };
961
962 if (match[1]) {
963 event.data = match[1];
964 } else {
965 event.data = '';
966 }
967
968 this.trigger('data', event);
969 return;
970 }
971
972 match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
973
974 if (match && match[1]) {
975 event = {
976 type: 'tag',
977 tagType: 'skip'
978 };
979 event.attributes = parseAttributes$1(match[1]);
980
981 if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
982 event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
983 }
984
985 if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
986 event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
987 }
988
989 this.trigger('data', event);
990 return;
991 }
992
993 match = /^#EXT-X-PART:(.*)$/.exec(newLine);
994
995 if (match && match[1]) {
996 event = {
997 type: 'tag',
998 tagType: 'part'
999 };
1000 event.attributes = parseAttributes$1(match[1]);
1001 ['DURATION'].forEach(function (key) {
1002 if (event.attributes.hasOwnProperty(key)) {
1003 event.attributes[key] = parseFloat(event.attributes[key]);
1004 }
1005 });
1006 ['INDEPENDENT', 'GAP'].forEach(function (key) {
1007 if (event.attributes.hasOwnProperty(key)) {
1008 event.attributes[key] = /YES/.test(event.attributes[key]);
1009 }
1010 });
1011
1012 if (event.attributes.hasOwnProperty('BYTERANGE')) {
1013 event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
1014 }
1015
1016 this.trigger('data', event);
1017 return;
1018 }
1019
1020 match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
1021
1022 if (match && match[1]) {
1023 event = {
1024 type: 'tag',
1025 tagType: 'server-control'
1026 };
1027 event.attributes = parseAttributes$1(match[1]);
1028 ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
1029 if (event.attributes.hasOwnProperty(key)) {
1030 event.attributes[key] = parseFloat(event.attributes[key]);
1031 }
1032 });
1033 ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
1034 if (event.attributes.hasOwnProperty(key)) {
1035 event.attributes[key] = /YES/.test(event.attributes[key]);
1036 }
1037 });
1038 this.trigger('data', event);
1039 return;
1040 }
1041
1042 match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
1043
1044 if (match && match[1]) {
1045 event = {
1046 type: 'tag',
1047 tagType: 'part-inf'
1048 };
1049 event.attributes = parseAttributes$1(match[1]);
1050 ['PART-TARGET'].forEach(function (key) {
1051 if (event.attributes.hasOwnProperty(key)) {
1052 event.attributes[key] = parseFloat(event.attributes[key]);
1053 }
1054 });
1055 this.trigger('data', event);
1056 return;
1057 }
1058
1059 match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
1060
1061 if (match && match[1]) {
1062 event = {
1063 type: 'tag',
1064 tagType: 'preload-hint'
1065 };
1066 event.attributes = parseAttributes$1(match[1]);
1067 ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
1068 if (event.attributes.hasOwnProperty(key)) {
1069 event.attributes[key] = parseInt(event.attributes[key], 10);
1070 const subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
1071 event.attributes.byterange = event.attributes.byterange || {};
1072 event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
1073
1074 delete event.attributes[key];
1075 }
1076 });
1077 this.trigger('data', event);
1078 return;
1079 }
1080
1081 match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
1082
1083 if (match && match[1]) {
1084 event = {
1085 type: 'tag',
1086 tagType: 'rendition-report'
1087 };
1088 event.attributes = parseAttributes$1(match[1]);
1089 ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
1090 if (event.attributes.hasOwnProperty(key)) {
1091 event.attributes[key] = parseInt(event.attributes[key], 10);
1092 }
1093 });
1094 this.trigger('data', event);
1095 return;
1096 }
1097
1098 match = /^#EXT-X-DATERANGE:(.*)$/.exec(newLine);
1099
1100 if (match && match[1]) {
1101 event = {
1102 type: 'tag',
1103 tagType: 'daterange'
1104 };
1105 event.attributes = parseAttributes$1(match[1]);
1106 ['ID', 'CLASS'].forEach(function (key) {
1107 if (event.attributes.hasOwnProperty(key)) {
1108 event.attributes[key] = String(event.attributes[key]);
1109 }
1110 });
1111 ['START-DATE', 'END-DATE'].forEach(function (key) {
1112 if (event.attributes.hasOwnProperty(key)) {
1113 event.attributes[key] = new Date(event.attributes[key]);
1114 }
1115 });
1116 ['DURATION', 'PLANNED-DURATION'].forEach(function (key) {
1117 if (event.attributes.hasOwnProperty(key)) {
1118 event.attributes[key] = parseFloat(event.attributes[key]);
1119 }
1120 });
1121 ['END-ON-NEXT'].forEach(function (key) {
1122 if (event.attributes.hasOwnProperty(key)) {
1123 event.attributes[key] = /YES/i.test(event.attributes[key]);
1124 }
1125 });
1126 ['SCTE35-CMD', ' SCTE35-OUT', 'SCTE35-IN'].forEach(function (key) {
1127 if (event.attributes.hasOwnProperty(key)) {
1128 event.attributes[key] = event.attributes[key].toString(16);
1129 }
1130 });
1131 const clientAttributePattern = /^X-([A-Z]+-)+[A-Z]+$/;
1132
1133 for (const key in event.attributes) {
1134 if (!clientAttributePattern.test(key)) {
1135 continue;
1136 }
1137
1138 const isHexaDecimal = /[0-9A-Fa-f]{6}/g.test(event.attributes[key]);
1139 const isDecimalFloating = /^\d+(\.\d+)?$/.test(event.attributes[key]);
1140 event.attributes[key] = isHexaDecimal ? event.attributes[key].toString(16) : isDecimalFloating ? parseFloat(event.attributes[key]) : String(event.attributes[key]);
1141 }
1142
1143 this.trigger('data', event);
1144 return;
1145 }
1146
1147 match = /^#EXT-X-INDEPENDENT-SEGMENTS/.exec(newLine);
1148
1149 if (match) {
1150 this.trigger('data', {
1151 type: 'tag',
1152 tagType: 'independent-segments'
1153 });
1154 return;
1155 }
1156
1157 match = /^#EXT-X-CONTENT-STEERING:(.*)$/.exec(newLine);
1158
1159 if (match) {
1160 event = {
1161 type: 'tag',
1162 tagType: 'content-steering'
1163 };
1164 event.attributes = parseAttributes$1(match[1]);
1165 this.trigger('data', event);
1166 return;
1167 } // unknown tag type
1168
1169
1170 this.trigger('data', {
1171 type: 'tag',
1172 data: newLine.slice(4)
1173 });
1174 });
1175 }
1176 /**
1177 * Add a parser for custom headers
1178 *
1179 * @param {Object} options a map of options for the added parser
1180 * @param {RegExp} options.expression a regular expression to match the custom header
1181 * @param {string} options.customType the custom type to register to the output
1182 * @param {Function} [options.dataParser] function to parse the line into an object
1183 * @param {boolean} [options.segment] should tag data be attached to the segment object
1184 */
1185
1186
1187 addParser({
1188 expression,
1189 customType,
1190 dataParser,
1191 segment
1192 }) {
1193 if (typeof dataParser !== 'function') {
1194 dataParser = line => line;
1195 }
1196
1197 this.customParsers.push(line => {
1198 const match = expression.exec(line);
1199
1200 if (match) {
1201 this.trigger('data', {
1202 type: 'custom',
1203 data: dataParser(line),
1204 customType,
1205 segment
1206 });
1207 return true;
1208 }
1209 });
1210 }
1211 /**
1212 * Add a custom header mapper
1213 *
1214 * @param {Object} options
1215 * @param {RegExp} options.expression a regular expression to match the custom header
1216 * @param {Function} options.map function to translate tag into a different tag
1217 */
1218
1219
1220 addTagMapper({
1221 expression,
1222 map
1223 }) {
1224 const mapFn = line => {
1225 if (expression.test(line)) {
1226 return map(line);
1227 }
1228
1229 return line;
1230 };
1231
1232 this.tagMappers.push(mapFn);
1233 }
1234
1235 }
1236
1237 const camelCase = str => str.toLowerCase().replace(/-(\w)/g, a => a[1].toUpperCase());
1238
1239 const camelCaseKeys = function (attributes) {
1240 const result = {};
1241 Object.keys(attributes).forEach(function (key) {
1242 result[camelCase(key)] = attributes[key];
1243 });
1244 return result;
1245 }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
1246 // we need this helper because defaults are based upon targetDuration and
1247 // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
1248 // target durations are set.
1249
1250
1251 const setHoldBack = function (manifest) {
1252 const {
1253 serverControl,
1254 targetDuration,
1255 partTargetDuration
1256 } = manifest;
1257
1258 if (!serverControl) {
1259 return;
1260 }
1261
1262 const tag = '#EXT-X-SERVER-CONTROL';
1263 const hb = 'holdBack';
1264 const phb = 'partHoldBack';
1265 const minTargetDuration = targetDuration && targetDuration * 3;
1266 const minPartDuration = partTargetDuration && partTargetDuration * 2;
1267
1268 if (targetDuration && !serverControl.hasOwnProperty(hb)) {
1269 serverControl[hb] = minTargetDuration;
1270 this.trigger('info', {
1271 message: `${tag} defaulting HOLD-BACK to targetDuration * 3 (${minTargetDuration}).`
1272 });
1273 }
1274
1275 if (minTargetDuration && serverControl[hb] < minTargetDuration) {
1276 this.trigger('warn', {
1277 message: `${tag} clamping HOLD-BACK (${serverControl[hb]}) to targetDuration * 3 (${minTargetDuration})`
1278 });
1279 serverControl[hb] = minTargetDuration;
1280 } // default no part hold back to part target duration * 3
1281
1282
1283 if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
1284 serverControl[phb] = partTargetDuration * 3;
1285 this.trigger('info', {
1286 message: `${tag} defaulting PART-HOLD-BACK to partTargetDuration * 3 (${serverControl[phb]}).`
1287 });
1288 } // if part hold back is too small default it to part target duration * 2
1289
1290
1291 if (partTargetDuration && serverControl[phb] < minPartDuration) {
1292 this.trigger('warn', {
1293 message: `${tag} clamping PART-HOLD-BACK (${serverControl[phb]}) to partTargetDuration * 2 (${minPartDuration}).`
1294 });
1295 serverControl[phb] = minPartDuration;
1296 }
1297 };
1298 /**
1299 * A parser for M3U8 files. The current interpretation of the input is
1300 * exposed as a property `manifest` on parser objects. It's just two lines to
1301 * create and parse a manifest once you have the contents available as a string:
1302 *
1303 * ```js
1304 * var parser = new m3u8.Parser();
1305 * parser.push(xhr.responseText);
1306 * ```
1307 *
1308 * New input can later be applied to update the manifest object by calling
1309 * `push` again.
1310 *
1311 * The parser attempts to create a usable manifest object even if the
1312 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1313 * events during the parse if it encounters input that seems invalid or
1314 * requires some property of the manifest object to be defaulted.
1315 *
1316 * @class Parser
1317 * @extends Stream
1318 */
1319
1320
1321 class Parser extends Stream {
1322 constructor() {
1323 super();
1324 this.lineStream = new LineStream();
1325 this.parseStream = new ParseStream();
1326 this.lineStream.pipe(this.parseStream);
1327 this.lastProgramDateTime = null;
1328 /* eslint-disable consistent-this */
1329
1330 const self = this;
1331 /* eslint-enable consistent-this */
1332
1333 const uris = [];
1334 let currentUri = {}; // if specified, the active EXT-X-MAP definition
1335
1336 let currentMap; // if specified, the active decryption key
1337
1338 let key;
1339 let hasParts = false;
1340
1341 const noop = function () {};
1342
1343 const defaultMediaGroups = {
1344 'AUDIO': {},
1345 'VIDEO': {},
1346 'CLOSED-CAPTIONS': {},
1347 'SUBTITLES': {}
1348 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1349 // used in MPDs with Widevine encrypted streams.
1350
1351 const widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1352
1353 let currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1354
1355 this.manifest = {
1356 allowCache: true,
1357 discontinuityStarts: [],
1358 dateRanges: [],
1359 segments: []
1360 }; // keep track of the last seen segment's byte range end, as segments are not required
1361 // to provide the offset, in which case it defaults to the next byte after the
1362 // previous segment
1363
1364 let lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
1365
1366 let lastPartByterangeEnd = 0;
1367 const dateRangeTags = {};
1368 this.on('end', () => {
1369 // only add preloadSegment if we don't yet have a uri for it.
1370 // and we actually have parts/preloadHints
1371 if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
1372 return;
1373 }
1374
1375 if (!currentUri.map && currentMap) {
1376 currentUri.map = currentMap;
1377 }
1378
1379 if (!currentUri.key && key) {
1380 currentUri.key = key;
1381 }
1382
1383 if (!currentUri.timeline && typeof currentTimeline === 'number') {
1384 currentUri.timeline = currentTimeline;
1385 }
1386
1387 this.manifest.preloadSegment = currentUri;
1388 }); // update the manifest with the m3u8 entry from the parse stream
1389
1390 this.parseStream.on('data', function (entry) {
1391 let mediaGroup;
1392 let rendition;
1393 ({
1394 tag() {
1395 // switch based on the tag type
1396 (({
1397 version() {
1398 if (entry.version) {
1399 this.manifest.version = entry.version;
1400 }
1401 },
1402
1403 'allow-cache'() {
1404 this.manifest.allowCache = entry.allowed;
1405
1406 if (!('allowed' in entry)) {
1407 this.trigger('info', {
1408 message: 'defaulting allowCache to YES'
1409 });
1410 this.manifest.allowCache = true;
1411 }
1412 },
1413
1414 byterange() {
1415 const byterange = {};
1416
1417 if ('length' in entry) {
1418 currentUri.byterange = byterange;
1419 byterange.length = entry.length;
1420
1421 if (!('offset' in entry)) {
1422 /*
1423 * From the latest spec (as of this writing):
1424 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
1425 *
1426 * Same text since EXT-X-BYTERANGE's introduction in draft 7:
1427 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
1428 *
1429 * "If o [offset] is not present, the sub-range begins at the next byte
1430 * following the sub-range of the previous media segment."
1431 */
1432 entry.offset = lastByterangeEnd;
1433 }
1434 }
1435
1436 if ('offset' in entry) {
1437 currentUri.byterange = byterange;
1438 byterange.offset = entry.offset;
1439 }
1440
1441 lastByterangeEnd = byterange.offset + byterange.length;
1442 },
1443
1444 endlist() {
1445 this.manifest.endList = true;
1446 },
1447
1448 inf() {
1449 if (!('mediaSequence' in this.manifest)) {
1450 this.manifest.mediaSequence = 0;
1451 this.trigger('info', {
1452 message: 'defaulting media sequence to zero'
1453 });
1454 }
1455
1456 if (!('discontinuitySequence' in this.manifest)) {
1457 this.manifest.discontinuitySequence = 0;
1458 this.trigger('info', {
1459 message: 'defaulting discontinuity sequence to zero'
1460 });
1461 }
1462
1463 if (entry.title) {
1464 currentUri.title = entry.title;
1465 }
1466
1467 if (entry.duration > 0) {
1468 currentUri.duration = entry.duration;
1469 }
1470
1471 if (entry.duration === 0) {
1472 currentUri.duration = 0.01;
1473 this.trigger('info', {
1474 message: 'updating zero segment duration to a small value'
1475 });
1476 }
1477
1478 this.manifest.segments = uris;
1479 },
1480
1481 key() {
1482 if (!entry.attributes) {
1483 this.trigger('warn', {
1484 message: 'ignoring key declaration without attribute list'
1485 });
1486 return;
1487 } // clear the active encryption key
1488
1489
1490 if (entry.attributes.METHOD === 'NONE') {
1491 key = null;
1492 return;
1493 }
1494
1495 if (!entry.attributes.URI) {
1496 this.trigger('warn', {
1497 message: 'ignoring key declaration without URI'
1498 });
1499 return;
1500 }
1501
1502 if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {
1503 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1504
1505 this.manifest.contentProtection['com.apple.fps.1_0'] = {
1506 attributes: entry.attributes
1507 };
1508 return;
1509 }
1510
1511 if (entry.attributes.KEYFORMAT === 'com.microsoft.playready') {
1512 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1513
1514 this.manifest.contentProtection['com.microsoft.playready'] = {
1515 uri: entry.attributes.URI
1516 };
1517 return;
1518 } // check if the content is encrypted for Widevine
1519 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1520
1521
1522 if (entry.attributes.KEYFORMAT === widevineUuid) {
1523 const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1524
1525 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1526 this.trigger('warn', {
1527 message: 'invalid key method provided for Widevine'
1528 });
1529 return;
1530 }
1531
1532 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1533 this.trigger('warn', {
1534 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1535 });
1536 }
1537
1538 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1539 this.trigger('warn', {
1540 message: 'invalid key URI provided for Widevine'
1541 });
1542 return;
1543 }
1544
1545 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1546 this.trigger('warn', {
1547 message: 'invalid key ID provided for Widevine'
1548 });
1549 return;
1550 } // if Widevine key attributes are valid, store them as `contentProtection`
1551 // on the manifest to emulate Widevine tag structure in a DASH mpd
1552
1553
1554 this.manifest.contentProtection = this.manifest.contentProtection || {};
1555 this.manifest.contentProtection['com.widevine.alpha'] = {
1556 attributes: {
1557 schemeIdUri: entry.attributes.KEYFORMAT,
1558 // remove '0x' from the key id string
1559 keyId: entry.attributes.KEYID.substring(2)
1560 },
1561 // decode the base64-encoded PSSH box
1562 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1563 };
1564 return;
1565 }
1566
1567 if (!entry.attributes.METHOD) {
1568 this.trigger('warn', {
1569 message: 'defaulting key method to AES-128'
1570 });
1571 } // setup an encryption key for upcoming segments
1572
1573
1574 key = {
1575 method: entry.attributes.METHOD || 'AES-128',
1576 uri: entry.attributes.URI
1577 };
1578
1579 if (typeof entry.attributes.IV !== 'undefined') {
1580 key.iv = entry.attributes.IV;
1581 }
1582 },
1583
1584 'media-sequence'() {
1585 if (!isFinite(entry.number)) {
1586 this.trigger('warn', {
1587 message: 'ignoring invalid media sequence: ' + entry.number
1588 });
1589 return;
1590 }
1591
1592 this.manifest.mediaSequence = entry.number;
1593 },
1594
1595 'discontinuity-sequence'() {
1596 if (!isFinite(entry.number)) {
1597 this.trigger('warn', {
1598 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1599 });
1600 return;
1601 }
1602
1603 this.manifest.discontinuitySequence = entry.number;
1604 currentTimeline = entry.number;
1605 },
1606
1607 'playlist-type'() {
1608 if (!/VOD|EVENT/.test(entry.playlistType)) {
1609 this.trigger('warn', {
1610 message: 'ignoring unknown playlist type: ' + entry.playlist
1611 });
1612 return;
1613 }
1614
1615 this.manifest.playlistType = entry.playlistType;
1616 },
1617
1618 map() {
1619 currentMap = {};
1620
1621 if (entry.uri) {
1622 currentMap.uri = entry.uri;
1623 }
1624
1625 if (entry.byterange) {
1626 currentMap.byterange = entry.byterange;
1627 }
1628
1629 if (key) {
1630 currentMap.key = key;
1631 }
1632 },
1633
1634 'stream-inf'() {
1635 this.manifest.playlists = uris;
1636 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1637
1638 if (!entry.attributes) {
1639 this.trigger('warn', {
1640 message: 'ignoring empty stream-inf attributes'
1641 });
1642 return;
1643 }
1644
1645 if (!currentUri.attributes) {
1646 currentUri.attributes = {};
1647 }
1648
1649 _extends(currentUri.attributes, entry.attributes);
1650 },
1651
1652 media() {
1653 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1654
1655 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1656 this.trigger('warn', {
1657 message: 'ignoring incomplete or missing media group'
1658 });
1659 return;
1660 } // find the media group, creating defaults as necessary
1661
1662
1663 const mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1664 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1665 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1666
1667 rendition = {
1668 default: /yes/i.test(entry.attributes.DEFAULT)
1669 };
1670
1671 if (rendition.default) {
1672 rendition.autoselect = true;
1673 } else {
1674 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1675 }
1676
1677 if (entry.attributes.LANGUAGE) {
1678 rendition.language = entry.attributes.LANGUAGE;
1679 }
1680
1681 if (entry.attributes.URI) {
1682 rendition.uri = entry.attributes.URI;
1683 }
1684
1685 if (entry.attributes['INSTREAM-ID']) {
1686 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1687 }
1688
1689 if (entry.attributes.CHARACTERISTICS) {
1690 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1691 }
1692
1693 if (entry.attributes.FORCED) {
1694 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1695 } // insert the new rendition
1696
1697
1698 mediaGroup[entry.attributes.NAME] = rendition;
1699 },
1700
1701 discontinuity() {
1702 currentTimeline += 1;
1703 currentUri.discontinuity = true;
1704 this.manifest.discontinuityStarts.push(uris.length);
1705 },
1706
1707 'program-date-time'() {
1708 if (typeof this.manifest.dateTimeString === 'undefined') {
1709 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1710 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1711 // to the manifest object
1712 // TODO: Consider removing this in future major version
1713 this.manifest.dateTimeString = entry.dateTimeString;
1714 this.manifest.dateTimeObject = entry.dateTimeObject;
1715 }
1716
1717 currentUri.dateTimeString = entry.dateTimeString;
1718 currentUri.dateTimeObject = entry.dateTimeObject;
1719 const {
1720 lastProgramDateTime
1721 } = this;
1722 this.lastProgramDateTime = new Date(entry.dateTimeString).getTime(); // We should extrapolate Program Date Time backward only during first program date time occurrence.
1723 // Once we have at least one program date time point, we can always extrapolate it forward using lastProgramDateTime reference.
1724
1725 if (lastProgramDateTime === null) {
1726 // Extrapolate Program Date Time backward
1727 // Since it is first program date time occurrence we're assuming that
1728 // all this.manifest.segments have no program date time info
1729 this.manifest.segments.reduceRight((programDateTime, segment) => {
1730 segment.programDateTime = programDateTime - segment.duration * 1000;
1731 return segment.programDateTime;
1732 }, this.lastProgramDateTime);
1733 }
1734 },
1735
1736 targetduration() {
1737 if (!isFinite(entry.duration) || entry.duration < 0) {
1738 this.trigger('warn', {
1739 message: 'ignoring invalid target duration: ' + entry.duration
1740 });
1741 return;
1742 }
1743
1744 this.manifest.targetDuration = entry.duration;
1745 setHoldBack.call(this, this.manifest);
1746 },
1747
1748 start() {
1749 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1750 this.trigger('warn', {
1751 message: 'ignoring start declaration without appropriate attribute list'
1752 });
1753 return;
1754 }
1755
1756 this.manifest.start = {
1757 timeOffset: entry.attributes['TIME-OFFSET'],
1758 precise: entry.attributes.PRECISE
1759 };
1760 },
1761
1762 'cue-out'() {
1763 currentUri.cueOut = entry.data;
1764 },
1765
1766 'cue-out-cont'() {
1767 currentUri.cueOutCont = entry.data;
1768 },
1769
1770 'cue-in'() {
1771 currentUri.cueIn = entry.data;
1772 },
1773
1774 'skip'() {
1775 this.manifest.skip = camelCaseKeys(entry.attributes);
1776 this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
1777 },
1778
1779 'part'() {
1780 hasParts = true; // parts are always specifed before a segment
1781
1782 const segmentIndex = this.manifest.segments.length;
1783 const part = camelCaseKeys(entry.attributes);
1784 currentUri.parts = currentUri.parts || [];
1785 currentUri.parts.push(part);
1786
1787 if (part.byterange) {
1788 if (!part.byterange.hasOwnProperty('offset')) {
1789 part.byterange.offset = lastPartByterangeEnd;
1790 }
1791
1792 lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
1793 }
1794
1795 const partIndex = currentUri.parts.length - 1;
1796 this.warnOnMissingAttributes_(`#EXT-X-PART #${partIndex} for segment #${segmentIndex}`, entry.attributes, ['URI', 'DURATION']);
1797
1798 if (this.manifest.renditionReports) {
1799 this.manifest.renditionReports.forEach((r, i) => {
1800 if (!r.hasOwnProperty('lastPart')) {
1801 this.trigger('warn', {
1802 message: `#EXT-X-RENDITION-REPORT #${i} lacks required attribute(s): LAST-PART`
1803 });
1804 }
1805 });
1806 }
1807 },
1808
1809 'server-control'() {
1810 const attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
1811
1812 if (!attrs.hasOwnProperty('canBlockReload')) {
1813 attrs.canBlockReload = false;
1814 this.trigger('info', {
1815 message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
1816 });
1817 }
1818
1819 setHoldBack.call(this, this.manifest);
1820
1821 if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
1822 this.trigger('warn', {
1823 message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
1824 });
1825 }
1826 },
1827
1828 'preload-hint'() {
1829 // parts are always specifed before a segment
1830 const segmentIndex = this.manifest.segments.length;
1831 const hint = camelCaseKeys(entry.attributes);
1832 const isPart = hint.type && hint.type === 'PART';
1833 currentUri.preloadHints = currentUri.preloadHints || [];
1834 currentUri.preloadHints.push(hint);
1835
1836 if (hint.byterange) {
1837 if (!hint.byterange.hasOwnProperty('offset')) {
1838 // use last part byterange end or zero if not a part.
1839 hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
1840
1841 if (isPart) {
1842 lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
1843 }
1844 }
1845 }
1846
1847 const index = currentUri.preloadHints.length - 1;
1848 this.warnOnMissingAttributes_(`#EXT-X-PRELOAD-HINT #${index} for segment #${segmentIndex}`, entry.attributes, ['TYPE', 'URI']);
1849
1850 if (!hint.type) {
1851 return;
1852 } // search through all preload hints except for the current one for
1853 // a duplicate type.
1854
1855
1856 for (let i = 0; i < currentUri.preloadHints.length - 1; i++) {
1857 const otherHint = currentUri.preloadHints[i];
1858
1859 if (!otherHint.type) {
1860 continue;
1861 }
1862
1863 if (otherHint.type === hint.type) {
1864 this.trigger('warn', {
1865 message: `#EXT-X-PRELOAD-HINT #${index} for segment #${segmentIndex} has the same TYPE ${hint.type} as preload hint #${i}`
1866 });
1867 }
1868 }
1869 },
1870
1871 'rendition-report'() {
1872 const report = camelCaseKeys(entry.attributes);
1873 this.manifest.renditionReports = this.manifest.renditionReports || [];
1874 this.manifest.renditionReports.push(report);
1875 const index = this.manifest.renditionReports.length - 1;
1876 const required = ['LAST-MSN', 'URI'];
1877
1878 if (hasParts) {
1879 required.push('LAST-PART');
1880 }
1881
1882 this.warnOnMissingAttributes_(`#EXT-X-RENDITION-REPORT #${index}`, entry.attributes, required);
1883 },
1884
1885 'part-inf'() {
1886 this.manifest.partInf = camelCaseKeys(entry.attributes);
1887 this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
1888
1889 if (this.manifest.partInf.partTarget) {
1890 this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
1891 }
1892
1893 setHoldBack.call(this, this.manifest);
1894 },
1895
1896 'daterange'() {
1897 this.manifest.dateRanges.push(camelCaseKeys(entry.attributes));
1898 const index = this.manifest.dateRanges.length - 1;
1899 this.warnOnMissingAttributes_(`#EXT-X-DATERANGE #${index}`, entry.attributes, ['ID', 'START-DATE']);
1900 const dateRange = this.manifest.dateRanges[index];
1901
1902 if (dateRange.endDate && dateRange.startDate && new Date(dateRange.endDate) < new Date(dateRange.startDate)) {
1903 this.trigger('warn', {
1904 message: 'EXT-X-DATERANGE END-DATE must be equal to or later than the value of the START-DATE'
1905 });
1906 }
1907
1908 if (dateRange.duration && dateRange.duration < 0) {
1909 this.trigger('warn', {
1910 message: 'EXT-X-DATERANGE DURATION must not be negative'
1911 });
1912 }
1913
1914 if (dateRange.plannedDuration && dateRange.plannedDuration < 0) {
1915 this.trigger('warn', {
1916 message: 'EXT-X-DATERANGE PLANNED-DURATION must not be negative'
1917 });
1918 }
1919
1920 const endOnNextYes = !!dateRange.endOnNext;
1921
1922 if (endOnNextYes && !dateRange.class) {
1923 this.trigger('warn', {
1924 message: 'EXT-X-DATERANGE with an END-ON-NEXT=YES attribute must have a CLASS attribute'
1925 });
1926 }
1927
1928 if (endOnNextYes && (dateRange.duration || dateRange.endDate)) {
1929 this.trigger('warn', {
1930 message: 'EXT-X-DATERANGE with an END-ON-NEXT=YES attribute must not contain DURATION or END-DATE attributes'
1931 });
1932 }
1933
1934 if (dateRange.duration && dateRange.endDate) {
1935 const startDate = dateRange.startDate;
1936 const newDateInSeconds = startDate.getTime() + dateRange.duration * 1000;
1937 this.manifest.dateRanges[index].endDate = new Date(newDateInSeconds);
1938 }
1939
1940 if (!dateRangeTags[dateRange.id]) {
1941 dateRangeTags[dateRange.id] = dateRange;
1942 } else {
1943 for (const attribute in dateRangeTags[dateRange.id]) {
1944 if (!!dateRange[attribute] && JSON.stringify(dateRangeTags[dateRange.id][attribute]) !== JSON.stringify(dateRange[attribute])) {
1945 this.trigger('warn', {
1946 message: 'EXT-X-DATERANGE tags with the same ID in a playlist must have the same attributes values'
1947 });
1948 break;
1949 }
1950 } // if tags with the same ID do not have conflicting attributes, merge them
1951
1952
1953 const dateRangeWithSameId = this.manifest.dateRanges.findIndex(dateRangeToFind => dateRangeToFind.id === dateRange.id);
1954 this.manifest.dateRanges[dateRangeWithSameId] = _extends(this.manifest.dateRanges[dateRangeWithSameId], dateRange);
1955 dateRangeTags[dateRange.id] = _extends(dateRangeTags[dateRange.id], dateRange); // after merging, delete the duplicate dateRange that was added last
1956
1957 this.manifest.dateRanges.pop();
1958 }
1959 },
1960
1961 'independent-segments'() {
1962 this.manifest.independentSegments = true;
1963 },
1964
1965 'content-steering'() {
1966 this.manifest.contentSteering = camelCaseKeys(entry.attributes);
1967 this.warnOnMissingAttributes_('#EXT-X-CONTENT-STEERING', entry.attributes, ['SERVER-URI']);
1968 }
1969
1970 })[entry.tagType] || noop).call(self);
1971 },
1972
1973 uri() {
1974 currentUri.uri = entry.uri;
1975 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1976
1977 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1978 this.trigger('warn', {
1979 message: 'defaulting segment duration to the target duration'
1980 });
1981 currentUri.duration = this.manifest.targetDuration;
1982 } // annotate with encryption information, if necessary
1983
1984
1985 if (key) {
1986 currentUri.key = key;
1987 }
1988
1989 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1990
1991 if (currentMap) {
1992 currentUri.map = currentMap;
1993 } // reset the last byterange end as it needs to be 0 between parts
1994
1995
1996 lastPartByterangeEnd = 0; // Once we have at least one program date time we can always extrapolate it forward
1997
1998 if (this.lastProgramDateTime !== null) {
1999 currentUri.programDateTime = this.lastProgramDateTime;
2000 this.lastProgramDateTime += currentUri.duration * 1000;
2001 } // prepare for the next URI
2002
2003
2004 currentUri = {};
2005 },
2006
2007 comment() {// comments are not important for playback
2008 },
2009
2010 custom() {
2011 // if this is segment-level data attach the output to the segment
2012 if (entry.segment) {
2013 currentUri.custom = currentUri.custom || {};
2014 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
2015 } else {
2016 this.manifest.custom = this.manifest.custom || {};
2017 this.manifest.custom[entry.customType] = entry.data;
2018 }
2019 }
2020
2021 })[entry.type].call(self);
2022 });
2023 }
2024
2025 warnOnMissingAttributes_(identifier, attributes, required) {
2026 const missing = [];
2027 required.forEach(function (key) {
2028 if (!attributes.hasOwnProperty(key)) {
2029 missing.push(key);
2030 }
2031 });
2032
2033 if (missing.length) {
2034 this.trigger('warn', {
2035 message: `${identifier} lacks required attribute(s): ${missing.join(', ')}`
2036 });
2037 }
2038 }
2039 /**
2040 * Parse the input string and update the manifest object.
2041 *
2042 * @param {string} chunk a potentially incomplete portion of the manifest
2043 */
2044
2045
2046 push(chunk) {
2047 this.lineStream.push(chunk);
2048 }
2049 /**
2050 * Flush any remaining input. This can be handy if the last line of an M3U8
2051 * manifest did not contain a trailing newline but the file has been
2052 * completely received.
2053 */
2054
2055
2056 end() {
2057 // flush any buffered input
2058 this.lineStream.push('\n');
2059
2060 if (this.manifest.dateRanges.length && this.lastProgramDateTime === null) {
2061 this.trigger('warn', {
2062 message: 'A playlist with EXT-X-DATERANGE tag must contain atleast one EXT-X-PROGRAM-DATE-TIME tag'
2063 });
2064 }
2065
2066 this.lastProgramDateTime = null;
2067 this.trigger('end');
2068 }
2069 /**
2070 * Add an additional parser for non-standard tags
2071 *
2072 * @param {Object} options a map of options for the added parser
2073 * @param {RegExp} options.expression a regular expression to match the custom header
2074 * @param {string} options.customType the custom type to register to the output
2075 * @param {Function} [options.dataParser] function to parse the line into an object
2076 * @param {boolean} [options.segment] should tag data be attached to the segment object
2077 */
2078
2079
2080 addParser(options) {
2081 this.parseStream.addParser(options);
2082 }
2083 /**
2084 * Add a custom header mapper
2085 *
2086 * @param {Object} options
2087 * @param {RegExp} options.expression a regular expression to match the custom header
2088 * @param {Function} options.map function to translate tag into a different tag
2089 */
2090
2091
2092 addTagMapper(options) {
2093 this.parseStream.addTagMapper(options);
2094 }
2095
2096 }
2097
2098 var regexs = {
2099 // to determine mime types
2100 mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
2101 webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
2102 ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
2103 // to determine if a codec is audio or video
2104 video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
2105 audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
2106 text: /^(stpp.ttml.im1t)/,
2107 // mux.js support regex
2108 muxerVideo: /^(avc0?1)/,
2109 muxerAudio: /^(mp4a)/,
2110 // match nothing as muxer does not support text right now.
2111 // there cannot never be a character before the start of a string
2112 // so this matches nothing.
2113 muxerText: /a^/
2114 };
2115 var mediaTypes = ['video', 'audio', 'text'];
2116 var upperMediaTypes = ['Video', 'Audio', 'Text'];
2117 /**
2118 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
2119 * `avc1.<hhhhhh>`
2120 *
2121 * @param {string} codec
2122 * Codec string to translate
2123 * @return {string}
2124 * The translated codec string
2125 */
2126
2127 var translateLegacyCodec = function translateLegacyCodec(codec) {
2128 if (!codec) {
2129 return codec;
2130 }
2131
2132 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
2133 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
2134 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
2135 return 'avc1.' + profileHex + '00' + avcLevelHex;
2136 });
2137 };
2138 /**
2139 * @typedef {Object} ParsedCodecInfo
2140 * @property {number} codecCount
2141 * Number of codecs parsed
2142 * @property {string} [videoCodec]
2143 * Parsed video codec (if found)
2144 * @property {string} [videoObjectTypeIndicator]
2145 * Video object type indicator (if found)
2146 * @property {string|null} audioProfile
2147 * Audio profile
2148 */
2149
2150 /**
2151 * Parses a codec string to retrieve the number of codecs specified, the video codec and
2152 * object type indicator, and the audio profile.
2153 *
2154 * @param {string} [codecString]
2155 * The codec string to parse
2156 * @return {ParsedCodecInfo}
2157 * Parsed codec info
2158 */
2159
2160 var parseCodecs = function parseCodecs(codecString) {
2161 if (codecString === void 0) {
2162 codecString = '';
2163 }
2164
2165 var codecs = codecString.split(',');
2166 var result = [];
2167 codecs.forEach(function (codec) {
2168 codec = codec.trim();
2169 var codecType;
2170 mediaTypes.forEach(function (name) {
2171 var match = regexs[name].exec(codec.toLowerCase());
2172
2173 if (!match || match.length <= 1) {
2174 return;
2175 }
2176
2177 codecType = name; // maintain codec case
2178
2179 var type = codec.substring(0, match[1].length);
2180 var details = codec.replace(type, '');
2181 result.push({
2182 type: type,
2183 details: details,
2184 mediaType: name
2185 });
2186 });
2187
2188 if (!codecType) {
2189 result.push({
2190 type: codec,
2191 details: '',
2192 mediaType: 'unknown'
2193 });
2194 }
2195 });
2196 return result;
2197 };
2198 /**
2199 * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
2200 * a default alternate audio playlist for the provided audio group.
2201 *
2202 * @param {Object} master
2203 * The master playlist
2204 * @param {string} audioGroupId
2205 * ID of the audio group for which to find the default codec info
2206 * @return {ParsedCodecInfo}
2207 * Parsed codec info
2208 */
2209
2210 var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
2211 if (!master.mediaGroups.AUDIO || !audioGroupId) {
2212 return null;
2213 }
2214
2215 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
2216
2217 if (!audioGroup) {
2218 return null;
2219 }
2220
2221 for (var name in audioGroup) {
2222 var audioType = audioGroup[name];
2223
2224 if (audioType.default && audioType.playlists) {
2225 // codec should be the same for all playlists within the audio type
2226 return parseCodecs(audioType.playlists[0].attributes.CODECS);
2227 }
2228 }
2229
2230 return null;
2231 };
2232 var isAudioCodec = function isAudioCodec(codec) {
2233 if (codec === void 0) {
2234 codec = '';
2235 }
2236
2237 return regexs.audio.test(codec.trim().toLowerCase());
2238 };
2239 var isTextCodec = function isTextCodec(codec) {
2240 if (codec === void 0) {
2241 codec = '';
2242 }
2243
2244 return regexs.text.test(codec.trim().toLowerCase());
2245 };
2246 var getMimeForCodec = function getMimeForCodec(codecString) {
2247 if (!codecString || typeof codecString !== 'string') {
2248 return;
2249 }
2250
2251 var codecs = codecString.toLowerCase().split(',').map(function (c) {
2252 return translateLegacyCodec(c.trim());
2253 }); // default to video type
2254
2255 var type = 'video'; // only change to audio type if the only codec we have is
2256 // audio
2257
2258 if (codecs.length === 1 && isAudioCodec(codecs[0])) {
2259 type = 'audio';
2260 } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
2261 // text uses application/<container> for now
2262 type = 'application';
2263 } // default the container to mp4
2264
2265
2266 var container = 'mp4'; // every codec must be able to go into the container
2267 // for that container to be the correct one
2268
2269 if (codecs.every(function (c) {
2270 return regexs.mp4.test(c);
2271 })) {
2272 container = 'mp4';
2273 } else if (codecs.every(function (c) {
2274 return regexs.webm.test(c);
2275 })) {
2276 container = 'webm';
2277 } else if (codecs.every(function (c) {
2278 return regexs.ogg.test(c);
2279 })) {
2280 container = 'ogg';
2281 }
2282
2283 return type + "/" + container + ";codecs=\"" + codecString + "\"";
2284 };
2285 var browserSupportsCodec = function browserSupportsCodec(codecString) {
2286 if (codecString === void 0) {
2287 codecString = '';
2288 }
2289
2290 return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
2291 };
2292 var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
2293 if (codecString === void 0) {
2294 codecString = '';
2295 }
2296
2297 return codecString.toLowerCase().split(',').every(function (codec) {
2298 codec = codec.trim(); // any match is supported.
2299
2300 for (var i = 0; i < upperMediaTypes.length; i++) {
2301 var type = upperMediaTypes[i];
2302
2303 if (regexs["muxer" + type].test(codec)) {
2304 return true;
2305 }
2306 }
2307
2308 return false;
2309 });
2310 };
2311 var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
2312 var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
2313
2314 /**
2315 * Provides a compatibility layer between Video.js 7 and 8 API changes for VHS.
2316 */
2317 /**
2318 * Delegates to videojs.obj.merge (Video.js 8) or
2319 * videojs.mergeOptions (Video.js 7).
2320 */
2321
2322 function merge$1(...args) {
2323 const context = videojs__default["default"].obj || videojs__default["default"];
2324 const fn = context.merge || context.mergeOptions;
2325 return fn.apply(context, args);
2326 }
2327 /**
2328 * Delegates to videojs.time.createTimeRanges (Video.js 8) or
2329 * videojs.createTimeRanges (Video.js 7).
2330 */
2331
2332 function createTimeRanges(...args) {
2333 const context = videojs__default["default"].time || videojs__default["default"];
2334 const fn = context.createTimeRanges || context.createTimeRanges;
2335 return fn.apply(context, args);
2336 }
2337
2338 /**
2339 * ranges
2340 *
2341 * Utilities for working with TimeRanges.
2342 *
2343 */
2344
2345 const TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
2346 // can be misleading because of precision differences or when the current media has poorly
2347 // aligned audio and video, which can cause values to be slightly off from what you would
2348 // expect. This value is what we consider to be safe to use in such comparisons to account
2349 // for these scenarios.
2350
2351 const SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2352
2353 const filterRanges = function (timeRanges, predicate) {
2354 const results = [];
2355 let i;
2356
2357 if (timeRanges && timeRanges.length) {
2358 // Search for ranges that match the predicate
2359 for (i = 0; i < timeRanges.length; i++) {
2360 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2361 results.push([timeRanges.start(i), timeRanges.end(i)]);
2362 }
2363 }
2364 }
2365
2366 return createTimeRanges(results);
2367 };
2368 /**
2369 * Attempts to find the buffered TimeRange that contains the specified
2370 * time.
2371 *
2372 * @param {TimeRanges} buffered - the TimeRanges object to query
2373 * @param {number} time - the time to filter on.
2374 * @return {TimeRanges} a new TimeRanges object
2375 */
2376
2377
2378 const findRange = function (buffered, time) {
2379 return filterRanges(buffered, function (start, end) {
2380 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2381 });
2382 };
2383 /**
2384 * Returns the TimeRanges that begin later than the specified time.
2385 *
2386 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2387 * @param {number} time - the time to filter on.
2388 * @return {TimeRanges} a new TimeRanges object.
2389 */
2390
2391 const findNextRange = function (timeRanges, time) {
2392 return filterRanges(timeRanges, function (start) {
2393 return start - TIME_FUDGE_FACTOR >= time;
2394 });
2395 };
2396 /**
2397 * Returns gaps within a list of TimeRanges
2398 *
2399 * @param {TimeRanges} buffered - the TimeRanges object
2400 * @return {TimeRanges} a TimeRanges object of gaps
2401 */
2402
2403 const findGaps = function (buffered) {
2404 if (buffered.length < 2) {
2405 return createTimeRanges();
2406 }
2407
2408 const ranges = [];
2409
2410 for (let i = 1; i < buffered.length; i++) {
2411 const start = buffered.end(i - 1);
2412 const end = buffered.start(i);
2413 ranges.push([start, end]);
2414 }
2415
2416 return createTimeRanges(ranges);
2417 };
2418 /**
2419 * Calculate the intersection of two TimeRanges
2420 *
2421 * @param {TimeRanges} bufferA
2422 * @param {TimeRanges} bufferB
2423 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
2424 */
2425
2426 const bufferIntersection = function (bufferA, bufferB) {
2427 let start = null;
2428 let end = null;
2429 let arity = 0;
2430 const extents = [];
2431 const ranges = [];
2432
2433 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
2434 return createTimeRanges();
2435 } // Handle the case where we have both buffers and create an
2436 // intersection of the two
2437
2438
2439 let count = bufferA.length; // A) Gather up all start and end times
2440
2441 while (count--) {
2442 extents.push({
2443 time: bufferA.start(count),
2444 type: 'start'
2445 });
2446 extents.push({
2447 time: bufferA.end(count),
2448 type: 'end'
2449 });
2450 }
2451
2452 count = bufferB.length;
2453
2454 while (count--) {
2455 extents.push({
2456 time: bufferB.start(count),
2457 type: 'start'
2458 });
2459 extents.push({
2460 time: bufferB.end(count),
2461 type: 'end'
2462 });
2463 } // B) Sort them by time
2464
2465
2466 extents.sort(function (a, b) {
2467 return a.time - b.time;
2468 }); // C) Go along one by one incrementing arity for start and decrementing
2469 // arity for ends
2470
2471 for (count = 0; count < extents.length; count++) {
2472 if (extents[count].type === 'start') {
2473 arity++; // D) If arity is ever incremented to 2 we are entering an
2474 // overlapping range
2475
2476 if (arity === 2) {
2477 start = extents[count].time;
2478 }
2479 } else if (extents[count].type === 'end') {
2480 arity--; // E) If arity is ever decremented to 1 we leaving an
2481 // overlapping range
2482
2483 if (arity === 1) {
2484 end = extents[count].time;
2485 }
2486 } // F) Record overlapping ranges
2487
2488
2489 if (start !== null && end !== null) {
2490 ranges.push([start, end]);
2491 start = null;
2492 end = null;
2493 }
2494 }
2495
2496 return createTimeRanges(ranges);
2497 };
2498 /**
2499 * Gets a human readable string for a TimeRange
2500 *
2501 * @param {TimeRange} range
2502 * @return {string} a human readable string
2503 */
2504
2505 const printableRange = range => {
2506 const strArr = [];
2507
2508 if (!range || !range.length) {
2509 return '';
2510 }
2511
2512 for (let i = 0; i < range.length; i++) {
2513 strArr.push(range.start(i) + ' => ' + range.end(i));
2514 }
2515
2516 return strArr.join(', ');
2517 };
2518 /**
2519 * Calculates the amount of time left in seconds until the player hits the end of the
2520 * buffer and causes a rebuffer
2521 *
2522 * @param {TimeRange} buffered
2523 * The state of the buffer
2524 * @param {Numnber} currentTime
2525 * The current time of the player
2526 * @param {number} playbackRate
2527 * The current playback rate of the player. Defaults to 1.
2528 * @return {number}
2529 * Time until the player has to start rebuffering in seconds.
2530 * @function timeUntilRebuffer
2531 */
2532
2533 const timeUntilRebuffer = function (buffered, currentTime, playbackRate = 1) {
2534 const bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
2535 return (bufferedEnd - currentTime) / playbackRate;
2536 };
2537 /**
2538 * Converts a TimeRanges object into an array representation
2539 *
2540 * @param {TimeRanges} timeRanges
2541 * @return {Array}
2542 */
2543
2544 const timeRangesToArray = timeRanges => {
2545 const timeRangesList = [];
2546
2547 for (let i = 0; i < timeRanges.length; i++) {
2548 timeRangesList.push({
2549 start: timeRanges.start(i),
2550 end: timeRanges.end(i)
2551 });
2552 }
2553
2554 return timeRangesList;
2555 };
2556 /**
2557 * Determines if two time range objects are different.
2558 *
2559 * @param {TimeRange} a
2560 * the first time range object to check
2561 *
2562 * @param {TimeRange} b
2563 * the second time range object to check
2564 *
2565 * @return {Boolean}
2566 * Whether the time range objects differ
2567 */
2568
2569 const isRangeDifferent = function (a, b) {
2570 // same object
2571 if (a === b) {
2572 return false;
2573 } // one or the other is undefined
2574
2575
2576 if (!a && b || !b && a) {
2577 return true;
2578 } // length is different
2579
2580
2581 if (a.length !== b.length) {
2582 return true;
2583 } // see if any start/end pair is different
2584
2585
2586 for (let i = 0; i < a.length; i++) {
2587 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
2588 return true;
2589 }
2590 } // if the length and every pair is the same
2591 // this is the same time range
2592
2593
2594 return false;
2595 };
2596 const lastBufferedEnd = function (a) {
2597 if (!a || !a.length || !a.end) {
2598 return;
2599 }
2600
2601 return a.end(a.length - 1);
2602 };
2603 /**
2604 * A utility function to add up the amount of time in a timeRange
2605 * after a specified startTime.
2606 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
2607 * would return 40 as there are 40s seconds after 0 in the timeRange
2608 *
2609 * @param {TimeRange} range
2610 * The range to check against
2611 * @param {number} startTime
2612 * The time in the time range that you should start counting from
2613 *
2614 * @return {number}
2615 * The number of seconds in the buffer passed the specified time.
2616 */
2617
2618 const timeAheadOf = function (range, startTime) {
2619 let time = 0;
2620
2621 if (!range || !range.length) {
2622 return time;
2623 }
2624
2625 for (let i = 0; i < range.length; i++) {
2626 const start = range.start(i);
2627 const end = range.end(i); // startTime is after this range entirely
2628
2629 if (startTime > end) {
2630 continue;
2631 } // startTime is within this range
2632
2633
2634 if (startTime > start && startTime <= end) {
2635 time += end - startTime;
2636 continue;
2637 } // startTime is before this range.
2638
2639
2640 time += end - start;
2641 }
2642
2643 return time;
2644 };
2645
2646 /**
2647 * @file playlist.js
2648 *
2649 * Playlist related utilities.
2650 */
2651 /**
2652 * Get the duration of a segment, with special cases for
2653 * llhls segments that do not have a duration yet.
2654 *
2655 * @param {Object} playlist
2656 * the playlist that the segment belongs to.
2657 * @param {Object} segment
2658 * the segment to get a duration for.
2659 *
2660 * @return {number}
2661 * the segment duration
2662 */
2663
2664 const segmentDurationWithParts = (playlist, segment) => {
2665 // if this isn't a preload segment
2666 // then we will have a segment duration that is accurate.
2667 if (!segment.preload) {
2668 return segment.duration;
2669 } // otherwise we have to add up parts and preload hints
2670 // to get an up to date duration.
2671
2672
2673 let result = 0;
2674 (segment.parts || []).forEach(function (p) {
2675 result += p.duration;
2676 }); // for preload hints we have to use partTargetDuration
2677 // as they won't even have a duration yet.
2678
2679 (segment.preloadHints || []).forEach(function (p) {
2680 if (p.type === 'PART') {
2681 result += playlist.partTargetDuration;
2682 }
2683 });
2684 return result;
2685 };
2686 /**
2687 * A function to get a combined list of parts and segments with durations
2688 * and indexes.
2689 *
2690 * @param {Playlist} playlist the playlist to get the list for.
2691 *
2692 * @return {Array} The part/segment list.
2693 */
2694
2695 const getPartsAndSegments = playlist => (playlist.segments || []).reduce((acc, segment, si) => {
2696 if (segment.parts) {
2697 segment.parts.forEach(function (part, pi) {
2698 acc.push({
2699 duration: part.duration,
2700 segmentIndex: si,
2701 partIndex: pi,
2702 part,
2703 segment
2704 });
2705 });
2706 } else {
2707 acc.push({
2708 duration: segment.duration,
2709 segmentIndex: si,
2710 partIndex: null,
2711 segment,
2712 part: null
2713 });
2714 }
2715
2716 return acc;
2717 }, []);
2718 const getLastParts = media => {
2719 const lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
2720 return lastSegment && lastSegment.parts || [];
2721 };
2722 const getKnownPartCount = ({
2723 preloadSegment
2724 }) => {
2725 if (!preloadSegment) {
2726 return;
2727 }
2728
2729 const {
2730 parts,
2731 preloadHints
2732 } = preloadSegment;
2733 let partCount = (preloadHints || []).reduce((count, hint) => count + (hint.type === 'PART' ? 1 : 0), 0);
2734 partCount += parts && parts.length ? parts.length : 0;
2735 return partCount;
2736 };
2737 /**
2738 * Get the number of seconds to delay from the end of a
2739 * live playlist.
2740 *
2741 * @param {Playlist} main the main playlist
2742 * @param {Playlist} media the media playlist
2743 * @return {number} the hold back in seconds.
2744 */
2745
2746 const liveEdgeDelay = (main, media) => {
2747 if (media.endList) {
2748 return 0;
2749 } // dash suggestedPresentationDelay trumps everything
2750
2751
2752 if (main && main.suggestedPresentationDelay) {
2753 return main.suggestedPresentationDelay;
2754 }
2755
2756 const hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
2757
2758 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
2759 return media.serverControl.partHoldBack;
2760 } else if (hasParts && media.partTargetDuration) {
2761 return media.partTargetDuration * 3; // finally look for full segment delays
2762 } else if (media.serverControl && media.serverControl.holdBack) {
2763 return media.serverControl.holdBack;
2764 } else if (media.targetDuration) {
2765 return media.targetDuration * 3;
2766 }
2767
2768 return 0;
2769 };
2770 /**
2771 * walk backward until we find a duration we can use
2772 * or return a failure
2773 *
2774 * @param {Playlist} playlist the playlist to walk through
2775 * @param {Number} endSequence the mediaSequence to stop walking on
2776 */
2777
2778 const backwardDuration = function (playlist, endSequence) {
2779 let result = 0;
2780 let i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
2781 // the interval, use it
2782
2783 let segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
2784 // information that is earlier than endSequence
2785
2786 if (segment) {
2787 if (typeof segment.start !== 'undefined') {
2788 return {
2789 result: segment.start,
2790 precise: true
2791 };
2792 }
2793
2794 if (typeof segment.end !== 'undefined') {
2795 return {
2796 result: segment.end - segment.duration,
2797 precise: true
2798 };
2799 }
2800 }
2801
2802 while (i--) {
2803 segment = playlist.segments[i];
2804
2805 if (typeof segment.end !== 'undefined') {
2806 return {
2807 result: result + segment.end,
2808 precise: true
2809 };
2810 }
2811
2812 result += segmentDurationWithParts(playlist, segment);
2813
2814 if (typeof segment.start !== 'undefined') {
2815 return {
2816 result: result + segment.start,
2817 precise: true
2818 };
2819 }
2820 }
2821
2822 return {
2823 result,
2824 precise: false
2825 };
2826 };
2827 /**
2828 * walk forward until we find a duration we can use
2829 * or return a failure
2830 *
2831 * @param {Playlist} playlist the playlist to walk through
2832 * @param {number} endSequence the mediaSequence to stop walking on
2833 */
2834
2835
2836 const forwardDuration = function (playlist, endSequence) {
2837 let result = 0;
2838 let segment;
2839 let i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
2840 // information
2841
2842 for (; i < playlist.segments.length; i++) {
2843 segment = playlist.segments[i];
2844
2845 if (typeof segment.start !== 'undefined') {
2846 return {
2847 result: segment.start - result,
2848 precise: true
2849 };
2850 }
2851
2852 result += segmentDurationWithParts(playlist, segment);
2853
2854 if (typeof segment.end !== 'undefined') {
2855 return {
2856 result: segment.end - result,
2857 precise: true
2858 };
2859 }
2860 } // indicate we didn't find a useful duration estimate
2861
2862
2863 return {
2864 result: -1,
2865 precise: false
2866 };
2867 };
2868 /**
2869 * Calculate the media duration from the segments associated with a
2870 * playlist. The duration of a subinterval of the available segments
2871 * may be calculated by specifying an end index.
2872 *
2873 * @param {Object} playlist a media playlist object
2874 * @param {number=} endSequence an exclusive upper boundary
2875 * for the playlist. Defaults to playlist length.
2876 * @param {number} expired the amount of time that has dropped
2877 * off the front of the playlist in a live scenario
2878 * @return {number} the duration between the first available segment
2879 * and end index.
2880 */
2881
2882
2883 const intervalDuration = function (playlist, endSequence, expired) {
2884 if (typeof endSequence === 'undefined') {
2885 endSequence = playlist.mediaSequence + playlist.segments.length;
2886 }
2887
2888 if (endSequence < playlist.mediaSequence) {
2889 return 0;
2890 } // do a backward walk to estimate the duration
2891
2892
2893 const backward = backwardDuration(playlist, endSequence);
2894
2895 if (backward.precise) {
2896 // if we were able to base our duration estimate on timing
2897 // information provided directly from the Media Source, return
2898 // it
2899 return backward.result;
2900 } // walk forward to see if a precise duration estimate can be made
2901 // that way
2902
2903
2904 const forward = forwardDuration(playlist, endSequence);
2905
2906 if (forward.precise) {
2907 // we found a segment that has been buffered and so it's
2908 // position is known precisely
2909 return forward.result;
2910 } // return the less-precise, playlist-based duration estimate
2911
2912
2913 return backward.result + expired;
2914 };
2915 /**
2916 * Calculates the duration of a playlist. If a start and end index
2917 * are specified, the duration will be for the subset of the media
2918 * timeline between those two indices. The total duration for live
2919 * playlists is always Infinity.
2920 *
2921 * @param {Object} playlist a media playlist object
2922 * @param {number=} endSequence an exclusive upper
2923 * boundary for the playlist. Defaults to the playlist media
2924 * sequence number plus its length.
2925 * @param {number=} expired the amount of time that has
2926 * dropped off the front of the playlist in a live scenario
2927 * @return {number} the duration between the start index and end
2928 * index.
2929 */
2930
2931
2932 const duration = function (playlist, endSequence, expired) {
2933 if (!playlist) {
2934 return 0;
2935 }
2936
2937 if (typeof expired !== 'number') {
2938 expired = 0;
2939 } // if a slice of the total duration is not requested, use
2940 // playlist-level duration indicators when they're present
2941
2942
2943 if (typeof endSequence === 'undefined') {
2944 // if present, use the duration specified in the playlist
2945 if (playlist.totalDuration) {
2946 return playlist.totalDuration;
2947 } // duration should be Infinity for live playlists
2948
2949
2950 if (!playlist.endList) {
2951 return window.Infinity;
2952 }
2953 } // calculate the total duration based on the segment durations
2954
2955
2956 return intervalDuration(playlist, endSequence, expired);
2957 };
2958 /**
2959 * Calculate the time between two indexes in the current playlist
2960 * neight the start- nor the end-index need to be within the current
2961 * playlist in which case, the targetDuration of the playlist is used
2962 * to approximate the durations of the segments
2963 *
2964 * @param {Array} options.durationList list to iterate over for durations.
2965 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
2966 * @param {number} options.startIndex partsAndSegments index to start
2967 * @param {number} options.endIndex partsAndSegments index to end.
2968 * @return {number} the number of seconds between startIndex and endIndex
2969 */
2970
2971 const sumDurations = function ({
2972 defaultDuration,
2973 durationList,
2974 startIndex,
2975 endIndex
2976 }) {
2977 let durations = 0;
2978
2979 if (startIndex > endIndex) {
2980 [startIndex, endIndex] = [endIndex, startIndex];
2981 }
2982
2983 if (startIndex < 0) {
2984 for (let i = startIndex; i < Math.min(0, endIndex); i++) {
2985 durations += defaultDuration;
2986 }
2987
2988 startIndex = 0;
2989 }
2990
2991 for (let i = startIndex; i < endIndex; i++) {
2992 durations += durationList[i].duration;
2993 }
2994
2995 return durations;
2996 };
2997 /**
2998 * Calculates the playlist end time
2999 *
3000 * @param {Object} playlist a media playlist object
3001 * @param {number=} expired the amount of time that has
3002 * dropped off the front of the playlist in a live scenario
3003 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
3004 * playlist end calculation should consider the safe live end
3005 * (truncate the playlist end by three segments). This is normally
3006 * used for calculating the end of the playlist's seekable range.
3007 * This takes into account the value of liveEdgePadding.
3008 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
3009 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
3010 * If this is provided, it is used in the safe live end calculation.
3011 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
3012 * Corresponds to suggestedPresentationDelay in DASH manifests.
3013 * @return {number} the end time of playlist
3014 * @function playlistEnd
3015 */
3016
3017 const playlistEnd = function (playlist, expired, useSafeLiveEnd, liveEdgePadding) {
3018 if (!playlist || !playlist.segments) {
3019 return null;
3020 }
3021
3022 if (playlist.endList) {
3023 return duration(playlist);
3024 }
3025
3026 if (expired === null) {
3027 return null;
3028 }
3029
3030 expired = expired || 0;
3031 let lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
3032
3033 if (useSafeLiveEnd) {
3034 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
3035 lastSegmentEndTime -= liveEdgePadding;
3036 } // don't return a time less than zero
3037
3038
3039 return Math.max(0, lastSegmentEndTime);
3040 };
3041 /**
3042 * Calculates the interval of time that is currently seekable in a
3043 * playlist. The returned time ranges are relative to the earliest
3044 * moment in the specified playlist that is still available. A full
3045 * seekable implementation for live streams would need to offset
3046 * these values by the duration of content that has expired from the
3047 * stream.
3048 *
3049 * @param {Object} playlist a media playlist object
3050 * dropped off the front of the playlist in a live scenario
3051 * @param {number=} expired the amount of time that has
3052 * dropped off the front of the playlist in a live scenario
3053 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
3054 * Corresponds to suggestedPresentationDelay in DASH manifests.
3055 * @return {TimeRanges} the periods of time that are valid targets
3056 * for seeking
3057 */
3058
3059 const seekable = function (playlist, expired, liveEdgePadding) {
3060 const useSafeLiveEnd = true;
3061 const seekableStart = expired || 0;
3062 let seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
3063
3064 if (seekableEnd === null) {
3065 return createTimeRanges();
3066 } // Clamp seekable end since it can not be less than the seekable start
3067
3068
3069 if (seekableEnd < seekableStart) {
3070 seekableEnd = seekableStart;
3071 }
3072
3073 return createTimeRanges(seekableStart, seekableEnd);
3074 };
3075 /**
3076 * Determine the index and estimated starting time of the segment that
3077 * contains a specified playback position in a media playlist.
3078 *
3079 * @param {Object} options.playlist the media playlist to query
3080 * @param {number} options.currentTime The number of seconds since the earliest
3081 * possible position to determine the containing segment for
3082 * @param {number} options.startTime the time when the segment/part starts
3083 * @param {number} options.startingSegmentIndex the segment index to start looking at.
3084 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
3085 *
3086 * @return {Object} an object with partIndex, segmentIndex, and startTime.
3087 */
3088
3089 const getMediaInfoForTime = function ({
3090 playlist,
3091 currentTime,
3092 startingSegmentIndex,
3093 startingPartIndex,
3094 startTime,
3095 exactManifestTimings
3096 }) {
3097 let time = currentTime - startTime;
3098 const partsAndSegments = getPartsAndSegments(playlist);
3099 let startIndex = 0;
3100
3101 for (let i = 0; i < partsAndSegments.length; i++) {
3102 const partAndSegment = partsAndSegments[i];
3103
3104 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
3105 continue;
3106 } // skip this if part index does not match.
3107
3108
3109 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
3110 continue;
3111 }
3112
3113 startIndex = i;
3114 break;
3115 }
3116
3117 if (time < 0) {
3118 // Walk backward from startIndex in the playlist, adding durations
3119 // until we find a segment that contains `time` and return it
3120 if (startIndex > 0) {
3121 for (let i = startIndex - 1; i >= 0; i--) {
3122 const partAndSegment = partsAndSegments[i];
3123 time += partAndSegment.duration;
3124
3125 if (exactManifestTimings) {
3126 if (time < 0) {
3127 continue;
3128 }
3129 } else if (time + TIME_FUDGE_FACTOR <= 0) {
3130 continue;
3131 }
3132
3133 return {
3134 partIndex: partAndSegment.partIndex,
3135 segmentIndex: partAndSegment.segmentIndex,
3136 startTime: startTime - sumDurations({
3137 defaultDuration: playlist.targetDuration,
3138 durationList: partsAndSegments,
3139 startIndex,
3140 endIndex: i
3141 })
3142 };
3143 }
3144 } // We were unable to find a good segment within the playlist
3145 // so select the first segment
3146
3147
3148 return {
3149 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3150 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3151 startTime: currentTime
3152 };
3153 } // When startIndex is negative, we first walk forward to first segment
3154 // adding target durations. If we "run out of time" before getting to
3155 // the first segment, return the first segment
3156
3157
3158 if (startIndex < 0) {
3159 for (let i = startIndex; i < 0; i++) {
3160 time -= playlist.targetDuration;
3161
3162 if (time < 0) {
3163 return {
3164 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3165 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3166 startTime: currentTime
3167 };
3168 }
3169 }
3170
3171 startIndex = 0;
3172 } // Walk forward from startIndex in the playlist, subtracting durations
3173 // until we find a segment that contains `time` and return it
3174
3175
3176 for (let i = startIndex; i < partsAndSegments.length; i++) {
3177 const partAndSegment = partsAndSegments[i];
3178 time -= partAndSegment.duration;
3179 const canUseFudgeFactor = partAndSegment.duration > TIME_FUDGE_FACTOR;
3180 const isExactlyAtTheEnd = time === 0;
3181 const isExtremelyCloseToTheEnd = canUseFudgeFactor && time + TIME_FUDGE_FACTOR >= 0;
3182
3183 if (isExactlyAtTheEnd || isExtremelyCloseToTheEnd) {
3184 // 1) We are exactly at the end of the current segment.
3185 // 2) We are extremely close to the end of the current segment (The difference is less than 1 / 30).
3186 // We may encounter this situation when
3187 // we don't have exact match between segment duration info in the manifest and the actual duration of the segment
3188 // For example:
3189 // We appended 3 segments 10 seconds each, meaning we should have 30 sec buffered,
3190 // but we the actual buffered is 29.99999
3191 //
3192 // In both cases:
3193 // if we passed current time -> it means that we already played current segment
3194 // if we passed buffered.end -> it means that this segment is already loaded and buffered
3195 // we should select the next segment if we have one:
3196 if (i !== partsAndSegments.length - 1) {
3197 continue;
3198 }
3199 }
3200
3201 if (exactManifestTimings) {
3202 if (time > 0) {
3203 continue;
3204 }
3205 } else if (time - TIME_FUDGE_FACTOR >= 0) {
3206 continue;
3207 }
3208
3209 return {
3210 partIndex: partAndSegment.partIndex,
3211 segmentIndex: partAndSegment.segmentIndex,
3212 startTime: startTime + sumDurations({
3213 defaultDuration: playlist.targetDuration,
3214 durationList: partsAndSegments,
3215 startIndex,
3216 endIndex: i
3217 })
3218 };
3219 } // We are out of possible candidates so load the last one...
3220
3221
3222 return {
3223 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
3224 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
3225 startTime: currentTime
3226 };
3227 };
3228 /**
3229 * Check whether the playlist is excluded or not.
3230 *
3231 * @param {Object} playlist the media playlist object
3232 * @return {boolean} whether the playlist is excluded or not
3233 * @function isExcluded
3234 */
3235
3236 const isExcluded = function (playlist) {
3237 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
3238 };
3239 /**
3240 * Check whether the playlist is compatible with current playback configuration or has
3241 * been excluded permanently for being incompatible.
3242 *
3243 * @param {Object} playlist the media playlist object
3244 * @return {boolean} whether the playlist is incompatible or not
3245 * @function isIncompatible
3246 */
3247
3248 const isIncompatible = function (playlist) {
3249 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
3250 };
3251 /**
3252 * Check whether the playlist is enabled or not.
3253 *
3254 * @param {Object} playlist the media playlist object
3255 * @return {boolean} whether the playlist is enabled or not
3256 * @function isEnabled
3257 */
3258
3259 const isEnabled = function (playlist) {
3260 const excluded = isExcluded(playlist);
3261 return !playlist.disabled && !excluded;
3262 };
3263 /**
3264 * Check whether the playlist has been manually disabled through the representations api.
3265 *
3266 * @param {Object} playlist the media playlist object
3267 * @return {boolean} whether the playlist is disabled manually or not
3268 * @function isDisabled
3269 */
3270
3271 const isDisabled = function (playlist) {
3272 return playlist.disabled;
3273 };
3274 /**
3275 * Returns whether the current playlist is an AES encrypted HLS stream
3276 *
3277 * @return {boolean} true if it's an AES encrypted HLS stream
3278 */
3279
3280 const isAes = function (media) {
3281 for (let i = 0; i < media.segments.length; i++) {
3282 if (media.segments[i].key) {
3283 return true;
3284 }
3285 }
3286
3287 return false;
3288 };
3289 /**
3290 * Checks if the playlist has a value for the specified attribute
3291 *
3292 * @param {string} attr
3293 * Attribute to check for
3294 * @param {Object} playlist
3295 * The media playlist object
3296 * @return {boolean}
3297 * Whether the playlist contains a value for the attribute or not
3298 * @function hasAttribute
3299 */
3300
3301 const hasAttribute = function (attr, playlist) {
3302 return playlist.attributes && playlist.attributes[attr];
3303 };
3304 /**
3305 * Estimates the time required to complete a segment download from the specified playlist
3306 *
3307 * @param {number} segmentDuration
3308 * Duration of requested segment
3309 * @param {number} bandwidth
3310 * Current measured bandwidth of the player
3311 * @param {Object} playlist
3312 * The media playlist object
3313 * @param {number=} bytesReceived
3314 * Number of bytes already received for the request. Defaults to 0
3315 * @return {number|NaN}
3316 * The estimated time to request the segment. NaN if bandwidth information for
3317 * the given playlist is unavailable
3318 * @function estimateSegmentRequestTime
3319 */
3320
3321 const estimateSegmentRequestTime = function (segmentDuration, bandwidth, playlist, bytesReceived = 0) {
3322 if (!hasAttribute('BANDWIDTH', playlist)) {
3323 return NaN;
3324 }
3325
3326 const size = segmentDuration * playlist.attributes.BANDWIDTH;
3327 return (size - bytesReceived * 8) / bandwidth;
3328 };
3329 /*
3330 * Returns whether the current playlist is the lowest rendition
3331 *
3332 * @return {Boolean} true if on lowest rendition
3333 */
3334
3335 const isLowestEnabledRendition = (main, media) => {
3336 if (main.playlists.length === 1) {
3337 return true;
3338 }
3339
3340 const currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3341 return main.playlists.filter(playlist => {
3342 if (!isEnabled(playlist)) {
3343 return false;
3344 }
3345
3346 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3347 }).length === 0;
3348 };
3349 const playlistMatch = (a, b) => {
3350 // both playlits are null
3351 // or only one playlist is non-null
3352 // no match
3353 if (!a && !b || !a && b || a && !b) {
3354 return false;
3355 } // playlist objects are the same, match
3356
3357
3358 if (a === b) {
3359 return true;
3360 } // first try to use id as it should be the most
3361 // accurate
3362
3363
3364 if (a.id && b.id && a.id === b.id) {
3365 return true;
3366 } // next try to use reslovedUri as it should be the
3367 // second most accurate.
3368
3369
3370 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
3371 return true;
3372 } // finally try to use uri as it should be accurate
3373 // but might miss a few cases for relative uris
3374
3375
3376 if (a.uri && b.uri && a.uri === b.uri) {
3377 return true;
3378 }
3379
3380 return false;
3381 };
3382
3383 const someAudioVariant = function (main, callback) {
3384 const AUDIO = main && main.mediaGroups && main.mediaGroups.AUDIO || {};
3385 let found = false;
3386
3387 for (const groupName in AUDIO) {
3388 for (const label in AUDIO[groupName]) {
3389 found = callback(AUDIO[groupName][label]);
3390
3391 if (found) {
3392 break;
3393 }
3394 }
3395
3396 if (found) {
3397 break;
3398 }
3399 }
3400
3401 return !!found;
3402 };
3403
3404 const isAudioOnly = main => {
3405 // we are audio only if we have no main playlists but do
3406 // have media group playlists.
3407 if (!main || !main.playlists || !main.playlists.length) {
3408 // without audio variants or playlists this
3409 // is not an audio only main.
3410 const found = someAudioVariant(main, variant => variant.playlists && variant.playlists.length || variant.uri);
3411 return found;
3412 } // if every playlist has only an audio codec it is audio only
3413
3414
3415 for (let i = 0; i < main.playlists.length; i++) {
3416 const playlist = main.playlists[i];
3417 const CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
3418
3419 if (CODECS && CODECS.split(',').every(c => isAudioCodec(c))) {
3420 continue;
3421 } // playlist is in an audio group it is audio only
3422
3423
3424 const found = someAudioVariant(main, variant => playlistMatch(playlist, variant));
3425
3426 if (found) {
3427 continue;
3428 } // if we make it here this playlist isn't audio and we
3429 // are not audio only
3430
3431
3432 return false;
3433 } // if we make it past every playlist without returning, then
3434 // this is an audio only playlist.
3435
3436
3437 return true;
3438 }; // exports
3439
3440 var Playlist = {
3441 liveEdgeDelay,
3442 duration,
3443 seekable,
3444 getMediaInfoForTime,
3445 isEnabled,
3446 isDisabled,
3447 isExcluded,
3448 isIncompatible,
3449 playlistEnd,
3450 isAes,
3451 hasAttribute,
3452 estimateSegmentRequestTime,
3453 isLowestEnabledRendition,
3454 isAudioOnly,
3455 playlistMatch,
3456 segmentDurationWithParts
3457 };
3458
3459 const {
3460 log
3461 } = videojs__default["default"];
3462 const createPlaylistID = (index, uri) => {
3463 return `${index}-${uri}`;
3464 }; // default function for creating a group id
3465
3466 const groupID = (type, group, label) => {
3467 return `placeholder-uri-${type}-${group}-${label}`;
3468 };
3469 /**
3470 * Parses a given m3u8 playlist
3471 *
3472 * @param {Function} [onwarn]
3473 * a function to call when the parser triggers a warning event.
3474 * @param {Function} [oninfo]
3475 * a function to call when the parser triggers an info event.
3476 * @param {string} manifestString
3477 * The downloaded manifest string
3478 * @param {Object[]} [customTagParsers]
3479 * An array of custom tag parsers for the m3u8-parser instance
3480 * @param {Object[]} [customTagMappers]
3481 * An array of custom tag mappers for the m3u8-parser instance
3482 * @param {boolean} [llhls]
3483 * Whether to keep ll-hls features in the manifest after parsing.
3484 * @return {Object}
3485 * The manifest object
3486 */
3487
3488 const parseManifest = ({
3489 onwarn,
3490 oninfo,
3491 manifestString,
3492 customTagParsers = [],
3493 customTagMappers = [],
3494 llhls
3495 }) => {
3496 const parser = new Parser();
3497
3498 if (onwarn) {
3499 parser.on('warn', onwarn);
3500 }
3501
3502 if (oninfo) {
3503 parser.on('info', oninfo);
3504 }
3505
3506 customTagParsers.forEach(customParser => parser.addParser(customParser));
3507 customTagMappers.forEach(mapper => parser.addTagMapper(mapper));
3508 parser.push(manifestString);
3509 parser.end();
3510 const manifest = parser.manifest; // remove llhls features from the parsed manifest
3511 // if we don't want llhls support.
3512
3513 if (!llhls) {
3514 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
3515 if (manifest.hasOwnProperty(k)) {
3516 delete manifest[k];
3517 }
3518 });
3519
3520 if (manifest.segments) {
3521 manifest.segments.forEach(function (segment) {
3522 ['parts', 'preloadHints'].forEach(function (k) {
3523 if (segment.hasOwnProperty(k)) {
3524 delete segment[k];
3525 }
3526 });
3527 });
3528 }
3529 }
3530
3531 if (!manifest.targetDuration) {
3532 let targetDuration = 10;
3533
3534 if (manifest.segments && manifest.segments.length) {
3535 targetDuration = manifest.segments.reduce((acc, s) => Math.max(acc, s.duration), 0);
3536 }
3537
3538 if (onwarn) {
3539 onwarn({
3540 message: `manifest has no targetDuration defaulting to ${targetDuration}`
3541 });
3542 }
3543
3544 manifest.targetDuration = targetDuration;
3545 }
3546
3547 const parts = getLastParts(manifest);
3548
3549 if (parts.length && !manifest.partTargetDuration) {
3550 const partTargetDuration = parts.reduce((acc, p) => Math.max(acc, p.duration), 0);
3551
3552 if (onwarn) {
3553 onwarn({
3554 message: `manifest has no partTargetDuration defaulting to ${partTargetDuration}`
3555 });
3556 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
3557 }
3558
3559 manifest.partTargetDuration = partTargetDuration;
3560 }
3561
3562 return manifest;
3563 };
3564 /**
3565 * Loops through all supported media groups in main and calls the provided
3566 * callback for each group
3567 *
3568 * @param {Object} main
3569 * The parsed main manifest object
3570 * @param {Function} callback
3571 * Callback to call for each media group
3572 */
3573
3574 const forEachMediaGroup$1 = (main, callback) => {
3575 if (!main.mediaGroups) {
3576 return;
3577 }
3578
3579 ['AUDIO', 'SUBTITLES'].forEach(mediaType => {
3580 if (!main.mediaGroups[mediaType]) {
3581 return;
3582 }
3583
3584 for (const groupKey in main.mediaGroups[mediaType]) {
3585 for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
3586 const mediaProperties = main.mediaGroups[mediaType][groupKey][labelKey];
3587 callback(mediaProperties, mediaType, groupKey, labelKey);
3588 }
3589 }
3590 });
3591 };
3592 /**
3593 * Adds properties and attributes to the playlist to keep consistent functionality for
3594 * playlists throughout VHS.
3595 *
3596 * @param {Object} config
3597 * Arguments object
3598 * @param {Object} config.playlist
3599 * The media playlist
3600 * @param {string} [config.uri]
3601 * The uri to the media playlist (if media playlist is not from within a main
3602 * playlist)
3603 * @param {string} id
3604 * ID to use for the playlist
3605 */
3606
3607 const setupMediaPlaylist = ({
3608 playlist,
3609 uri,
3610 id
3611 }) => {
3612 playlist.id = id;
3613 playlist.playlistErrors_ = 0;
3614
3615 if (uri) {
3616 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
3617 // playlists do not contain their own source URI, but one is needed for consistency in
3618 // VHS.
3619 playlist.uri = uri;
3620 } // For HLS main playlists, even though certain attributes MUST be defined, the
3621 // stream may still be played without them.
3622 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
3623 // manifest.
3624 //
3625 // To avoid undefined reference errors through the project, and make the code easier
3626 // to write/read, add an empty attributes object for these cases.
3627
3628
3629 playlist.attributes = playlist.attributes || {};
3630 };
3631 /**
3632 * Adds ID, resolvedUri, and attributes properties to each playlist of the main, where
3633 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
3634 * playlist references to the playlists array.
3635 *
3636 * @param {Object} main
3637 * The main playlist
3638 */
3639
3640 const setupMediaPlaylists = main => {
3641 let i = main.playlists.length;
3642
3643 while (i--) {
3644 const playlist = main.playlists[i];
3645 setupMediaPlaylist({
3646 playlist,
3647 id: createPlaylistID(i, playlist.uri)
3648 });
3649 playlist.resolvedUri = resolveUrl(main.uri, playlist.uri);
3650 main.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
3651
3652 main.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
3653 // the stream can be played without it. Although an attributes property may have been
3654 // added to the playlist to prevent undefined references, issue a warning to fix the
3655 // manifest.
3656
3657 if (!playlist.attributes.BANDWIDTH) {
3658 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
3659 }
3660 }
3661 };
3662 /**
3663 * Adds resolvedUri properties to each media group.
3664 *
3665 * @param {Object} main
3666 * The main playlist
3667 */
3668
3669 const resolveMediaGroupUris = main => {
3670 forEachMediaGroup$1(main, properties => {
3671 if (properties.uri) {
3672 properties.resolvedUri = resolveUrl(main.uri, properties.uri);
3673 }
3674 });
3675 };
3676 /**
3677 * Creates a main playlist wrapper to insert a sole media playlist into.
3678 *
3679 * @param {Object} media
3680 * Media playlist
3681 * @param {string} uri
3682 * The media URI
3683 *
3684 * @return {Object}
3685 * main playlist
3686 */
3687
3688 const mainForMedia = (media, uri) => {
3689 const id = createPlaylistID(0, uri);
3690 const main = {
3691 mediaGroups: {
3692 'AUDIO': {},
3693 'VIDEO': {},
3694 'CLOSED-CAPTIONS': {},
3695 'SUBTITLES': {}
3696 },
3697 uri: window.location.href,
3698 resolvedUri: window.location.href,
3699 playlists: [{
3700 uri,
3701 id,
3702 resolvedUri: uri,
3703 // m3u8-parser does not attach an attributes property to media playlists so make
3704 // sure that the property is attached to avoid undefined reference errors
3705 attributes: {}
3706 }]
3707 }; // set up ID reference
3708
3709 main.playlists[id] = main.playlists[0]; // URI reference added for backwards compatibility
3710
3711 main.playlists[uri] = main.playlists[0];
3712 return main;
3713 };
3714 /**
3715 * Does an in-place update of the main manifest to add updated playlist URI references
3716 * as well as other properties needed by VHS that aren't included by the parser.
3717 *
3718 * @param {Object} main
3719 * main manifest object
3720 * @param {string} uri
3721 * The source URI
3722 * @param {function} createGroupID
3723 * A function to determine how to create the groupID for mediaGroups
3724 */
3725
3726 const addPropertiesToMain = (main, uri, createGroupID = groupID) => {
3727 main.uri = uri;
3728
3729 for (let i = 0; i < main.playlists.length; i++) {
3730 if (!main.playlists[i].uri) {
3731 // Set up phony URIs for the playlists since playlists are referenced by their URIs
3732 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
3733 // TODO: consider adding dummy URIs in mpd-parser
3734 const phonyUri = `placeholder-uri-${i}`;
3735 main.playlists[i].uri = phonyUri;
3736 }
3737 }
3738
3739 const audioOnlyMain = isAudioOnly(main);
3740 forEachMediaGroup$1(main, (properties, mediaType, groupKey, labelKey) => {
3741 // add a playlist array under properties
3742 if (!properties.playlists || !properties.playlists.length) {
3743 // If the manifest is audio only and this media group does not have a uri, check
3744 // if the media group is located in the main list of playlists. If it is, don't add
3745 // placeholder properties as it shouldn't be considered an alternate audio track.
3746 if (audioOnlyMain && mediaType === 'AUDIO' && !properties.uri) {
3747 for (let i = 0; i < main.playlists.length; i++) {
3748 const p = main.playlists[i];
3749
3750 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
3751 return;
3752 }
3753 }
3754 }
3755
3756 properties.playlists = [_extends({}, properties)];
3757 }
3758
3759 properties.playlists.forEach(function (p, i) {
3760 const groupId = createGroupID(mediaType, groupKey, labelKey, p);
3761 const id = createPlaylistID(i, groupId);
3762
3763 if (p.uri) {
3764 p.resolvedUri = p.resolvedUri || resolveUrl(main.uri, p.uri);
3765 } else {
3766 // DEPRECATED, this has been added to prevent a breaking change.
3767 // previously we only ever had a single media group playlist, so
3768 // we mark the first playlist uri without prepending the index as we used to
3769 // ideally we would do all of the playlists the same way.
3770 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
3771 // the placeholder again
3772
3773 p.resolvedUri = p.uri;
3774 }
3775
3776 p.id = p.id || id; // add an empty attributes object, all playlists are
3777 // expected to have this.
3778
3779 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
3780
3781 main.playlists[p.id] = p;
3782 main.playlists[p.uri] = p;
3783 });
3784 });
3785 setupMediaPlaylists(main);
3786 resolveMediaGroupUris(main);
3787 };
3788
3789 class DateRangesStorage {
3790 constructor() {
3791 this.offset_ = null;
3792 this.pendingDateRanges_ = new Map();
3793 this.processedDateRanges_ = new Map();
3794 }
3795
3796 setOffset(segments = []) {
3797 // already set
3798 if (this.offset_ !== null) {
3799 return;
3800 } // no segment to process
3801
3802
3803 if (!segments.length) {
3804 return;
3805 }
3806
3807 const [firstSegment] = segments; // no program date time
3808
3809 if (firstSegment.programDateTime === undefined) {
3810 return;
3811 } // Set offset as ProgramDateTime for the very first segment of the very first playlist load:
3812
3813
3814 this.offset_ = firstSegment.programDateTime / 1000;
3815 }
3816
3817 setPendingDateRanges(dateRanges = []) {
3818 if (!dateRanges.length) {
3819 return;
3820 }
3821
3822 const [dateRange] = dateRanges;
3823 const startTime = dateRange.startDate.getTime();
3824 this.trimProcessedDateRanges_(startTime);
3825 this.pendingDateRanges_ = dateRanges.reduce((map, pendingDateRange) => {
3826 map.set(pendingDateRange.id, pendingDateRange);
3827 return map;
3828 }, new Map());
3829 }
3830
3831 processDateRange(dateRange) {
3832 this.pendingDateRanges_.delete(dateRange.id);
3833 this.processedDateRanges_.set(dateRange.id, dateRange);
3834 }
3835
3836 getDateRangesToProcess() {
3837 if (this.offset_ === null) {
3838 return [];
3839 }
3840
3841 const dateRangeClasses = {};
3842 const dateRangesToProcess = [];
3843 this.pendingDateRanges_.forEach((dateRange, id) => {
3844 if (this.processedDateRanges_.has(id)) {
3845 return;
3846 }
3847
3848 dateRange.startTime = dateRange.startDate.getTime() / 1000 - this.offset_;
3849
3850 dateRange.processDateRange = () => this.processDateRange(dateRange);
3851
3852 dateRangesToProcess.push(dateRange);
3853
3854 if (!dateRange.class) {
3855 return;
3856 }
3857
3858 if (dateRangeClasses[dateRange.class]) {
3859 const length = dateRangeClasses[dateRange.class].push(dateRange);
3860 dateRange.classListIndex = length - 1;
3861 } else {
3862 dateRangeClasses[dateRange.class] = [dateRange];
3863 dateRange.classListIndex = 0;
3864 }
3865 });
3866
3867 for (const dateRange of dateRangesToProcess) {
3868 const classList = dateRangeClasses[dateRange.class] || [];
3869
3870 if (dateRange.endDate) {
3871 dateRange.endTime = dateRange.endDate.getTime() / 1000 - this.offset_;
3872 } else if (dateRange.endOnNext && classList[dateRange.classListIndex + 1]) {
3873 dateRange.endTime = classList[dateRange.classListIndex + 1].startTime;
3874 } else if (dateRange.duration) {
3875 dateRange.endTime = dateRange.startTime + dateRange.duration;
3876 } else if (dateRange.plannedDuration) {
3877 dateRange.endTime = dateRange.startTime + dateRange.plannedDuration;
3878 } else {
3879 dateRange.endTime = dateRange.startTime;
3880 }
3881 }
3882
3883 return dateRangesToProcess;
3884 }
3885
3886 trimProcessedDateRanges_(startTime) {
3887 const copy = new Map(this.processedDateRanges_);
3888 copy.forEach((dateRange, id) => {
3889 if (dateRange.startDate.getTime() < startTime) {
3890 this.processedDateRanges_.delete(id);
3891 }
3892 });
3893 }
3894
3895 }
3896
3897 const {
3898 EventTarget: EventTarget$1
3899 } = videojs__default["default"];
3900
3901 const addLLHLSQueryDirectives = (uri, media) => {
3902 if (media.endList || !media.serverControl) {
3903 return uri;
3904 }
3905
3906 const parameters = {};
3907
3908 if (media.serverControl.canBlockReload) {
3909 const {
3910 preloadSegment
3911 } = media; // next msn is a zero based value, length is not.
3912
3913 let nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
3914 // that we are going to request a part of that preload segment.
3915 // the logic below is used to determine that.
3916
3917 if (preloadSegment) {
3918 const parts = preloadSegment.parts || []; // _HLS_part is a zero based index
3919
3920 const nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
3921 // length of parts, then we know we had part preload hints
3922 // and we need to add the _HLS_part= query
3923
3924 if (nextPart > -1 && nextPart !== parts.length - 1) {
3925 // add existing parts to our preload hints
3926 // eslint-disable-next-line
3927 parameters._HLS_part = nextPart;
3928 } // this if statement makes sure that we request the msn
3929 // of the preload segment if:
3930 // 1. the preload segment had parts (and was not yet a full segment)
3931 // but was added to our segments array
3932 // 2. the preload segment had preload hints for parts that are not in
3933 // the manifest yet.
3934 // in all other cases we want the segment after the preload segment
3935 // which will be given by using media.segments.length because it is 1 based
3936 // rather than 0 based.
3937
3938
3939 if (nextPart > -1 || parts.length) {
3940 nextMSN--;
3941 }
3942 } // add _HLS_msn= in front of any _HLS_part query
3943 // eslint-disable-next-line
3944
3945
3946 parameters._HLS_msn = nextMSN;
3947 }
3948
3949 if (media.serverControl && media.serverControl.canSkipUntil) {
3950 // add _HLS_skip= infront of all other queries.
3951 // eslint-disable-next-line
3952 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
3953 }
3954
3955 if (Object.keys(parameters).length) {
3956 const parsedUri = new window.URL(uri);
3957 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
3958 if (!parameters.hasOwnProperty(name)) {
3959 return;
3960 }
3961
3962 parsedUri.searchParams.set(name, parameters[name]);
3963 });
3964 uri = parsedUri.toString();
3965 }
3966
3967 return uri;
3968 };
3969 /**
3970 * Returns a new segment object with properties and
3971 * the parts array merged.
3972 *
3973 * @param {Object} a the old segment
3974 * @param {Object} b the new segment
3975 *
3976 * @return {Object} the merged segment
3977 */
3978
3979
3980 const updateSegment = (a, b) => {
3981 if (!a) {
3982 return b;
3983 }
3984
3985 const result = merge$1(a, b); // if only the old segment has preload hints
3986 // and the new one does not, remove preload hints.
3987
3988 if (a.preloadHints && !b.preloadHints) {
3989 delete result.preloadHints;
3990 } // if only the old segment has parts
3991 // then the parts are no longer valid
3992
3993
3994 if (a.parts && !b.parts) {
3995 delete result.parts; // if both segments have parts
3996 // copy part propeties from the old segment
3997 // to the new one.
3998 } else if (a.parts && b.parts) {
3999 for (let i = 0; i < b.parts.length; i++) {
4000 if (a.parts && a.parts[i]) {
4001 result.parts[i] = merge$1(a.parts[i], b.parts[i]);
4002 }
4003 }
4004 } // set skipped to false for segments that have
4005 // have had information merged from the old segment.
4006
4007
4008 if (!a.skipped && b.skipped) {
4009 result.skipped = false;
4010 } // set preload to false for segments that have
4011 // had information added in the new segment.
4012
4013
4014 if (a.preload && !b.preload) {
4015 result.preload = false;
4016 }
4017
4018 return result;
4019 };
4020 /**
4021 * Returns a new array of segments that is the result of merging
4022 * properties from an older list of segments onto an updated
4023 * list. No properties on the updated playlist will be ovewritten.
4024 *
4025 * @param {Array} original the outdated list of segments
4026 * @param {Array} update the updated list of segments
4027 * @param {number=} offset the index of the first update
4028 * segment in the original segment list. For non-live playlists,
4029 * this should always be zero and does not need to be
4030 * specified. For live playlists, it should be the difference
4031 * between the media sequence numbers in the original and updated
4032 * playlists.
4033 * @return {Array} a list of merged segment objects
4034 */
4035
4036 const updateSegments = (original, update, offset) => {
4037 const oldSegments = original.slice();
4038 const newSegments = update.slice();
4039 offset = offset || 0;
4040 const result = [];
4041 let currentMap;
4042
4043 for (let newIndex = 0; newIndex < newSegments.length; newIndex++) {
4044 const oldSegment = oldSegments[newIndex + offset];
4045 const newSegment = newSegments[newIndex];
4046
4047 if (oldSegment) {
4048 currentMap = oldSegment.map || currentMap;
4049 result.push(updateSegment(oldSegment, newSegment));
4050 } else {
4051 // carry over map to new segment if it is missing
4052 if (currentMap && !newSegment.map) {
4053 newSegment.map = currentMap;
4054 }
4055
4056 result.push(newSegment);
4057 }
4058 }
4059
4060 return result;
4061 };
4062 const resolveSegmentUris = (segment, baseUri) => {
4063 // preloadSegment will not have a uri at all
4064 // as the segment isn't actually in the manifest yet, only parts
4065 if (!segment.resolvedUri && segment.uri) {
4066 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
4067 }
4068
4069 if (segment.key && !segment.key.resolvedUri) {
4070 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
4071 }
4072
4073 if (segment.map && !segment.map.resolvedUri) {
4074 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
4075 }
4076
4077 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
4078 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
4079 }
4080
4081 if (segment.parts && segment.parts.length) {
4082 segment.parts.forEach(p => {
4083 if (p.resolvedUri) {
4084 return;
4085 }
4086
4087 p.resolvedUri = resolveUrl(baseUri, p.uri);
4088 });
4089 }
4090
4091 if (segment.preloadHints && segment.preloadHints.length) {
4092 segment.preloadHints.forEach(p => {
4093 if (p.resolvedUri) {
4094 return;
4095 }
4096
4097 p.resolvedUri = resolveUrl(baseUri, p.uri);
4098 });
4099 }
4100 };
4101
4102 const getAllSegments = function (media) {
4103 const segments = media.segments || [];
4104 const preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
4105 // a usable segment, only include a preloadSegment that has
4106 // parts.
4107
4108 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
4109 // if preloadHints has a MAP that means that the
4110 // init segment is going to change. We cannot use any of the parts
4111 // from this preload segment.
4112 if (preloadSegment.preloadHints) {
4113 for (let i = 0; i < preloadSegment.preloadHints.length; i++) {
4114 if (preloadSegment.preloadHints[i].type === 'MAP') {
4115 return segments;
4116 }
4117 }
4118 } // set the duration for our preload segment to target duration.
4119
4120
4121 preloadSegment.duration = media.targetDuration;
4122 preloadSegment.preload = true;
4123 segments.push(preloadSegment);
4124 }
4125
4126 return segments;
4127 }; // consider the playlist unchanged if the playlist object is the same or
4128 // the number of segments is equal, the media sequence number is unchanged,
4129 // and this playlist hasn't become the end of the playlist
4130
4131
4132 const isPlaylistUnchanged = (a, b) => a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
4133 /**
4134 * Returns a new main playlist that is the result of merging an
4135 * updated media playlist into the original version. If the
4136 * updated media playlist does not match any of the playlist
4137 * entries in the original main playlist, null is returned.
4138 *
4139 * @param {Object} main a parsed main M3U8 object
4140 * @param {Object} media a parsed media M3U8 object
4141 * @return {Object} a new object that represents the original
4142 * main playlist with the updated media playlist merged in, or
4143 * null if the merge produced no change.
4144 */
4145
4146 const updateMain$1 = (main, newMedia, unchangedCheck = isPlaylistUnchanged) => {
4147 const result = merge$1(main, {});
4148 const oldMedia = result.playlists[newMedia.id];
4149
4150 if (!oldMedia) {
4151 return null;
4152 }
4153
4154 if (unchangedCheck(oldMedia, newMedia)) {
4155 return null;
4156 }
4157
4158 newMedia.segments = getAllSegments(newMedia);
4159 const mergedPlaylist = merge$1(oldMedia, newMedia); // always use the new media's preload segment
4160
4161 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
4162 delete mergedPlaylist.preloadSegment;
4163 } // if the update could overlap existing segment information, merge the two segment lists
4164
4165
4166 if (oldMedia.segments) {
4167 if (newMedia.skip) {
4168 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
4169 // old properties into the new segments
4170
4171 for (let i = 0; i < newMedia.skip.skippedSegments; i++) {
4172 newMedia.segments.unshift({
4173 skipped: true
4174 });
4175 }
4176 }
4177
4178 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
4179 } // resolve any segment URIs to prevent us from having to do it later
4180
4181
4182 mergedPlaylist.segments.forEach(segment => {
4183 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
4184 }); // TODO Right now in the playlists array there are two references to each playlist, one
4185 // that is referenced by index, and one by URI. The index reference may no longer be
4186 // necessary.
4187
4188 for (let i = 0; i < result.playlists.length; i++) {
4189 if (result.playlists[i].id === newMedia.id) {
4190 result.playlists[i] = mergedPlaylist;
4191 }
4192 }
4193
4194 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
4195
4196 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
4197
4198 forEachMediaGroup$1(main, (properties, mediaType, groupKey, labelKey) => {
4199 if (!properties.playlists) {
4200 return;
4201 }
4202
4203 for (let i = 0; i < properties.playlists.length; i++) {
4204 if (newMedia.id === properties.playlists[i].id) {
4205 properties.playlists[i] = mergedPlaylist;
4206 }
4207 }
4208 });
4209 return result;
4210 };
4211 /**
4212 * Calculates the time to wait before refreshing a live playlist
4213 *
4214 * @param {Object} media
4215 * The current media
4216 * @param {boolean} update
4217 * True if there were any updates from the last refresh, false otherwise
4218 * @return {number}
4219 * The time in ms to wait before refreshing the live playlist
4220 */
4221
4222 const refreshDelay = (media, update) => {
4223 const segments = media.segments || [];
4224 const lastSegment = segments[segments.length - 1];
4225 const lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
4226 const lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
4227
4228 if (update && lastDuration) {
4229 return lastDuration * 1000;
4230 } // if the playlist is unchanged since the last reload or last segment duration
4231 // cannot be determined, try again after half the target duration
4232
4233
4234 return (media.partTargetDuration || media.targetDuration || 10) * 500;
4235 };
4236 /**
4237 * Load a playlist from a remote location
4238 *
4239 * @class PlaylistLoader
4240 * @extends Stream
4241 * @param {string|Object} src url or object of manifest
4242 * @param {boolean} withCredentials the withCredentials xhr option
4243 * @class
4244 */
4245
4246 class PlaylistLoader extends EventTarget$1 {
4247 constructor(src, vhs, options = {}) {
4248 super();
4249
4250 if (!src) {
4251 throw new Error('A non-empty playlist URL or object is required');
4252 }
4253
4254 this.logger_ = logger('PlaylistLoader');
4255 const {
4256 withCredentials = false
4257 } = options;
4258 this.src = src;
4259 this.vhs_ = vhs;
4260 this.withCredentials = withCredentials;
4261 this.addDateRangesToTextTrack_ = options.addDateRangesToTextTrack;
4262 const vhsOptions = vhs.options_;
4263 this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
4264 this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
4265 this.llhls = vhsOptions && vhsOptions.llhls;
4266 this.dateRangesStorage_ = new DateRangesStorage(); // initialize the loader state
4267
4268 this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
4269
4270 this.handleMediaupdatetimeout_ = this.handleMediaupdatetimeout_.bind(this);
4271 this.on('mediaupdatetimeout', this.handleMediaupdatetimeout_);
4272 this.on('loadedplaylist', this.handleLoadedPlaylist_.bind(this));
4273 }
4274
4275 handleLoadedPlaylist_() {
4276 const mediaPlaylist = this.media();
4277
4278 if (!mediaPlaylist) {
4279 return;
4280 }
4281
4282 this.dateRangesStorage_.setOffset(mediaPlaylist.segments);
4283 this.dateRangesStorage_.setPendingDateRanges(mediaPlaylist.dateRanges);
4284 const availableDateRanges = this.dateRangesStorage_.getDateRangesToProcess();
4285
4286 if (!availableDateRanges.length || !this.addDateRangesToTextTrack_) {
4287 return;
4288 }
4289
4290 this.addDateRangesToTextTrack_(availableDateRanges);
4291 }
4292
4293 handleMediaupdatetimeout_() {
4294 if (this.state !== 'HAVE_METADATA') {
4295 // only refresh the media playlist if no other activity is going on
4296 return;
4297 }
4298
4299 const media = this.media();
4300 let uri = resolveUrl(this.main.uri, media.uri);
4301
4302 if (this.llhls) {
4303 uri = addLLHLSQueryDirectives(uri, media);
4304 }
4305
4306 this.state = 'HAVE_CURRENT_METADATA';
4307 this.request = this.vhs_.xhr({
4308 uri,
4309 withCredentials: this.withCredentials
4310 }, (error, req) => {
4311 // disposed
4312 if (!this.request) {
4313 return;
4314 }
4315
4316 if (error) {
4317 return this.playlistRequestError(this.request, this.media(), 'HAVE_METADATA');
4318 }
4319
4320 this.haveMetadata({
4321 playlistString: this.request.responseText,
4322 url: this.media().uri,
4323 id: this.media().id
4324 });
4325 });
4326 }
4327
4328 playlistRequestError(xhr, playlist, startingState) {
4329 const {
4330 uri,
4331 id
4332 } = playlist; // any in-flight request is now finished
4333
4334 this.request = null;
4335
4336 if (startingState) {
4337 this.state = startingState;
4338 }
4339
4340 this.error = {
4341 playlist: this.main.playlists[id],
4342 status: xhr.status,
4343 message: `HLS playlist request error at URL: ${uri}.`,
4344 responseText: xhr.responseText,
4345 code: xhr.status >= 500 ? 4 : 2
4346 };
4347 this.trigger('error');
4348 }
4349
4350 parseManifest_({
4351 url,
4352 manifestString
4353 }) {
4354 return parseManifest({
4355 onwarn: ({
4356 message
4357 }) => this.logger_(`m3u8-parser warn for ${url}: ${message}`),
4358 oninfo: ({
4359 message
4360 }) => this.logger_(`m3u8-parser info for ${url}: ${message}`),
4361 manifestString,
4362 customTagParsers: this.customTagParsers,
4363 customTagMappers: this.customTagMappers,
4364 llhls: this.llhls
4365 });
4366 }
4367 /**
4368 * Update the playlist loader's state in response to a new or updated playlist.
4369 *
4370 * @param {string} [playlistString]
4371 * Playlist string (if playlistObject is not provided)
4372 * @param {Object} [playlistObject]
4373 * Playlist object (if playlistString is not provided)
4374 * @param {string} url
4375 * URL of playlist
4376 * @param {string} id
4377 * ID to use for playlist
4378 */
4379
4380
4381 haveMetadata({
4382 playlistString,
4383 playlistObject,
4384 url,
4385 id
4386 }) {
4387 // any in-flight request is now finished
4388 this.request = null;
4389 this.state = 'HAVE_METADATA';
4390 const playlist = playlistObject || this.parseManifest_({
4391 url,
4392 manifestString: playlistString
4393 });
4394 playlist.lastRequest = Date.now();
4395 setupMediaPlaylist({
4396 playlist,
4397 uri: url,
4398 id
4399 }); // merge this playlist into the main manifest
4400
4401 const update = updateMain$1(this.main, playlist);
4402 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
4403 this.pendingMedia_ = null;
4404
4405 if (update) {
4406 this.main = update;
4407 this.media_ = this.main.playlists[id];
4408 } else {
4409 this.trigger('playlistunchanged');
4410 }
4411
4412 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
4413 this.trigger('loadedplaylist');
4414 }
4415 /**
4416 * Abort any outstanding work and clean up.
4417 */
4418
4419
4420 dispose() {
4421 this.trigger('dispose');
4422 this.stopRequest();
4423 window.clearTimeout(this.mediaUpdateTimeout);
4424 window.clearTimeout(this.finalRenditionTimeout);
4425 this.dateRangesStorage_ = new DateRangesStorage();
4426 this.off();
4427 }
4428
4429 stopRequest() {
4430 if (this.request) {
4431 const oldRequest = this.request;
4432 this.request = null;
4433 oldRequest.onreadystatechange = null;
4434 oldRequest.abort();
4435 }
4436 }
4437 /**
4438 * When called without any arguments, returns the currently
4439 * active media playlist. When called with a single argument,
4440 * triggers the playlist loader to asynchronously switch to the
4441 * specified media playlist. Calling this method while the
4442 * loader is in the HAVE_NOTHING causes an error to be emitted
4443 * but otherwise has no effect.
4444 *
4445 * @param {Object=} playlist the parsed media playlist
4446 * object to switch to
4447 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
4448 *
4449 * @return {Playlist} the current loaded media
4450 */
4451
4452
4453 media(playlist, shouldDelay) {
4454 // getter
4455 if (!playlist) {
4456 return this.media_;
4457 } // setter
4458
4459
4460 if (this.state === 'HAVE_NOTHING') {
4461 throw new Error('Cannot switch media playlist from ' + this.state);
4462 } // find the playlist object if the target playlist has been
4463 // specified by URI
4464
4465
4466 if (typeof playlist === 'string') {
4467 if (!this.main.playlists[playlist]) {
4468 throw new Error('Unknown playlist URI: ' + playlist);
4469 }
4470
4471 playlist = this.main.playlists[playlist];
4472 }
4473
4474 window.clearTimeout(this.finalRenditionTimeout);
4475
4476 if (shouldDelay) {
4477 const delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
4478 this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);
4479 return;
4480 }
4481
4482 const startingState = this.state;
4483 const mediaChange = !this.media_ || playlist.id !== this.media_.id;
4484 const mainPlaylistRef = this.main.playlists[playlist.id]; // switch to fully loaded playlists immediately
4485
4486 if (mainPlaylistRef && mainPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
4487 // media playlist or, for the case of demuxed audio, a resolved audio media group)
4488 playlist.endList && playlist.segments.length) {
4489 // abort outstanding playlist requests
4490 if (this.request) {
4491 this.request.onreadystatechange = null;
4492 this.request.abort();
4493 this.request = null;
4494 }
4495
4496 this.state = 'HAVE_METADATA';
4497 this.media_ = playlist; // trigger media change if the active media has been updated
4498
4499 if (mediaChange) {
4500 this.trigger('mediachanging');
4501
4502 if (startingState === 'HAVE_MAIN_MANIFEST') {
4503 // The initial playlist was a main manifest, and the first media selected was
4504 // also provided (in the form of a resolved playlist object) as part of the
4505 // source object (rather than just a URL). Therefore, since the media playlist
4506 // doesn't need to be requested, loadedmetadata won't trigger as part of the
4507 // normal flow, and needs an explicit trigger here.
4508 this.trigger('loadedmetadata');
4509 } else {
4510 this.trigger('mediachange');
4511 }
4512 }
4513
4514 return;
4515 } // We update/set the timeout here so that live playlists
4516 // that are not a media change will "start" the loader as expected.
4517 // We expect that this function will start the media update timeout
4518 // cycle again. This also prevents a playlist switch failure from
4519 // causing us to stall during live.
4520
4521
4522 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
4523
4524 if (!mediaChange) {
4525 return;
4526 }
4527
4528 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
4529
4530 if (this.request) {
4531 if (playlist.resolvedUri === this.request.url) {
4532 // requesting to switch to the same playlist multiple times
4533 // has no effect after the first
4534 return;
4535 }
4536
4537 this.request.onreadystatechange = null;
4538 this.request.abort();
4539 this.request = null;
4540 } // request the new playlist
4541
4542
4543 if (this.media_) {
4544 this.trigger('mediachanging');
4545 }
4546
4547 this.pendingMedia_ = playlist;
4548 this.request = this.vhs_.xhr({
4549 uri: playlist.resolvedUri,
4550 withCredentials: this.withCredentials
4551 }, (error, req) => {
4552 // disposed
4553 if (!this.request) {
4554 return;
4555 }
4556
4557 playlist.lastRequest = Date.now();
4558 playlist.resolvedUri = resolveManifestRedirect(playlist.resolvedUri, req);
4559
4560 if (error) {
4561 return this.playlistRequestError(this.request, playlist, startingState);
4562 }
4563
4564 this.haveMetadata({
4565 playlistString: req.responseText,
4566 url: playlist.uri,
4567 id: playlist.id
4568 }); // fire loadedmetadata the first time a media playlist is loaded
4569
4570 if (startingState === 'HAVE_MAIN_MANIFEST') {
4571 this.trigger('loadedmetadata');
4572 } else {
4573 this.trigger('mediachange');
4574 }
4575 });
4576 }
4577 /**
4578 * pause loading of the playlist
4579 */
4580
4581
4582 pause() {
4583 if (this.mediaUpdateTimeout) {
4584 window.clearTimeout(this.mediaUpdateTimeout);
4585 this.mediaUpdateTimeout = null;
4586 }
4587
4588 this.stopRequest();
4589
4590 if (this.state === 'HAVE_NOTHING') {
4591 // If we pause the loader before any data has been retrieved, its as if we never
4592 // started, so reset to an unstarted state.
4593 this.started = false;
4594 } // Need to restore state now that no activity is happening
4595
4596
4597 if (this.state === 'SWITCHING_MEDIA') {
4598 // if the loader was in the process of switching media, it should either return to
4599 // HAVE_MAIN_MANIFEST or HAVE_METADATA depending on if the loader has loaded a media
4600 // playlist yet. This is determined by the existence of loader.media_
4601 if (this.media_) {
4602 this.state = 'HAVE_METADATA';
4603 } else {
4604 this.state = 'HAVE_MAIN_MANIFEST';
4605 }
4606 } else if (this.state === 'HAVE_CURRENT_METADATA') {
4607 this.state = 'HAVE_METADATA';
4608 }
4609 }
4610 /**
4611 * start loading of the playlist
4612 */
4613
4614
4615 load(shouldDelay) {
4616 if (this.mediaUpdateTimeout) {
4617 window.clearTimeout(this.mediaUpdateTimeout);
4618 this.mediaUpdateTimeout = null;
4619 }
4620
4621 const media = this.media();
4622
4623 if (shouldDelay) {
4624 const delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
4625 this.mediaUpdateTimeout = window.setTimeout(() => {
4626 this.mediaUpdateTimeout = null;
4627 this.load();
4628 }, delay);
4629 return;
4630 }
4631
4632 if (!this.started) {
4633 this.start();
4634 return;
4635 }
4636
4637 if (media && !media.endList) {
4638 this.trigger('mediaupdatetimeout');
4639 } else {
4640 this.trigger('loadedplaylist');
4641 }
4642 }
4643
4644 updateMediaUpdateTimeout_(delay) {
4645 if (this.mediaUpdateTimeout) {
4646 window.clearTimeout(this.mediaUpdateTimeout);
4647 this.mediaUpdateTimeout = null;
4648 } // we only have use mediaupdatetimeout for live playlists.
4649
4650
4651 if (!this.media() || this.media().endList) {
4652 return;
4653 }
4654
4655 this.mediaUpdateTimeout = window.setTimeout(() => {
4656 this.mediaUpdateTimeout = null;
4657 this.trigger('mediaupdatetimeout');
4658 this.updateMediaUpdateTimeout_(delay);
4659 }, delay);
4660 }
4661 /**
4662 * start loading of the playlist
4663 */
4664
4665
4666 start() {
4667 this.started = true;
4668
4669 if (typeof this.src === 'object') {
4670 // in the case of an entirely constructed manifest object (meaning there's no actual
4671 // manifest on a server), default the uri to the page's href
4672 if (!this.src.uri) {
4673 this.src.uri = window.location.href;
4674 } // resolvedUri is added on internally after the initial request. Since there's no
4675 // request for pre-resolved manifests, add on resolvedUri here.
4676
4677
4678 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
4679 // request can be skipped (since the top level of the manifest, at a minimum, is
4680 // already available as a parsed manifest object). However, if the manifest object
4681 // represents a main playlist, some media playlists may need to be resolved before
4682 // the starting segment list is available. Therefore, go directly to setup of the
4683 // initial playlist, and let the normal flow continue from there.
4684 //
4685 // Note that the call to setup is asynchronous, as other sections of VHS may assume
4686 // that the first request is asynchronous.
4687
4688 setTimeout(() => {
4689 this.setupInitialPlaylist(this.src);
4690 }, 0);
4691 return;
4692 } // request the specified URL
4693
4694
4695 this.request = this.vhs_.xhr({
4696 uri: this.src,
4697 withCredentials: this.withCredentials
4698 }, (error, req) => {
4699 // disposed
4700 if (!this.request) {
4701 return;
4702 } // clear the loader's request reference
4703
4704
4705 this.request = null;
4706
4707 if (error) {
4708 this.error = {
4709 status: req.status,
4710 message: `HLS playlist request error at URL: ${this.src}.`,
4711 responseText: req.responseText,
4712 // MEDIA_ERR_NETWORK
4713 code: 2
4714 };
4715
4716 if (this.state === 'HAVE_NOTHING') {
4717 this.started = false;
4718 }
4719
4720 return this.trigger('error');
4721 }
4722
4723 this.src = resolveManifestRedirect(this.src, req);
4724 const manifest = this.parseManifest_({
4725 manifestString: req.responseText,
4726 url: this.src
4727 });
4728 this.setupInitialPlaylist(manifest);
4729 });
4730 }
4731
4732 srcUri() {
4733 return typeof this.src === 'string' ? this.src : this.src.uri;
4734 }
4735 /**
4736 * Given a manifest object that's either a main or media playlist, trigger the proper
4737 * events and set the state of the playlist loader.
4738 *
4739 * If the manifest object represents a main playlist, `loadedplaylist` will be
4740 * triggered to allow listeners to select a playlist. If none is selected, the loader
4741 * will default to the first one in the playlists array.
4742 *
4743 * If the manifest object represents a media playlist, `loadedplaylist` will be
4744 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
4745 *
4746 * In the case of a media playlist, a main playlist object wrapper with one playlist
4747 * will be created so that all logic can handle playlists in the same fashion (as an
4748 * assumed manifest object schema).
4749 *
4750 * @param {Object} manifest
4751 * The parsed manifest object
4752 */
4753
4754
4755 setupInitialPlaylist(manifest) {
4756 this.state = 'HAVE_MAIN_MANIFEST';
4757
4758 if (manifest.playlists) {
4759 this.main = manifest;
4760 addPropertiesToMain(this.main, this.srcUri()); // If the initial main playlist has playlists wtih segments already resolved,
4761 // then resolve URIs in advance, as they are usually done after a playlist request,
4762 // which may not happen if the playlist is resolved.
4763
4764 manifest.playlists.forEach(playlist => {
4765 playlist.segments = getAllSegments(playlist);
4766 playlist.segments.forEach(segment => {
4767 resolveSegmentUris(segment, playlist.resolvedUri);
4768 });
4769 });
4770 this.trigger('loadedplaylist');
4771
4772 if (!this.request) {
4773 // no media playlist was specifically selected so start
4774 // from the first listed one
4775 this.media(this.main.playlists[0]);
4776 }
4777
4778 return;
4779 } // In order to support media playlists passed in as vhs-json, the case where the uri
4780 // is not provided as part of the manifest should be considered, and an appropriate
4781 // default used.
4782
4783
4784 const uri = this.srcUri() || window.location.href;
4785 this.main = mainForMedia(manifest, uri);
4786 this.haveMetadata({
4787 playlistObject: manifest,
4788 url: uri,
4789 id: this.main.playlists[0].id
4790 });
4791 this.trigger('loadedmetadata');
4792 }
4793 /**
4794 * Updates or deletes a preexisting pathway clone.
4795 * Ensures that all playlists related to the old pathway clone are
4796 * either updated or deleted.
4797 *
4798 * @param {Object} clone On update, the pathway clone object for the newly updated pathway clone.
4799 * On delete, the old pathway clone object to be deleted.
4800 * @param {boolean} isUpdate True if the pathway is to be updated,
4801 * false if it is meant to be deleted.
4802 */
4803
4804
4805 updateOrDeleteClone(clone, isUpdate) {
4806 const main = this.main;
4807 const pathway = clone.ID;
4808 let i = main.playlists.length; // Iterate backwards through the playlist so we can remove playlists if necessary.
4809
4810 while (i--) {
4811 const p = main.playlists[i];
4812
4813 if (p.attributes['PATHWAY-ID'] === pathway) {
4814 const oldPlaylistUri = p.resolvedUri;
4815 const oldPlaylistId = p.id; // update the indexed playlist and add new playlists by ID and URI
4816
4817 if (isUpdate) {
4818 const newPlaylistUri = this.createCloneURI_(p.resolvedUri, clone);
4819 const newPlaylistId = createPlaylistID(pathway, newPlaylistUri);
4820 const attributes = this.createCloneAttributes_(pathway, p.attributes);
4821 const updatedPlaylist = this.createClonePlaylist_(p, newPlaylistId, clone, attributes);
4822 main.playlists[i] = updatedPlaylist;
4823 main.playlists[newPlaylistId] = updatedPlaylist;
4824 main.playlists[newPlaylistUri] = updatedPlaylist;
4825 } else {
4826 // Remove the indexed playlist.
4827 main.playlists.splice(i, 1);
4828 } // Remove playlists by the old ID and URI.
4829
4830
4831 delete main.playlists[oldPlaylistId];
4832 delete main.playlists[oldPlaylistUri];
4833 }
4834 }
4835
4836 this.updateOrDeleteCloneMedia(clone, isUpdate);
4837 }
4838 /**
4839 * Updates or deletes media data based on the pathway clone object.
4840 * Due to the complexity of the media groups and playlists, in all cases
4841 * we remove all of the old media groups and playlists.
4842 * On updates, we then create new media groups and playlists based on the
4843 * new pathway clone object.
4844 *
4845 * @param {Object} clone The pathway clone object for the newly updated pathway clone.
4846 * @param {boolean} isUpdate True if the pathway is to be updated,
4847 * false if it is meant to be deleted.
4848 */
4849
4850
4851 updateOrDeleteCloneMedia(clone, isUpdate) {
4852 const main = this.main;
4853 const id = clone.ID;
4854 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {
4855 if (!main.mediaGroups[mediaType] || !main.mediaGroups[mediaType][id]) {
4856 return;
4857 }
4858
4859 for (const groupKey in main.mediaGroups[mediaType]) {
4860 // Remove all media playlists for the media group for this pathway clone.
4861 if (groupKey === id) {
4862 for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
4863 const oldMedia = main.mediaGroups[mediaType][groupKey][labelKey];
4864 oldMedia.playlists.forEach((p, i) => {
4865 const oldMediaPlaylist = main.playlists[p.id];
4866 const oldPlaylistId = oldMediaPlaylist.id;
4867 const oldPlaylistUri = oldMediaPlaylist.resolvedUri;
4868 delete main.playlists[oldPlaylistId];
4869 delete main.playlists[oldPlaylistUri];
4870 });
4871 } // Delete the old media group.
4872
4873
4874 delete main.mediaGroups[mediaType][groupKey];
4875 }
4876 }
4877 }); // Create the new media groups and playlists if there is an update.
4878
4879 if (isUpdate) {
4880 this.createClonedMediaGroups_(clone);
4881 }
4882 }
4883 /**
4884 * Given a pathway clone object, clones all necessary playlists.
4885 *
4886 * @param {Object} clone The pathway clone object.
4887 * @param {Object} basePlaylist The original playlist to clone from.
4888 */
4889
4890
4891 addClonePathway(clone, basePlaylist = {}) {
4892 const main = this.main;
4893 const index = main.playlists.length;
4894 const uri = this.createCloneURI_(basePlaylist.resolvedUri, clone);
4895 const playlistId = createPlaylistID(clone.ID, uri);
4896 const attributes = this.createCloneAttributes_(clone.ID, basePlaylist.attributes);
4897 const playlist = this.createClonePlaylist_(basePlaylist, playlistId, clone, attributes);
4898 main.playlists[index] = playlist; // add playlist by ID and URI
4899
4900 main.playlists[playlistId] = playlist;
4901 main.playlists[uri] = playlist;
4902 this.createClonedMediaGroups_(clone);
4903 }
4904 /**
4905 * Given a pathway clone object we create clones of all media.
4906 * In this function, all necessary information and updated playlists
4907 * are added to the `mediaGroup` object.
4908 * Playlists are also added to the `playlists` array so the media groups
4909 * will be properly linked.
4910 *
4911 * @param {Object} clone The pathway clone object.
4912 */
4913
4914
4915 createClonedMediaGroups_(clone) {
4916 const id = clone.ID;
4917 const baseID = clone['BASE-ID'];
4918 const main = this.main;
4919 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {
4920 // If the media type doesn't exist, or there is already a clone, skip
4921 // to the next media type.
4922 if (!main.mediaGroups[mediaType] || main.mediaGroups[mediaType][id]) {
4923 return;
4924 }
4925
4926 for (const groupKey in main.mediaGroups[mediaType]) {
4927 if (groupKey === baseID) {
4928 // Create the group.
4929 main.mediaGroups[mediaType][id] = {};
4930 } else {
4931 // There is no need to iterate over label keys in this case.
4932 continue;
4933 }
4934
4935 for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
4936 const oldMedia = main.mediaGroups[mediaType][groupKey][labelKey];
4937 main.mediaGroups[mediaType][id][labelKey] = _extends({}, oldMedia);
4938 const newMedia = main.mediaGroups[mediaType][id][labelKey]; // update URIs on the media
4939
4940 const newUri = this.createCloneURI_(oldMedia.resolvedUri, clone);
4941 newMedia.resolvedUri = newUri;
4942 newMedia.uri = newUri; // Reset playlists in the new media group.
4943
4944 newMedia.playlists = []; // Create new playlists in the newly cloned media group.
4945
4946 oldMedia.playlists.forEach((p, i) => {
4947 const oldMediaPlaylist = main.playlists[p.id];
4948 const group = groupID(mediaType, id, labelKey);
4949 const newPlaylistID = createPlaylistID(id, group); // Check to see if it already exists
4950
4951 if (oldMediaPlaylist && !main.playlists[newPlaylistID]) {
4952 const newMediaPlaylist = this.createClonePlaylist_(oldMediaPlaylist, newPlaylistID, clone);
4953 const newPlaylistUri = newMediaPlaylist.resolvedUri;
4954 main.playlists[newPlaylistID] = newMediaPlaylist;
4955 main.playlists[newPlaylistUri] = newMediaPlaylist;
4956 }
4957
4958 newMedia.playlists[i] = this.createClonePlaylist_(p, newPlaylistID, clone);
4959 });
4960 }
4961 }
4962 });
4963 }
4964 /**
4965 * Using the original playlist to be cloned, and the pathway clone object
4966 * information, we create a new playlist.
4967 *
4968 * @param {Object} basePlaylist The original playlist to be cloned from.
4969 * @param {string} id The desired id of the newly cloned playlist.
4970 * @param {Object} clone The pathway clone object.
4971 * @param {Object} attributes An optional object to populate the `attributes` property in the playlist.
4972 *
4973 * @return {Object} The combined cloned playlist.
4974 */
4975
4976
4977 createClonePlaylist_(basePlaylist, id, clone, attributes) {
4978 const uri = this.createCloneURI_(basePlaylist.resolvedUri, clone);
4979 const newProps = {
4980 resolvedUri: uri,
4981 uri,
4982 id
4983 }; // Remove all segments from previous playlist in the clone.
4984
4985 if (basePlaylist.segments) {
4986 newProps.segments = [];
4987 }
4988
4989 if (attributes) {
4990 newProps.attributes = attributes;
4991 }
4992
4993 return merge$1(basePlaylist, newProps);
4994 }
4995 /**
4996 * Generates an updated URI for a cloned pathway based on the original
4997 * pathway's URI and the paramaters from the pathway clone object in the
4998 * content steering server response.
4999 *
5000 * @param {string} baseUri URI to be updated in the cloned pathway.
5001 * @param {Object} clone The pathway clone object.
5002 *
5003 * @return {string} The updated URI for the cloned pathway.
5004 */
5005
5006
5007 createCloneURI_(baseURI, clone) {
5008 const uri = new URL(baseURI);
5009 uri.hostname = clone['URI-REPLACEMENT'].HOST;
5010 const params = clone['URI-REPLACEMENT'].PARAMS; // Add params to the cloned URL.
5011
5012 for (const key of Object.keys(params)) {
5013 uri.searchParams.set(key, params[key]);
5014 }
5015
5016 return uri.href;
5017 }
5018 /**
5019 * Helper function to create the attributes needed for the new clone.
5020 * This mainly adds the necessary media attributes.
5021 *
5022 * @param {string} id The pathway clone object ID.
5023 * @param {Object} oldAttributes The old attributes to compare to.
5024 * @return {Object} The new attributes to add to the playlist.
5025 */
5026
5027
5028 createCloneAttributes_(id, oldAttributes) {
5029 const attributes = {
5030 ['PATHWAY-ID']: id
5031 };
5032 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {
5033 if (oldAttributes[mediaType]) {
5034 attributes[mediaType] = id;
5035 }
5036 });
5037 return attributes;
5038 }
5039 /**
5040 * Returns the key ID set from a playlist
5041 *
5042 * @param {playlist} playlist to fetch the key ID set from.
5043 * @return a Set of 32 digit hex strings that represent the unique keyIds for that playlist.
5044 */
5045
5046
5047 getKeyIdSet(playlist) {
5048 if (playlist.contentProtection) {
5049 const keyIds = new Set();
5050
5051 for (const keysystem in playlist.contentProtection) {
5052 const keyId = playlist.contentProtection[keysystem].attributes.keyId;
5053
5054 if (keyId) {
5055 keyIds.add(keyId.toLowerCase());
5056 }
5057 }
5058
5059 return keyIds;
5060 }
5061 }
5062
5063 }
5064
5065 /**
5066 * @file xhr.js
5067 */
5068 const {
5069 xhr: videojsXHR
5070 } = videojs__default["default"];
5071
5072 const callbackWrapper = function (request, error, response, callback) {
5073 const reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
5074
5075 if (!error && reqResponse) {
5076 request.responseTime = Date.now();
5077 request.roundTripTime = request.responseTime - request.requestTime;
5078 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
5079
5080 if (!request.bandwidth) {
5081 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
5082 }
5083 }
5084
5085 if (response.headers) {
5086 request.responseHeaders = response.headers;
5087 } // videojs.xhr now uses a specific code on the error
5088 // object to signal that a request has timed out instead
5089 // of setting a boolean on the request object
5090
5091
5092 if (error && error.code === 'ETIMEDOUT') {
5093 request.timedout = true;
5094 } // videojs.xhr no longer considers status codes outside of 200 and 0
5095 // (for file uris) to be errors, but the old XHR did, so emulate that
5096 // behavior. Status 206 may be used in response to byterange requests.
5097
5098
5099 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
5100 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
5101 }
5102
5103 callback(error, request);
5104 };
5105 /**
5106 * Iterates over the request hooks Set and calls them in order
5107 *
5108 * @param {Set} hooks the hook Set to iterate over
5109 * @param {Object} options the request options to pass to the xhr wrapper
5110 * @return the callback hook function return value, the modified or new options Object.
5111 */
5112
5113
5114 const callAllRequestHooks = (requestSet, options) => {
5115 if (!requestSet || !requestSet.size) {
5116 return;
5117 }
5118
5119 let newOptions = options;
5120 requestSet.forEach(requestCallback => {
5121 newOptions = requestCallback(newOptions);
5122 });
5123 return newOptions;
5124 };
5125 /**
5126 * Iterates over the response hooks Set and calls them in order.
5127 *
5128 * @param {Set} hooks the hook Set to iterate over
5129 * @param {Object} request the xhr request object
5130 * @param {Object} error the xhr error object
5131 * @param {Object} response the xhr response object
5132 */
5133
5134
5135 const callAllResponseHooks = (responseSet, request, error, response) => {
5136 if (!responseSet || !responseSet.size) {
5137 return;
5138 }
5139
5140 responseSet.forEach(responseCallback => {
5141 responseCallback(request, error, response);
5142 });
5143 };
5144
5145 const xhrFactory = function () {
5146 const xhr = function XhrFunction(options, callback) {
5147 // Add a default timeout
5148 options = merge$1({
5149 timeout: 45e3
5150 }, options); // Allow an optional user-specified function to modify the option
5151 // object before we construct the xhr request
5152 // TODO: Remove beforeRequest in the next major release.
5153
5154 const beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest; // onRequest and onResponse hooks as a Set, at either the player or global level.
5155 // TODO: new Set added here for beforeRequest alias. Remove this when beforeRequest is removed.
5156
5157 const _requestCallbackSet = XhrFunction._requestCallbackSet || videojs__default["default"].Vhs.xhr._requestCallbackSet || new Set();
5158
5159 const _responseCallbackSet = XhrFunction._responseCallbackSet || videojs__default["default"].Vhs.xhr._responseCallbackSet;
5160
5161 if (beforeRequest && typeof beforeRequest === 'function') {
5162 videojs__default["default"].log.warn('beforeRequest is deprecated, use onRequest instead.');
5163
5164 _requestCallbackSet.add(beforeRequest);
5165 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
5166 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
5167
5168
5169 const xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr; // call all registered onRequest hooks, assign new options.
5170
5171 const beforeRequestOptions = callAllRequestHooks(_requestCallbackSet, options); // Remove the beforeRequest function from the hooks set so stale beforeRequest functions are not called.
5172
5173 _requestCallbackSet.delete(beforeRequest); // xhrMethod will call XMLHttpRequest.open and XMLHttpRequest.send
5174
5175
5176 const request = xhrMethod(beforeRequestOptions || options, function (error, response) {
5177 // call all registered onResponse hooks
5178 callAllResponseHooks(_responseCallbackSet, request, error, response);
5179 return callbackWrapper(request, error, response, callback);
5180 });
5181 const originalAbort = request.abort;
5182
5183 request.abort = function () {
5184 request.aborted = true;
5185 return originalAbort.apply(request, arguments);
5186 };
5187
5188 request.uri = options.uri;
5189 request.requestTime = Date.now();
5190 return request;
5191 };
5192
5193 xhr.original = true;
5194 return xhr;
5195 };
5196 /**
5197 * Turns segment byterange into a string suitable for use in
5198 * HTTP Range requests
5199 *
5200 * @param {Object} byterange - an object with two values defining the start and end
5201 * of a byte-range
5202 */
5203
5204
5205 const byterangeStr = function (byterange) {
5206 // `byterangeEnd` is one less than `offset + length` because the HTTP range
5207 // header uses inclusive ranges
5208 let byterangeEnd;
5209 const byterangeStart = byterange.offset;
5210
5211 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
5212 byterangeEnd = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
5213 } else {
5214 byterangeEnd = byterange.offset + byterange.length - 1;
5215 }
5216
5217 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
5218 };
5219 /**
5220 * Defines headers for use in the xhr request for a particular segment.
5221 *
5222 * @param {Object} segment - a simplified copy of the segmentInfo object
5223 * from SegmentLoader
5224 */
5225
5226 const segmentXhrHeaders = function (segment) {
5227 const headers = {};
5228
5229 if (segment.byterange) {
5230 headers.Range = byterangeStr(segment.byterange);
5231 }
5232
5233 return headers;
5234 };
5235
5236 var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
5237 var DASH_REGEX = /^application\/dash\+xml/i;
5238 /**
5239 * Returns a string that describes the type of source based on a video source object's
5240 * media type.
5241 *
5242 * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
5243 *
5244 * @param {string} type
5245 * Video source object media type
5246 * @return {('hls'|'dash'|'vhs-json'|null)}
5247 * VHS source type string
5248 */
5249
5250 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
5251 if (MPEGURL_REGEX.test(type)) {
5252 return 'hls';
5253 }
5254
5255 if (DASH_REGEX.test(type)) {
5256 return 'dash';
5257 } // Denotes the special case of a manifest object passed to http-streaming instead of a
5258 // source URL.
5259 //
5260 // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
5261 //
5262 // In this case, vnd stands for vendor, video.js for the organization, VHS for this
5263 // project, and the +json suffix identifies the structure of the media type.
5264
5265
5266 if (type === 'application/vnd.videojs.vhs+json') {
5267 return 'vhs-json';
5268 }
5269
5270 return null;
5271 };
5272
5273 // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
5274 // we used to do this with log2 but BigInt does not support builtin math
5275 // Math.ceil(log2(x));
5276
5277
5278 var countBits = function countBits(x) {
5279 return x.toString(2).length;
5280 }; // count the number of whole bytes it would take to represent a number
5281
5282 var countBytes = function countBytes(x) {
5283 return Math.ceil(countBits(x) / 8);
5284 };
5285 var isArrayBufferView = function isArrayBufferView(obj) {
5286 if (ArrayBuffer.isView === 'function') {
5287 return ArrayBuffer.isView(obj);
5288 }
5289
5290 return obj && obj.buffer instanceof ArrayBuffer;
5291 };
5292 var isTypedArray = function isTypedArray(obj) {
5293 return isArrayBufferView(obj);
5294 };
5295 var toUint8 = function toUint8(bytes) {
5296 if (bytes instanceof Uint8Array) {
5297 return bytes;
5298 }
5299
5300 if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
5301 // any non-number or NaN leads to empty uint8array
5302 // eslint-disable-next-line
5303 if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
5304 bytes = 0;
5305 } else {
5306 bytes = [bytes];
5307 }
5308 }
5309
5310 return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
5311 };
5312 var BigInt = window.BigInt || Number;
5313 var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
5314 (function () {
5315 var a = new Uint16Array([0xFFCC]);
5316 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
5317
5318 if (b[0] === 0xFF) {
5319 return 'big';
5320 }
5321
5322 if (b[0] === 0xCC) {
5323 return 'little';
5324 }
5325
5326 return 'unknown';
5327 })();
5328 var bytesToNumber = function bytesToNumber(bytes, _temp) {
5329 var _ref = _temp === void 0 ? {} : _temp,
5330 _ref$signed = _ref.signed,
5331 signed = _ref$signed === void 0 ? false : _ref$signed,
5332 _ref$le = _ref.le,
5333 le = _ref$le === void 0 ? false : _ref$le;
5334
5335 bytes = toUint8(bytes);
5336 var fn = le ? 'reduce' : 'reduceRight';
5337 var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
5338 var number = obj.call(bytes, function (total, byte, i) {
5339 var exponent = le ? i : Math.abs(i + 1 - bytes.length);
5340 return total + BigInt(byte) * BYTE_TABLE[exponent];
5341 }, BigInt(0));
5342
5343 if (signed) {
5344 var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
5345 number = BigInt(number);
5346
5347 if (number > max) {
5348 number -= max;
5349 number -= max;
5350 number -= BigInt(2);
5351 }
5352 }
5353
5354 return Number(number);
5355 };
5356 var numberToBytes = function numberToBytes(number, _temp2) {
5357 var _ref2 = _temp2 === void 0 ? {} : _temp2,
5358 _ref2$le = _ref2.le,
5359 le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
5360
5361
5362 if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
5363 number = 0;
5364 }
5365
5366 number = BigInt(number);
5367 var byteCount = countBytes(number);
5368 var bytes = new Uint8Array(new ArrayBuffer(byteCount));
5369
5370 for (var i = 0; i < byteCount; i++) {
5371 var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
5372 bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
5373
5374 if (number < 0) {
5375 bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
5376 bytes[byteIndex] -= i === 0 ? 1 : 2;
5377 }
5378 }
5379
5380 return bytes;
5381 };
5382 var stringToBytes = function stringToBytes(string, stringIsBytes) {
5383 if (typeof string !== 'string' && string && typeof string.toString === 'function') {
5384 string = string.toString();
5385 }
5386
5387 if (typeof string !== 'string') {
5388 return new Uint8Array();
5389 } // If the string already is bytes, we don't have to do this
5390 // otherwise we do this so that we split multi length characters
5391 // into individual bytes
5392
5393
5394 if (!stringIsBytes) {
5395 string = unescape(encodeURIComponent(string));
5396 }
5397
5398 var view = new Uint8Array(string.length);
5399
5400 for (var i = 0; i < string.length; i++) {
5401 view[i] = string.charCodeAt(i);
5402 }
5403
5404 return view;
5405 };
5406 var concatTypedArrays = function concatTypedArrays() {
5407 for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
5408 buffers[_key] = arguments[_key];
5409 }
5410
5411 buffers = buffers.filter(function (b) {
5412 return b && (b.byteLength || b.length) && typeof b !== 'string';
5413 });
5414
5415 if (buffers.length <= 1) {
5416 // for 0 length we will return empty uint8
5417 // for 1 length we return the first uint8
5418 return toUint8(buffers[0]);
5419 }
5420
5421 var totalLen = buffers.reduce(function (total, buf, i) {
5422 return total + (buf.byteLength || buf.length);
5423 }, 0);
5424 var tempBuffer = new Uint8Array(totalLen);
5425 var offset = 0;
5426 buffers.forEach(function (buf) {
5427 buf = toUint8(buf);
5428 tempBuffer.set(buf, offset);
5429 offset += buf.byteLength;
5430 });
5431 return tempBuffer;
5432 };
5433 /**
5434 * Check if the bytes "b" are contained within bytes "a".
5435 *
5436 * @param {Uint8Array|Array} a
5437 * Bytes to check in
5438 *
5439 * @param {Uint8Array|Array} b
5440 * Bytes to check for
5441 *
5442 * @param {Object} options
5443 * options
5444 *
5445 * @param {Array|Uint8Array} [offset=0]
5446 * offset to use when looking at bytes in a
5447 *
5448 * @param {Array|Uint8Array} [mask=[]]
5449 * mask to use on bytes before comparison.
5450 *
5451 * @return {boolean}
5452 * If all bytes in b are inside of a, taking into account
5453 * bit masks.
5454 */
5455
5456 var bytesMatch = function bytesMatch(a, b, _temp3) {
5457 var _ref3 = _temp3 === void 0 ? {} : _temp3,
5458 _ref3$offset = _ref3.offset,
5459 offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
5460 _ref3$mask = _ref3.mask,
5461 mask = _ref3$mask === void 0 ? [] : _ref3$mask;
5462
5463 a = toUint8(a);
5464 b = toUint8(b); // ie 11 does not support uint8 every
5465
5466 var fn = b.every ? b.every : Array.prototype.every;
5467 return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
5468 fn.call(b, function (bByte, i) {
5469 var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
5470 return bByte === aByte;
5471 });
5472 };
5473
5474 /**
5475 * @file bin-utils.js
5476 */
5477
5478 /**
5479 * convert a TimeRange to text
5480 *
5481 * @param {TimeRange} range the timerange to use for conversion
5482 * @param {number} i the iterator on the range to convert
5483 * @return {string} the range in string format
5484 */
5485
5486 const textRange = function (range, i) {
5487 return range.start(i) + '-' + range.end(i);
5488 };
5489 /**
5490 * format a number as hex string
5491 *
5492 * @param {number} e The number
5493 * @param {number} i the iterator
5494 * @return {string} the hex formatted number as a string
5495 */
5496
5497
5498 const formatHexString = function (e, i) {
5499 const value = e.toString(16);
5500 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
5501 };
5502
5503 const formatAsciiString = function (e) {
5504 if (e >= 0x20 && e < 0x7e) {
5505 return String.fromCharCode(e);
5506 }
5507
5508 return '.';
5509 };
5510 /**
5511 * Creates an object for sending to a web worker modifying properties that are TypedArrays
5512 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
5513 *
5514 * @param {Object} message
5515 * Object of properties and values to send to the web worker
5516 * @return {Object}
5517 * Modified message with TypedArray values expanded
5518 * @function createTransferableMessage
5519 */
5520
5521
5522 const createTransferableMessage = function (message) {
5523 const transferable = {};
5524 Object.keys(message).forEach(key => {
5525 const value = message[key];
5526
5527 if (isArrayBufferView(value)) {
5528 transferable[key] = {
5529 bytes: value.buffer,
5530 byteOffset: value.byteOffset,
5531 byteLength: value.byteLength
5532 };
5533 } else {
5534 transferable[key] = value;
5535 }
5536 });
5537 return transferable;
5538 };
5539 /**
5540 * Returns a unique string identifier for a media initialization
5541 * segment.
5542 *
5543 * @param {Object} initSegment
5544 * the init segment object.
5545 *
5546 * @return {string} the generated init segment id
5547 */
5548
5549 const initSegmentId = function (initSegment) {
5550 const byterange = initSegment.byterange || {
5551 length: Infinity,
5552 offset: 0
5553 };
5554 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
5555 };
5556 /**
5557 * Returns a unique string identifier for a media segment key.
5558 *
5559 * @param {Object} key the encryption key
5560 * @return {string} the unique id for the media segment key.
5561 */
5562
5563 const segmentKeyId = function (key) {
5564 return key.resolvedUri;
5565 };
5566 /**
5567 * utils to help dump binary data to the console
5568 *
5569 * @param {Array|TypedArray} data
5570 * data to dump to a string
5571 *
5572 * @return {string} the data as a hex string.
5573 */
5574
5575 const hexDump = data => {
5576 const bytes = Array.prototype.slice.call(data);
5577 const step = 16;
5578 let result = '';
5579 let hex;
5580 let ascii;
5581
5582 for (let j = 0; j < bytes.length / step; j++) {
5583 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
5584 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
5585 result += hex + ' ' + ascii + '\n';
5586 }
5587
5588 return result;
5589 };
5590 const tagDump = ({
5591 bytes
5592 }) => hexDump(bytes);
5593 const textRanges = ranges => {
5594 let result = '';
5595 let i;
5596
5597 for (i = 0; i < ranges.length; i++) {
5598 result += textRange(ranges, i) + ' ';
5599 }
5600
5601 return result;
5602 };
5603
5604 var utils = /*#__PURE__*/Object.freeze({
5605 __proto__: null,
5606 createTransferableMessage: createTransferableMessage,
5607 initSegmentId: initSegmentId,
5608 segmentKeyId: segmentKeyId,
5609 hexDump: hexDump,
5610 tagDump: tagDump,
5611 textRanges: textRanges
5612 });
5613
5614 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
5615 // 25% was arbitrarily chosen, and may need to be refined over time.
5616
5617 const SEGMENT_END_FUDGE_PERCENT = 0.25;
5618 /**
5619 * Converts a player time (any time that can be gotten/set from player.currentTime(),
5620 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
5621 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
5622 *
5623 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
5624 * point" (a point where we have a mapping from program time to player time, with player
5625 * time being the post transmux start of the segment).
5626 *
5627 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
5628 *
5629 * @param {number} playerTime the player time
5630 * @param {Object} segment the segment which contains the player time
5631 * @return {Date} program time
5632 */
5633
5634 const playerTimeToProgramTime = (playerTime, segment) => {
5635 if (!segment.dateTimeObject) {
5636 // Can't convert without an "anchor point" for the program time (i.e., a time that can
5637 // be used to map the start of a segment with a real world time).
5638 return null;
5639 }
5640
5641 const transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
5642 const transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
5643
5644 const startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
5645 const offsetFromSegmentStart = playerTime - startOfSegment;
5646 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
5647 };
5648 const originalSegmentVideoDuration = videoTimingInfo => {
5649 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
5650 };
5651 /**
5652 * Finds a segment that contains the time requested given as an ISO-8601 string. The
5653 * returned segment might be an estimate or an accurate match.
5654 *
5655 * @param {string} programTime The ISO-8601 programTime to find a match for
5656 * @param {Object} playlist A playlist object to search within
5657 */
5658
5659 const findSegmentForProgramTime = (programTime, playlist) => {
5660 // Assumptions:
5661 // - verifyProgramDateTimeTags has already been run
5662 // - live streams have been started
5663 let dateTimeObject;
5664
5665 try {
5666 dateTimeObject = new Date(programTime);
5667 } catch (e) {
5668 return null;
5669 }
5670
5671 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5672 return null;
5673 }
5674
5675 let segment = playlist.segments[0];
5676
5677 if (dateTimeObject < new Date(segment.dateTimeObject)) {
5678 // Requested time is before stream start.
5679 return null;
5680 }
5681
5682 for (let i = 0; i < playlist.segments.length - 1; i++) {
5683 segment = playlist.segments[i];
5684 const nextSegmentStart = new Date(playlist.segments[i + 1].dateTimeObject);
5685
5686 if (dateTimeObject < nextSegmentStart) {
5687 break;
5688 }
5689 }
5690
5691 const lastSegment = playlist.segments[playlist.segments.length - 1];
5692 const lastSegmentStart = lastSegment.dateTimeObject;
5693 const lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
5694 const lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
5695
5696 if (dateTimeObject > lastSegmentEnd) {
5697 // Beyond the end of the stream, or our best guess of the end of the stream.
5698 return null;
5699 }
5700
5701 if (dateTimeObject > new Date(lastSegmentStart)) {
5702 segment = lastSegment;
5703 }
5704
5705 return {
5706 segment,
5707 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
5708 // Although, given that all segments have accurate date time objects, the segment
5709 // selected should be accurate, unless the video has been transmuxed at some point
5710 // (determined by the presence of the videoTimingInfo object), the segment's "player
5711 // time" (the start time in the player) can't be considered accurate.
5712 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5713 };
5714 };
5715 /**
5716 * Finds a segment that contains the given player time(in seconds).
5717 *
5718 * @param {number} time The player time to find a match for
5719 * @param {Object} playlist A playlist object to search within
5720 */
5721
5722 const findSegmentForPlayerTime = (time, playlist) => {
5723 // Assumptions:
5724 // - there will always be a segment.duration
5725 // - we can start from zero
5726 // - segments are in time order
5727 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5728 return null;
5729 }
5730
5731 let segmentEnd = 0;
5732 let segment;
5733
5734 for (let i = 0; i < playlist.segments.length; i++) {
5735 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
5736 // should contain the most accurate values we have for the segment's player times.
5737 //
5738 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
5739 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
5740 // calculate an end value.
5741
5742 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
5743
5744 if (time <= segmentEnd) {
5745 break;
5746 }
5747 }
5748
5749 const lastSegment = playlist.segments[playlist.segments.length - 1];
5750
5751 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
5752 // The time requested is beyond the stream end.
5753 return null;
5754 }
5755
5756 if (time > segmentEnd) {
5757 // The time is within or beyond the last segment.
5758 //
5759 // Check to see if the time is beyond a reasonable guess of the end of the stream.
5760 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
5761 // Technically, because the duration value is only an estimate, the time may still
5762 // exist in the last segment, however, there isn't enough information to make even
5763 // a reasonable estimate.
5764 return null;
5765 }
5766
5767 segment = lastSegment;
5768 }
5769
5770 return {
5771 segment,
5772 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
5773 // Because videoTimingInfo is only set after transmux, it is the only way to get
5774 // accurate timing values.
5775 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5776 };
5777 };
5778 /**
5779 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
5780 * If the offset returned is positive, the programTime occurs after the
5781 * comparisonTimestamp.
5782 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
5783 *
5784 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
5785 * @param {string} programTime The programTime as an ISO-8601 string
5786 * @return {number} offset
5787 */
5788
5789 const getOffsetFromTimestamp = (comparisonTimeStamp, programTime) => {
5790 let segmentDateTime;
5791 let programDateTime;
5792
5793 try {
5794 segmentDateTime = new Date(comparisonTimeStamp);
5795 programDateTime = new Date(programTime);
5796 } catch (e) {// TODO handle error
5797 }
5798
5799 const segmentTimeEpoch = segmentDateTime.getTime();
5800 const programTimeEpoch = programDateTime.getTime();
5801 return (programTimeEpoch - segmentTimeEpoch) / 1000;
5802 };
5803 /**
5804 * Checks that all segments in this playlist have programDateTime tags.
5805 *
5806 * @param {Object} playlist A playlist object
5807 */
5808
5809 const verifyProgramDateTimeTags = playlist => {
5810 if (!playlist.segments || playlist.segments.length === 0) {
5811 return false;
5812 }
5813
5814 for (let i = 0; i < playlist.segments.length; i++) {
5815 const segment = playlist.segments[i];
5816
5817 if (!segment.dateTimeObject) {
5818 return false;
5819 }
5820 }
5821
5822 return true;
5823 };
5824 /**
5825 * Returns the programTime of the media given a playlist and a playerTime.
5826 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
5827 * If the segments containing the time requested have not been buffered yet, an estimate
5828 * may be returned to the callback.
5829 *
5830 * @param {Object} args
5831 * @param {Object} args.playlist A playlist object to search within
5832 * @param {number} time A playerTime in seconds
5833 * @param {Function} callback(err, programTime)
5834 * @return {string} err.message A detailed error message
5835 * @return {Object} programTime
5836 * @return {number} programTime.mediaSeconds The streamTime in seconds
5837 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
5838 */
5839
5840 const getProgramTime = ({
5841 playlist,
5842 time = undefined,
5843 callback
5844 }) => {
5845 if (!callback) {
5846 throw new Error('getProgramTime: callback must be provided');
5847 }
5848
5849 if (!playlist || time === undefined) {
5850 return callback({
5851 message: 'getProgramTime: playlist and time must be provided'
5852 });
5853 }
5854
5855 const matchedSegment = findSegmentForPlayerTime(time, playlist);
5856
5857 if (!matchedSegment) {
5858 return callback({
5859 message: 'valid programTime was not found'
5860 });
5861 }
5862
5863 if (matchedSegment.type === 'estimate') {
5864 return callback({
5865 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
5866 seekTime: matchedSegment.estimatedStart
5867 });
5868 }
5869
5870 const programTimeObject = {
5871 mediaSeconds: time
5872 };
5873 const programTime = playerTimeToProgramTime(time, matchedSegment.segment);
5874
5875 if (programTime) {
5876 programTimeObject.programDateTime = programTime.toISOString();
5877 }
5878
5879 return callback(null, programTimeObject);
5880 };
5881 /**
5882 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
5883 *
5884 * @param {Object} args
5885 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
5886 * @param {Object} args.playlist A playlist to look within
5887 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
5888 * @param {Function} args.seekTo A method to perform a seek
5889 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
5890 * @param {Object} args.tech The tech to seek on
5891 * @param {Function} args.callback(err, newTime) A callback to return the new time to
5892 * @return {string} err.message A detailed error message
5893 * @return {number} newTime The exact time that was seeked to in seconds
5894 */
5895
5896 const seekToProgramTime = ({
5897 programTime,
5898 playlist,
5899 retryCount = 2,
5900 seekTo,
5901 pauseAfterSeek = true,
5902 tech,
5903 callback
5904 }) => {
5905 if (!callback) {
5906 throw new Error('seekToProgramTime: callback must be provided');
5907 }
5908
5909 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
5910 return callback({
5911 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
5912 });
5913 }
5914
5915 if (!playlist.endList && !tech.hasStarted_) {
5916 return callback({
5917 message: 'player must be playing a live stream to start buffering'
5918 });
5919 }
5920
5921 if (!verifyProgramDateTimeTags(playlist)) {
5922 return callback({
5923 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
5924 });
5925 }
5926
5927 const matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
5928
5929 if (!matchedSegment) {
5930 return callback({
5931 message: `${programTime} was not found in the stream`
5932 });
5933 }
5934
5935 const segment = matchedSegment.segment;
5936 const mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
5937
5938 if (matchedSegment.type === 'estimate') {
5939 // we've run out of retries
5940 if (retryCount === 0) {
5941 return callback({
5942 message: `${programTime} is not buffered yet. Try again`
5943 });
5944 }
5945
5946 seekTo(matchedSegment.estimatedStart + mediaOffset);
5947 tech.one('seeked', () => {
5948 seekToProgramTime({
5949 programTime,
5950 playlist,
5951 retryCount: retryCount - 1,
5952 seekTo,
5953 pauseAfterSeek,
5954 tech,
5955 callback
5956 });
5957 });
5958 return;
5959 } // Since the segment.start value is determined from the buffered end or ending time
5960 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
5961 // modifications.
5962
5963
5964 const seekToTime = segment.start + mediaOffset;
5965
5966 const seekedCallback = () => {
5967 return callback(null, tech.currentTime());
5968 }; // listen for seeked event
5969
5970
5971 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
5972
5973 if (pauseAfterSeek) {
5974 tech.pause();
5975 }
5976
5977 seekTo(seekToTime);
5978 };
5979
5980 /**
5981 * Loops through all supported media groups in master and calls the provided
5982 * callback for each group
5983 *
5984 * @param {Object} master
5985 * The parsed master manifest object
5986 * @param {string[]} groups
5987 * The media groups to call the callback for
5988 * @param {Function} callback
5989 * Callback to call for each media group
5990 */
5991 var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
5992 groups.forEach(function (mediaType) {
5993 for (var groupKey in master.mediaGroups[mediaType]) {
5994 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
5995 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
5996 callback(mediaProperties, mediaType, groupKey, labelKey);
5997 }
5998 }
5999 });
6000 };
6001
6002 /*! @name mpd-parser @version 1.3.0 @license Apache-2.0 */
6003
6004 const isObject = obj => {
6005 return !!obj && typeof obj === 'object';
6006 };
6007
6008 const merge = (...objects) => {
6009 return objects.reduce((result, source) => {
6010 if (typeof source !== 'object') {
6011 return result;
6012 }
6013
6014 Object.keys(source).forEach(key => {
6015 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
6016 result[key] = result[key].concat(source[key]);
6017 } else if (isObject(result[key]) && isObject(source[key])) {
6018 result[key] = merge(result[key], source[key]);
6019 } else {
6020 result[key] = source[key];
6021 }
6022 });
6023 return result;
6024 }, {});
6025 };
6026
6027 const values = o => Object.keys(o).map(k => o[k]);
6028
6029 const range = (start, end) => {
6030 const result = [];
6031
6032 for (let i = start; i < end; i++) {
6033 result.push(i);
6034 }
6035
6036 return result;
6037 };
6038
6039 const flatten = lists => lists.reduce((x, y) => x.concat(y), []);
6040
6041 const from = list => {
6042 if (!list.length) {
6043 return [];
6044 }
6045
6046 const result = [];
6047
6048 for (let i = 0; i < list.length; i++) {
6049 result.push(list[i]);
6050 }
6051
6052 return result;
6053 };
6054
6055 const findIndexes = (l, key) => l.reduce((a, e, i) => {
6056 if (e[key]) {
6057 a.push(i);
6058 }
6059
6060 return a;
6061 }, []);
6062 /**
6063 * Returns a union of the included lists provided each element can be identified by a key.
6064 *
6065 * @param {Array} list - list of lists to get the union of
6066 * @param {Function} keyFunction - the function to use as a key for each element
6067 *
6068 * @return {Array} the union of the arrays
6069 */
6070
6071
6072 const union = (lists, keyFunction) => {
6073 return values(lists.reduce((acc, list) => {
6074 list.forEach(el => {
6075 acc[keyFunction(el)] = el;
6076 });
6077 return acc;
6078 }, {}));
6079 };
6080
6081 var errors = {
6082 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
6083 INVALID_NUMBER_OF_CONTENT_STEERING: 'INVALID_NUMBER_OF_CONTENT_STEERING',
6084 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
6085 DASH_INVALID_XML: 'DASH_INVALID_XML',
6086 NO_BASE_URL: 'NO_BASE_URL',
6087 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
6088 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
6089 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
6090 };
6091 /**
6092 * @typedef {Object} SingleUri
6093 * @property {string} uri - relative location of segment
6094 * @property {string} resolvedUri - resolved location of segment
6095 * @property {Object} byterange - Object containing information on how to make byte range
6096 * requests following byte-range-spec per RFC2616.
6097 * @property {String} byterange.length - length of range request
6098 * @property {String} byterange.offset - byte offset of range request
6099 *
6100 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
6101 */
6102
6103 /**
6104 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
6105 * that conforms to how m3u8-parser is structured
6106 *
6107 * @see https://github.com/videojs/m3u8-parser
6108 *
6109 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
6110 * @param {string} source - source url for segment
6111 * @param {string} range - optional range used for range calls,
6112 * follows RFC 2616, Clause 14.35.1
6113 * @return {SingleUri} full segment information transformed into a format similar
6114 * to m3u8-parser
6115 */
6116
6117 const urlTypeToSegment = ({
6118 baseUrl = '',
6119 source = '',
6120 range = '',
6121 indexRange = ''
6122 }) => {
6123 const segment = {
6124 uri: source,
6125 resolvedUri: resolveUrl$1(baseUrl || '', source)
6126 };
6127
6128 if (range || indexRange) {
6129 const rangeStr = range ? range : indexRange;
6130 const ranges = rangeStr.split('-'); // default to parsing this as a BigInt if possible
6131
6132 let startRange = window.BigInt ? window.BigInt(ranges[0]) : parseInt(ranges[0], 10);
6133 let endRange = window.BigInt ? window.BigInt(ranges[1]) : parseInt(ranges[1], 10); // convert back to a number if less than MAX_SAFE_INTEGER
6134
6135 if (startRange < Number.MAX_SAFE_INTEGER && typeof startRange === 'bigint') {
6136 startRange = Number(startRange);
6137 }
6138
6139 if (endRange < Number.MAX_SAFE_INTEGER && typeof endRange === 'bigint') {
6140 endRange = Number(endRange);
6141 }
6142
6143 let length;
6144
6145 if (typeof endRange === 'bigint' || typeof startRange === 'bigint') {
6146 length = window.BigInt(endRange) - window.BigInt(startRange) + window.BigInt(1);
6147 } else {
6148 length = endRange - startRange + 1;
6149 }
6150
6151 if (typeof length === 'bigint' && length < Number.MAX_SAFE_INTEGER) {
6152 length = Number(length);
6153 } // byterange should be inclusive according to
6154 // RFC 2616, Clause 14.35.1
6155
6156
6157 segment.byterange = {
6158 length,
6159 offset: startRange
6160 };
6161 }
6162
6163 return segment;
6164 };
6165
6166 const byteRangeToString = byterange => {
6167 // `endRange` is one less than `offset + length` because the HTTP range
6168 // header uses inclusive ranges
6169 let endRange;
6170
6171 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
6172 endRange = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
6173 } else {
6174 endRange = byterange.offset + byterange.length - 1;
6175 }
6176
6177 return `${byterange.offset}-${endRange}`;
6178 };
6179 /**
6180 * parse the end number attribue that can be a string
6181 * number, or undefined.
6182 *
6183 * @param {string|number|undefined} endNumber
6184 * The end number attribute.
6185 *
6186 * @return {number|null}
6187 * The result of parsing the end number.
6188 */
6189
6190
6191 const parseEndNumber = endNumber => {
6192 if (endNumber && typeof endNumber !== 'number') {
6193 endNumber = parseInt(endNumber, 10);
6194 }
6195
6196 if (isNaN(endNumber)) {
6197 return null;
6198 }
6199
6200 return endNumber;
6201 };
6202 /**
6203 * Functions for calculating the range of available segments in static and dynamic
6204 * manifests.
6205 */
6206
6207
6208 const segmentRange = {
6209 /**
6210 * Returns the entire range of available segments for a static MPD
6211 *
6212 * @param {Object} attributes
6213 * Inheritied MPD attributes
6214 * @return {{ start: number, end: number }}
6215 * The start and end numbers for available segments
6216 */
6217 static(attributes) {
6218 const {
6219 duration,
6220 timescale = 1,
6221 sourceDuration,
6222 periodDuration
6223 } = attributes;
6224 const endNumber = parseEndNumber(attributes.endNumber);
6225 const segmentDuration = duration / timescale;
6226
6227 if (typeof endNumber === 'number') {
6228 return {
6229 start: 0,
6230 end: endNumber
6231 };
6232 }
6233
6234 if (typeof periodDuration === 'number') {
6235 return {
6236 start: 0,
6237 end: periodDuration / segmentDuration
6238 };
6239 }
6240
6241 return {
6242 start: 0,
6243 end: sourceDuration / segmentDuration
6244 };
6245 },
6246
6247 /**
6248 * Returns the current live window range of available segments for a dynamic MPD
6249 *
6250 * @param {Object} attributes
6251 * Inheritied MPD attributes
6252 * @return {{ start: number, end: number }}
6253 * The start and end numbers for available segments
6254 */
6255 dynamic(attributes) {
6256 const {
6257 NOW,
6258 clientOffset,
6259 availabilityStartTime,
6260 timescale = 1,
6261 duration,
6262 periodStart = 0,
6263 minimumUpdatePeriod = 0,
6264 timeShiftBufferDepth = Infinity
6265 } = attributes;
6266 const endNumber = parseEndNumber(attributes.endNumber); // clientOffset is passed in at the top level of mpd-parser and is an offset calculated
6267 // after retrieving UTC server time.
6268
6269 const now = (NOW + clientOffset) / 1000; // WC stands for Wall Clock.
6270 // Convert the period start time to EPOCH.
6271
6272 const periodStartWC = availabilityStartTime + periodStart; // Period end in EPOCH is manifest's retrieval time + time until next update.
6273
6274 const periodEndWC = now + minimumUpdatePeriod;
6275 const periodDuration = periodEndWC - periodStartWC;
6276 const segmentCount = Math.ceil(periodDuration * timescale / duration);
6277 const availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
6278 const availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
6279 return {
6280 start: Math.max(0, availableStart),
6281 end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)
6282 };
6283 }
6284
6285 };
6286 /**
6287 * Maps a range of numbers to objects with information needed to build the corresponding
6288 * segment list
6289 *
6290 * @name toSegmentsCallback
6291 * @function
6292 * @param {number} number
6293 * Number of the segment
6294 * @param {number} index
6295 * Index of the number in the range list
6296 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
6297 * Object with segment timing and duration info
6298 */
6299
6300 /**
6301 * Returns a callback for Array.prototype.map for mapping a range of numbers to
6302 * information needed to build the segment list.
6303 *
6304 * @param {Object} attributes
6305 * Inherited MPD attributes
6306 * @return {toSegmentsCallback}
6307 * Callback map function
6308 */
6309
6310 const toSegments = attributes => number => {
6311 const {
6312 duration,
6313 timescale = 1,
6314 periodStart,
6315 startNumber = 1
6316 } = attributes;
6317 return {
6318 number: startNumber + number,
6319 duration: duration / timescale,
6320 timeline: periodStart,
6321 time: number * duration
6322 };
6323 };
6324 /**
6325 * Returns a list of objects containing segment timing and duration info used for
6326 * building the list of segments. This uses the @duration attribute specified
6327 * in the MPD manifest to derive the range of segments.
6328 *
6329 * @param {Object} attributes
6330 * Inherited MPD attributes
6331 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6332 * List of Objects with segment timing and duration info
6333 */
6334
6335
6336 const parseByDuration = attributes => {
6337 const {
6338 type,
6339 duration,
6340 timescale = 1,
6341 periodDuration,
6342 sourceDuration
6343 } = attributes;
6344 const {
6345 start,
6346 end
6347 } = segmentRange[type](attributes);
6348 const segments = range(start, end).map(toSegments(attributes));
6349
6350 if (type === 'static') {
6351 const index = segments.length - 1; // section is either a period or the full source
6352
6353 const sectionDuration = typeof periodDuration === 'number' ? periodDuration : sourceDuration; // final segment may be less than full segment duration
6354
6355 segments[index].duration = sectionDuration - duration / timescale * index;
6356 }
6357
6358 return segments;
6359 };
6360 /**
6361 * Translates SegmentBase into a set of segments.
6362 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
6363 * node should be translated into segment.
6364 *
6365 * @param {Object} attributes
6366 * Object containing all inherited attributes from parent elements with attribute
6367 * names as keys
6368 * @return {Object.<Array>} list of segments
6369 */
6370
6371
6372 const segmentsFromBase = attributes => {
6373 const {
6374 baseUrl,
6375 initialization = {},
6376 sourceDuration,
6377 indexRange = '',
6378 periodStart,
6379 presentationTime,
6380 number = 0,
6381 duration
6382 } = attributes; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
6383
6384 if (!baseUrl) {
6385 throw new Error(errors.NO_BASE_URL);
6386 }
6387
6388 const initSegment = urlTypeToSegment({
6389 baseUrl,
6390 source: initialization.sourceURL,
6391 range: initialization.range
6392 });
6393 const segment = urlTypeToSegment({
6394 baseUrl,
6395 source: baseUrl,
6396 indexRange
6397 });
6398 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
6399 // (since SegmentBase is only for one total segment)
6400
6401 if (duration) {
6402 const segmentTimeInfo = parseByDuration(attributes);
6403
6404 if (segmentTimeInfo.length) {
6405 segment.duration = segmentTimeInfo[0].duration;
6406 segment.timeline = segmentTimeInfo[0].timeline;
6407 }
6408 } else if (sourceDuration) {
6409 segment.duration = sourceDuration;
6410 segment.timeline = periodStart;
6411 } // If presentation time is provided, these segments are being generated by SIDX
6412 // references, and should use the time provided. For the general case of SegmentBase,
6413 // there should only be one segment in the period, so its presentation time is the same
6414 // as its period start.
6415
6416
6417 segment.presentationTime = presentationTime || periodStart;
6418 segment.number = number;
6419 return [segment];
6420 };
6421 /**
6422 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
6423 * according to the sidx information given.
6424 *
6425 * playlist.sidx has metadadata about the sidx where-as the sidx param
6426 * is the parsed sidx box itself.
6427 *
6428 * @param {Object} playlist the playlist to update the sidx information for
6429 * @param {Object} sidx the parsed sidx box
6430 * @return {Object} the playlist object with the updated sidx information
6431 */
6432
6433
6434 const addSidxSegmentsToPlaylist$1 = (playlist, sidx, baseUrl) => {
6435 // Retain init segment information
6436 const initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial main manifest parsing
6437
6438 const sourceDuration = playlist.sidx.duration; // Retain source timeline
6439
6440 const timeline = playlist.timeline || 0;
6441 const sidxByteRange = playlist.sidx.byterange;
6442 const sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
6443
6444 const timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
6445
6446 const mediaReferences = sidx.references.filter(r => r.referenceType !== 1);
6447 const segments = [];
6448 const type = playlist.endList ? 'static' : 'dynamic';
6449 const periodStart = playlist.sidx.timeline;
6450 let presentationTime = periodStart;
6451 let number = playlist.mediaSequence || 0; // firstOffset is the offset from the end of the sidx box
6452
6453 let startIndex; // eslint-disable-next-line
6454
6455 if (typeof sidx.firstOffset === 'bigint') {
6456 startIndex = window.BigInt(sidxEnd) + sidx.firstOffset;
6457 } else {
6458 startIndex = sidxEnd + sidx.firstOffset;
6459 }
6460
6461 for (let i = 0; i < mediaReferences.length; i++) {
6462 const reference = sidx.references[i]; // size of the referenced (sub)segment
6463
6464 const size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
6465 // this will be converted to seconds when generating segments
6466
6467 const duration = reference.subsegmentDuration; // should be an inclusive range
6468
6469 let endIndex; // eslint-disable-next-line
6470
6471 if (typeof startIndex === 'bigint') {
6472 endIndex = startIndex + window.BigInt(size) - window.BigInt(1);
6473 } else {
6474 endIndex = startIndex + size - 1;
6475 }
6476
6477 const indexRange = `${startIndex}-${endIndex}`;
6478 const attributes = {
6479 baseUrl,
6480 timescale,
6481 timeline,
6482 periodStart,
6483 presentationTime,
6484 number,
6485 duration,
6486 sourceDuration,
6487 indexRange,
6488 type
6489 };
6490 const segment = segmentsFromBase(attributes)[0];
6491
6492 if (initSegment) {
6493 segment.map = initSegment;
6494 }
6495
6496 segments.push(segment);
6497
6498 if (typeof startIndex === 'bigint') {
6499 startIndex += window.BigInt(size);
6500 } else {
6501 startIndex += size;
6502 }
6503
6504 presentationTime += duration / timescale;
6505 number++;
6506 }
6507
6508 playlist.segments = segments;
6509 return playlist;
6510 };
6511
6512 const SUPPORTED_MEDIA_TYPES = ['AUDIO', 'SUBTITLES']; // allow one 60fps frame as leniency (arbitrarily chosen)
6513
6514 const TIME_FUDGE = 1 / 60;
6515 /**
6516 * Given a list of timelineStarts, combines, dedupes, and sorts them.
6517 *
6518 * @param {TimelineStart[]} timelineStarts - list of timeline starts
6519 *
6520 * @return {TimelineStart[]} the combined and deduped timeline starts
6521 */
6522
6523 const getUniqueTimelineStarts = timelineStarts => {
6524 return union(timelineStarts, ({
6525 timeline
6526 }) => timeline).sort((a, b) => a.timeline > b.timeline ? 1 : -1);
6527 };
6528 /**
6529 * Finds the playlist with the matching NAME attribute.
6530 *
6531 * @param {Array} playlists - playlists to search through
6532 * @param {string} name - the NAME attribute to search for
6533 *
6534 * @return {Object|null} the matching playlist object, or null
6535 */
6536
6537
6538 const findPlaylistWithName = (playlists, name) => {
6539 for (let i = 0; i < playlists.length; i++) {
6540 if (playlists[i].attributes.NAME === name) {
6541 return playlists[i];
6542 }
6543 }
6544
6545 return null;
6546 };
6547 /**
6548 * Gets a flattened array of media group playlists.
6549 *
6550 * @param {Object} manifest - the main manifest object
6551 *
6552 * @return {Array} the media group playlists
6553 */
6554
6555
6556 const getMediaGroupPlaylists = manifest => {
6557 let mediaGroupPlaylists = [];
6558 forEachMediaGroup(manifest, SUPPORTED_MEDIA_TYPES, (properties, type, group, label) => {
6559 mediaGroupPlaylists = mediaGroupPlaylists.concat(properties.playlists || []);
6560 });
6561 return mediaGroupPlaylists;
6562 };
6563 /**
6564 * Updates the playlist's media sequence numbers.
6565 *
6566 * @param {Object} config - options object
6567 * @param {Object} config.playlist - the playlist to update
6568 * @param {number} config.mediaSequence - the mediaSequence number to start with
6569 */
6570
6571
6572 const updateMediaSequenceForPlaylist = ({
6573 playlist,
6574 mediaSequence
6575 }) => {
6576 playlist.mediaSequence = mediaSequence;
6577 playlist.segments.forEach((segment, index) => {
6578 segment.number = playlist.mediaSequence + index;
6579 });
6580 };
6581 /**
6582 * Updates the media and discontinuity sequence numbers of newPlaylists given oldPlaylists
6583 * and a complete list of timeline starts.
6584 *
6585 * If no matching playlist is found, only the discontinuity sequence number of the playlist
6586 * will be updated.
6587 *
6588 * Since early available timelines are not supported, at least one segment must be present.
6589 *
6590 * @param {Object} config - options object
6591 * @param {Object[]} oldPlaylists - the old playlists to use as a reference
6592 * @param {Object[]} newPlaylists - the new playlists to update
6593 * @param {Object} timelineStarts - all timelineStarts seen in the stream to this point
6594 */
6595
6596
6597 const updateSequenceNumbers = ({
6598 oldPlaylists,
6599 newPlaylists,
6600 timelineStarts
6601 }) => {
6602 newPlaylists.forEach(playlist => {
6603 playlist.discontinuitySequence = timelineStarts.findIndex(function ({
6604 timeline
6605 }) {
6606 return timeline === playlist.timeline;
6607 }); // Playlists NAMEs come from DASH Representation IDs, which are mandatory
6608 // (see ISO_23009-1-2012 5.3.5.2).
6609 //
6610 // If the same Representation existed in a prior Period, it will retain the same NAME.
6611
6612 const oldPlaylist = findPlaylistWithName(oldPlaylists, playlist.attributes.NAME);
6613
6614 if (!oldPlaylist) {
6615 // Since this is a new playlist, the media sequence values can start from 0 without
6616 // consequence.
6617 return;
6618 } // TODO better support for live SIDX
6619 //
6620 // As of this writing, mpd-parser does not support multiperiod SIDX (in live or VOD).
6621 // This is evident by a playlist only having a single SIDX reference. In a multiperiod
6622 // playlist there would need to be multiple SIDX references. In addition, live SIDX is
6623 // not supported when the SIDX properties change on refreshes.
6624 //
6625 // In the future, if support needs to be added, the merging logic here can be called
6626 // after SIDX references are resolved. For now, exit early to prevent exceptions being
6627 // thrown due to undefined references.
6628
6629
6630 if (playlist.sidx) {
6631 return;
6632 } // Since we don't yet support early available timelines, we don't need to support
6633 // playlists with no segments.
6634
6635
6636 const firstNewSegment = playlist.segments[0];
6637 const oldMatchingSegmentIndex = oldPlaylist.segments.findIndex(function (oldSegment) {
6638 return Math.abs(oldSegment.presentationTime - firstNewSegment.presentationTime) < TIME_FUDGE;
6639 }); // No matching segment from the old playlist means the entire playlist was refreshed.
6640 // In this case the media sequence should account for this update, and the new segments
6641 // should be marked as discontinuous from the prior content, since the last prior
6642 // timeline was removed.
6643
6644 if (oldMatchingSegmentIndex === -1) {
6645 updateMediaSequenceForPlaylist({
6646 playlist,
6647 mediaSequence: oldPlaylist.mediaSequence + oldPlaylist.segments.length
6648 });
6649 playlist.segments[0].discontinuity = true;
6650 playlist.discontinuityStarts.unshift(0); // No matching segment does not necessarily mean there's missing content.
6651 //
6652 // If the new playlist's timeline is the same as the last seen segment's timeline,
6653 // then a discontinuity can be added to identify that there's potentially missing
6654 // content. If there's no missing content, the discontinuity should still be rather
6655 // harmless. It's possible that if segment durations are accurate enough, that the
6656 // existence of a gap can be determined using the presentation times and durations,
6657 // but if the segment timing info is off, it may introduce more problems than simply
6658 // adding the discontinuity.
6659 //
6660 // If the new playlist's timeline is different from the last seen segment's timeline,
6661 // then a discontinuity can be added to identify that this is the first seen segment
6662 // of a new timeline. However, the logic at the start of this function that
6663 // determined the disconinuity sequence by timeline index is now off by one (the
6664 // discontinuity of the newest timeline hasn't yet fallen off the manifest...since
6665 // we added it), so the disconinuity sequence must be decremented.
6666 //
6667 // A period may also have a duration of zero, so the case of no segments is handled
6668 // here even though we don't yet support early available periods.
6669
6670 if (!oldPlaylist.segments.length && playlist.timeline > oldPlaylist.timeline || oldPlaylist.segments.length && playlist.timeline > oldPlaylist.segments[oldPlaylist.segments.length - 1].timeline) {
6671 playlist.discontinuitySequence--;
6672 }
6673
6674 return;
6675 } // If the first segment matched with a prior segment on a discontinuity (it's matching
6676 // on the first segment of a period), then the discontinuitySequence shouldn't be the
6677 // timeline's matching one, but instead should be the one prior, and the first segment
6678 // of the new manifest should be marked with a discontinuity.
6679 //
6680 // The reason for this special case is that discontinuity sequence shows how many
6681 // discontinuities have fallen off of the playlist, and discontinuities are marked on
6682 // the first segment of a new "timeline." Because of this, while DASH will retain that
6683 // Period while the "timeline" exists, HLS keeps track of it via the discontinuity
6684 // sequence, and that first segment is an indicator, but can be removed before that
6685 // timeline is gone.
6686
6687
6688 const oldMatchingSegment = oldPlaylist.segments[oldMatchingSegmentIndex];
6689
6690 if (oldMatchingSegment.discontinuity && !firstNewSegment.discontinuity) {
6691 firstNewSegment.discontinuity = true;
6692 playlist.discontinuityStarts.unshift(0);
6693 playlist.discontinuitySequence--;
6694 }
6695
6696 updateMediaSequenceForPlaylist({
6697 playlist,
6698 mediaSequence: oldPlaylist.segments[oldMatchingSegmentIndex].number
6699 });
6700 });
6701 };
6702 /**
6703 * Given an old parsed manifest object and a new parsed manifest object, updates the
6704 * sequence and timing values within the new manifest to ensure that it lines up with the
6705 * old.
6706 *
6707 * @param {Array} oldManifest - the old main manifest object
6708 * @param {Array} newManifest - the new main manifest object
6709 *
6710 * @return {Object} the updated new manifest object
6711 */
6712
6713
6714 const positionManifestOnTimeline = ({
6715 oldManifest,
6716 newManifest
6717 }) => {
6718 // Starting from v4.1.2 of the IOP, section 4.4.3.3 states:
6719 //
6720 // "MPD@availabilityStartTime and Period@start shall not be changed over MPD updates."
6721 //
6722 // This was added from https://github.com/Dash-Industry-Forum/DASH-IF-IOP/issues/160
6723 //
6724 // Because of this change, and the difficulty of supporting periods with changing start
6725 // times, periods with changing start times are not supported. This makes the logic much
6726 // simpler, since periods with the same start time can be considerred the same period
6727 // across refreshes.
6728 //
6729 // To give an example as to the difficulty of handling periods where the start time may
6730 // change, if a single period manifest is refreshed with another manifest with a single
6731 // period, and both the start and end times are increased, then the only way to determine
6732 // if it's a new period or an old one that has changed is to look through the segments of
6733 // each playlist and determine the presentation time bounds to find a match. In addition,
6734 // if the period start changed to exceed the old period end, then there would be no
6735 // match, and it would not be possible to determine whether the refreshed period is a new
6736 // one or the old one.
6737 const oldPlaylists = oldManifest.playlists.concat(getMediaGroupPlaylists(oldManifest));
6738 const newPlaylists = newManifest.playlists.concat(getMediaGroupPlaylists(newManifest)); // Save all seen timelineStarts to the new manifest. Although this potentially means that
6739 // there's a "memory leak" in that it will never stop growing, in reality, only a couple
6740 // of properties are saved for each seen Period. Even long running live streams won't
6741 // generate too many Periods, unless the stream is watched for decades. In the future,
6742 // this can be optimized by mapping to discontinuity sequence numbers for each timeline,
6743 // but it may not become an issue, and the additional info can be useful for debugging.
6744
6745 newManifest.timelineStarts = getUniqueTimelineStarts([oldManifest.timelineStarts, newManifest.timelineStarts]);
6746 updateSequenceNumbers({
6747 oldPlaylists,
6748 newPlaylists,
6749 timelineStarts: newManifest.timelineStarts
6750 });
6751 return newManifest;
6752 };
6753
6754 const generateSidxKey = sidx => sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);
6755
6756 const mergeDiscontiguousPlaylists = playlists => {
6757 // Break out playlists into groups based on their baseUrl
6758 const playlistsByBaseUrl = playlists.reduce(function (acc, cur) {
6759 if (!acc[cur.attributes.baseUrl]) {
6760 acc[cur.attributes.baseUrl] = [];
6761 }
6762
6763 acc[cur.attributes.baseUrl].push(cur);
6764 return acc;
6765 }, {});
6766 let allPlaylists = [];
6767 Object.values(playlistsByBaseUrl).forEach(playlistGroup => {
6768 const mergedPlaylists = values(playlistGroup.reduce((acc, playlist) => {
6769 // assuming playlist IDs are the same across periods
6770 // TODO: handle multiperiod where representation sets are not the same
6771 // across periods
6772 const name = playlist.attributes.id + (playlist.attributes.lang || '');
6773
6774 if (!acc[name]) {
6775 // First Period
6776 acc[name] = playlist;
6777 acc[name].attributes.timelineStarts = [];
6778 } else {
6779 // Subsequent Periods
6780 if (playlist.segments) {
6781 // first segment of subsequent periods signal a discontinuity
6782 if (playlist.segments[0]) {
6783 playlist.segments[0].discontinuity = true;
6784 }
6785
6786 acc[name].segments.push(...playlist.segments);
6787 } // bubble up contentProtection, this assumes all DRM content
6788 // has the same contentProtection
6789
6790
6791 if (playlist.attributes.contentProtection) {
6792 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
6793 }
6794 }
6795
6796 acc[name].attributes.timelineStarts.push({
6797 // Although they represent the same number, it's important to have both to make it
6798 // compatible with HLS potentially having a similar attribute.
6799 start: playlist.attributes.periodStart,
6800 timeline: playlist.attributes.periodStart
6801 });
6802 return acc;
6803 }, {}));
6804 allPlaylists = allPlaylists.concat(mergedPlaylists);
6805 });
6806 return allPlaylists.map(playlist => {
6807 playlist.discontinuityStarts = findIndexes(playlist.segments || [], 'discontinuity');
6808 return playlist;
6809 });
6810 };
6811
6812 const addSidxSegmentsToPlaylist = (playlist, sidxMapping) => {
6813 const sidxKey = generateSidxKey(playlist.sidx);
6814 const sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
6815
6816 if (sidxMatch) {
6817 addSidxSegmentsToPlaylist$1(playlist, sidxMatch, playlist.sidx.resolvedUri);
6818 }
6819
6820 return playlist;
6821 };
6822
6823 const addSidxSegmentsToPlaylists = (playlists, sidxMapping = {}) => {
6824 if (!Object.keys(sidxMapping).length) {
6825 return playlists;
6826 }
6827
6828 for (const i in playlists) {
6829 playlists[i] = addSidxSegmentsToPlaylist(playlists[i], sidxMapping);
6830 }
6831
6832 return playlists;
6833 };
6834
6835 const formatAudioPlaylist = ({
6836 attributes,
6837 segments,
6838 sidx,
6839 mediaSequence,
6840 discontinuitySequence,
6841 discontinuityStarts
6842 }, isAudioOnly) => {
6843 const playlist = {
6844 attributes: {
6845 NAME: attributes.id,
6846 BANDWIDTH: attributes.bandwidth,
6847 CODECS: attributes.codecs,
6848 ['PROGRAM-ID']: 1
6849 },
6850 uri: '',
6851 endList: attributes.type === 'static',
6852 timeline: attributes.periodStart,
6853 resolvedUri: attributes.baseUrl || '',
6854 targetDuration: attributes.duration,
6855 discontinuitySequence,
6856 discontinuityStarts,
6857 timelineStarts: attributes.timelineStarts,
6858 mediaSequence,
6859 segments
6860 };
6861
6862 if (attributes.contentProtection) {
6863 playlist.contentProtection = attributes.contentProtection;
6864 }
6865
6866 if (attributes.serviceLocation) {
6867 playlist.attributes.serviceLocation = attributes.serviceLocation;
6868 }
6869
6870 if (sidx) {
6871 playlist.sidx = sidx;
6872 }
6873
6874 if (isAudioOnly) {
6875 playlist.attributes.AUDIO = 'audio';
6876 playlist.attributes.SUBTITLES = 'subs';
6877 }
6878
6879 return playlist;
6880 };
6881
6882 const formatVttPlaylist = ({
6883 attributes,
6884 segments,
6885 mediaSequence,
6886 discontinuityStarts,
6887 discontinuitySequence
6888 }) => {
6889 if (typeof segments === 'undefined') {
6890 // vtt tracks may use single file in BaseURL
6891 segments = [{
6892 uri: attributes.baseUrl,
6893 timeline: attributes.periodStart,
6894 resolvedUri: attributes.baseUrl || '',
6895 duration: attributes.sourceDuration,
6896 number: 0
6897 }]; // targetDuration should be the same duration as the only segment
6898
6899 attributes.duration = attributes.sourceDuration;
6900 }
6901
6902 const m3u8Attributes = {
6903 NAME: attributes.id,
6904 BANDWIDTH: attributes.bandwidth,
6905 ['PROGRAM-ID']: 1
6906 };
6907
6908 if (attributes.codecs) {
6909 m3u8Attributes.CODECS = attributes.codecs;
6910 }
6911
6912 const vttPlaylist = {
6913 attributes: m3u8Attributes,
6914 uri: '',
6915 endList: attributes.type === 'static',
6916 timeline: attributes.periodStart,
6917 resolvedUri: attributes.baseUrl || '',
6918 targetDuration: attributes.duration,
6919 timelineStarts: attributes.timelineStarts,
6920 discontinuityStarts,
6921 discontinuitySequence,
6922 mediaSequence,
6923 segments
6924 };
6925
6926 if (attributes.serviceLocation) {
6927 vttPlaylist.attributes.serviceLocation = attributes.serviceLocation;
6928 }
6929
6930 return vttPlaylist;
6931 };
6932
6933 const organizeAudioPlaylists = (playlists, sidxMapping = {}, isAudioOnly = false) => {
6934 let mainPlaylist;
6935 const formattedPlaylists = playlists.reduce((a, playlist) => {
6936 const role = playlist.attributes.role && playlist.attributes.role.value || '';
6937 const language = playlist.attributes.lang || '';
6938 let label = playlist.attributes.label || 'main';
6939
6940 if (language && !playlist.attributes.label) {
6941 const roleLabel = role ? ` (${role})` : '';
6942 label = `${playlist.attributes.lang}${roleLabel}`;
6943 }
6944
6945 if (!a[label]) {
6946 a[label] = {
6947 language,
6948 autoselect: true,
6949 default: role === 'main',
6950 playlists: [],
6951 uri: ''
6952 };
6953 }
6954
6955 const formatted = addSidxSegmentsToPlaylist(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);
6956 a[label].playlists.push(formatted);
6957
6958 if (typeof mainPlaylist === 'undefined' && role === 'main') {
6959 mainPlaylist = playlist;
6960 mainPlaylist.default = true;
6961 }
6962
6963 return a;
6964 }, {}); // if no playlists have role "main", mark the first as main
6965
6966 if (!mainPlaylist) {
6967 const firstLabel = Object.keys(formattedPlaylists)[0];
6968 formattedPlaylists[firstLabel].default = true;
6969 }
6970
6971 return formattedPlaylists;
6972 };
6973
6974 const organizeVttPlaylists = (playlists, sidxMapping = {}) => {
6975 return playlists.reduce((a, playlist) => {
6976 const label = playlist.attributes.label || playlist.attributes.lang || 'text';
6977
6978 if (!a[label]) {
6979 a[label] = {
6980 language: label,
6981 default: false,
6982 autoselect: false,
6983 playlists: [],
6984 uri: ''
6985 };
6986 }
6987
6988 a[label].playlists.push(addSidxSegmentsToPlaylist(formatVttPlaylist(playlist), sidxMapping));
6989 return a;
6990 }, {});
6991 };
6992
6993 const organizeCaptionServices = captionServices => captionServices.reduce((svcObj, svc) => {
6994 if (!svc) {
6995 return svcObj;
6996 }
6997
6998 svc.forEach(service => {
6999 const {
7000 channel,
7001 language
7002 } = service;
7003 svcObj[language] = {
7004 autoselect: false,
7005 default: false,
7006 instreamId: channel,
7007 language
7008 };
7009
7010 if (service.hasOwnProperty('aspectRatio')) {
7011 svcObj[language].aspectRatio = service.aspectRatio;
7012 }
7013
7014 if (service.hasOwnProperty('easyReader')) {
7015 svcObj[language].easyReader = service.easyReader;
7016 }
7017
7018 if (service.hasOwnProperty('3D')) {
7019 svcObj[language]['3D'] = service['3D'];
7020 }
7021 });
7022 return svcObj;
7023 }, {});
7024
7025 const formatVideoPlaylist = ({
7026 attributes,
7027 segments,
7028 sidx,
7029 discontinuityStarts
7030 }) => {
7031 const playlist = {
7032 attributes: {
7033 NAME: attributes.id,
7034 AUDIO: 'audio',
7035 SUBTITLES: 'subs',
7036 RESOLUTION: {
7037 width: attributes.width,
7038 height: attributes.height
7039 },
7040 CODECS: attributes.codecs,
7041 BANDWIDTH: attributes.bandwidth,
7042 ['PROGRAM-ID']: 1
7043 },
7044 uri: '',
7045 endList: attributes.type === 'static',
7046 timeline: attributes.periodStart,
7047 resolvedUri: attributes.baseUrl || '',
7048 targetDuration: attributes.duration,
7049 discontinuityStarts,
7050 timelineStarts: attributes.timelineStarts,
7051 segments
7052 };
7053
7054 if (attributes.frameRate) {
7055 playlist.attributes['FRAME-RATE'] = attributes.frameRate;
7056 }
7057
7058 if (attributes.contentProtection) {
7059 playlist.contentProtection = attributes.contentProtection;
7060 }
7061
7062 if (attributes.serviceLocation) {
7063 playlist.attributes.serviceLocation = attributes.serviceLocation;
7064 }
7065
7066 if (sidx) {
7067 playlist.sidx = sidx;
7068 }
7069
7070 return playlist;
7071 };
7072
7073 const videoOnly = ({
7074 attributes
7075 }) => attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';
7076
7077 const audioOnly = ({
7078 attributes
7079 }) => attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';
7080
7081 const vttOnly = ({
7082 attributes
7083 }) => attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
7084 /**
7085 * Contains start and timeline properties denoting a timeline start. For DASH, these will
7086 * be the same number.
7087 *
7088 * @typedef {Object} TimelineStart
7089 * @property {number} start - the start time of the timeline
7090 * @property {number} timeline - the timeline number
7091 */
7092
7093 /**
7094 * Adds appropriate media and discontinuity sequence values to the segments and playlists.
7095 *
7096 * Throughout mpd-parser, the `number` attribute is used in relation to `startNumber`, a
7097 * DASH specific attribute used in constructing segment URI's from templates. However, from
7098 * an HLS perspective, the `number` attribute on a segment would be its `mediaSequence`
7099 * value, which should start at the original media sequence value (or 0) and increment by 1
7100 * for each segment thereafter. Since DASH's `startNumber` values are independent per
7101 * period, it doesn't make sense to use it for `number`. Instead, assume everything starts
7102 * from a 0 mediaSequence value and increment from there.
7103 *
7104 * Note that VHS currently doesn't use the `number` property, but it can be helpful for
7105 * debugging and making sense of the manifest.
7106 *
7107 * For live playlists, to account for values increasing in manifests when periods are
7108 * removed on refreshes, merging logic should be used to update the numbers to their
7109 * appropriate values (to ensure they're sequential and increasing).
7110 *
7111 * @param {Object[]} playlists - the playlists to update
7112 * @param {TimelineStart[]} timelineStarts - the timeline starts for the manifest
7113 */
7114
7115
7116 const addMediaSequenceValues = (playlists, timelineStarts) => {
7117 // increment all segments sequentially
7118 playlists.forEach(playlist => {
7119 playlist.mediaSequence = 0;
7120 playlist.discontinuitySequence = timelineStarts.findIndex(function ({
7121 timeline
7122 }) {
7123 return timeline === playlist.timeline;
7124 });
7125
7126 if (!playlist.segments) {
7127 return;
7128 }
7129
7130 playlist.segments.forEach((segment, index) => {
7131 segment.number = index;
7132 });
7133 });
7134 };
7135 /**
7136 * Given a media group object, flattens all playlists within the media group into a single
7137 * array.
7138 *
7139 * @param {Object} mediaGroupObject - the media group object
7140 *
7141 * @return {Object[]}
7142 * The media group playlists
7143 */
7144
7145
7146 const flattenMediaGroupPlaylists = mediaGroupObject => {
7147 if (!mediaGroupObject) {
7148 return [];
7149 }
7150
7151 return Object.keys(mediaGroupObject).reduce((acc, label) => {
7152 const labelContents = mediaGroupObject[label];
7153 return acc.concat(labelContents.playlists);
7154 }, []);
7155 };
7156
7157 const toM3u8 = ({
7158 dashPlaylists,
7159 locations,
7160 contentSteering,
7161 sidxMapping = {},
7162 previousManifest,
7163 eventStream
7164 }) => {
7165 if (!dashPlaylists.length) {
7166 return {};
7167 } // grab all main manifest attributes
7168
7169
7170 const {
7171 sourceDuration: duration,
7172 type,
7173 suggestedPresentationDelay,
7174 minimumUpdatePeriod
7175 } = dashPlaylists[0].attributes;
7176 const videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
7177 const audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
7178 const vttPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(vttOnly));
7179 const captions = dashPlaylists.map(playlist => playlist.attributes.captionServices).filter(Boolean);
7180 const manifest = {
7181 allowCache: true,
7182 discontinuityStarts: [],
7183 segments: [],
7184 endList: true,
7185 mediaGroups: {
7186 AUDIO: {},
7187 VIDEO: {},
7188 ['CLOSED-CAPTIONS']: {},
7189 SUBTITLES: {}
7190 },
7191 uri: '',
7192 duration,
7193 playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)
7194 };
7195
7196 if (minimumUpdatePeriod >= 0) {
7197 manifest.minimumUpdatePeriod = minimumUpdatePeriod * 1000;
7198 }
7199
7200 if (locations) {
7201 manifest.locations = locations;
7202 }
7203
7204 if (contentSteering) {
7205 manifest.contentSteering = contentSteering;
7206 }
7207
7208 if (type === 'dynamic') {
7209 manifest.suggestedPresentationDelay = suggestedPresentationDelay;
7210 }
7211
7212 if (eventStream && eventStream.length > 0) {
7213 manifest.eventStream = eventStream;
7214 }
7215
7216 const isAudioOnly = manifest.playlists.length === 0;
7217 const organizedAudioGroup = audioPlaylists.length ? organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly) : null;
7218 const organizedVttGroup = vttPlaylists.length ? organizeVttPlaylists(vttPlaylists, sidxMapping) : null;
7219 const formattedPlaylists = videoPlaylists.concat(flattenMediaGroupPlaylists(organizedAudioGroup), flattenMediaGroupPlaylists(organizedVttGroup));
7220 const playlistTimelineStarts = formattedPlaylists.map(({
7221 timelineStarts
7222 }) => timelineStarts);
7223 manifest.timelineStarts = getUniqueTimelineStarts(playlistTimelineStarts);
7224 addMediaSequenceValues(formattedPlaylists, manifest.timelineStarts);
7225
7226 if (organizedAudioGroup) {
7227 manifest.mediaGroups.AUDIO.audio = organizedAudioGroup;
7228 }
7229
7230 if (organizedVttGroup) {
7231 manifest.mediaGroups.SUBTITLES.subs = organizedVttGroup;
7232 }
7233
7234 if (captions.length) {
7235 manifest.mediaGroups['CLOSED-CAPTIONS'].cc = organizeCaptionServices(captions);
7236 }
7237
7238 if (previousManifest) {
7239 return positionManifestOnTimeline({
7240 oldManifest: previousManifest,
7241 newManifest: manifest
7242 });
7243 }
7244
7245 return manifest;
7246 };
7247 /**
7248 * Calculates the R (repetition) value for a live stream (for the final segment
7249 * in a manifest where the r value is negative 1)
7250 *
7251 * @param {Object} attributes
7252 * Object containing all inherited attributes from parent elements with attribute
7253 * names as keys
7254 * @param {number} time
7255 * current time (typically the total time up until the final segment)
7256 * @param {number} duration
7257 * duration property for the given <S />
7258 *
7259 * @return {number}
7260 * R value to reach the end of the given period
7261 */
7262
7263
7264 const getLiveRValue = (attributes, time, duration) => {
7265 const {
7266 NOW,
7267 clientOffset,
7268 availabilityStartTime,
7269 timescale = 1,
7270 periodStart = 0,
7271 minimumUpdatePeriod = 0
7272 } = attributes;
7273 const now = (NOW + clientOffset) / 1000;
7274 const periodStartWC = availabilityStartTime + periodStart;
7275 const periodEndWC = now + minimumUpdatePeriod;
7276 const periodDuration = periodEndWC - periodStartWC;
7277 return Math.ceil((periodDuration * timescale - time) / duration);
7278 };
7279 /**
7280 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
7281 * timing and duration
7282 *
7283 * @param {Object} attributes
7284 * Object containing all inherited attributes from parent elements with attribute
7285 * names as keys
7286 * @param {Object[]} segmentTimeline
7287 * List of objects representing the attributes of each S element contained within
7288 *
7289 * @return {{number: number, duration: number, time: number, timeline: number}[]}
7290 * List of Objects with segment timing and duration info
7291 */
7292
7293
7294 const parseByTimeline = (attributes, segmentTimeline) => {
7295 const {
7296 type,
7297 minimumUpdatePeriod = 0,
7298 media = '',
7299 sourceDuration,
7300 timescale = 1,
7301 startNumber = 1,
7302 periodStart: timeline
7303 } = attributes;
7304 const segments = [];
7305 let time = -1;
7306
7307 for (let sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
7308 const S = segmentTimeline[sIndex];
7309 const duration = S.d;
7310 const repeat = S.r || 0;
7311 const segmentTime = S.t || 0;
7312
7313 if (time < 0) {
7314 // first segment
7315 time = segmentTime;
7316 }
7317
7318 if (segmentTime && segmentTime > time) {
7319 // discontinuity
7320 // TODO: How to handle this type of discontinuity
7321 // timeline++ here would treat it like HLS discontuity and content would
7322 // get appended without gap
7323 // E.G.
7324 // <S t="0" d="1" />
7325 // <S d="1" />
7326 // <S d="1" />
7327 // <S t="5" d="1" />
7328 // would have $Time$ values of [0, 1, 2, 5]
7329 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
7330 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
7331 // does the value of sourceDuration consider this when calculating arbitrary
7332 // negative @r repeat value?
7333 // E.G. Same elements as above with this added at the end
7334 // <S d="1" r="-1" />
7335 // with a sourceDuration of 10
7336 // Would the 2 gaps be included in the time duration calculations resulting in
7337 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
7338 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
7339 time = segmentTime;
7340 }
7341
7342 let count;
7343
7344 if (repeat < 0) {
7345 const nextS = sIndex + 1;
7346
7347 if (nextS === segmentTimeline.length) {
7348 // last segment
7349 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
7350 count = getLiveRValue(attributes, time, duration);
7351 } else {
7352 // TODO: This may be incorrect depending on conclusion of TODO above
7353 count = (sourceDuration * timescale - time) / duration;
7354 }
7355 } else {
7356 count = (segmentTimeline[nextS].t - time) / duration;
7357 }
7358 } else {
7359 count = repeat + 1;
7360 }
7361
7362 const end = startNumber + segments.length + count;
7363 let number = startNumber + segments.length;
7364
7365 while (number < end) {
7366 segments.push({
7367 number,
7368 duration: duration / timescale,
7369 time,
7370 timeline
7371 });
7372 time += duration;
7373 number++;
7374 }
7375 }
7376
7377 return segments;
7378 };
7379
7380 const identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
7381 /**
7382 * Replaces template identifiers with corresponding values. To be used as the callback
7383 * for String.prototype.replace
7384 *
7385 * @name replaceCallback
7386 * @function
7387 * @param {string} match
7388 * Entire match of identifier
7389 * @param {string} identifier
7390 * Name of matched identifier
7391 * @param {string} format
7392 * Format tag string. Its presence indicates that padding is expected
7393 * @param {string} width
7394 * Desired length of the replaced value. Values less than this width shall be left
7395 * zero padded
7396 * @return {string}
7397 * Replacement for the matched identifier
7398 */
7399
7400 /**
7401 * Returns a function to be used as a callback for String.prototype.replace to replace
7402 * template identifiers
7403 *
7404 * @param {Obect} values
7405 * Object containing values that shall be used to replace known identifiers
7406 * @param {number} values.RepresentationID
7407 * Value of the Representation@id attribute
7408 * @param {number} values.Number
7409 * Number of the corresponding segment
7410 * @param {number} values.Bandwidth
7411 * Value of the Representation@bandwidth attribute.
7412 * @param {number} values.Time
7413 * Timestamp value of the corresponding segment
7414 * @return {replaceCallback}
7415 * Callback to be used with String.prototype.replace to replace identifiers
7416 */
7417
7418 const identifierReplacement = values => (match, identifier, format, width) => {
7419 if (match === '$$') {
7420 // escape sequence
7421 return '$';
7422 }
7423
7424 if (typeof values[identifier] === 'undefined') {
7425 return match;
7426 }
7427
7428 const value = '' + values[identifier];
7429
7430 if (identifier === 'RepresentationID') {
7431 // Format tag shall not be present with RepresentationID
7432 return value;
7433 }
7434
7435 if (!format) {
7436 width = 1;
7437 } else {
7438 width = parseInt(width, 10);
7439 }
7440
7441 if (value.length >= width) {
7442 return value;
7443 }
7444
7445 return `${new Array(width - value.length + 1).join('0')}${value}`;
7446 };
7447 /**
7448 * Constructs a segment url from a template string
7449 *
7450 * @param {string} url
7451 * Template string to construct url from
7452 * @param {Obect} values
7453 * Object containing values that shall be used to replace known identifiers
7454 * @param {number} values.RepresentationID
7455 * Value of the Representation@id attribute
7456 * @param {number} values.Number
7457 * Number of the corresponding segment
7458 * @param {number} values.Bandwidth
7459 * Value of the Representation@bandwidth attribute.
7460 * @param {number} values.Time
7461 * Timestamp value of the corresponding segment
7462 * @return {string}
7463 * Segment url with identifiers replaced
7464 */
7465
7466
7467 const constructTemplateUrl = (url, values) => url.replace(identifierPattern, identifierReplacement(values));
7468 /**
7469 * Generates a list of objects containing timing and duration information about each
7470 * segment needed to generate segment uris and the complete segment object
7471 *
7472 * @param {Object} attributes
7473 * Object containing all inherited attributes from parent elements with attribute
7474 * names as keys
7475 * @param {Object[]|undefined} segmentTimeline
7476 * List of objects representing the attributes of each S element contained within
7477 * the SegmentTimeline element
7478 * @return {{number: number, duration: number, time: number, timeline: number}[]}
7479 * List of Objects with segment timing and duration info
7480 */
7481
7482
7483 const parseTemplateInfo = (attributes, segmentTimeline) => {
7484 if (!attributes.duration && !segmentTimeline) {
7485 // if neither @duration or SegmentTimeline are present, then there shall be exactly
7486 // one media segment
7487 return [{
7488 number: attributes.startNumber || 1,
7489 duration: attributes.sourceDuration,
7490 time: 0,
7491 timeline: attributes.periodStart
7492 }];
7493 }
7494
7495 if (attributes.duration) {
7496 return parseByDuration(attributes);
7497 }
7498
7499 return parseByTimeline(attributes, segmentTimeline);
7500 };
7501 /**
7502 * Generates a list of segments using information provided by the SegmentTemplate element
7503 *
7504 * @param {Object} attributes
7505 * Object containing all inherited attributes from parent elements with attribute
7506 * names as keys
7507 * @param {Object[]|undefined} segmentTimeline
7508 * List of objects representing the attributes of each S element contained within
7509 * the SegmentTimeline element
7510 * @return {Object[]}
7511 * List of segment objects
7512 */
7513
7514
7515 const segmentsFromTemplate = (attributes, segmentTimeline) => {
7516 const templateValues = {
7517 RepresentationID: attributes.id,
7518 Bandwidth: attributes.bandwidth || 0
7519 };
7520 const {
7521 initialization = {
7522 sourceURL: '',
7523 range: ''
7524 }
7525 } = attributes;
7526 const mapSegment = urlTypeToSegment({
7527 baseUrl: attributes.baseUrl,
7528 source: constructTemplateUrl(initialization.sourceURL, templateValues),
7529 range: initialization.range
7530 });
7531 const segments = parseTemplateInfo(attributes, segmentTimeline);
7532 return segments.map(segment => {
7533 templateValues.Number = segment.number;
7534 templateValues.Time = segment.time;
7535 const uri = constructTemplateUrl(attributes.media || '', templateValues); // See DASH spec section 5.3.9.2.2
7536 // - if timescale isn't present on any level, default to 1.
7537
7538 const timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7539
7540 const presentationTimeOffset = attributes.presentationTimeOffset || 0;
7541 const presentationTime = // Even if the @t attribute is not specified for the segment, segment.time is
7542 // calculated in mpd-parser prior to this, so it's assumed to be available.
7543 attributes.periodStart + (segment.time - presentationTimeOffset) / timescale;
7544 const map = {
7545 uri,
7546 timeline: segment.timeline,
7547 duration: segment.duration,
7548 resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),
7549 map: mapSegment,
7550 number: segment.number,
7551 presentationTime
7552 };
7553 return map;
7554 });
7555 };
7556 /**
7557 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
7558 * to an object that matches the output of a segment in videojs/mpd-parser
7559 *
7560 * @param {Object} attributes
7561 * Object containing all inherited attributes from parent elements with attribute
7562 * names as keys
7563 * @param {Object} segmentUrl
7564 * <SegmentURL> node to translate into a segment object
7565 * @return {Object} translated segment object
7566 */
7567
7568
7569 const SegmentURLToSegmentObject = (attributes, segmentUrl) => {
7570 const {
7571 baseUrl,
7572 initialization = {}
7573 } = attributes;
7574 const initSegment = urlTypeToSegment({
7575 baseUrl,
7576 source: initialization.sourceURL,
7577 range: initialization.range
7578 });
7579 const segment = urlTypeToSegment({
7580 baseUrl,
7581 source: segmentUrl.media,
7582 range: segmentUrl.mediaRange
7583 });
7584 segment.map = initSegment;
7585 return segment;
7586 };
7587 /**
7588 * Generates a list of segments using information provided by the SegmentList element
7589 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
7590 * node should be translated into segment.
7591 *
7592 * @param {Object} attributes
7593 * Object containing all inherited attributes from parent elements with attribute
7594 * names as keys
7595 * @param {Object[]|undefined} segmentTimeline
7596 * List of objects representing the attributes of each S element contained within
7597 * the SegmentTimeline element
7598 * @return {Object.<Array>} list of segments
7599 */
7600
7601
7602 const segmentsFromList = (attributes, segmentTimeline) => {
7603 const {
7604 duration,
7605 segmentUrls = [],
7606 periodStart
7607 } = attributes; // Per spec (5.3.9.2.1) no way to determine segment duration OR
7608 // if both SegmentTimeline and @duration are defined, it is outside of spec.
7609
7610 if (!duration && !segmentTimeline || duration && segmentTimeline) {
7611 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
7612 }
7613
7614 const segmentUrlMap = segmentUrls.map(segmentUrlObject => SegmentURLToSegmentObject(attributes, segmentUrlObject));
7615 let segmentTimeInfo;
7616
7617 if (duration) {
7618 segmentTimeInfo = parseByDuration(attributes);
7619 }
7620
7621 if (segmentTimeline) {
7622 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
7623 }
7624
7625 const segments = segmentTimeInfo.map((segmentTime, index) => {
7626 if (segmentUrlMap[index]) {
7627 const segment = segmentUrlMap[index]; // See DASH spec section 5.3.9.2.2
7628 // - if timescale isn't present on any level, default to 1.
7629
7630 const timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7631
7632 const presentationTimeOffset = attributes.presentationTimeOffset || 0;
7633 segment.timeline = segmentTime.timeline;
7634 segment.duration = segmentTime.duration;
7635 segment.number = segmentTime.number;
7636 segment.presentationTime = periodStart + (segmentTime.time - presentationTimeOffset) / timescale;
7637 return segment;
7638 } // Since we're mapping we should get rid of any blank segments (in case
7639 // the given SegmentTimeline is handling for more elements than we have
7640 // SegmentURLs for).
7641
7642 }).filter(segment => segment);
7643 return segments;
7644 };
7645
7646 const generateSegments = ({
7647 attributes,
7648 segmentInfo
7649 }) => {
7650 let segmentAttributes;
7651 let segmentsFn;
7652
7653 if (segmentInfo.template) {
7654 segmentsFn = segmentsFromTemplate;
7655 segmentAttributes = merge(attributes, segmentInfo.template);
7656 } else if (segmentInfo.base) {
7657 segmentsFn = segmentsFromBase;
7658 segmentAttributes = merge(attributes, segmentInfo.base);
7659 } else if (segmentInfo.list) {
7660 segmentsFn = segmentsFromList;
7661 segmentAttributes = merge(attributes, segmentInfo.list);
7662 }
7663
7664 const segmentsInfo = {
7665 attributes
7666 };
7667
7668 if (!segmentsFn) {
7669 return segmentsInfo;
7670 }
7671
7672 const segments = segmentsFn(segmentAttributes, segmentInfo.segmentTimeline); // The @duration attribute will be used to determin the playlist's targetDuration which
7673 // must be in seconds. Since we've generated the segment list, we no longer need
7674 // @duration to be in @timescale units, so we can convert it here.
7675
7676 if (segmentAttributes.duration) {
7677 const {
7678 duration,
7679 timescale = 1
7680 } = segmentAttributes;
7681 segmentAttributes.duration = duration / timescale;
7682 } else if (segments.length) {
7683 // if there is no @duration attribute, use the largest segment duration as
7684 // as target duration
7685 segmentAttributes.duration = segments.reduce((max, segment) => {
7686 return Math.max(max, Math.ceil(segment.duration));
7687 }, 0);
7688 } else {
7689 segmentAttributes.duration = 0;
7690 }
7691
7692 segmentsInfo.attributes = segmentAttributes;
7693 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
7694
7695 if (segmentInfo.base && segmentAttributes.indexRange) {
7696 segmentsInfo.sidx = segments[0];
7697 segmentsInfo.segments = [];
7698 }
7699
7700 return segmentsInfo;
7701 };
7702
7703 const toPlaylists = representations => representations.map(generateSegments);
7704
7705 const findChildren = (element, name) => from(element.childNodes).filter(({
7706 tagName
7707 }) => tagName === name);
7708
7709 const getContent = element => element.textContent.trim();
7710 /**
7711 * Converts the provided string that may contain a division operation to a number.
7712 *
7713 * @param {string} value - the provided string value
7714 *
7715 * @return {number} the parsed string value
7716 */
7717
7718
7719 const parseDivisionValue = value => {
7720 return parseFloat(value.split('/').reduce((prev, current) => prev / current));
7721 };
7722
7723 const parseDuration = str => {
7724 const SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
7725 const SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
7726 const SECONDS_IN_DAY = 24 * 60 * 60;
7727 const SECONDS_IN_HOUR = 60 * 60;
7728 const SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
7729
7730 const durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
7731 const match = durationRegex.exec(str);
7732
7733 if (!match) {
7734 return 0;
7735 }
7736
7737 const [year, month, day, hour, minute, second] = match.slice(1);
7738 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
7739 };
7740
7741 const parseDate = str => {
7742 // Date format without timezone according to ISO 8601
7743 // YYY-MM-DDThh:mm:ss.ssssss
7744 const dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
7745 // expressed by ending with 'Z'
7746
7747 if (dateRegex.test(str)) {
7748 str += 'Z';
7749 }
7750
7751 return Date.parse(str);
7752 };
7753
7754 const parsers = {
7755 /**
7756 * Specifies the duration of the entire Media Presentation. Format is a duration string
7757 * as specified in ISO 8601
7758 *
7759 * @param {string} value
7760 * value of attribute as a string
7761 * @return {number}
7762 * The duration in seconds
7763 */
7764 mediaPresentationDuration(value) {
7765 return parseDuration(value);
7766 },
7767
7768 /**
7769 * Specifies the Segment availability start time for all Segments referred to in this
7770 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
7771 * time. Format is a date string as specified in ISO 8601
7772 *
7773 * @param {string} value
7774 * value of attribute as a string
7775 * @return {number}
7776 * The date as seconds from unix epoch
7777 */
7778 availabilityStartTime(value) {
7779 return parseDate(value) / 1000;
7780 },
7781
7782 /**
7783 * Specifies the smallest period between potential changes to the MPD. Format is a
7784 * duration string as specified in ISO 8601
7785 *
7786 * @param {string} value
7787 * value of attribute as a string
7788 * @return {number}
7789 * The duration in seconds
7790 */
7791 minimumUpdatePeriod(value) {
7792 return parseDuration(value);
7793 },
7794
7795 /**
7796 * Specifies the suggested presentation delay. Format is a
7797 * duration string as specified in ISO 8601
7798 *
7799 * @param {string} value
7800 * value of attribute as a string
7801 * @return {number}
7802 * The duration in seconds
7803 */
7804 suggestedPresentationDelay(value) {
7805 return parseDuration(value);
7806 },
7807
7808 /**
7809 * specifices the type of mpd. Can be either "static" or "dynamic"
7810 *
7811 * @param {string} value
7812 * value of attribute as a string
7813 *
7814 * @return {string}
7815 * The type as a string
7816 */
7817 type(value) {
7818 return value;
7819 },
7820
7821 /**
7822 * Specifies the duration of the smallest time shifting buffer for any Representation
7823 * in the MPD. Format is a duration string as specified in ISO 8601
7824 *
7825 * @param {string} value
7826 * value of attribute as a string
7827 * @return {number}
7828 * The duration in seconds
7829 */
7830 timeShiftBufferDepth(value) {
7831 return parseDuration(value);
7832 },
7833
7834 /**
7835 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
7836 * Format is a duration string as specified in ISO 8601
7837 *
7838 * @param {string} value
7839 * value of attribute as a string
7840 * @return {number}
7841 * The duration in seconds
7842 */
7843 start(value) {
7844 return parseDuration(value);
7845 },
7846
7847 /**
7848 * Specifies the width of the visual presentation
7849 *
7850 * @param {string} value
7851 * value of attribute as a string
7852 * @return {number}
7853 * The parsed width
7854 */
7855 width(value) {
7856 return parseInt(value, 10);
7857 },
7858
7859 /**
7860 * Specifies the height of the visual presentation
7861 *
7862 * @param {string} value
7863 * value of attribute as a string
7864 * @return {number}
7865 * The parsed height
7866 */
7867 height(value) {
7868 return parseInt(value, 10);
7869 },
7870
7871 /**
7872 * Specifies the bitrate of the representation
7873 *
7874 * @param {string} value
7875 * value of attribute as a string
7876 * @return {number}
7877 * The parsed bandwidth
7878 */
7879 bandwidth(value) {
7880 return parseInt(value, 10);
7881 },
7882
7883 /**
7884 * Specifies the frame rate of the representation
7885 *
7886 * @param {string} value
7887 * value of attribute as a string
7888 * @return {number}
7889 * The parsed frame rate
7890 */
7891 frameRate(value) {
7892 return parseDivisionValue(value);
7893 },
7894
7895 /**
7896 * Specifies the number of the first Media Segment in this Representation in the Period
7897 *
7898 * @param {string} value
7899 * value of attribute as a string
7900 * @return {number}
7901 * The parsed number
7902 */
7903 startNumber(value) {
7904 return parseInt(value, 10);
7905 },
7906
7907 /**
7908 * Specifies the timescale in units per seconds
7909 *
7910 * @param {string} value
7911 * value of attribute as a string
7912 * @return {number}
7913 * The parsed timescale
7914 */
7915 timescale(value) {
7916 return parseInt(value, 10);
7917 },
7918
7919 /**
7920 * Specifies the presentationTimeOffset.
7921 *
7922 * @param {string} value
7923 * value of the attribute as a string
7924 *
7925 * @return {number}
7926 * The parsed presentationTimeOffset
7927 */
7928 presentationTimeOffset(value) {
7929 return parseInt(value, 10);
7930 },
7931
7932 /**
7933 * Specifies the constant approximate Segment duration
7934 * NOTE: The <Period> element also contains an @duration attribute. This duration
7935 * specifies the duration of the Period. This attribute is currently not
7936 * supported by the rest of the parser, however we still check for it to prevent
7937 * errors.
7938 *
7939 * @param {string} value
7940 * value of attribute as a string
7941 * @return {number}
7942 * The parsed duration
7943 */
7944 duration(value) {
7945 const parsedValue = parseInt(value, 10);
7946
7947 if (isNaN(parsedValue)) {
7948 return parseDuration(value);
7949 }
7950
7951 return parsedValue;
7952 },
7953
7954 /**
7955 * Specifies the Segment duration, in units of the value of the @timescale.
7956 *
7957 * @param {string} value
7958 * value of attribute as a string
7959 * @return {number}
7960 * The parsed duration
7961 */
7962 d(value) {
7963 return parseInt(value, 10);
7964 },
7965
7966 /**
7967 * Specifies the MPD start time, in @timescale units, the first Segment in the series
7968 * starts relative to the beginning of the Period
7969 *
7970 * @param {string} value
7971 * value of attribute as a string
7972 * @return {number}
7973 * The parsed time
7974 */
7975 t(value) {
7976 return parseInt(value, 10);
7977 },
7978
7979 /**
7980 * Specifies the repeat count of the number of following contiguous Segments with the
7981 * same duration expressed by the value of @d
7982 *
7983 * @param {string} value
7984 * value of attribute as a string
7985 * @return {number}
7986 * The parsed number
7987 */
7988 r(value) {
7989 return parseInt(value, 10);
7990 },
7991
7992 /**
7993 * Specifies the presentationTime.
7994 *
7995 * @param {string} value
7996 * value of the attribute as a string
7997 *
7998 * @return {number}
7999 * The parsed presentationTime
8000 */
8001 presentationTime(value) {
8002 return parseInt(value, 10);
8003 },
8004
8005 /**
8006 * Default parser for all other attributes. Acts as a no-op and just returns the value
8007 * as a string
8008 *
8009 * @param {string} value
8010 * value of attribute as a string
8011 * @return {string}
8012 * Unparsed value
8013 */
8014 DEFAULT(value) {
8015 return value;
8016 }
8017
8018 };
8019 /**
8020 * Gets all the attributes and values of the provided node, parses attributes with known
8021 * types, and returns an object with attribute names mapped to values.
8022 *
8023 * @param {Node} el
8024 * The node to parse attributes from
8025 * @return {Object}
8026 * Object with all attributes of el parsed
8027 */
8028
8029 const parseAttributes = el => {
8030 if (!(el && el.attributes)) {
8031 return {};
8032 }
8033
8034 return from(el.attributes).reduce((a, e) => {
8035 const parseFn = parsers[e.name] || parsers.DEFAULT;
8036 a[e.name] = parseFn(e.value);
8037 return a;
8038 }, {});
8039 };
8040
8041 const keySystemsMap = {
8042 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
8043 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
8044 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
8045 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime',
8046 // ISO_IEC 23009-1_2022 5.8.5.2.2 The mp4 Protection Scheme
8047 'urn:mpeg:dash:mp4protection:2011': 'mp4protection'
8048 };
8049 /**
8050 * Builds a list of urls that is the product of the reference urls and BaseURL values
8051 *
8052 * @param {Object[]} references
8053 * List of objects containing the reference URL as well as its attributes
8054 * @param {Node[]} baseUrlElements
8055 * List of BaseURL nodes from the mpd
8056 * @return {Object[]}
8057 * List of objects with resolved urls and attributes
8058 */
8059
8060 const buildBaseUrls = (references, baseUrlElements) => {
8061 if (!baseUrlElements.length) {
8062 return references;
8063 }
8064
8065 return flatten(references.map(function (reference) {
8066 return baseUrlElements.map(function (baseUrlElement) {
8067 const initialBaseUrl = getContent(baseUrlElement);
8068 const resolvedBaseUrl = resolveUrl$1(reference.baseUrl, initialBaseUrl);
8069 const finalBaseUrl = merge(parseAttributes(baseUrlElement), {
8070 baseUrl: resolvedBaseUrl
8071 }); // If the URL is resolved, we want to get the serviceLocation from the reference
8072 // assuming there is no serviceLocation on the initialBaseUrl
8073
8074 if (resolvedBaseUrl !== initialBaseUrl && !finalBaseUrl.serviceLocation && reference.serviceLocation) {
8075 finalBaseUrl.serviceLocation = reference.serviceLocation;
8076 }
8077
8078 return finalBaseUrl;
8079 });
8080 }));
8081 };
8082 /**
8083 * Contains all Segment information for its containing AdaptationSet
8084 *
8085 * @typedef {Object} SegmentInformation
8086 * @property {Object|undefined} template
8087 * Contains the attributes for the SegmentTemplate node
8088 * @property {Object[]|undefined} segmentTimeline
8089 * Contains a list of atrributes for each S node within the SegmentTimeline node
8090 * @property {Object|undefined} list
8091 * Contains the attributes for the SegmentList node
8092 * @property {Object|undefined} base
8093 * Contains the attributes for the SegmentBase node
8094 */
8095
8096 /**
8097 * Returns all available Segment information contained within the AdaptationSet node
8098 *
8099 * @param {Node} adaptationSet
8100 * The AdaptationSet node to get Segment information from
8101 * @return {SegmentInformation}
8102 * The Segment information contained within the provided AdaptationSet
8103 */
8104
8105
8106 const getSegmentInformation = adaptationSet => {
8107 const segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
8108 const segmentList = findChildren(adaptationSet, 'SegmentList')[0];
8109 const segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(s => merge({
8110 tag: 'SegmentURL'
8111 }, parseAttributes(s)));
8112 const segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
8113 const segmentTimelineParentNode = segmentList || segmentTemplate;
8114 const segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
8115 const segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
8116 const segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
8117 // @initialization and an <Initialization> node. @initialization can be templated,
8118 // while the node can have a url and range specified. If the <SegmentTemplate> has
8119 // both @initialization and an <Initialization> subelement we opt to override with
8120 // the node, as this interaction is not defined in the spec.
8121
8122 const template = segmentTemplate && parseAttributes(segmentTemplate);
8123
8124 if (template && segmentInitialization) {
8125 template.initialization = segmentInitialization && parseAttributes(segmentInitialization);
8126 } else if (template && template.initialization) {
8127 // If it is @initialization we convert it to an object since this is the format that
8128 // later functions will rely on for the initialization segment. This is only valid
8129 // for <SegmentTemplate>
8130 template.initialization = {
8131 sourceURL: template.initialization
8132 };
8133 }
8134
8135 const segmentInfo = {
8136 template,
8137 segmentTimeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(s => parseAttributes(s)),
8138 list: segmentList && merge(parseAttributes(segmentList), {
8139 segmentUrls,
8140 initialization: parseAttributes(segmentInitialization)
8141 }),
8142 base: segmentBase && merge(parseAttributes(segmentBase), {
8143 initialization: parseAttributes(segmentInitialization)
8144 })
8145 };
8146 Object.keys(segmentInfo).forEach(key => {
8147 if (!segmentInfo[key]) {
8148 delete segmentInfo[key];
8149 }
8150 });
8151 return segmentInfo;
8152 };
8153 /**
8154 * Contains Segment information and attributes needed to construct a Playlist object
8155 * from a Representation
8156 *
8157 * @typedef {Object} RepresentationInformation
8158 * @property {SegmentInformation} segmentInfo
8159 * Segment information for this Representation
8160 * @property {Object} attributes
8161 * Inherited attributes for this Representation
8162 */
8163
8164 /**
8165 * Maps a Representation node to an object containing Segment information and attributes
8166 *
8167 * @name inheritBaseUrlsCallback
8168 * @function
8169 * @param {Node} representation
8170 * Representation node from the mpd
8171 * @return {RepresentationInformation}
8172 * Representation information needed to construct a Playlist object
8173 */
8174
8175 /**
8176 * Returns a callback for Array.prototype.map for mapping Representation nodes to
8177 * Segment information and attributes using inherited BaseURL nodes.
8178 *
8179 * @param {Object} adaptationSetAttributes
8180 * Contains attributes inherited by the AdaptationSet
8181 * @param {Object[]} adaptationSetBaseUrls
8182 * List of objects containing resolved base URLs and attributes
8183 * inherited by the AdaptationSet
8184 * @param {SegmentInformation} adaptationSetSegmentInfo
8185 * Contains Segment information for the AdaptationSet
8186 * @return {inheritBaseUrlsCallback}
8187 * Callback map function
8188 */
8189
8190
8191 const inheritBaseUrls = (adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) => representation => {
8192 const repBaseUrlElements = findChildren(representation, 'BaseURL');
8193 const repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
8194 const attributes = merge(adaptationSetAttributes, parseAttributes(representation));
8195 const representationSegmentInfo = getSegmentInformation(representation);
8196 return repBaseUrls.map(baseUrl => {
8197 return {
8198 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
8199 attributes: merge(attributes, baseUrl)
8200 };
8201 });
8202 };
8203 /**
8204 * Tranforms a series of content protection nodes to
8205 * an object containing pssh data by key system
8206 *
8207 * @param {Node[]} contentProtectionNodes
8208 * Content protection nodes
8209 * @return {Object}
8210 * Object containing pssh data by key system
8211 */
8212
8213
8214 const generateKeySystemInformation = contentProtectionNodes => {
8215 return contentProtectionNodes.reduce((acc, node) => {
8216 const attributes = parseAttributes(node); // Although it could be argued that according to the UUID RFC spec the UUID string (a-f chars) should be generated
8217 // as a lowercase string it also mentions it should be treated as case-insensitive on input. Since the key system
8218 // UUIDs in the keySystemsMap are hardcoded as lowercase in the codebase there isn't any reason not to do
8219 // .toLowerCase() on the input UUID string from the manifest (at least I could not think of one).
8220
8221 if (attributes.schemeIdUri) {
8222 attributes.schemeIdUri = attributes.schemeIdUri.toLowerCase();
8223 }
8224
8225 const keySystem = keySystemsMap[attributes.schemeIdUri];
8226
8227 if (keySystem) {
8228 acc[keySystem] = {
8229 attributes
8230 };
8231 const psshNode = findChildren(node, 'cenc:pssh')[0];
8232
8233 if (psshNode) {
8234 const pssh = getContent(psshNode);
8235 acc[keySystem].pssh = pssh && decodeB64ToUint8Array(pssh);
8236 }
8237 }
8238
8239 return acc;
8240 }, {});
8241 }; // defined in ANSI_SCTE 214-1 2016
8242
8243
8244 const parseCaptionServiceMetadata = service => {
8245 // 608 captions
8246 if (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {
8247 const values = typeof service.value !== 'string' ? [] : service.value.split(';');
8248 return values.map(value => {
8249 let channel;
8250 let language; // default language to value
8251
8252 language = value;
8253
8254 if (/^CC\d=/.test(value)) {
8255 [channel, language] = value.split('=');
8256 } else if (/^CC\d$/.test(value)) {
8257 channel = value;
8258 }
8259
8260 return {
8261 channel,
8262 language
8263 };
8264 });
8265 } else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {
8266 const values = typeof service.value !== 'string' ? [] : service.value.split(';');
8267 return values.map(value => {
8268 const flags = {
8269 // service or channel number 1-63
8270 'channel': undefined,
8271 // language is a 3ALPHA per ISO 639.2/B
8272 // field is required
8273 'language': undefined,
8274 // BIT 1/0 or ?
8275 // default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown
8276 'aspectRatio': 1,
8277 // BIT 1/0
8278 // easy reader flag indicated the text is tailed to the needs of beginning readers
8279 // default 0, or off
8280 'easyReader': 0,
8281 // BIT 1/0
8282 // If 3d metadata is present (CEA-708.1) then 1
8283 // default 0
8284 '3D': 0
8285 };
8286
8287 if (/=/.test(value)) {
8288 const [channel, opts = ''] = value.split('=');
8289 flags.channel = channel;
8290 flags.language = value;
8291 opts.split(',').forEach(opt => {
8292 const [name, val] = opt.split(':');
8293
8294 if (name === 'lang') {
8295 flags.language = val; // er for easyReadery
8296 } else if (name === 'er') {
8297 flags.easyReader = Number(val); // war for wide aspect ratio
8298 } else if (name === 'war') {
8299 flags.aspectRatio = Number(val);
8300 } else if (name === '3D') {
8301 flags['3D'] = Number(val);
8302 }
8303 });
8304 } else {
8305 flags.language = value;
8306 }
8307
8308 if (flags.channel) {
8309 flags.channel = 'SERVICE' + flags.channel;
8310 }
8311
8312 return flags;
8313 });
8314 }
8315 };
8316 /**
8317 * A map callback that will parse all event stream data for a collection of periods
8318 * DASH ISO_IEC_23009 5.10.2.2
8319 * https://dashif-documents.azurewebsites.net/Events/master/event.html#mpd-event-timing
8320 *
8321 * @param {PeriodInformation} period object containing necessary period information
8322 * @return a collection of parsed eventstream event objects
8323 */
8324
8325
8326 const toEventStream = period => {
8327 // get and flatten all EventStreams tags and parse attributes and children
8328 return flatten(findChildren(period.node, 'EventStream').map(eventStream => {
8329 const eventStreamAttributes = parseAttributes(eventStream);
8330 const schemeIdUri = eventStreamAttributes.schemeIdUri; // find all Events per EventStream tag and map to return objects
8331
8332 return findChildren(eventStream, 'Event').map(event => {
8333 const eventAttributes = parseAttributes(event);
8334 const presentationTime = eventAttributes.presentationTime || 0;
8335 const timescale = eventStreamAttributes.timescale || 1;
8336 const duration = eventAttributes.duration || 0;
8337 const start = presentationTime / timescale + period.attributes.start;
8338 return {
8339 schemeIdUri,
8340 value: eventStreamAttributes.value,
8341 id: eventAttributes.id,
8342 start,
8343 end: start + duration / timescale,
8344 messageData: getContent(event) || eventAttributes.messageData,
8345 contentEncoding: eventStreamAttributes.contentEncoding,
8346 presentationTimeOffset: eventStreamAttributes.presentationTimeOffset || 0
8347 };
8348 });
8349 }));
8350 };
8351 /**
8352 * Maps an AdaptationSet node to a list of Representation information objects
8353 *
8354 * @name toRepresentationsCallback
8355 * @function
8356 * @param {Node} adaptationSet
8357 * AdaptationSet node from the mpd
8358 * @return {RepresentationInformation[]}
8359 * List of objects containing Representaion information
8360 */
8361
8362 /**
8363 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
8364 * Representation information objects
8365 *
8366 * @param {Object} periodAttributes
8367 * Contains attributes inherited by the Period
8368 * @param {Object[]} periodBaseUrls
8369 * Contains list of objects with resolved base urls and attributes
8370 * inherited by the Period
8371 * @param {string[]} periodSegmentInfo
8372 * Contains Segment Information at the period level
8373 * @return {toRepresentationsCallback}
8374 * Callback map function
8375 */
8376
8377
8378 const toRepresentations = (periodAttributes, periodBaseUrls, periodSegmentInfo) => adaptationSet => {
8379 const adaptationSetAttributes = parseAttributes(adaptationSet);
8380 const adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
8381 const role = findChildren(adaptationSet, 'Role')[0];
8382 const roleAttributes = {
8383 role: parseAttributes(role)
8384 };
8385 let attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
8386 const accessibility = findChildren(adaptationSet, 'Accessibility')[0];
8387 const captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));
8388
8389 if (captionServices) {
8390 attrs = merge(attrs, {
8391 captionServices
8392 });
8393 }
8394
8395 const label = findChildren(adaptationSet, 'Label')[0];
8396
8397 if (label && label.childNodes.length) {
8398 const labelVal = label.childNodes[0].nodeValue.trim();
8399 attrs = merge(attrs, {
8400 label: labelVal
8401 });
8402 }
8403
8404 const contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
8405
8406 if (Object.keys(contentProtection).length) {
8407 attrs = merge(attrs, {
8408 contentProtection
8409 });
8410 }
8411
8412 const segmentInfo = getSegmentInformation(adaptationSet);
8413 const representations = findChildren(adaptationSet, 'Representation');
8414 const adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
8415 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
8416 };
8417 /**
8418 * Contains all period information for mapping nodes onto adaptation sets.
8419 *
8420 * @typedef {Object} PeriodInformation
8421 * @property {Node} period.node
8422 * Period node from the mpd
8423 * @property {Object} period.attributes
8424 * Parsed period attributes from node plus any added
8425 */
8426
8427 /**
8428 * Maps a PeriodInformation object to a list of Representation information objects for all
8429 * AdaptationSet nodes contained within the Period.
8430 *
8431 * @name toAdaptationSetsCallback
8432 * @function
8433 * @param {PeriodInformation} period
8434 * Period object containing necessary period information
8435 * @param {number} periodStart
8436 * Start time of the Period within the mpd
8437 * @return {RepresentationInformation[]}
8438 * List of objects containing Representaion information
8439 */
8440
8441 /**
8442 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
8443 * Representation information objects
8444 *
8445 * @param {Object} mpdAttributes
8446 * Contains attributes inherited by the mpd
8447 * @param {Object[]} mpdBaseUrls
8448 * Contains list of objects with resolved base urls and attributes
8449 * inherited by the mpd
8450 * @return {toAdaptationSetsCallback}
8451 * Callback map function
8452 */
8453
8454
8455 const toAdaptationSets = (mpdAttributes, mpdBaseUrls) => (period, index) => {
8456 const periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));
8457 const periodAttributes = merge(mpdAttributes, {
8458 periodStart: period.attributes.start
8459 });
8460
8461 if (typeof period.attributes.duration === 'number') {
8462 periodAttributes.periodDuration = period.attributes.duration;
8463 }
8464
8465 const adaptationSets = findChildren(period.node, 'AdaptationSet');
8466 const periodSegmentInfo = getSegmentInformation(period.node);
8467 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
8468 };
8469 /**
8470 * Tranforms an array of content steering nodes into an object
8471 * containing CDN content steering information from the MPD manifest.
8472 *
8473 * For more information on the DASH spec for Content Steering parsing, see:
8474 * https://dashif.org/docs/DASH-IF-CTS-00XX-Content-Steering-Community-Review.pdf
8475 *
8476 * @param {Node[]} contentSteeringNodes
8477 * Content steering nodes
8478 * @param {Function} eventHandler
8479 * The event handler passed into the parser options to handle warnings
8480 * @return {Object}
8481 * Object containing content steering data
8482 */
8483
8484
8485 const generateContentSteeringInformation = (contentSteeringNodes, eventHandler) => {
8486 // If there are more than one ContentSteering tags, throw an error
8487 if (contentSteeringNodes.length > 1) {
8488 eventHandler({
8489 type: 'warn',
8490 message: 'The MPD manifest should contain no more than one ContentSteering tag'
8491 });
8492 } // Return a null value if there are no ContentSteering tags
8493
8494
8495 if (!contentSteeringNodes.length) {
8496 return null;
8497 }
8498
8499 const infoFromContentSteeringTag = merge({
8500 serverURL: getContent(contentSteeringNodes[0])
8501 }, parseAttributes(contentSteeringNodes[0])); // Converts `queryBeforeStart` to a boolean, as well as setting the default value
8502 // to `false` if it doesn't exist
8503
8504 infoFromContentSteeringTag.queryBeforeStart = infoFromContentSteeringTag.queryBeforeStart === 'true';
8505 return infoFromContentSteeringTag;
8506 };
8507 /**
8508 * Gets Period@start property for a given period.
8509 *
8510 * @param {Object} options
8511 * Options object
8512 * @param {Object} options.attributes
8513 * Period attributes
8514 * @param {Object} [options.priorPeriodAttributes]
8515 * Prior period attributes (if prior period is available)
8516 * @param {string} options.mpdType
8517 * The MPD@type these periods came from
8518 * @return {number|null}
8519 * The period start, or null if it's an early available period or error
8520 */
8521
8522
8523 const getPeriodStart = ({
8524 attributes,
8525 priorPeriodAttributes,
8526 mpdType
8527 }) => {
8528 // Summary of period start time calculation from DASH spec section 5.3.2.1
8529 //
8530 // A period's start is the first period's start + time elapsed after playing all
8531 // prior periods to this one. Periods continue one after the other in time (without
8532 // gaps) until the end of the presentation.
8533 //
8534 // The value of Period@start should be:
8535 // 1. if Period@start is present: value of Period@start
8536 // 2. if previous period exists and it has @duration: previous Period@start +
8537 // previous Period@duration
8538 // 3. if this is first period and MPD@type is 'static': 0
8539 // 4. in all other cases, consider the period an "early available period" (note: not
8540 // currently supported)
8541 // (1)
8542 if (typeof attributes.start === 'number') {
8543 return attributes.start;
8544 } // (2)
8545
8546
8547 if (priorPeriodAttributes && typeof priorPeriodAttributes.start === 'number' && typeof priorPeriodAttributes.duration === 'number') {
8548 return priorPeriodAttributes.start + priorPeriodAttributes.duration;
8549 } // (3)
8550
8551
8552 if (!priorPeriodAttributes && mpdType === 'static') {
8553 return 0;
8554 } // (4)
8555 // There is currently no logic for calculating the Period@start value if there is
8556 // no Period@start or prior Period@start and Period@duration available. This is not made
8557 // explicit by the DASH interop guidelines or the DASH spec, however, since there's
8558 // nothing about any other resolution strategies, it's implied. Thus, this case should
8559 // be considered an early available period, or error, and null should suffice for both
8560 // of those cases.
8561
8562
8563 return null;
8564 };
8565 /**
8566 * Traverses the mpd xml tree to generate a list of Representation information objects
8567 * that have inherited attributes from parent nodes
8568 *
8569 * @param {Node} mpd
8570 * The root node of the mpd
8571 * @param {Object} options
8572 * Available options for inheritAttributes
8573 * @param {string} options.manifestUri
8574 * The uri source of the mpd
8575 * @param {number} options.NOW
8576 * Current time per DASH IOP. Default is current time in ms since epoch
8577 * @param {number} options.clientOffset
8578 * Client time difference from NOW (in milliseconds)
8579 * @return {RepresentationInformation[]}
8580 * List of objects containing Representation information
8581 */
8582
8583
8584 const inheritAttributes = (mpd, options = {}) => {
8585 const {
8586 manifestUri = '',
8587 NOW = Date.now(),
8588 clientOffset = 0,
8589 // TODO: For now, we are expecting an eventHandler callback function
8590 // to be passed into the mpd parser as an option.
8591 // In the future, we should enable stream parsing by using the Stream class from vhs-utils.
8592 // This will support new features including a standardized event handler.
8593 // See the m3u8 parser for examples of how stream parsing is currently used for HLS parsing.
8594 // https://github.com/videojs/vhs-utils/blob/88d6e10c631e57a5af02c5a62bc7376cd456b4f5/src/stream.js#L9
8595 eventHandler = function () {}
8596 } = options;
8597 const periodNodes = findChildren(mpd, 'Period');
8598
8599 if (!periodNodes.length) {
8600 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
8601 }
8602
8603 const locations = findChildren(mpd, 'Location');
8604 const mpdAttributes = parseAttributes(mpd);
8605 const mpdBaseUrls = buildBaseUrls([{
8606 baseUrl: manifestUri
8607 }], findChildren(mpd, 'BaseURL'));
8608 const contentSteeringNodes = findChildren(mpd, 'ContentSteering'); // See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.
8609
8610 mpdAttributes.type = mpdAttributes.type || 'static';
8611 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
8612 mpdAttributes.NOW = NOW;
8613 mpdAttributes.clientOffset = clientOffset;
8614
8615 if (locations.length) {
8616 mpdAttributes.locations = locations.map(getContent);
8617 }
8618
8619 const periods = []; // Since toAdaptationSets acts on individual periods right now, the simplest approach to
8620 // adding properties that require looking at prior periods is to parse attributes and add
8621 // missing ones before toAdaptationSets is called. If more such properties are added, it
8622 // may be better to refactor toAdaptationSets.
8623
8624 periodNodes.forEach((node, index) => {
8625 const attributes = parseAttributes(node); // Use the last modified prior period, as it may contain added information necessary
8626 // for this period.
8627
8628 const priorPeriod = periods[index - 1];
8629 attributes.start = getPeriodStart({
8630 attributes,
8631 priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,
8632 mpdType: mpdAttributes.type
8633 });
8634 periods.push({
8635 node,
8636 attributes
8637 });
8638 });
8639 return {
8640 locations: mpdAttributes.locations,
8641 contentSteeringInfo: generateContentSteeringInformation(contentSteeringNodes, eventHandler),
8642 // TODO: There are occurences where this `representationInfo` array contains undesired
8643 // duplicates. This generally occurs when there are multiple BaseURL nodes that are
8644 // direct children of the MPD node. When we attempt to resolve URLs from a combination of the
8645 // parent BaseURL and a child BaseURL, and the value does not resolve,
8646 // we end up returning the child BaseURL multiple times.
8647 // We need to determine a way to remove these duplicates in a safe way.
8648 // See: https://github.com/videojs/mpd-parser/pull/17#discussion_r162750527
8649 representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls))),
8650 eventStream: flatten(periods.map(toEventStream))
8651 };
8652 };
8653
8654 const stringToMpdXml = manifestString => {
8655 if (manifestString === '') {
8656 throw new Error(errors.DASH_EMPTY_MANIFEST);
8657 }
8658
8659 const parser = new xmldom.DOMParser();
8660 let xml;
8661 let mpd;
8662
8663 try {
8664 xml = parser.parseFromString(manifestString, 'application/xml');
8665 mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
8666 } catch (e) {// ie 11 throws on invalid xml
8667 }
8668
8669 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
8670 throw new Error(errors.DASH_INVALID_XML);
8671 }
8672
8673 return mpd;
8674 };
8675 /**
8676 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8677 *
8678 * @param {string} mpd
8679 * XML string of the MPD manifest
8680 * @return {Object|null}
8681 * Attributes of UTCTiming node specified in the manifest. Null if none found
8682 */
8683
8684
8685 const parseUTCTimingScheme = mpd => {
8686 const UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
8687
8688 if (!UTCTimingNode) {
8689 return null;
8690 }
8691
8692 const attributes = parseAttributes(UTCTimingNode);
8693
8694 switch (attributes.schemeIdUri) {
8695 case 'urn:mpeg:dash:utc:http-head:2014':
8696 case 'urn:mpeg:dash:utc:http-head:2012':
8697 attributes.method = 'HEAD';
8698 break;
8699
8700 case 'urn:mpeg:dash:utc:http-xsdate:2014':
8701 case 'urn:mpeg:dash:utc:http-iso:2014':
8702 case 'urn:mpeg:dash:utc:http-xsdate:2012':
8703 case 'urn:mpeg:dash:utc:http-iso:2012':
8704 attributes.method = 'GET';
8705 break;
8706
8707 case 'urn:mpeg:dash:utc:direct:2014':
8708 case 'urn:mpeg:dash:utc:direct:2012':
8709 attributes.method = 'DIRECT';
8710 attributes.value = Date.parse(attributes.value);
8711 break;
8712
8713 case 'urn:mpeg:dash:utc:http-ntp:2014':
8714 case 'urn:mpeg:dash:utc:ntp:2014':
8715 case 'urn:mpeg:dash:utc:sntp:2014':
8716 default:
8717 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
8718 }
8719
8720 return attributes;
8721 };
8722 /*
8723 * Given a DASH manifest string and options, parses the DASH manifest into an object in the
8724 * form outputed by m3u8-parser and accepted by videojs/http-streaming.
8725 *
8726 * For live DASH manifests, if `previousManifest` is provided in options, then the newly
8727 * parsed DASH manifest will have its media sequence and discontinuity sequence values
8728 * updated to reflect its position relative to the prior manifest.
8729 *
8730 * @param {string} manifestString - the DASH manifest as a string
8731 * @param {options} [options] - any options
8732 *
8733 * @return {Object} the manifest object
8734 */
8735
8736 const parse = (manifestString, options = {}) => {
8737 const parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);
8738 const playlists = toPlaylists(parsedManifestInfo.representationInfo);
8739 return toM3u8({
8740 dashPlaylists: playlists,
8741 locations: parsedManifestInfo.locations,
8742 contentSteering: parsedManifestInfo.contentSteeringInfo,
8743 sidxMapping: options.sidxMapping,
8744 previousManifest: options.previousManifest,
8745 eventStream: parsedManifestInfo.eventStream
8746 });
8747 };
8748 /**
8749 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8750 *
8751 * @param {string} manifestString
8752 * XML string of the MPD manifest
8753 * @return {Object|null}
8754 * Attributes of UTCTiming node specified in the manifest. Null if none found
8755 */
8756
8757
8758 const parseUTCTiming = manifestString => parseUTCTimingScheme(stringToMpdXml(manifestString));
8759
8760 var MAX_UINT32 = Math.pow(2, 32);
8761
8762 var getUint64$1 = function (uint8) {
8763 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
8764 var value;
8765
8766 if (dv.getBigUint64) {
8767 value = dv.getBigUint64(0);
8768
8769 if (value < Number.MAX_SAFE_INTEGER) {
8770 return Number(value);
8771 }
8772
8773 return value;
8774 }
8775
8776 return dv.getUint32(0) * MAX_UINT32 + dv.getUint32(4);
8777 };
8778
8779 var numbers = {
8780 getUint64: getUint64$1,
8781 MAX_UINT32: MAX_UINT32
8782 };
8783
8784 var getUint64 = numbers.getUint64;
8785
8786 var parseSidx = function (data) {
8787 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
8788 result = {
8789 version: data[0],
8790 flags: new Uint8Array(data.subarray(1, 4)),
8791 references: [],
8792 referenceId: view.getUint32(4),
8793 timescale: view.getUint32(8)
8794 },
8795 i = 12;
8796
8797 if (result.version === 0) {
8798 result.earliestPresentationTime = view.getUint32(i);
8799 result.firstOffset = view.getUint32(i + 4);
8800 i += 8;
8801 } else {
8802 // read 64 bits
8803 result.earliestPresentationTime = getUint64(data.subarray(i));
8804 result.firstOffset = getUint64(data.subarray(i + 8));
8805 i += 16;
8806 }
8807
8808 i += 2; // reserved
8809
8810 var referenceCount = view.getUint16(i);
8811 i += 2; // start of references
8812
8813 for (; referenceCount > 0; i += 12, referenceCount--) {
8814 result.references.push({
8815 referenceType: (data[i] & 0x80) >>> 7,
8816 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
8817 subsegmentDuration: view.getUint32(i + 4),
8818 startsWithSap: !!(data[i + 8] & 0x80),
8819 sapType: (data[i + 8] & 0x70) >>> 4,
8820 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
8821 });
8822 }
8823
8824 return result;
8825 };
8826
8827 var parseSidx_1 = parseSidx;
8828
8829 var ID3 = toUint8([0x49, 0x44, 0x33]);
8830 var getId3Size = function getId3Size(bytes, offset) {
8831 if (offset === void 0) {
8832 offset = 0;
8833 }
8834
8835 bytes = toUint8(bytes);
8836 var flags = bytes[offset + 5];
8837 var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
8838 var footerPresent = (flags & 16) >> 4;
8839
8840 if (footerPresent) {
8841 return returnSize + 20;
8842 }
8843
8844 return returnSize + 10;
8845 };
8846 var getId3Offset = function getId3Offset(bytes, offset) {
8847 if (offset === void 0) {
8848 offset = 0;
8849 }
8850
8851 bytes = toUint8(bytes);
8852
8853 if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
8854 offset: offset
8855 })) {
8856 return offset;
8857 }
8858
8859 offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
8860 // have multiple ID3 tag sections even though
8861 // they should not.
8862
8863 return getId3Offset(bytes, offset);
8864 };
8865
8866 var normalizePath$1 = function normalizePath(path) {
8867 if (typeof path === 'string') {
8868 return stringToBytes(path);
8869 }
8870
8871 if (typeof path === 'number') {
8872 return path;
8873 }
8874
8875 return path;
8876 };
8877
8878 var normalizePaths$1 = function normalizePaths(paths) {
8879 if (!Array.isArray(paths)) {
8880 return [normalizePath$1(paths)];
8881 }
8882
8883 return paths.map(function (p) {
8884 return normalizePath$1(p);
8885 });
8886 };
8887 /**
8888 * find any number of boxes by name given a path to it in an iso bmff
8889 * such as mp4.
8890 *
8891 * @param {TypedArray} bytes
8892 * bytes for the iso bmff to search for boxes in
8893 *
8894 * @param {Uint8Array[]|string[]|string|Uint8Array} name
8895 * An array of paths or a single path representing the name
8896 * of boxes to search through in bytes. Paths may be
8897 * uint8 (character codes) or strings.
8898 *
8899 * @param {boolean} [complete=false]
8900 * Should we search only for complete boxes on the final path.
8901 * This is very useful when you do not want to get back partial boxes
8902 * in the case of streaming files.
8903 *
8904 * @return {Uint8Array[]}
8905 * An array of the end paths that we found.
8906 */
8907
8908 var findBox = function findBox(bytes, paths, complete) {
8909 if (complete === void 0) {
8910 complete = false;
8911 }
8912
8913 paths = normalizePaths$1(paths);
8914 bytes = toUint8(bytes);
8915 var results = [];
8916
8917 if (!paths.length) {
8918 // short-circuit the search for empty paths
8919 return results;
8920 }
8921
8922 var i = 0;
8923
8924 while (i < bytes.length) {
8925 var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
8926 var type = bytes.subarray(i + 4, i + 8); // invalid box format.
8927
8928 if (size === 0) {
8929 break;
8930 }
8931
8932 var end = i + size;
8933
8934 if (end > bytes.length) {
8935 // this box is bigger than the number of bytes we have
8936 // and complete is set, we cannot find any more boxes.
8937 if (complete) {
8938 break;
8939 }
8940
8941 end = bytes.length;
8942 }
8943
8944 var data = bytes.subarray(i + 8, end);
8945
8946 if (bytesMatch(type, paths[0])) {
8947 if (paths.length === 1) {
8948 // this is the end of the path and we've found the box we were
8949 // looking for
8950 results.push(data);
8951 } else {
8952 // recursively search for the next box along the path
8953 results.push.apply(results, findBox(data, paths.slice(1), complete));
8954 }
8955 }
8956
8957 i = end;
8958 } // we've finished searching all of bytes
8959
8960
8961 return results;
8962 };
8963
8964 // https://matroska-org.github.io/libebml/specs.html
8965 // https://www.matroska.org/technical/elements.html
8966 // https://www.webmproject.org/docs/container/
8967
8968 var EBML_TAGS = {
8969 EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
8970 DocType: toUint8([0x42, 0x82]),
8971 Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
8972 SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
8973 Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
8974 Track: toUint8([0xAE]),
8975 TrackNumber: toUint8([0xd7]),
8976 DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
8977 TrackEntry: toUint8([0xAE]),
8978 TrackType: toUint8([0x83]),
8979 FlagDefault: toUint8([0x88]),
8980 CodecID: toUint8([0x86]),
8981 CodecPrivate: toUint8([0x63, 0xA2]),
8982 VideoTrack: toUint8([0xe0]),
8983 AudioTrack: toUint8([0xe1]),
8984 // Not used yet, but will be used for live webm/mkv
8985 // see https://www.matroska.org/technical/basics.html#block-structure
8986 // see https://www.matroska.org/technical/basics.html#simpleblock-structure
8987 Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
8988 Timestamp: toUint8([0xE7]),
8989 TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
8990 BlockGroup: toUint8([0xA0]),
8991 BlockDuration: toUint8([0x9B]),
8992 Block: toUint8([0xA1]),
8993 SimpleBlock: toUint8([0xA3])
8994 };
8995 /**
8996 * This is a simple table to determine the length
8997 * of things in ebml. The length is one based (starts at 1,
8998 * rather than zero) and for every zero bit before a one bit
8999 * we add one to length. We also need this table because in some
9000 * case we have to xor all the length bits from another value.
9001 */
9002
9003 var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
9004
9005 var getLength = function getLength(byte) {
9006 var len = 1;
9007
9008 for (var i = 0; i < LENGTH_TABLE.length; i++) {
9009 if (byte & LENGTH_TABLE[i]) {
9010 break;
9011 }
9012
9013 len++;
9014 }
9015
9016 return len;
9017 }; // length in ebml is stored in the first 4 to 8 bits
9018 // of the first byte. 4 for the id length and 8 for the
9019 // data size length. Length is measured by converting the number to binary
9020 // then 1 + the number of zeros before a 1 is encountered starting
9021 // from the left.
9022
9023
9024 var getvint = function getvint(bytes, offset, removeLength, signed) {
9025 if (removeLength === void 0) {
9026 removeLength = true;
9027 }
9028
9029 if (signed === void 0) {
9030 signed = false;
9031 }
9032
9033 var length = getLength(bytes[offset]);
9034 var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
9035 // as they will be modified below to remove the dataSizeLen bits and we do not
9036 // want to modify the original data. normally we could just call slice on
9037 // uint8array but ie 11 does not support that...
9038
9039 if (removeLength) {
9040 valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
9041 valueBytes[0] ^= LENGTH_TABLE[length - 1];
9042 }
9043
9044 return {
9045 length: length,
9046 value: bytesToNumber(valueBytes, {
9047 signed: signed
9048 }),
9049 bytes: valueBytes
9050 };
9051 };
9052
9053 var normalizePath = function normalizePath(path) {
9054 if (typeof path === 'string') {
9055 return path.match(/.{1,2}/g).map(function (p) {
9056 return normalizePath(p);
9057 });
9058 }
9059
9060 if (typeof path === 'number') {
9061 return numberToBytes(path);
9062 }
9063
9064 return path;
9065 };
9066
9067 var normalizePaths = function normalizePaths(paths) {
9068 if (!Array.isArray(paths)) {
9069 return [normalizePath(paths)];
9070 }
9071
9072 return paths.map(function (p) {
9073 return normalizePath(p);
9074 });
9075 };
9076
9077 var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
9078 if (offset >= bytes.length) {
9079 return bytes.length;
9080 }
9081
9082 var innerid = getvint(bytes, offset, false);
9083
9084 if (bytesMatch(id.bytes, innerid.bytes)) {
9085 return offset;
9086 }
9087
9088 var dataHeader = getvint(bytes, offset + innerid.length);
9089 return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
9090 };
9091 /**
9092 * Notes on the EBLM format.
9093 *
9094 * EBLM uses "vints" tags. Every vint tag contains
9095 * two parts
9096 *
9097 * 1. The length from the first byte. You get this by
9098 * converting the byte to binary and counting the zeros
9099 * before a 1. Then you add 1 to that. Examples
9100 * 00011111 = length 4 because there are 3 zeros before a 1.
9101 * 00100000 = length 3 because there are 2 zeros before a 1.
9102 * 00000011 = length 7 because there are 6 zeros before a 1.
9103 *
9104 * 2. The bits used for length are removed from the first byte
9105 * Then all the bytes are merged into a value. NOTE: this
9106 * is not the case for id ebml tags as there id includes
9107 * length bits.
9108 *
9109 */
9110
9111
9112 var findEbml = function findEbml(bytes, paths) {
9113 paths = normalizePaths(paths);
9114 bytes = toUint8(bytes);
9115 var results = [];
9116
9117 if (!paths.length) {
9118 return results;
9119 }
9120
9121 var i = 0;
9122
9123 while (i < bytes.length) {
9124 var id = getvint(bytes, i, false);
9125 var dataHeader = getvint(bytes, i + id.length);
9126 var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
9127
9128 if (dataHeader.value === 0x7f) {
9129 dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
9130
9131 if (dataHeader.value !== bytes.length) {
9132 dataHeader.value -= dataStart;
9133 }
9134 }
9135
9136 var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
9137 var data = bytes.subarray(dataStart, dataEnd);
9138
9139 if (bytesMatch(paths[0], id.bytes)) {
9140 if (paths.length === 1) {
9141 // this is the end of the paths and we've found the tag we were
9142 // looking for
9143 results.push(data);
9144 } else {
9145 // recursively search for the next tag inside of the data
9146 // of this one
9147 results = results.concat(findEbml(data, paths.slice(1)));
9148 }
9149 }
9150
9151 var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
9152
9153 i += totalLength;
9154 }
9155
9156 return results;
9157 }; // see https://www.matroska.org/technical/basics.html#block-structure
9158
9159 var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
9160 var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
9161 var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
9162 /**
9163 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
9164 * Sequence Payload"
9165 *
9166 * @param data {Uint8Array} the bytes of a RBSP from a NAL
9167 * unit
9168 * @return {Uint8Array} the RBSP without any Emulation
9169 * Prevention Bytes
9170 */
9171
9172 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
9173 var positions = [];
9174 var i = 1; // Find all `Emulation Prevention Bytes`
9175
9176 while (i < bytes.length - 2) {
9177 if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
9178 positions.push(i + 2);
9179 i++;
9180 }
9181
9182 i++;
9183 } // If no Emulation Prevention Bytes were found just return the original
9184 // array
9185
9186
9187 if (positions.length === 0) {
9188 return bytes;
9189 } // Create a new array to hold the NAL unit data
9190
9191
9192 var newLength = bytes.length - positions.length;
9193 var newData = new Uint8Array(newLength);
9194 var sourceIndex = 0;
9195
9196 for (i = 0; i < newLength; sourceIndex++, i++) {
9197 if (sourceIndex === positions[0]) {
9198 // Skip this byte
9199 sourceIndex++; // Remove this position index
9200
9201 positions.shift();
9202 }
9203
9204 newData[i] = bytes[sourceIndex];
9205 }
9206
9207 return newData;
9208 };
9209 var findNal = function findNal(bytes, dataType, types, nalLimit) {
9210 if (nalLimit === void 0) {
9211 nalLimit = Infinity;
9212 }
9213
9214 bytes = toUint8(bytes);
9215 types = [].concat(types);
9216 var i = 0;
9217 var nalStart;
9218 var nalsFound = 0; // keep searching until:
9219 // we reach the end of bytes
9220 // we reach the maximum number of nals they want to seach
9221 // NOTE: that we disregard nalLimit when we have found the start
9222 // of the nal we want so that we can find the end of the nal we want.
9223
9224 while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
9225 var nalOffset = void 0;
9226
9227 if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
9228 nalOffset = 4;
9229 } else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
9230 nalOffset = 3;
9231 } // we are unsynced,
9232 // find the next nal unit
9233
9234
9235 if (!nalOffset) {
9236 i++;
9237 continue;
9238 }
9239
9240 nalsFound++;
9241
9242 if (nalStart) {
9243 return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
9244 }
9245
9246 var nalType = void 0;
9247
9248 if (dataType === 'h264') {
9249 nalType = bytes[i + nalOffset] & 0x1f;
9250 } else if (dataType === 'h265') {
9251 nalType = bytes[i + nalOffset] >> 1 & 0x3f;
9252 }
9253
9254 if (types.indexOf(nalType) !== -1) {
9255 nalStart = i + nalOffset;
9256 } // nal header is 1 length for h264, and 2 for h265
9257
9258
9259 i += nalOffset + (dataType === 'h264' ? 1 : 2);
9260 }
9261
9262 return bytes.subarray(0, 0);
9263 };
9264 var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
9265 return findNal(bytes, 'h264', type, nalLimit);
9266 };
9267 var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
9268 return findNal(bytes, 'h265', type, nalLimit);
9269 };
9270
9271 var CONSTANTS = {
9272 // "webm" string literal in hex
9273 'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
9274 // "matroska" string literal in hex
9275 'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
9276 // "fLaC" string literal in hex
9277 'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
9278 // "OggS" string literal in hex
9279 'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
9280 // ac-3 sync byte, also works for ec-3 as that is simply a codec
9281 // of ac-3
9282 'ac3': toUint8([0x0b, 0x77]),
9283 // "RIFF" string literal in hex used for wav and avi
9284 'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
9285 // "AVI" string literal in hex
9286 'avi': toUint8([0x41, 0x56, 0x49]),
9287 // "WAVE" string literal in hex
9288 'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
9289 // "ftyp3g" string literal in hex
9290 '3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
9291 // "ftyp" string literal in hex
9292 'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
9293 // "styp" string literal in hex
9294 'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
9295 // "ftypqt" string literal in hex
9296 'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
9297 // moov string literal in hex
9298 'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
9299 // moof string literal in hex
9300 'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
9301 };
9302 var _isLikely = {
9303 aac: function aac(bytes) {
9304 var offset = getId3Offset(bytes);
9305 return bytesMatch(bytes, [0xFF, 0x10], {
9306 offset: offset,
9307 mask: [0xFF, 0x16]
9308 });
9309 },
9310 mp3: function mp3(bytes) {
9311 var offset = getId3Offset(bytes);
9312 return bytesMatch(bytes, [0xFF, 0x02], {
9313 offset: offset,
9314 mask: [0xFF, 0x06]
9315 });
9316 },
9317 webm: function webm(bytes) {
9318 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
9319
9320 return bytesMatch(docType, CONSTANTS.webm);
9321 },
9322 mkv: function mkv(bytes) {
9323 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
9324
9325 return bytesMatch(docType, CONSTANTS.matroska);
9326 },
9327 mp4: function mp4(bytes) {
9328 // if this file is another base media file format, it is not mp4
9329 if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
9330 return false;
9331 } // if this file starts with a ftyp or styp box its mp4
9332
9333
9334 if (bytesMatch(bytes, CONSTANTS.mp4, {
9335 offset: 4
9336 }) || bytesMatch(bytes, CONSTANTS.fmp4, {
9337 offset: 4
9338 })) {
9339 return true;
9340 } // if this file starts with a moof/moov box its mp4
9341
9342
9343 if (bytesMatch(bytes, CONSTANTS.moof, {
9344 offset: 4
9345 }) || bytesMatch(bytes, CONSTANTS.moov, {
9346 offset: 4
9347 })) {
9348 return true;
9349 }
9350 },
9351 mov: function mov(bytes) {
9352 return bytesMatch(bytes, CONSTANTS.mov, {
9353 offset: 4
9354 });
9355 },
9356 '3gp': function gp(bytes) {
9357 return bytesMatch(bytes, CONSTANTS['3gp'], {
9358 offset: 4
9359 });
9360 },
9361 ac3: function ac3(bytes) {
9362 var offset = getId3Offset(bytes);
9363 return bytesMatch(bytes, CONSTANTS.ac3, {
9364 offset: offset
9365 });
9366 },
9367 ts: function ts(bytes) {
9368 if (bytes.length < 189 && bytes.length >= 1) {
9369 return bytes[0] === 0x47;
9370 }
9371
9372 var i = 0; // check the first 376 bytes for two matching sync bytes
9373
9374 while (i + 188 < bytes.length && i < 188) {
9375 if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
9376 return true;
9377 }
9378
9379 i += 1;
9380 }
9381
9382 return false;
9383 },
9384 flac: function flac(bytes) {
9385 var offset = getId3Offset(bytes);
9386 return bytesMatch(bytes, CONSTANTS.flac, {
9387 offset: offset
9388 });
9389 },
9390 ogg: function ogg(bytes) {
9391 return bytesMatch(bytes, CONSTANTS.ogg);
9392 },
9393 avi: function avi(bytes) {
9394 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
9395 offset: 8
9396 });
9397 },
9398 wav: function wav(bytes) {
9399 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
9400 offset: 8
9401 });
9402 },
9403 'h264': function h264(bytes) {
9404 // find seq_parameter_set_rbsp
9405 return findH264Nal(bytes, 7, 3).length;
9406 },
9407 'h265': function h265(bytes) {
9408 // find video_parameter_set_rbsp or seq_parameter_set_rbsp
9409 return findH265Nal(bytes, [32, 33], 3).length;
9410 }
9411 }; // get all the isLikely functions
9412 // but make sure 'ts' is above h264 and h265
9413 // but below everything else as it is the least specific
9414
9415 var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
9416 .filter(function (t) {
9417 return t !== 'ts' && t !== 'h264' && t !== 'h265';
9418 }) // add it back to the bottom
9419 .concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
9420
9421 isLikelyTypes.forEach(function (type) {
9422 var isLikelyFn = _isLikely[type];
9423
9424 _isLikely[type] = function (bytes) {
9425 return isLikelyFn(toUint8(bytes));
9426 };
9427 }); // export after wrapping
9428
9429 var isLikely = _isLikely; // A useful list of file signatures can be found here
9430 // https://en.wikipedia.org/wiki/List_of_file_signatures
9431
9432 var detectContainerForBytes = function detectContainerForBytes(bytes) {
9433 bytes = toUint8(bytes);
9434
9435 for (var i = 0; i < isLikelyTypes.length; i++) {
9436 var type = isLikelyTypes[i];
9437
9438 if (isLikely[type](bytes)) {
9439 return type;
9440 }
9441 }
9442
9443 return '';
9444 }; // fmp4 is not a container
9445
9446 var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
9447 return findBox(bytes, ['moof']).length > 0;
9448 };
9449
9450 // which will only happen if the request is complete.
9451
9452 const callbackOnCompleted = (request, cb) => {
9453 if (request.readyState === 4) {
9454 return cb();
9455 }
9456
9457 return;
9458 };
9459
9460 const containerRequest = (uri, xhr, cb) => {
9461 let bytes = [];
9462 let id3Offset;
9463 let finished = false;
9464
9465 const endRequestAndCallback = function (err, req, type, _bytes) {
9466 req.abort();
9467 finished = true;
9468 return cb(err, req, type, _bytes);
9469 };
9470
9471 const progressListener = function (error, request) {
9472 if (finished) {
9473 return;
9474 }
9475
9476 if (error) {
9477 return endRequestAndCallback(error, request, '', bytes);
9478 } // grap the new part of content that was just downloaded
9479
9480
9481 const newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
9482
9483 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
9484 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
9485 // or we need at least two bytes after an id3Offset
9486
9487 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
9488 return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
9489 }
9490
9491 const type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
9492 // to see the second sync byte, wait until we have enough data
9493 // before declaring it ts
9494
9495 if (type === 'ts' && bytes.length < 188) {
9496 return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
9497 } // this may be an unsynced ts segment
9498 // wait for 376 bytes before detecting no container
9499
9500
9501 if (!type && bytes.length < 376) {
9502 return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
9503 }
9504
9505 return endRequestAndCallback(null, request, type, bytes);
9506 };
9507
9508 const options = {
9509 uri,
9510
9511 beforeSend(request) {
9512 // this forces the browser to pass the bytes to us unprocessed
9513 request.overrideMimeType('text/plain; charset=x-user-defined');
9514 request.addEventListener('progress', function ({
9515 total,
9516 loaded
9517 }) {
9518 return callbackWrapper(request, null, {
9519 statusCode: request.status
9520 }, progressListener);
9521 });
9522 }
9523
9524 };
9525 const request = xhr(options, function (error, response) {
9526 return callbackWrapper(request, error, response, progressListener);
9527 });
9528 return request;
9529 };
9530
9531 const {
9532 EventTarget
9533 } = videojs__default["default"];
9534
9535 const dashPlaylistUnchanged = function (a, b) {
9536 if (!isPlaylistUnchanged(a, b)) {
9537 return false;
9538 } // for dash the above check will often return true in scenarios where
9539 // the playlist actually has changed because mediaSequence isn't a
9540 // dash thing, and we often set it to 1. So if the playlists have the same amount
9541 // of segments we return true.
9542 // So for dash we need to make sure that the underlying segments are different.
9543 // if sidx changed then the playlists are different.
9544
9545
9546 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
9547 return false;
9548 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
9549 return false;
9550 } // one or the other does not have segments
9551 // there was a change.
9552
9553
9554 if (a.segments && !b.segments || !a.segments && b.segments) {
9555 return false;
9556 } // neither has segments nothing changed
9557
9558
9559 if (!a.segments && !b.segments) {
9560 return true;
9561 } // check segments themselves
9562
9563
9564 for (let i = 0; i < a.segments.length; i++) {
9565 const aSegment = a.segments[i];
9566 const bSegment = b.segments[i]; // if uris are different between segments there was a change
9567
9568 if (aSegment.uri !== bSegment.uri) {
9569 return false;
9570 } // neither segment has a byterange, there will be no byterange change.
9571
9572
9573 if (!aSegment.byterange && !bSegment.byterange) {
9574 continue;
9575 }
9576
9577 const aByterange = aSegment.byterange;
9578 const bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
9579
9580 if (aByterange && !bByterange || !aByterange && bByterange) {
9581 return false;
9582 } // if both segments have byterange with different offsets, there was a change.
9583
9584
9585 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
9586 return false;
9587 }
9588 } // if everything was the same with segments, this is the same playlist.
9589
9590
9591 return true;
9592 };
9593 /**
9594 * Use the representation IDs from the mpd object to create groupIDs, the NAME is set to mandatory representation
9595 * ID in the parser. This allows for continuous playout across periods with the same representation IDs
9596 * (continuous periods as defined in DASH-IF 3.2.12). This is assumed in the mpd-parser as well. If we want to support
9597 * periods without continuous playback this function may need modification as well as the parser.
9598 */
9599
9600
9601 const dashGroupId = (type, group, label, playlist) => {
9602 // If the manifest somehow does not have an ID (non-dash compliant), use the label.
9603 const playlistId = playlist.attributes.NAME || label;
9604 return `placeholder-uri-${type}-${group}-${playlistId}`;
9605 };
9606 /**
9607 * Parses the main XML string and updates playlist URI references.
9608 *
9609 * @param {Object} config
9610 * Object of arguments
9611 * @param {string} config.mainXml
9612 * The mpd XML
9613 * @param {string} config.srcUrl
9614 * The mpd URL
9615 * @param {Date} config.clientOffset
9616 * A time difference between server and client
9617 * @param {Object} config.sidxMapping
9618 * SIDX mappings for moof/mdat URIs and byte ranges
9619 * @return {Object}
9620 * The parsed mpd manifest object
9621 */
9622
9623
9624 const parseMainXml = ({
9625 mainXml,
9626 srcUrl,
9627 clientOffset,
9628 sidxMapping,
9629 previousManifest
9630 }) => {
9631 const manifest = parse(mainXml, {
9632 manifestUri: srcUrl,
9633 clientOffset,
9634 sidxMapping,
9635 previousManifest
9636 });
9637 addPropertiesToMain(manifest, srcUrl, dashGroupId);
9638 return manifest;
9639 };
9640 /**
9641 * Removes any mediaGroup labels that no longer exist in the newMain
9642 *
9643 * @param {Object} update
9644 * The previous mpd object being updated
9645 * @param {Object} newMain
9646 * The new mpd object
9647 */
9648
9649 const removeOldMediaGroupLabels = (update, newMain) => {
9650 forEachMediaGroup$1(update, (properties, type, group, label) => {
9651 if (!(label in newMain.mediaGroups[type][group])) {
9652 delete update.mediaGroups[type][group][label];
9653 }
9654 });
9655 };
9656 /**
9657 * Returns a new main manifest that is the result of merging an updated main manifest
9658 * into the original version.
9659 *
9660 * @param {Object} oldMain
9661 * The old parsed mpd object
9662 * @param {Object} newMain
9663 * The updated parsed mpd object
9664 * @return {Object}
9665 * A new object representing the original main manifest with the updated media
9666 * playlists merged in
9667 */
9668
9669
9670 const updateMain = (oldMain, newMain, sidxMapping) => {
9671 let noChanges = true;
9672 let update = merge$1(oldMain, {
9673 // These are top level properties that can be updated
9674 duration: newMain.duration,
9675 minimumUpdatePeriod: newMain.minimumUpdatePeriod,
9676 timelineStarts: newMain.timelineStarts
9677 }); // First update the playlists in playlist list
9678
9679 for (let i = 0; i < newMain.playlists.length; i++) {
9680 const playlist = newMain.playlists[i];
9681
9682 if (playlist.sidx) {
9683 const sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
9684
9685 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
9686 addSidxSegmentsToPlaylist$1(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
9687 }
9688 }
9689
9690 const playlistUpdate = updateMain$1(update, playlist, dashPlaylistUnchanged);
9691
9692 if (playlistUpdate) {
9693 update = playlistUpdate;
9694 noChanges = false;
9695 }
9696 } // Then update media group playlists
9697
9698
9699 forEachMediaGroup$1(newMain, (properties, type, group, label) => {
9700 if (properties.playlists && properties.playlists.length) {
9701 const id = properties.playlists[0].id;
9702 const playlistUpdate = updateMain$1(update, properties.playlists[0], dashPlaylistUnchanged);
9703
9704 if (playlistUpdate) {
9705 update = playlistUpdate; // add new mediaGroup label if it doesn't exist and assign the new mediaGroup.
9706
9707 if (!(label in update.mediaGroups[type][group])) {
9708 update.mediaGroups[type][group][label] = properties;
9709 } // update the playlist reference within media groups
9710
9711
9712 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
9713 noChanges = false;
9714 }
9715 }
9716 }); // remove mediaGroup labels and references that no longer exist in the newMain
9717
9718 removeOldMediaGroupLabels(update, newMain);
9719
9720 if (newMain.minimumUpdatePeriod !== oldMain.minimumUpdatePeriod) {
9721 noChanges = false;
9722 }
9723
9724 if (noChanges) {
9725 return null;
9726 }
9727
9728 return update;
9729 }; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
9730 // If the SIDXs have maps, the two maps should match,
9731 // both `a` and `b` missing SIDXs is considered matching.
9732 // If `a` or `b` but not both have a map, they aren't matching.
9733
9734 const equivalentSidx = (a, b) => {
9735 const neitherMap = Boolean(!a.map && !b.map);
9736 const equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
9737 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
9738 }; // exported for testing
9739
9740
9741 const compareSidxEntry = (playlists, oldSidxMapping) => {
9742 const newSidxMapping = {};
9743
9744 for (const id in playlists) {
9745 const playlist = playlists[id];
9746 const currentSidxInfo = playlist.sidx;
9747
9748 if (currentSidxInfo) {
9749 const key = generateSidxKey(currentSidxInfo);
9750
9751 if (!oldSidxMapping[key]) {
9752 break;
9753 }
9754
9755 const savedSidxInfo = oldSidxMapping[key].sidxInfo;
9756
9757 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
9758 newSidxMapping[key] = oldSidxMapping[key];
9759 }
9760 }
9761 }
9762
9763 return newSidxMapping;
9764 };
9765 /**
9766 * A function that filters out changed items as they need to be requested separately.
9767 *
9768 * The method is exported for testing
9769 *
9770 * @param {Object} main the parsed mpd XML returned via mpd-parser
9771 * @param {Object} oldSidxMapping the SIDX to compare against
9772 */
9773
9774 const filterChangedSidxMappings = (main, oldSidxMapping) => {
9775 const videoSidx = compareSidxEntry(main.playlists, oldSidxMapping);
9776 let mediaGroupSidx = videoSidx;
9777 forEachMediaGroup$1(main, (properties, mediaType, groupKey, labelKey) => {
9778 if (properties.playlists && properties.playlists.length) {
9779 const playlists = properties.playlists;
9780 mediaGroupSidx = merge$1(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
9781 }
9782 });
9783 return mediaGroupSidx;
9784 };
9785 class DashPlaylistLoader extends EventTarget {
9786 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
9787 // playlist loader setups from media groups will expect to be able to pass a playlist
9788 // (since there aren't external URLs to media playlists with DASH)
9789 constructor(srcUrlOrPlaylist, vhs, options = {}, mainPlaylistLoader) {
9790 super();
9791 this.mainPlaylistLoader_ = mainPlaylistLoader || this;
9792
9793 if (!mainPlaylistLoader) {
9794 this.isMain_ = true;
9795 }
9796
9797 const {
9798 withCredentials = false
9799 } = options;
9800 this.vhs_ = vhs;
9801 this.withCredentials = withCredentials;
9802 this.addMetadataToTextTrack = options.addMetadataToTextTrack;
9803
9804 if (!srcUrlOrPlaylist) {
9805 throw new Error('A non-empty playlist URL or object is required');
9806 } // event naming?
9807
9808
9809 this.on('minimumUpdatePeriod', () => {
9810 this.refreshXml_();
9811 }); // live playlist staleness timeout
9812
9813 this.on('mediaupdatetimeout', () => {
9814 this.refreshMedia_(this.media().id);
9815 });
9816 this.state = 'HAVE_NOTHING';
9817 this.loadedPlaylists_ = {};
9818 this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
9819 // The mainPlaylistLoader will be created with a string
9820
9821 if (this.isMain_) {
9822 this.mainPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
9823 // once multi-period is refactored
9824
9825 this.mainPlaylistLoader_.sidxMapping_ = {};
9826 } else {
9827 this.childPlaylist_ = srcUrlOrPlaylist;
9828 }
9829 }
9830
9831 requestErrored_(err, request, startingState) {
9832 // disposed
9833 if (!this.request) {
9834 return true;
9835 } // pending request is cleared
9836
9837
9838 this.request = null;
9839
9840 if (err) {
9841 // use the provided error object or create one
9842 // based on the request/response
9843 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
9844 status: request.status,
9845 message: 'DASH request error at URL: ' + request.uri,
9846 response: request.response,
9847 // MEDIA_ERR_NETWORK
9848 code: 2
9849 };
9850
9851 if (startingState) {
9852 this.state = startingState;
9853 }
9854
9855 this.trigger('error');
9856 return true;
9857 }
9858 }
9859 /**
9860 * Verify that the container of the sidx segment can be parsed
9861 * and if it can, get and parse that segment.
9862 */
9863
9864
9865 addSidxSegments_(playlist, startingState, cb) {
9866 const sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
9867
9868 if (!playlist.sidx || !sidxKey || this.mainPlaylistLoader_.sidxMapping_[sidxKey]) {
9869 // keep this function async
9870 this.mediaRequest_ = window.setTimeout(() => cb(false), 0);
9871 return;
9872 } // resolve the segment URL relative to the playlist
9873
9874
9875 const uri = resolveManifestRedirect(playlist.sidx.resolvedUri);
9876
9877 const fin = (err, request) => {
9878 if (this.requestErrored_(err, request, startingState)) {
9879 return;
9880 }
9881
9882 const sidxMapping = this.mainPlaylistLoader_.sidxMapping_;
9883 let sidx;
9884
9885 try {
9886 sidx = parseSidx_1(toUint8(request.response).subarray(8));
9887 } catch (e) {
9888 // sidx parsing failed.
9889 this.requestErrored_(e, request, startingState);
9890 return;
9891 }
9892
9893 sidxMapping[sidxKey] = {
9894 sidxInfo: playlist.sidx,
9895 sidx
9896 };
9897 addSidxSegmentsToPlaylist$1(playlist, sidx, playlist.sidx.resolvedUri);
9898 return cb(true);
9899 };
9900
9901 this.request = containerRequest(uri, this.vhs_.xhr, (err, request, container, bytes) => {
9902 if (err) {
9903 return fin(err, request);
9904 }
9905
9906 if (!container || container !== 'mp4') {
9907 return fin({
9908 status: request.status,
9909 message: `Unsupported ${container || 'unknown'} container type for sidx segment at URL: ${uri}`,
9910 // response is just bytes in this case
9911 // but we really don't want to return that.
9912 response: '',
9913 playlist,
9914 internal: true,
9915 playlistExclusionDuration: Infinity,
9916 // MEDIA_ERR_NETWORK
9917 code: 2
9918 }, request);
9919 } // if we already downloaded the sidx bytes in the container request, use them
9920
9921
9922 const {
9923 offset,
9924 length
9925 } = playlist.sidx.byterange;
9926
9927 if (bytes.length >= length + offset) {
9928 return fin(err, {
9929 response: bytes.subarray(offset, offset + length),
9930 status: request.status,
9931 uri: request.uri
9932 });
9933 } // otherwise request sidx bytes
9934
9935
9936 this.request = this.vhs_.xhr({
9937 uri,
9938 responseType: 'arraybuffer',
9939 headers: segmentXhrHeaders({
9940 byterange: playlist.sidx.byterange
9941 })
9942 }, fin);
9943 });
9944 }
9945
9946 dispose() {
9947 this.trigger('dispose');
9948 this.stopRequest();
9949 this.loadedPlaylists_ = {};
9950 window.clearTimeout(this.minimumUpdatePeriodTimeout_);
9951 window.clearTimeout(this.mediaRequest_);
9952 window.clearTimeout(this.mediaUpdateTimeout);
9953 this.mediaUpdateTimeout = null;
9954 this.mediaRequest_ = null;
9955 this.minimumUpdatePeriodTimeout_ = null;
9956
9957 if (this.mainPlaylistLoader_.createMupOnMedia_) {
9958 this.off('loadedmetadata', this.mainPlaylistLoader_.createMupOnMedia_);
9959 this.mainPlaylistLoader_.createMupOnMedia_ = null;
9960 }
9961
9962 this.off();
9963 }
9964
9965 hasPendingRequest() {
9966 return this.request || this.mediaRequest_;
9967 }
9968
9969 stopRequest() {
9970 if (this.request) {
9971 const oldRequest = this.request;
9972 this.request = null;
9973 oldRequest.onreadystatechange = null;
9974 oldRequest.abort();
9975 }
9976 }
9977
9978 media(playlist) {
9979 // getter
9980 if (!playlist) {
9981 return this.media_;
9982 } // setter
9983
9984
9985 if (this.state === 'HAVE_NOTHING') {
9986 throw new Error('Cannot switch media playlist from ' + this.state);
9987 }
9988
9989 const startingState = this.state; // find the playlist object if the target playlist has been specified by URI
9990
9991 if (typeof playlist === 'string') {
9992 if (!this.mainPlaylistLoader_.main.playlists[playlist]) {
9993 throw new Error('Unknown playlist URI: ' + playlist);
9994 }
9995
9996 playlist = this.mainPlaylistLoader_.main.playlists[playlist];
9997 }
9998
9999 const mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
10000
10001 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
10002 this.state = 'HAVE_METADATA';
10003 this.media_ = playlist; // trigger media change if the active media has been updated
10004
10005 if (mediaChange) {
10006 this.trigger('mediachanging');
10007 this.trigger('mediachange');
10008 }
10009
10010 return;
10011 } // switching to the active playlist is a no-op
10012
10013
10014 if (!mediaChange) {
10015 return;
10016 } // switching from an already loaded playlist
10017
10018
10019 if (this.media_) {
10020 this.trigger('mediachanging');
10021 }
10022
10023 this.addSidxSegments_(playlist, startingState, sidxChanged => {
10024 // everything is ready just continue to haveMetadata
10025 this.haveMetadata({
10026 startingState,
10027 playlist
10028 });
10029 });
10030 }
10031
10032 haveMetadata({
10033 startingState,
10034 playlist
10035 }) {
10036 this.state = 'HAVE_METADATA';
10037 this.loadedPlaylists_[playlist.id] = playlist;
10038 this.mediaRequest_ = null; // This will trigger loadedplaylist
10039
10040 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
10041 // to resolve setup of media groups
10042
10043 if (startingState === 'HAVE_MAIN_MANIFEST') {
10044 this.trigger('loadedmetadata');
10045 } else {
10046 // trigger media change if the active media has been updated
10047 this.trigger('mediachange');
10048 }
10049 }
10050
10051 pause() {
10052 if (this.mainPlaylistLoader_.createMupOnMedia_) {
10053 this.off('loadedmetadata', this.mainPlaylistLoader_.createMupOnMedia_);
10054 this.mainPlaylistLoader_.createMupOnMedia_ = null;
10055 }
10056
10057 this.stopRequest();
10058 window.clearTimeout(this.mediaUpdateTimeout);
10059 this.mediaUpdateTimeout = null;
10060
10061 if (this.isMain_) {
10062 window.clearTimeout(this.mainPlaylistLoader_.minimumUpdatePeriodTimeout_);
10063 this.mainPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
10064 }
10065
10066 if (this.state === 'HAVE_NOTHING') {
10067 // If we pause the loader before any data has been retrieved, its as if we never
10068 // started, so reset to an unstarted state.
10069 this.started = false;
10070 }
10071 }
10072
10073 load(isFinalRendition) {
10074 window.clearTimeout(this.mediaUpdateTimeout);
10075 this.mediaUpdateTimeout = null;
10076 const media = this.media();
10077
10078 if (isFinalRendition) {
10079 const delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
10080 this.mediaUpdateTimeout = window.setTimeout(() => this.load(), delay);
10081 return;
10082 } // because the playlists are internal to the manifest, load should either load the
10083 // main manifest, or do nothing but trigger an event
10084
10085
10086 if (!this.started) {
10087 this.start();
10088 return;
10089 }
10090
10091 if (media && !media.endList) {
10092 // Check to see if this is the main loader and the MUP was cleared (this happens
10093 // when the loader was paused). `media` should be set at this point since one is always
10094 // set during `start()`.
10095 if (this.isMain_ && !this.minimumUpdatePeriodTimeout_) {
10096 // Trigger minimumUpdatePeriod to refresh the main manifest
10097 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
10098
10099 this.updateMinimumUpdatePeriodTimeout_();
10100 }
10101
10102 this.trigger('mediaupdatetimeout');
10103 } else {
10104 this.trigger('loadedplaylist');
10105 }
10106 }
10107
10108 start() {
10109 this.started = true; // We don't need to request the main manifest again
10110 // Call this asynchronously to match the xhr request behavior below
10111
10112 if (!this.isMain_) {
10113 this.mediaRequest_ = window.setTimeout(() => this.haveMain_(), 0);
10114 return;
10115 }
10116
10117 this.requestMain_((req, mainChanged) => {
10118 this.haveMain_();
10119
10120 if (!this.hasPendingRequest() && !this.media_) {
10121 this.media(this.mainPlaylistLoader_.main.playlists[0]);
10122 }
10123 });
10124 }
10125
10126 requestMain_(cb) {
10127 this.request = this.vhs_.xhr({
10128 uri: this.mainPlaylistLoader_.srcUrl,
10129 withCredentials: this.withCredentials
10130 }, (error, req) => {
10131 if (this.requestErrored_(error, req)) {
10132 if (this.state === 'HAVE_NOTHING') {
10133 this.started = false;
10134 }
10135
10136 return;
10137 }
10138
10139 const mainChanged = req.responseText !== this.mainPlaylistLoader_.mainXml_;
10140 this.mainPlaylistLoader_.mainXml_ = req.responseText;
10141
10142 if (req.responseHeaders && req.responseHeaders.date) {
10143 this.mainLoaded_ = Date.parse(req.responseHeaders.date);
10144 } else {
10145 this.mainLoaded_ = Date.now();
10146 }
10147
10148 this.mainPlaylistLoader_.srcUrl = resolveManifestRedirect(this.mainPlaylistLoader_.srcUrl, req);
10149
10150 if (mainChanged) {
10151 this.handleMain_();
10152 this.syncClientServerClock_(() => {
10153 return cb(req, mainChanged);
10154 });
10155 return;
10156 }
10157
10158 return cb(req, mainChanged);
10159 });
10160 }
10161 /**
10162 * Parses the main xml for UTCTiming node to sync the client clock to the server
10163 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
10164 *
10165 * @param {Function} done
10166 * Function to call when clock sync has completed
10167 */
10168
10169
10170 syncClientServerClock_(done) {
10171 const utcTiming = parseUTCTiming(this.mainPlaylistLoader_.mainXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
10172 // server clock
10173
10174 if (utcTiming === null) {
10175 this.mainPlaylistLoader_.clientOffset_ = this.mainLoaded_ - Date.now();
10176 return done();
10177 }
10178
10179 if (utcTiming.method === 'DIRECT') {
10180 this.mainPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
10181 return done();
10182 }
10183
10184 this.request = this.vhs_.xhr({
10185 uri: resolveUrl(this.mainPlaylistLoader_.srcUrl, utcTiming.value),
10186 method: utcTiming.method,
10187 withCredentials: this.withCredentials
10188 }, (error, req) => {
10189 // disposed
10190 if (!this.request) {
10191 return;
10192 }
10193
10194 if (error) {
10195 // sync request failed, fall back to using date header from mpd
10196 // TODO: log warning
10197 this.mainPlaylistLoader_.clientOffset_ = this.mainLoaded_ - Date.now();
10198 return done();
10199 }
10200
10201 let serverTime;
10202
10203 if (utcTiming.method === 'HEAD') {
10204 if (!req.responseHeaders || !req.responseHeaders.date) {
10205 // expected date header not preset, fall back to using date header from mpd
10206 // TODO: log warning
10207 serverTime = this.mainLoaded_;
10208 } else {
10209 serverTime = Date.parse(req.responseHeaders.date);
10210 }
10211 } else {
10212 serverTime = Date.parse(req.responseText);
10213 }
10214
10215 this.mainPlaylistLoader_.clientOffset_ = serverTime - Date.now();
10216 done();
10217 });
10218 }
10219
10220 haveMain_() {
10221 this.state = 'HAVE_MAIN_MANIFEST';
10222
10223 if (this.isMain_) {
10224 // We have the main playlist at this point, so
10225 // trigger this to allow PlaylistController
10226 // to make an initial playlist selection
10227 this.trigger('loadedplaylist');
10228 } else if (!this.media_) {
10229 // no media playlist was specifically selected so select
10230 // the one the child playlist loader was created with
10231 this.media(this.childPlaylist_);
10232 }
10233 }
10234
10235 handleMain_() {
10236 // clear media request
10237 this.mediaRequest_ = null;
10238 const oldMain = this.mainPlaylistLoader_.main;
10239 let newMain = parseMainXml({
10240 mainXml: this.mainPlaylistLoader_.mainXml_,
10241 srcUrl: this.mainPlaylistLoader_.srcUrl,
10242 clientOffset: this.mainPlaylistLoader_.clientOffset_,
10243 sidxMapping: this.mainPlaylistLoader_.sidxMapping_,
10244 previousManifest: oldMain
10245 }); // if we have an old main to compare the new main against
10246
10247 if (oldMain) {
10248 newMain = updateMain(oldMain, newMain, this.mainPlaylistLoader_.sidxMapping_);
10249 } // only update main if we have a new main
10250
10251
10252 this.mainPlaylistLoader_.main = newMain ? newMain : oldMain;
10253 const location = this.mainPlaylistLoader_.main.locations && this.mainPlaylistLoader_.main.locations[0];
10254
10255 if (location && location !== this.mainPlaylistLoader_.srcUrl) {
10256 this.mainPlaylistLoader_.srcUrl = location;
10257 }
10258
10259 if (!oldMain || newMain && newMain.minimumUpdatePeriod !== oldMain.minimumUpdatePeriod) {
10260 this.updateMinimumUpdatePeriodTimeout_();
10261 }
10262
10263 this.addEventStreamToMetadataTrack_(newMain);
10264 return Boolean(newMain);
10265 }
10266
10267 updateMinimumUpdatePeriodTimeout_() {
10268 const mpl = this.mainPlaylistLoader_; // cancel any pending creation of mup on media
10269 // a new one will be added if needed.
10270
10271 if (mpl.createMupOnMedia_) {
10272 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
10273 mpl.createMupOnMedia_ = null;
10274 } // clear any pending timeouts
10275
10276
10277 if (mpl.minimumUpdatePeriodTimeout_) {
10278 window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
10279 mpl.minimumUpdatePeriodTimeout_ = null;
10280 }
10281
10282 let mup = mpl.main && mpl.main.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
10283 // MPD has no future validity, so a new one will need to be acquired when new
10284 // media segments are to be made available. Thus, we use the target duration
10285 // in this case
10286
10287 if (mup === 0) {
10288 if (mpl.media()) {
10289 mup = mpl.media().targetDuration * 1000;
10290 } else {
10291 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
10292 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
10293 }
10294 } // if minimumUpdatePeriod is invalid or <= zero, which
10295 // can happen when a live video becomes VOD. skip timeout
10296 // creation.
10297
10298
10299 if (typeof mup !== 'number' || mup <= 0) {
10300 if (mup < 0) {
10301 this.logger_(`found invalid minimumUpdatePeriod of ${mup}, not setting a timeout`);
10302 }
10303
10304 return;
10305 }
10306
10307 this.createMUPTimeout_(mup);
10308 }
10309
10310 createMUPTimeout_(mup) {
10311 const mpl = this.mainPlaylistLoader_;
10312 mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(() => {
10313 mpl.minimumUpdatePeriodTimeout_ = null;
10314 mpl.trigger('minimumUpdatePeriod');
10315 mpl.createMUPTimeout_(mup);
10316 }, mup);
10317 }
10318 /**
10319 * Sends request to refresh the main xml and updates the parsed main manifest
10320 */
10321
10322
10323 refreshXml_() {
10324 this.requestMain_((req, mainChanged) => {
10325 if (!mainChanged) {
10326 return;
10327 }
10328
10329 if (this.media_) {
10330 this.media_ = this.mainPlaylistLoader_.main.playlists[this.media_.id];
10331 } // This will filter out updated sidx info from the mapping
10332
10333
10334 this.mainPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.sidxMapping_);
10335 this.addSidxSegments_(this.media(), this.state, sidxChanged => {
10336 // TODO: do we need to reload the current playlist?
10337 this.refreshMedia_(this.media().id);
10338 });
10339 });
10340 }
10341 /**
10342 * Refreshes the media playlist by re-parsing the main xml and updating playlist
10343 * references. If this is an alternate loader, the updated parsed manifest is retrieved
10344 * from the main loader.
10345 */
10346
10347
10348 refreshMedia_(mediaID) {
10349 if (!mediaID) {
10350 throw new Error('refreshMedia_ must take a media id');
10351 } // for main we have to reparse the main xml
10352 // to re-create segments based on current timing values
10353 // which may change media. We only skip updating the main manifest
10354 // if this is the first time this.media_ is being set.
10355 // as main was just parsed in that case.
10356
10357
10358 if (this.media_ && this.isMain_) {
10359 this.handleMain_();
10360 }
10361
10362 const playlists = this.mainPlaylistLoader_.main.playlists;
10363 const mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
10364
10365 if (mediaChanged) {
10366 this.media_ = playlists[mediaID];
10367 } else {
10368 this.trigger('playlistunchanged');
10369 }
10370
10371 if (!this.mediaUpdateTimeout) {
10372 const createMediaUpdateTimeout = () => {
10373 if (this.media().endList) {
10374 return;
10375 }
10376
10377 this.mediaUpdateTimeout = window.setTimeout(() => {
10378 this.trigger('mediaupdatetimeout');
10379 createMediaUpdateTimeout();
10380 }, refreshDelay(this.media(), Boolean(mediaChanged)));
10381 };
10382
10383 createMediaUpdateTimeout();
10384 }
10385
10386 this.trigger('loadedplaylist');
10387 }
10388 /**
10389 * Takes eventstream data from a parsed DASH manifest and adds it to the metadata text track.
10390 *
10391 * @param {manifest} newMain the newly parsed manifest
10392 */
10393
10394
10395 addEventStreamToMetadataTrack_(newMain) {
10396 // Only add new event stream metadata if we have a new manifest.
10397 if (newMain && this.mainPlaylistLoader_.main.eventStream) {
10398 // convert EventStream to ID3-like data.
10399 const metadataArray = this.mainPlaylistLoader_.main.eventStream.map(eventStreamNode => {
10400 return {
10401 cueTime: eventStreamNode.start,
10402 frames: [{
10403 data: eventStreamNode.messageData
10404 }]
10405 };
10406 });
10407 this.addMetadataToTextTrack('EventStream', metadataArray, this.mainPlaylistLoader_.main.duration);
10408 }
10409 }
10410 /**
10411 * Returns the key ID set from a playlist
10412 *
10413 * @param {playlist} playlist to fetch the key ID set from.
10414 * @return a Set of 32 digit hex strings that represent the unique keyIds for that playlist.
10415 */
10416
10417
10418 getKeyIdSet(playlist) {
10419 if (playlist.contentProtection) {
10420 const keyIds = new Set();
10421
10422 for (const keysystem in playlist.contentProtection) {
10423 const defaultKID = playlist.contentProtection[keysystem].attributes['cenc:default_KID'];
10424
10425 if (defaultKID) {
10426 // DASH keyIds are separated by dashes.
10427 keyIds.add(defaultKID.replace(/-/g, '').toLowerCase());
10428 }
10429 }
10430
10431 return keyIds;
10432 }
10433 }
10434
10435 }
10436
10437 var Config = {
10438 GOAL_BUFFER_LENGTH: 30,
10439 MAX_GOAL_BUFFER_LENGTH: 60,
10440 BACK_BUFFER_LENGTH: 30,
10441 GOAL_BUFFER_LENGTH_RATE: 1,
10442 // 0.5 MB/s
10443 INITIAL_BANDWIDTH: 4194304,
10444 // A fudge factor to apply to advertised playlist bitrates to account for
10445 // temporary flucations in client bandwidth
10446 BANDWIDTH_VARIANCE: 1.2,
10447 // How much of the buffer must be filled before we consider upswitching
10448 BUFFER_LOW_WATER_LINE: 0,
10449 MAX_BUFFER_LOW_WATER_LINE: 30,
10450 // TODO: Remove this when experimentalBufferBasedABR is removed
10451 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
10452 BUFFER_LOW_WATER_LINE_RATE: 1,
10453 // If the buffer is greater than the high water line, we won't switch down
10454 BUFFER_HIGH_WATER_LINE: 30
10455 };
10456
10457 const stringToArrayBuffer = string => {
10458 const view = new Uint8Array(new ArrayBuffer(string.length));
10459
10460 for (let i = 0; i < string.length; i++) {
10461 view[i] = string.charCodeAt(i);
10462 }
10463
10464 return view.buffer;
10465 };
10466
10467 /* global Blob, BlobBuilder, Worker */
10468 // unify worker interface
10469 const browserWorkerPolyFill = function (workerObj) {
10470 // node only supports on/off
10471 workerObj.on = workerObj.addEventListener;
10472 workerObj.off = workerObj.removeEventListener;
10473 return workerObj;
10474 };
10475
10476 const createObjectURL = function (str) {
10477 try {
10478 return URL.createObjectURL(new Blob([str], {
10479 type: 'application/javascript'
10480 }));
10481 } catch (e) {
10482 const blob = new BlobBuilder();
10483 blob.append(str);
10484 return URL.createObjectURL(blob.getBlob());
10485 }
10486 };
10487
10488 const factory = function (code) {
10489 return function () {
10490 const objectUrl = createObjectURL(code);
10491 const worker = browserWorkerPolyFill(new Worker(objectUrl));
10492 worker.objURL = objectUrl;
10493 const terminate = worker.terminate;
10494 worker.on = worker.addEventListener;
10495 worker.off = worker.removeEventListener;
10496
10497 worker.terminate = function () {
10498 URL.revokeObjectURL(objectUrl);
10499 return terminate.call(this);
10500 };
10501
10502 return worker;
10503 };
10504 };
10505 const transform = function (code) {
10506 return `var browserWorkerPolyFill = ${browserWorkerPolyFill.toString()};\n` + 'browserWorkerPolyFill(self);\n' + code;
10507 };
10508
10509 const getWorkerString = function (fn) {
10510 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
10511 };
10512
10513 /* rollup-plugin-worker-factory start for worker!/home/runner/work/http-streaming/http-streaming/src/transmuxer-worker.js */
10514 const workerCode$1 = transform(getWorkerString(function () {
10515
10516 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
10517 /**
10518 * mux.js
10519 *
10520 * Copyright (c) Brightcove
10521 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10522 *
10523 * A lightweight readable stream implemention that handles event dispatching.
10524 * Objects that inherit from streams should call init in their constructors.
10525 */
10526
10527 var Stream$8 = function () {
10528 this.init = function () {
10529 var listeners = {};
10530 /**
10531 * Add a listener for a specified event type.
10532 * @param type {string} the event name
10533 * @param listener {function} the callback to be invoked when an event of
10534 * the specified type occurs
10535 */
10536
10537 this.on = function (type, listener) {
10538 if (!listeners[type]) {
10539 listeners[type] = [];
10540 }
10541
10542 listeners[type] = listeners[type].concat(listener);
10543 };
10544 /**
10545 * Remove a listener for a specified event type.
10546 * @param type {string} the event name
10547 * @param listener {function} a function previously registered for this
10548 * type of event through `on`
10549 */
10550
10551
10552 this.off = function (type, listener) {
10553 var index;
10554
10555 if (!listeners[type]) {
10556 return false;
10557 }
10558
10559 index = listeners[type].indexOf(listener);
10560 listeners[type] = listeners[type].slice();
10561 listeners[type].splice(index, 1);
10562 return index > -1;
10563 };
10564 /**
10565 * Trigger an event of the specified type on this stream. Any additional
10566 * arguments to this function are passed as parameters to event listeners.
10567 * @param type {string} the event name
10568 */
10569
10570
10571 this.trigger = function (type) {
10572 var callbacks, i, length, args;
10573 callbacks = listeners[type];
10574
10575 if (!callbacks) {
10576 return;
10577 } // Slicing the arguments on every invocation of this method
10578 // can add a significant amount of overhead. Avoid the
10579 // intermediate object creation for the common case of a
10580 // single callback argument
10581
10582
10583 if (arguments.length === 2) {
10584 length = callbacks.length;
10585
10586 for (i = 0; i < length; ++i) {
10587 callbacks[i].call(this, arguments[1]);
10588 }
10589 } else {
10590 args = [];
10591 i = arguments.length;
10592
10593 for (i = 1; i < arguments.length; ++i) {
10594 args.push(arguments[i]);
10595 }
10596
10597 length = callbacks.length;
10598
10599 for (i = 0; i < length; ++i) {
10600 callbacks[i].apply(this, args);
10601 }
10602 }
10603 };
10604 /**
10605 * Destroys the stream and cleans up.
10606 */
10607
10608
10609 this.dispose = function () {
10610 listeners = {};
10611 };
10612 };
10613 };
10614 /**
10615 * Forwards all `data` events on this stream to the destination stream. The
10616 * destination stream should provide a method `push` to receive the data
10617 * events as they arrive.
10618 * @param destination {stream} the stream that will receive all `data` events
10619 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
10620 * when the current stream emits a 'done' event
10621 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
10622 */
10623
10624
10625 Stream$8.prototype.pipe = function (destination) {
10626 this.on('data', function (data) {
10627 destination.push(data);
10628 });
10629 this.on('done', function (flushSource) {
10630 destination.flush(flushSource);
10631 });
10632 this.on('partialdone', function (flushSource) {
10633 destination.partialFlush(flushSource);
10634 });
10635 this.on('endedtimeline', function (flushSource) {
10636 destination.endTimeline(flushSource);
10637 });
10638 this.on('reset', function (flushSource) {
10639 destination.reset(flushSource);
10640 });
10641 return destination;
10642 }; // Default stream functions that are expected to be overridden to perform
10643 // actual work. These are provided by the prototype as a sort of no-op
10644 // implementation so that we don't have to check for their existence in the
10645 // `pipe` function above.
10646
10647
10648 Stream$8.prototype.push = function (data) {
10649 this.trigger('data', data);
10650 };
10651
10652 Stream$8.prototype.flush = function (flushSource) {
10653 this.trigger('done', flushSource);
10654 };
10655
10656 Stream$8.prototype.partialFlush = function (flushSource) {
10657 this.trigger('partialdone', flushSource);
10658 };
10659
10660 Stream$8.prototype.endTimeline = function (flushSource) {
10661 this.trigger('endedtimeline', flushSource);
10662 };
10663
10664 Stream$8.prototype.reset = function (flushSource) {
10665 this.trigger('reset', flushSource);
10666 };
10667
10668 var stream = Stream$8;
10669 var MAX_UINT32$1 = Math.pow(2, 32);
10670
10671 var getUint64$3 = function (uint8) {
10672 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
10673 var value;
10674
10675 if (dv.getBigUint64) {
10676 value = dv.getBigUint64(0);
10677
10678 if (value < Number.MAX_SAFE_INTEGER) {
10679 return Number(value);
10680 }
10681
10682 return value;
10683 }
10684
10685 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
10686 };
10687
10688 var numbers = {
10689 getUint64: getUint64$3,
10690 MAX_UINT32: MAX_UINT32$1
10691 };
10692 /**
10693 * mux.js
10694 *
10695 * Copyright (c) Brightcove
10696 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10697 *
10698 * Functions that generate fragmented MP4s suitable for use with Media
10699 * Source Extensions.
10700 */
10701
10702 var MAX_UINT32 = numbers.MAX_UINT32;
10703 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
10704
10705 (function () {
10706 var i;
10707 types = {
10708 avc1: [],
10709 // codingname
10710 avcC: [],
10711 btrt: [],
10712 dinf: [],
10713 dref: [],
10714 esds: [],
10715 ftyp: [],
10716 hdlr: [],
10717 mdat: [],
10718 mdhd: [],
10719 mdia: [],
10720 mfhd: [],
10721 minf: [],
10722 moof: [],
10723 moov: [],
10724 mp4a: [],
10725 // codingname
10726 mvex: [],
10727 mvhd: [],
10728 pasp: [],
10729 sdtp: [],
10730 smhd: [],
10731 stbl: [],
10732 stco: [],
10733 stsc: [],
10734 stsd: [],
10735 stsz: [],
10736 stts: [],
10737 styp: [],
10738 tfdt: [],
10739 tfhd: [],
10740 traf: [],
10741 trak: [],
10742 trun: [],
10743 trex: [],
10744 tkhd: [],
10745 vmhd: []
10746 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
10747 // don't throw an error
10748
10749 if (typeof Uint8Array === 'undefined') {
10750 return;
10751 }
10752
10753 for (i in types) {
10754 if (types.hasOwnProperty(i)) {
10755 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
10756 }
10757 }
10758
10759 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
10760 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
10761 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
10762 VIDEO_HDLR = new Uint8Array([0x00, // version 0
10763 0x00, 0x00, 0x00, // flags
10764 0x00, 0x00, 0x00, 0x00, // pre_defined
10765 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
10766 0x00, 0x00, 0x00, 0x00, // reserved
10767 0x00, 0x00, 0x00, 0x00, // reserved
10768 0x00, 0x00, 0x00, 0x00, // reserved
10769 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
10770 ]);
10771 AUDIO_HDLR = new Uint8Array([0x00, // version 0
10772 0x00, 0x00, 0x00, // flags
10773 0x00, 0x00, 0x00, 0x00, // pre_defined
10774 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
10775 0x00, 0x00, 0x00, 0x00, // reserved
10776 0x00, 0x00, 0x00, 0x00, // reserved
10777 0x00, 0x00, 0x00, 0x00, // reserved
10778 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
10779 ]);
10780 HDLR_TYPES = {
10781 video: VIDEO_HDLR,
10782 audio: AUDIO_HDLR
10783 };
10784 DREF = new Uint8Array([0x00, // version 0
10785 0x00, 0x00, 0x00, // flags
10786 0x00, 0x00, 0x00, 0x01, // entry_count
10787 0x00, 0x00, 0x00, 0x0c, // entry_size
10788 0x75, 0x72, 0x6c, 0x20, // 'url' type
10789 0x00, // version 0
10790 0x00, 0x00, 0x01 // entry_flags
10791 ]);
10792 SMHD = new Uint8Array([0x00, // version
10793 0x00, 0x00, 0x00, // flags
10794 0x00, 0x00, // balance, 0 means centered
10795 0x00, 0x00 // reserved
10796 ]);
10797 STCO = new Uint8Array([0x00, // version
10798 0x00, 0x00, 0x00, // flags
10799 0x00, 0x00, 0x00, 0x00 // entry_count
10800 ]);
10801 STSC = STCO;
10802 STSZ = new Uint8Array([0x00, // version
10803 0x00, 0x00, 0x00, // flags
10804 0x00, 0x00, 0x00, 0x00, // sample_size
10805 0x00, 0x00, 0x00, 0x00 // sample_count
10806 ]);
10807 STTS = STCO;
10808 VMHD = new Uint8Array([0x00, // version
10809 0x00, 0x00, 0x01, // flags
10810 0x00, 0x00, // graphicsmode
10811 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
10812 ]);
10813 })();
10814
10815 box = function (type) {
10816 var payload = [],
10817 size = 0,
10818 i,
10819 result,
10820 view;
10821
10822 for (i = 1; i < arguments.length; i++) {
10823 payload.push(arguments[i]);
10824 }
10825
10826 i = payload.length; // calculate the total size we need to allocate
10827
10828 while (i--) {
10829 size += payload[i].byteLength;
10830 }
10831
10832 result = new Uint8Array(size + 8);
10833 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
10834 view.setUint32(0, result.byteLength);
10835 result.set(type, 4); // copy the payload into the result
10836
10837 for (i = 0, size = 8; i < payload.length; i++) {
10838 result.set(payload[i], size);
10839 size += payload[i].byteLength;
10840 }
10841
10842 return result;
10843 };
10844
10845 dinf = function () {
10846 return box(types.dinf, box(types.dref, DREF));
10847 };
10848
10849 esds = function (track) {
10850 return box(types.esds, new Uint8Array([0x00, // version
10851 0x00, 0x00, 0x00, // flags
10852 // ES_Descriptor
10853 0x03, // tag, ES_DescrTag
10854 0x19, // length
10855 0x00, 0x00, // ES_ID
10856 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
10857 // DecoderConfigDescriptor
10858 0x04, // tag, DecoderConfigDescrTag
10859 0x11, // length
10860 0x40, // object type
10861 0x15, // streamType
10862 0x00, 0x06, 0x00, // bufferSizeDB
10863 0x00, 0x00, 0xda, 0xc0, // maxBitrate
10864 0x00, 0x00, 0xda, 0xc0, // avgBitrate
10865 // DecoderSpecificInfo
10866 0x05, // tag, DecoderSpecificInfoTag
10867 0x02, // length
10868 // ISO/IEC 14496-3, AudioSpecificConfig
10869 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
10870 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
10871 ]));
10872 };
10873
10874 ftyp = function () {
10875 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
10876 };
10877
10878 hdlr = function (type) {
10879 return box(types.hdlr, HDLR_TYPES[type]);
10880 };
10881
10882 mdat = function (data) {
10883 return box(types.mdat, data);
10884 };
10885
10886 mdhd = function (track) {
10887 var result = new Uint8Array([0x00, // version 0
10888 0x00, 0x00, 0x00, // flags
10889 0x00, 0x00, 0x00, 0x02, // creation_time
10890 0x00, 0x00, 0x00, 0x03, // modification_time
10891 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10892 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
10893 0x55, 0xc4, // 'und' language (undetermined)
10894 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
10895 // defined. The sample rate can be parsed out of an ADTS header, for
10896 // instance.
10897
10898 if (track.samplerate) {
10899 result[12] = track.samplerate >>> 24 & 0xFF;
10900 result[13] = track.samplerate >>> 16 & 0xFF;
10901 result[14] = track.samplerate >>> 8 & 0xFF;
10902 result[15] = track.samplerate & 0xFF;
10903 }
10904
10905 return box(types.mdhd, result);
10906 };
10907
10908 mdia = function (track) {
10909 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
10910 };
10911
10912 mfhd = function (sequenceNumber) {
10913 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
10914 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
10915 ]));
10916 };
10917
10918 minf = function (track) {
10919 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
10920 };
10921
10922 moof = function (sequenceNumber, tracks) {
10923 var trackFragments = [],
10924 i = tracks.length; // build traf boxes for each track fragment
10925
10926 while (i--) {
10927 trackFragments[i] = traf(tracks[i]);
10928 }
10929
10930 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
10931 };
10932 /**
10933 * Returns a movie box.
10934 * @param tracks {array} the tracks associated with this movie
10935 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
10936 */
10937
10938
10939 moov = function (tracks) {
10940 var i = tracks.length,
10941 boxes = [];
10942
10943 while (i--) {
10944 boxes[i] = trak(tracks[i]);
10945 }
10946
10947 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
10948 };
10949
10950 mvex = function (tracks) {
10951 var i = tracks.length,
10952 boxes = [];
10953
10954 while (i--) {
10955 boxes[i] = trex(tracks[i]);
10956 }
10957
10958 return box.apply(null, [types.mvex].concat(boxes));
10959 };
10960
10961 mvhd = function (duration) {
10962 var bytes = new Uint8Array([0x00, // version 0
10963 0x00, 0x00, 0x00, // flags
10964 0x00, 0x00, 0x00, 0x01, // creation_time
10965 0x00, 0x00, 0x00, 0x02, // modification_time
10966 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10967 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
10968 0x00, 0x01, 0x00, 0x00, // 1.0 rate
10969 0x01, 0x00, // 1.0 volume
10970 0x00, 0x00, // reserved
10971 0x00, 0x00, 0x00, 0x00, // reserved
10972 0x00, 0x00, 0x00, 0x00, // reserved
10973 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10974 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10975 0xff, 0xff, 0xff, 0xff // next_track_ID
10976 ]);
10977 return box(types.mvhd, bytes);
10978 };
10979
10980 sdtp = function (track) {
10981 var samples = track.samples || [],
10982 bytes = new Uint8Array(4 + samples.length),
10983 flags,
10984 i; // leave the full box header (4 bytes) all zero
10985 // write the sample table
10986
10987 for (i = 0; i < samples.length; i++) {
10988 flags = samples[i].flags;
10989 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
10990 }
10991
10992 return box(types.sdtp, bytes);
10993 };
10994
10995 stbl = function (track) {
10996 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
10997 };
10998
10999 (function () {
11000 var videoSample, audioSample;
11001
11002 stsd = function (track) {
11003 return box(types.stsd, new Uint8Array([0x00, // version 0
11004 0x00, 0x00, 0x00, // flags
11005 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
11006 };
11007
11008 videoSample = function (track) {
11009 var sps = track.sps || [],
11010 pps = track.pps || [],
11011 sequenceParameterSets = [],
11012 pictureParameterSets = [],
11013 i,
11014 avc1Box; // assemble the SPSs
11015
11016 for (i = 0; i < sps.length; i++) {
11017 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
11018 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
11019
11020 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
11021 } // assemble the PPSs
11022
11023
11024 for (i = 0; i < pps.length; i++) {
11025 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
11026 pictureParameterSets.push(pps[i].byteLength & 0xFF);
11027 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
11028 }
11029
11030 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
11031 0x00, 0x01, // data_reference_index
11032 0x00, 0x00, // pre_defined
11033 0x00, 0x00, // reserved
11034 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
11035 (track.width & 0xff00) >> 8, track.width & 0xff, // width
11036 (track.height & 0xff00) >> 8, track.height & 0xff, // height
11037 0x00, 0x48, 0x00, 0x00, // horizresolution
11038 0x00, 0x48, 0x00, 0x00, // vertresolution
11039 0x00, 0x00, 0x00, 0x00, // reserved
11040 0x00, 0x01, // frame_count
11041 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
11042 0x00, 0x18, // depth = 24
11043 0x11, 0x11 // pre_defined = -1
11044 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
11045 track.profileIdc, // AVCProfileIndication
11046 track.profileCompatibility, // profile_compatibility
11047 track.levelIdc, // AVCLevelIndication
11048 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
11049 ].concat([sps.length], // numOfSequenceParameterSets
11050 sequenceParameterSets, // "SPS"
11051 [pps.length], // numOfPictureParameterSets
11052 pictureParameterSets // "PPS"
11053 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
11054 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
11055 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
11056 ]))];
11057
11058 if (track.sarRatio) {
11059 var hSpacing = track.sarRatio[0],
11060 vSpacing = track.sarRatio[1];
11061 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
11062 }
11063
11064 return box.apply(null, avc1Box);
11065 };
11066
11067 audioSample = function (track) {
11068 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
11069 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
11070 0x00, 0x01, // data_reference_index
11071 // AudioSampleEntry, ISO/IEC 14496-12
11072 0x00, 0x00, 0x00, 0x00, // reserved
11073 0x00, 0x00, 0x00, 0x00, // reserved
11074 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
11075 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
11076 0x00, 0x00, // pre_defined
11077 0x00, 0x00, // reserved
11078 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
11079 // MP4AudioSampleEntry, ISO/IEC 14496-14
11080 ]), esds(track));
11081 };
11082 })();
11083
11084 tkhd = function (track) {
11085 var result = new Uint8Array([0x00, // version 0
11086 0x00, 0x00, 0x07, // flags
11087 0x00, 0x00, 0x00, 0x00, // creation_time
11088 0x00, 0x00, 0x00, 0x00, // modification_time
11089 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
11090 0x00, 0x00, 0x00, 0x00, // reserved
11091 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
11092 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
11093 0x00, 0x00, // layer
11094 0x00, 0x00, // alternate_group
11095 0x01, 0x00, // non-audio track volume
11096 0x00, 0x00, // reserved
11097 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
11098 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
11099 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
11100 ]);
11101 return box(types.tkhd, result);
11102 };
11103 /**
11104 * Generate a track fragment (traf) box. A traf box collects metadata
11105 * about tracks in a movie fragment (moof) box.
11106 */
11107
11108
11109 traf = function (track) {
11110 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
11111 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
11112 0x00, 0x00, 0x3a, // flags
11113 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
11114 0x00, 0x00, 0x00, 0x01, // sample_description_index
11115 0x00, 0x00, 0x00, 0x00, // default_sample_duration
11116 0x00, 0x00, 0x00, 0x00, // default_sample_size
11117 0x00, 0x00, 0x00, 0x00 // default_sample_flags
11118 ]));
11119 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
11120 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
11121 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
11122 0x00, 0x00, 0x00, // flags
11123 // baseMediaDecodeTime
11124 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
11125 // the containing moof to the first payload byte of the associated
11126 // mdat
11127
11128 dataOffset = 32 + // tfhd
11129 20 + // tfdt
11130 8 + // traf header
11131 16 + // mfhd
11132 8 + // moof header
11133 8; // mdat header
11134 // audio tracks require less metadata
11135
11136 if (track.type === 'audio') {
11137 trackFragmentRun = trun$1(track, dataOffset);
11138 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
11139 } // video tracks should contain an independent and disposable samples
11140 // box (sdtp)
11141 // generate one and adjust offsets to match
11142
11143
11144 sampleDependencyTable = sdtp(track);
11145 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
11146 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
11147 };
11148 /**
11149 * Generate a track box.
11150 * @param track {object} a track definition
11151 * @return {Uint8Array} the track box
11152 */
11153
11154
11155 trak = function (track) {
11156 track.duration = track.duration || 0xffffffff;
11157 return box(types.trak, tkhd(track), mdia(track));
11158 };
11159
11160 trex = function (track) {
11161 var result = new Uint8Array([0x00, // version 0
11162 0x00, 0x00, 0x00, // flags
11163 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
11164 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
11165 0x00, 0x00, 0x00, 0x00, // default_sample_duration
11166 0x00, 0x00, 0x00, 0x00, // default_sample_size
11167 0x00, 0x01, 0x00, 0x01 // default_sample_flags
11168 ]); // the last two bytes of default_sample_flags is the sample
11169 // degradation priority, a hint about the importance of this sample
11170 // relative to others. Lower the degradation priority for all sample
11171 // types other than video.
11172
11173 if (track.type !== 'video') {
11174 result[result.length - 1] = 0x00;
11175 }
11176
11177 return box(types.trex, result);
11178 };
11179
11180 (function () {
11181 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
11182 // duration is present for the first sample, it will be present for
11183 // all subsequent samples.
11184 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
11185
11186 trunHeader = function (samples, offset) {
11187 var durationPresent = 0,
11188 sizePresent = 0,
11189 flagsPresent = 0,
11190 compositionTimeOffset = 0; // trun flag constants
11191
11192 if (samples.length) {
11193 if (samples[0].duration !== undefined) {
11194 durationPresent = 0x1;
11195 }
11196
11197 if (samples[0].size !== undefined) {
11198 sizePresent = 0x2;
11199 }
11200
11201 if (samples[0].flags !== undefined) {
11202 flagsPresent = 0x4;
11203 }
11204
11205 if (samples[0].compositionTimeOffset !== undefined) {
11206 compositionTimeOffset = 0x8;
11207 }
11208 }
11209
11210 return [0x00, // version 0
11211 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
11212 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
11213 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
11214 ];
11215 };
11216
11217 videoTrun = function (track, offset) {
11218 var bytesOffest, bytes, header, samples, sample, i;
11219 samples = track.samples || [];
11220 offset += 8 + 12 + 16 * samples.length;
11221 header = trunHeader(samples, offset);
11222 bytes = new Uint8Array(header.length + samples.length * 16);
11223 bytes.set(header);
11224 bytesOffest = header.length;
11225
11226 for (i = 0; i < samples.length; i++) {
11227 sample = samples[i];
11228 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
11229 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
11230 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
11231 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
11232
11233 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
11234 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
11235 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
11236 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
11237
11238 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
11239 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
11240 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
11241 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
11242
11243 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
11244 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
11245 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
11246 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
11247 }
11248
11249 return box(types.trun, bytes);
11250 };
11251
11252 audioTrun = function (track, offset) {
11253 var bytes, bytesOffest, header, samples, sample, i;
11254 samples = track.samples || [];
11255 offset += 8 + 12 + 8 * samples.length;
11256 header = trunHeader(samples, offset);
11257 bytes = new Uint8Array(header.length + samples.length * 8);
11258 bytes.set(header);
11259 bytesOffest = header.length;
11260
11261 for (i = 0; i < samples.length; i++) {
11262 sample = samples[i];
11263 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
11264 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
11265 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
11266 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
11267
11268 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
11269 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
11270 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
11271 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
11272 }
11273
11274 return box(types.trun, bytes);
11275 };
11276
11277 trun$1 = function (track, offset) {
11278 if (track.type === 'audio') {
11279 return audioTrun(track, offset);
11280 }
11281
11282 return videoTrun(track, offset);
11283 };
11284 })();
11285
11286 var mp4Generator = {
11287 ftyp: ftyp,
11288 mdat: mdat,
11289 moof: moof,
11290 moov: moov,
11291 initSegment: function (tracks) {
11292 var fileType = ftyp(),
11293 movie = moov(tracks),
11294 result;
11295 result = new Uint8Array(fileType.byteLength + movie.byteLength);
11296 result.set(fileType);
11297 result.set(movie, fileType.byteLength);
11298 return result;
11299 }
11300 };
11301 /**
11302 * mux.js
11303 *
11304 * Copyright (c) Brightcove
11305 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11306 */
11307 // composed of the nal units that make up that frame
11308 // Also keep track of cummulative data about the frame from the nal units such
11309 // as the frame duration, starting pts, etc.
11310
11311 var groupNalsIntoFrames = function (nalUnits) {
11312 var i,
11313 currentNal,
11314 currentFrame = [],
11315 frames = []; // TODO added for LHLS, make sure this is OK
11316
11317 frames.byteLength = 0;
11318 frames.nalCount = 0;
11319 frames.duration = 0;
11320 currentFrame.byteLength = 0;
11321
11322 for (i = 0; i < nalUnits.length; i++) {
11323 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
11324
11325 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
11326 // Since the very first nal unit is expected to be an AUD
11327 // only push to the frames array when currentFrame is not empty
11328 if (currentFrame.length) {
11329 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
11330
11331 frames.byteLength += currentFrame.byteLength;
11332 frames.nalCount += currentFrame.length;
11333 frames.duration += currentFrame.duration;
11334 frames.push(currentFrame);
11335 }
11336
11337 currentFrame = [currentNal];
11338 currentFrame.byteLength = currentNal.data.byteLength;
11339 currentFrame.pts = currentNal.pts;
11340 currentFrame.dts = currentNal.dts;
11341 } else {
11342 // Specifically flag key frames for ease of use later
11343 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
11344 currentFrame.keyFrame = true;
11345 }
11346
11347 currentFrame.duration = currentNal.dts - currentFrame.dts;
11348 currentFrame.byteLength += currentNal.data.byteLength;
11349 currentFrame.push(currentNal);
11350 }
11351 } // For the last frame, use the duration of the previous frame if we
11352 // have nothing better to go on
11353
11354
11355 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
11356 currentFrame.duration = frames[frames.length - 1].duration;
11357 } // Push the final frame
11358 // TODO added for LHLS, make sure this is OK
11359
11360
11361 frames.byteLength += currentFrame.byteLength;
11362 frames.nalCount += currentFrame.length;
11363 frames.duration += currentFrame.duration;
11364 frames.push(currentFrame);
11365 return frames;
11366 }; // Convert an array of frames into an array of Gop with each Gop being composed
11367 // of the frames that make up that Gop
11368 // Also keep track of cummulative data about the Gop from the frames such as the
11369 // Gop duration, starting pts, etc.
11370
11371
11372 var groupFramesIntoGops = function (frames) {
11373 var i,
11374 currentFrame,
11375 currentGop = [],
11376 gops = []; // We must pre-set some of the values on the Gop since we
11377 // keep running totals of these values
11378
11379 currentGop.byteLength = 0;
11380 currentGop.nalCount = 0;
11381 currentGop.duration = 0;
11382 currentGop.pts = frames[0].pts;
11383 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
11384
11385 gops.byteLength = 0;
11386 gops.nalCount = 0;
11387 gops.duration = 0;
11388 gops.pts = frames[0].pts;
11389 gops.dts = frames[0].dts;
11390
11391 for (i = 0; i < frames.length; i++) {
11392 currentFrame = frames[i];
11393
11394 if (currentFrame.keyFrame) {
11395 // Since the very first frame is expected to be an keyframe
11396 // only push to the gops array when currentGop is not empty
11397 if (currentGop.length) {
11398 gops.push(currentGop);
11399 gops.byteLength += currentGop.byteLength;
11400 gops.nalCount += currentGop.nalCount;
11401 gops.duration += currentGop.duration;
11402 }
11403
11404 currentGop = [currentFrame];
11405 currentGop.nalCount = currentFrame.length;
11406 currentGop.byteLength = currentFrame.byteLength;
11407 currentGop.pts = currentFrame.pts;
11408 currentGop.dts = currentFrame.dts;
11409 currentGop.duration = currentFrame.duration;
11410 } else {
11411 currentGop.duration += currentFrame.duration;
11412 currentGop.nalCount += currentFrame.length;
11413 currentGop.byteLength += currentFrame.byteLength;
11414 currentGop.push(currentFrame);
11415 }
11416 }
11417
11418 if (gops.length && currentGop.duration <= 0) {
11419 currentGop.duration = gops[gops.length - 1].duration;
11420 }
11421
11422 gops.byteLength += currentGop.byteLength;
11423 gops.nalCount += currentGop.nalCount;
11424 gops.duration += currentGop.duration; // push the final Gop
11425
11426 gops.push(currentGop);
11427 return gops;
11428 };
11429 /*
11430 * Search for the first keyframe in the GOPs and throw away all frames
11431 * until that keyframe. Then extend the duration of the pulled keyframe
11432 * and pull the PTS and DTS of the keyframe so that it covers the time
11433 * range of the frames that were disposed.
11434 *
11435 * @param {Array} gops video GOPs
11436 * @returns {Array} modified video GOPs
11437 */
11438
11439
11440 var extendFirstKeyFrame = function (gops) {
11441 var currentGop;
11442
11443 if (!gops[0][0].keyFrame && gops.length > 1) {
11444 // Remove the first GOP
11445 currentGop = gops.shift();
11446 gops.byteLength -= currentGop.byteLength;
11447 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
11448 // first gop to cover the time period of the
11449 // frames we just removed
11450
11451 gops[0][0].dts = currentGop.dts;
11452 gops[0][0].pts = currentGop.pts;
11453 gops[0][0].duration += currentGop.duration;
11454 }
11455
11456 return gops;
11457 };
11458 /**
11459 * Default sample object
11460 * see ISO/IEC 14496-12:2012, section 8.6.4.3
11461 */
11462
11463
11464 var createDefaultSample = function () {
11465 return {
11466 size: 0,
11467 flags: {
11468 isLeading: 0,
11469 dependsOn: 1,
11470 isDependedOn: 0,
11471 hasRedundancy: 0,
11472 degradationPriority: 0,
11473 isNonSyncSample: 1
11474 }
11475 };
11476 };
11477 /*
11478 * Collates information from a video frame into an object for eventual
11479 * entry into an MP4 sample table.
11480 *
11481 * @param {Object} frame the video frame
11482 * @param {Number} dataOffset the byte offset to position the sample
11483 * @return {Object} object containing sample table info for a frame
11484 */
11485
11486
11487 var sampleForFrame = function (frame, dataOffset) {
11488 var sample = createDefaultSample();
11489 sample.dataOffset = dataOffset;
11490 sample.compositionTimeOffset = frame.pts - frame.dts;
11491 sample.duration = frame.duration;
11492 sample.size = 4 * frame.length; // Space for nal unit size
11493
11494 sample.size += frame.byteLength;
11495
11496 if (frame.keyFrame) {
11497 sample.flags.dependsOn = 2;
11498 sample.flags.isNonSyncSample = 0;
11499 }
11500
11501 return sample;
11502 }; // generate the track's sample table from an array of gops
11503
11504
11505 var generateSampleTable$1 = function (gops, baseDataOffset) {
11506 var h,
11507 i,
11508 sample,
11509 currentGop,
11510 currentFrame,
11511 dataOffset = baseDataOffset || 0,
11512 samples = [];
11513
11514 for (h = 0; h < gops.length; h++) {
11515 currentGop = gops[h];
11516
11517 for (i = 0; i < currentGop.length; i++) {
11518 currentFrame = currentGop[i];
11519 sample = sampleForFrame(currentFrame, dataOffset);
11520 dataOffset += sample.size;
11521 samples.push(sample);
11522 }
11523 }
11524
11525 return samples;
11526 }; // generate the track's raw mdat data from an array of gops
11527
11528
11529 var concatenateNalData = function (gops) {
11530 var h,
11531 i,
11532 j,
11533 currentGop,
11534 currentFrame,
11535 currentNal,
11536 dataOffset = 0,
11537 nalsByteLength = gops.byteLength,
11538 numberOfNals = gops.nalCount,
11539 totalByteLength = nalsByteLength + 4 * numberOfNals,
11540 data = new Uint8Array(totalByteLength),
11541 view = new DataView(data.buffer); // For each Gop..
11542
11543 for (h = 0; h < gops.length; h++) {
11544 currentGop = gops[h]; // For each Frame..
11545
11546 for (i = 0; i < currentGop.length; i++) {
11547 currentFrame = currentGop[i]; // For each NAL..
11548
11549 for (j = 0; j < currentFrame.length; j++) {
11550 currentNal = currentFrame[j];
11551 view.setUint32(dataOffset, currentNal.data.byteLength);
11552 dataOffset += 4;
11553 data.set(currentNal.data, dataOffset);
11554 dataOffset += currentNal.data.byteLength;
11555 }
11556 }
11557 }
11558
11559 return data;
11560 }; // generate the track's sample table from a frame
11561
11562
11563 var generateSampleTableForFrame = function (frame, baseDataOffset) {
11564 var sample,
11565 dataOffset = baseDataOffset || 0,
11566 samples = [];
11567 sample = sampleForFrame(frame, dataOffset);
11568 samples.push(sample);
11569 return samples;
11570 }; // generate the track's raw mdat data from a frame
11571
11572
11573 var concatenateNalDataForFrame = function (frame) {
11574 var i,
11575 currentNal,
11576 dataOffset = 0,
11577 nalsByteLength = frame.byteLength,
11578 numberOfNals = frame.length,
11579 totalByteLength = nalsByteLength + 4 * numberOfNals,
11580 data = new Uint8Array(totalByteLength),
11581 view = new DataView(data.buffer); // For each NAL..
11582
11583 for (i = 0; i < frame.length; i++) {
11584 currentNal = frame[i];
11585 view.setUint32(dataOffset, currentNal.data.byteLength);
11586 dataOffset += 4;
11587 data.set(currentNal.data, dataOffset);
11588 dataOffset += currentNal.data.byteLength;
11589 }
11590
11591 return data;
11592 };
11593
11594 var frameUtils$1 = {
11595 groupNalsIntoFrames: groupNalsIntoFrames,
11596 groupFramesIntoGops: groupFramesIntoGops,
11597 extendFirstKeyFrame: extendFirstKeyFrame,
11598 generateSampleTable: generateSampleTable$1,
11599 concatenateNalData: concatenateNalData,
11600 generateSampleTableForFrame: generateSampleTableForFrame,
11601 concatenateNalDataForFrame: concatenateNalDataForFrame
11602 };
11603 /**
11604 * mux.js
11605 *
11606 * Copyright (c) Brightcove
11607 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11608 */
11609
11610 var highPrefix = [33, 16, 5, 32, 164, 27];
11611 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
11612
11613 var zeroFill = function (count) {
11614 var a = [];
11615
11616 while (count--) {
11617 a.push(0);
11618 }
11619
11620 return a;
11621 };
11622
11623 var makeTable = function (metaTable) {
11624 return Object.keys(metaTable).reduce(function (obj, key) {
11625 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
11626 return arr.concat(part);
11627 }, []));
11628 return obj;
11629 }, {});
11630 };
11631
11632 var silence;
11633
11634 var silence_1 = function () {
11635 if (!silence) {
11636 // Frames-of-silence to use for filling in missing AAC frames
11637 var coneOfSilence = {
11638 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
11639 88200: [highPrefix, [231], zeroFill(170), [56]],
11640 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
11641 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
11642 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
11643 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
11644 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
11645 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
11646 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
11647 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
11648 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
11649 };
11650 silence = makeTable(coneOfSilence);
11651 }
11652
11653 return silence;
11654 };
11655 /**
11656 * mux.js
11657 *
11658 * Copyright (c) Brightcove
11659 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11660 */
11661
11662
11663 var ONE_SECOND_IN_TS$4 = 90000,
11664 // 90kHz clock
11665 secondsToVideoTs,
11666 secondsToAudioTs,
11667 videoTsToSeconds,
11668 audioTsToSeconds,
11669 audioTsToVideoTs,
11670 videoTsToAudioTs,
11671 metadataTsToSeconds;
11672
11673 secondsToVideoTs = function (seconds) {
11674 return seconds * ONE_SECOND_IN_TS$4;
11675 };
11676
11677 secondsToAudioTs = function (seconds, sampleRate) {
11678 return seconds * sampleRate;
11679 };
11680
11681 videoTsToSeconds = function (timestamp) {
11682 return timestamp / ONE_SECOND_IN_TS$4;
11683 };
11684
11685 audioTsToSeconds = function (timestamp, sampleRate) {
11686 return timestamp / sampleRate;
11687 };
11688
11689 audioTsToVideoTs = function (timestamp, sampleRate) {
11690 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
11691 };
11692
11693 videoTsToAudioTs = function (timestamp, sampleRate) {
11694 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
11695 };
11696 /**
11697 * Adjust ID3 tag or caption timing information by the timeline pts values
11698 * (if keepOriginalTimestamps is false) and convert to seconds
11699 */
11700
11701
11702 metadataTsToSeconds = function (timestamp, timelineStartPts, keepOriginalTimestamps) {
11703 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
11704 };
11705
11706 var clock$2 = {
11707 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
11708 secondsToVideoTs: secondsToVideoTs,
11709 secondsToAudioTs: secondsToAudioTs,
11710 videoTsToSeconds: videoTsToSeconds,
11711 audioTsToSeconds: audioTsToSeconds,
11712 audioTsToVideoTs: audioTsToVideoTs,
11713 videoTsToAudioTs: videoTsToAudioTs,
11714 metadataTsToSeconds: metadataTsToSeconds
11715 };
11716 /**
11717 * mux.js
11718 *
11719 * Copyright (c) Brightcove
11720 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11721 */
11722
11723 var coneOfSilence = silence_1;
11724 var clock$1 = clock$2;
11725 /**
11726 * Sum the `byteLength` properties of the data in each AAC frame
11727 */
11728
11729 var sumFrameByteLengths = function (array) {
11730 var i,
11731 currentObj,
11732 sum = 0; // sum the byteLength's all each nal unit in the frame
11733
11734 for (i = 0; i < array.length; i++) {
11735 currentObj = array[i];
11736 sum += currentObj.data.byteLength;
11737 }
11738
11739 return sum;
11740 }; // Possibly pad (prefix) the audio track with silence if appending this track
11741 // would lead to the introduction of a gap in the audio buffer
11742
11743
11744 var prefixWithSilence = function (track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
11745 var baseMediaDecodeTimeTs,
11746 frameDuration = 0,
11747 audioGapDuration = 0,
11748 audioFillFrameCount = 0,
11749 audioFillDuration = 0,
11750 silentFrame,
11751 i,
11752 firstFrame;
11753
11754 if (!frames.length) {
11755 return;
11756 }
11757
11758 baseMediaDecodeTimeTs = clock$1.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
11759
11760 frameDuration = Math.ceil(clock$1.ONE_SECOND_IN_TS / (track.samplerate / 1024));
11761
11762 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
11763 // insert the shortest possible amount (audio gap or audio to video gap)
11764 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
11765
11766 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
11767 audioFillDuration = audioFillFrameCount * frameDuration;
11768 } // don't attempt to fill gaps smaller than a single frame or larger
11769 // than a half second
11770
11771
11772 if (audioFillFrameCount < 1 || audioFillDuration > clock$1.ONE_SECOND_IN_TS / 2) {
11773 return;
11774 }
11775
11776 silentFrame = coneOfSilence()[track.samplerate];
11777
11778 if (!silentFrame) {
11779 // we don't have a silent frame pregenerated for the sample rate, so use a frame
11780 // from the content instead
11781 silentFrame = frames[0].data;
11782 }
11783
11784 for (i = 0; i < audioFillFrameCount; i++) {
11785 firstFrame = frames[0];
11786 frames.splice(0, 0, {
11787 data: silentFrame,
11788 dts: firstFrame.dts - frameDuration,
11789 pts: firstFrame.pts - frameDuration
11790 });
11791 }
11792
11793 track.baseMediaDecodeTime -= Math.floor(clock$1.videoTsToAudioTs(audioFillDuration, track.samplerate));
11794 return audioFillDuration;
11795 }; // If the audio segment extends before the earliest allowed dts
11796 // value, remove AAC frames until starts at or after the earliest
11797 // allowed DTS so that we don't end up with a negative baseMedia-
11798 // DecodeTime for the audio track
11799
11800
11801 var trimAdtsFramesByEarliestDts = function (adtsFrames, track, earliestAllowedDts) {
11802 if (track.minSegmentDts >= earliestAllowedDts) {
11803 return adtsFrames;
11804 } // We will need to recalculate the earliest segment Dts
11805
11806
11807 track.minSegmentDts = Infinity;
11808 return adtsFrames.filter(function (currentFrame) {
11809 // If this is an allowed frame, keep it and record it's Dts
11810 if (currentFrame.dts >= earliestAllowedDts) {
11811 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
11812 track.minSegmentPts = track.minSegmentDts;
11813 return true;
11814 } // Otherwise, discard it
11815
11816
11817 return false;
11818 });
11819 }; // generate the track's raw mdat data from an array of frames
11820
11821
11822 var generateSampleTable = function (frames) {
11823 var i,
11824 currentFrame,
11825 samples = [];
11826
11827 for (i = 0; i < frames.length; i++) {
11828 currentFrame = frames[i];
11829 samples.push({
11830 size: currentFrame.data.byteLength,
11831 duration: 1024 // For AAC audio, all samples contain 1024 samples
11832
11833 });
11834 }
11835
11836 return samples;
11837 }; // generate the track's sample table from an array of frames
11838
11839
11840 var concatenateFrameData = function (frames) {
11841 var i,
11842 currentFrame,
11843 dataOffset = 0,
11844 data = new Uint8Array(sumFrameByteLengths(frames));
11845
11846 for (i = 0; i < frames.length; i++) {
11847 currentFrame = frames[i];
11848 data.set(currentFrame.data, dataOffset);
11849 dataOffset += currentFrame.data.byteLength;
11850 }
11851
11852 return data;
11853 };
11854
11855 var audioFrameUtils$1 = {
11856 prefixWithSilence: prefixWithSilence,
11857 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
11858 generateSampleTable: generateSampleTable,
11859 concatenateFrameData: concatenateFrameData
11860 };
11861 /**
11862 * mux.js
11863 *
11864 * Copyright (c) Brightcove
11865 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11866 */
11867
11868 var ONE_SECOND_IN_TS$3 = clock$2.ONE_SECOND_IN_TS;
11869 /**
11870 * Store information about the start and end of the track and the
11871 * duration for each frame/sample we process in order to calculate
11872 * the baseMediaDecodeTime
11873 */
11874
11875 var collectDtsInfo = function (track, data) {
11876 if (typeof data.pts === 'number') {
11877 if (track.timelineStartInfo.pts === undefined) {
11878 track.timelineStartInfo.pts = data.pts;
11879 }
11880
11881 if (track.minSegmentPts === undefined) {
11882 track.minSegmentPts = data.pts;
11883 } else {
11884 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
11885 }
11886
11887 if (track.maxSegmentPts === undefined) {
11888 track.maxSegmentPts = data.pts;
11889 } else {
11890 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
11891 }
11892 }
11893
11894 if (typeof data.dts === 'number') {
11895 if (track.timelineStartInfo.dts === undefined) {
11896 track.timelineStartInfo.dts = data.dts;
11897 }
11898
11899 if (track.minSegmentDts === undefined) {
11900 track.minSegmentDts = data.dts;
11901 } else {
11902 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
11903 }
11904
11905 if (track.maxSegmentDts === undefined) {
11906 track.maxSegmentDts = data.dts;
11907 } else {
11908 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
11909 }
11910 }
11911 };
11912 /**
11913 * Clear values used to calculate the baseMediaDecodeTime between
11914 * tracks
11915 */
11916
11917
11918 var clearDtsInfo = function (track) {
11919 delete track.minSegmentDts;
11920 delete track.maxSegmentDts;
11921 delete track.minSegmentPts;
11922 delete track.maxSegmentPts;
11923 };
11924 /**
11925 * Calculate the track's baseMediaDecodeTime based on the earliest
11926 * DTS the transmuxer has ever seen and the minimum DTS for the
11927 * current track
11928 * @param track {object} track metadata configuration
11929 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
11930 * in the source; false to adjust the first segment to start at 0.
11931 */
11932
11933
11934 var calculateTrackBaseMediaDecodeTime = function (track, keepOriginalTimestamps) {
11935 var baseMediaDecodeTime,
11936 scale,
11937 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
11938
11939 if (!keepOriginalTimestamps) {
11940 minSegmentDts -= track.timelineStartInfo.dts;
11941 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
11942 // we want the start of the first segment to be placed
11943
11944
11945 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
11946
11947 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
11948
11949 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
11950
11951 if (track.type === 'audio') {
11952 // Audio has a different clock equal to the sampling_rate so we need to
11953 // scale the PTS values into the clock rate of the track
11954 scale = track.samplerate / ONE_SECOND_IN_TS$3;
11955 baseMediaDecodeTime *= scale;
11956 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
11957 }
11958
11959 return baseMediaDecodeTime;
11960 };
11961
11962 var trackDecodeInfo$1 = {
11963 clearDtsInfo: clearDtsInfo,
11964 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
11965 collectDtsInfo: collectDtsInfo
11966 };
11967 /**
11968 * mux.js
11969 *
11970 * Copyright (c) Brightcove
11971 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11972 *
11973 * Reads in-band caption information from a video elementary
11974 * stream. Captions must follow the CEA-708 standard for injection
11975 * into an MPEG-2 transport streams.
11976 * @see https://en.wikipedia.org/wiki/CEA-708
11977 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
11978 */
11979 // payload type field to indicate how they are to be
11980 // interpreted. CEAS-708 caption content is always transmitted with
11981 // payload type 0x04.
11982
11983 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
11984 RBSP_TRAILING_BITS = 128;
11985 /**
11986 * Parse a supplemental enhancement information (SEI) NAL unit.
11987 * Stops parsing once a message of type ITU T T35 has been found.
11988 *
11989 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
11990 * @return {object} the parsed SEI payload
11991 * @see Rec. ITU-T H.264, 7.3.2.3.1
11992 */
11993
11994 var parseSei = function (bytes) {
11995 var i = 0,
11996 result = {
11997 payloadType: -1,
11998 payloadSize: 0
11999 },
12000 payloadType = 0,
12001 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
12002
12003 while (i < bytes.byteLength) {
12004 // stop once we have hit the end of the sei_rbsp
12005 if (bytes[i] === RBSP_TRAILING_BITS) {
12006 break;
12007 } // Parse payload type
12008
12009
12010 while (bytes[i] === 0xFF) {
12011 payloadType += 255;
12012 i++;
12013 }
12014
12015 payloadType += bytes[i++]; // Parse payload size
12016
12017 while (bytes[i] === 0xFF) {
12018 payloadSize += 255;
12019 i++;
12020 }
12021
12022 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
12023 // there can only ever be one caption message in a frame's sei
12024
12025 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
12026 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
12027
12028 if (userIdentifier === 'GA94') {
12029 result.payloadType = payloadType;
12030 result.payloadSize = payloadSize;
12031 result.payload = bytes.subarray(i, i + payloadSize);
12032 break;
12033 } else {
12034 result.payload = void 0;
12035 }
12036 } // skip the payload and parse the next message
12037
12038
12039 i += payloadSize;
12040 payloadType = 0;
12041 payloadSize = 0;
12042 }
12043
12044 return result;
12045 }; // see ANSI/SCTE 128-1 (2013), section 8.1
12046
12047
12048 var parseUserData = function (sei) {
12049 // itu_t_t35_contry_code must be 181 (United States) for
12050 // captions
12051 if (sei.payload[0] !== 181) {
12052 return null;
12053 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
12054
12055
12056 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
12057 return null;
12058 } // the user_identifier should be "GA94" to indicate ATSC1 data
12059
12060
12061 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
12062 return null;
12063 } // finally, user_data_type_code should be 0x03 for caption data
12064
12065
12066 if (sei.payload[7] !== 0x03) {
12067 return null;
12068 } // return the user_data_type_structure and strip the trailing
12069 // marker bits
12070
12071
12072 return sei.payload.subarray(8, sei.payload.length - 1);
12073 }; // see CEA-708-D, section 4.4
12074
12075
12076 var parseCaptionPackets = function (pts, userData) {
12077 var results = [],
12078 i,
12079 count,
12080 offset,
12081 data; // if this is just filler, return immediately
12082
12083 if (!(userData[0] & 0x40)) {
12084 return results;
12085 } // parse out the cc_data_1 and cc_data_2 fields
12086
12087
12088 count = userData[0] & 0x1f;
12089
12090 for (i = 0; i < count; i++) {
12091 offset = i * 3;
12092 data = {
12093 type: userData[offset + 2] & 0x03,
12094 pts: pts
12095 }; // capture cc data when cc_valid is 1
12096
12097 if (userData[offset + 2] & 0x04) {
12098 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
12099 results.push(data);
12100 }
12101 }
12102
12103 return results;
12104 };
12105
12106 var discardEmulationPreventionBytes$1 = function (data) {
12107 var length = data.byteLength,
12108 emulationPreventionBytesPositions = [],
12109 i = 1,
12110 newLength,
12111 newData; // Find all `Emulation Prevention Bytes`
12112
12113 while (i < length - 2) {
12114 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
12115 emulationPreventionBytesPositions.push(i + 2);
12116 i += 2;
12117 } else {
12118 i++;
12119 }
12120 } // If no Emulation Prevention Bytes were found just return the original
12121 // array
12122
12123
12124 if (emulationPreventionBytesPositions.length === 0) {
12125 return data;
12126 } // Create a new array to hold the NAL unit data
12127
12128
12129 newLength = length - emulationPreventionBytesPositions.length;
12130 newData = new Uint8Array(newLength);
12131 var sourceIndex = 0;
12132
12133 for (i = 0; i < newLength; sourceIndex++, i++) {
12134 if (sourceIndex === emulationPreventionBytesPositions[0]) {
12135 // Skip this byte
12136 sourceIndex++; // Remove this position index
12137
12138 emulationPreventionBytesPositions.shift();
12139 }
12140
12141 newData[i] = data[sourceIndex];
12142 }
12143
12144 return newData;
12145 }; // exports
12146
12147
12148 var captionPacketParser = {
12149 parseSei: parseSei,
12150 parseUserData: parseUserData,
12151 parseCaptionPackets: parseCaptionPackets,
12152 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
12153 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
12154 };
12155 /**
12156 * mux.js
12157 *
12158 * Copyright (c) Brightcove
12159 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
12160 *
12161 * Reads in-band caption information from a video elementary
12162 * stream. Captions must follow the CEA-708 standard for injection
12163 * into an MPEG-2 transport streams.
12164 * @see https://en.wikipedia.org/wiki/CEA-708
12165 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
12166 */
12167 // Link To Transport
12168 // -----------------
12169
12170 var Stream$7 = stream;
12171 var cea708Parser = captionPacketParser;
12172
12173 var CaptionStream$2 = function (options) {
12174 options = options || {};
12175 CaptionStream$2.prototype.init.call(this); // parse708captions flag, default to true
12176
12177 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
12178 this.captionPackets_ = [];
12179 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
12180 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
12181 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
12182 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
12183 ];
12184
12185 if (this.parse708captions_) {
12186 this.cc708Stream_ = new Cea708Stream({
12187 captionServices: options.captionServices
12188 }); // eslint-disable-line no-use-before-define
12189 }
12190
12191 this.reset(); // forward data and done events from CCs to this CaptionStream
12192
12193 this.ccStreams_.forEach(function (cc) {
12194 cc.on('data', this.trigger.bind(this, 'data'));
12195 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
12196 cc.on('done', this.trigger.bind(this, 'done'));
12197 }, this);
12198
12199 if (this.parse708captions_) {
12200 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
12201 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
12202 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
12203 }
12204 };
12205
12206 CaptionStream$2.prototype = new Stream$7();
12207
12208 CaptionStream$2.prototype.push = function (event) {
12209 var sei, userData, newCaptionPackets; // only examine SEI NALs
12210
12211 if (event.nalUnitType !== 'sei_rbsp') {
12212 return;
12213 } // parse the sei
12214
12215
12216 sei = cea708Parser.parseSei(event.escapedRBSP); // no payload data, skip
12217
12218 if (!sei.payload) {
12219 return;
12220 } // ignore everything but user_data_registered_itu_t_t35
12221
12222
12223 if (sei.payloadType !== cea708Parser.USER_DATA_REGISTERED_ITU_T_T35) {
12224 return;
12225 } // parse out the user data payload
12226
12227
12228 userData = cea708Parser.parseUserData(sei); // ignore unrecognized userData
12229
12230 if (!userData) {
12231 return;
12232 } // Sometimes, the same segment # will be downloaded twice. To stop the
12233 // caption data from being processed twice, we track the latest dts we've
12234 // received and ignore everything with a dts before that. However, since
12235 // data for a specific dts can be split across packets on either side of
12236 // a segment boundary, we need to make sure we *don't* ignore the packets
12237 // from the *next* segment that have dts === this.latestDts_. By constantly
12238 // tracking the number of packets received with dts === this.latestDts_, we
12239 // know how many should be ignored once we start receiving duplicates.
12240
12241
12242 if (event.dts < this.latestDts_) {
12243 // We've started getting older data, so set the flag.
12244 this.ignoreNextEqualDts_ = true;
12245 return;
12246 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
12247 this.numSameDts_--;
12248
12249 if (!this.numSameDts_) {
12250 // We've received the last duplicate packet, time to start processing again
12251 this.ignoreNextEqualDts_ = false;
12252 }
12253
12254 return;
12255 } // parse out CC data packets and save them for later
12256
12257
12258 newCaptionPackets = cea708Parser.parseCaptionPackets(event.pts, userData);
12259 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
12260
12261 if (this.latestDts_ !== event.dts) {
12262 this.numSameDts_ = 0;
12263 }
12264
12265 this.numSameDts_++;
12266 this.latestDts_ = event.dts;
12267 };
12268
12269 CaptionStream$2.prototype.flushCCStreams = function (flushType) {
12270 this.ccStreams_.forEach(function (cc) {
12271 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
12272 }, this);
12273 };
12274
12275 CaptionStream$2.prototype.flushStream = function (flushType) {
12276 // make sure we actually parsed captions before proceeding
12277 if (!this.captionPackets_.length) {
12278 this.flushCCStreams(flushType);
12279 return;
12280 } // In Chrome, the Array#sort function is not stable so add a
12281 // presortIndex that we can use to ensure we get a stable-sort
12282
12283
12284 this.captionPackets_.forEach(function (elem, idx) {
12285 elem.presortIndex = idx;
12286 }); // sort caption byte-pairs based on their PTS values
12287
12288 this.captionPackets_.sort(function (a, b) {
12289 if (a.pts === b.pts) {
12290 return a.presortIndex - b.presortIndex;
12291 }
12292
12293 return a.pts - b.pts;
12294 });
12295 this.captionPackets_.forEach(function (packet) {
12296 if (packet.type < 2) {
12297 // Dispatch packet to the right Cea608Stream
12298 this.dispatchCea608Packet(packet);
12299 } else {
12300 // Dispatch packet to the Cea708Stream
12301 this.dispatchCea708Packet(packet);
12302 }
12303 }, this);
12304 this.captionPackets_.length = 0;
12305 this.flushCCStreams(flushType);
12306 };
12307
12308 CaptionStream$2.prototype.flush = function () {
12309 return this.flushStream('flush');
12310 }; // Only called if handling partial data
12311
12312
12313 CaptionStream$2.prototype.partialFlush = function () {
12314 return this.flushStream('partialFlush');
12315 };
12316
12317 CaptionStream$2.prototype.reset = function () {
12318 this.latestDts_ = null;
12319 this.ignoreNextEqualDts_ = false;
12320 this.numSameDts_ = 0;
12321 this.activeCea608Channel_ = [null, null];
12322 this.ccStreams_.forEach(function (ccStream) {
12323 ccStream.reset();
12324 });
12325 }; // From the CEA-608 spec:
12326
12327 /*
12328 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
12329 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
12330 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
12331 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
12332 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
12333 * to switch to captioning or Text.
12334 */
12335 // With that in mind, we ignore any data between an XDS control code and a
12336 // subsequent closed-captioning control code.
12337
12338
12339 CaptionStream$2.prototype.dispatchCea608Packet = function (packet) {
12340 // NOTE: packet.type is the CEA608 field
12341 if (this.setsTextOrXDSActive(packet)) {
12342 this.activeCea608Channel_[packet.type] = null;
12343 } else if (this.setsChannel1Active(packet)) {
12344 this.activeCea608Channel_[packet.type] = 0;
12345 } else if (this.setsChannel2Active(packet)) {
12346 this.activeCea608Channel_[packet.type] = 1;
12347 }
12348
12349 if (this.activeCea608Channel_[packet.type] === null) {
12350 // If we haven't received anything to set the active channel, or the
12351 // packets are Text/XDS data, discard the data; we don't want jumbled
12352 // captions
12353 return;
12354 }
12355
12356 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
12357 };
12358
12359 CaptionStream$2.prototype.setsChannel1Active = function (packet) {
12360 return (packet.ccData & 0x7800) === 0x1000;
12361 };
12362
12363 CaptionStream$2.prototype.setsChannel2Active = function (packet) {
12364 return (packet.ccData & 0x7800) === 0x1800;
12365 };
12366
12367 CaptionStream$2.prototype.setsTextOrXDSActive = function (packet) {
12368 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
12369 };
12370
12371 CaptionStream$2.prototype.dispatchCea708Packet = function (packet) {
12372 if (this.parse708captions_) {
12373 this.cc708Stream_.push(packet);
12374 }
12375 }; // ----------------------
12376 // Session to Application
12377 // ----------------------
12378 // This hash maps special and extended character codes to their
12379 // proper Unicode equivalent. The first one-byte key is just a
12380 // non-standard character code. The two-byte keys that follow are
12381 // the extended CEA708 character codes, along with the preceding
12382 // 0x10 extended character byte to distinguish these codes from
12383 // non-extended character codes. Every CEA708 character code that
12384 // is not in this object maps directly to a standard unicode
12385 // character code.
12386 // The transparent space and non-breaking transparent space are
12387 // technically not fully supported since there is no code to
12388 // make them transparent, so they have normal non-transparent
12389 // stand-ins.
12390 // The special closed caption (CC) character isn't a standard
12391 // unicode character, so a fairly similar unicode character was
12392 // chosen in it's place.
12393
12394
12395 var CHARACTER_TRANSLATION_708 = {
12396 0x7f: 0x266a,
12397 // ♪
12398 0x1020: 0x20,
12399 // Transparent Space
12400 0x1021: 0xa0,
12401 // Nob-breaking Transparent Space
12402 0x1025: 0x2026,
12403 // …
12404 0x102a: 0x0160,
12405 // Š
12406 0x102c: 0x0152,
12407 // Œ
12408 0x1030: 0x2588,
12409 // █
12410 0x1031: 0x2018,
12411 // ‘
12412 0x1032: 0x2019,
12413 // ’
12414 0x1033: 0x201c,
12415 // “
12416 0x1034: 0x201d,
12417 // ”
12418 0x1035: 0x2022,
12419 // •
12420 0x1039: 0x2122,
12421 // ™
12422 0x103a: 0x0161,
12423 // š
12424 0x103c: 0x0153,
12425 // œ
12426 0x103d: 0x2120,
12427 // ℠
12428 0x103f: 0x0178,
12429 // Ÿ
12430 0x1076: 0x215b,
12431 // ⅛
12432 0x1077: 0x215c,
12433 // ⅜
12434 0x1078: 0x215d,
12435 // ⅝
12436 0x1079: 0x215e,
12437 // ⅞
12438 0x107a: 0x23d0,
12439 // ⏐
12440 0x107b: 0x23a4,
12441 // ⎤
12442 0x107c: 0x23a3,
12443 // ⎣
12444 0x107d: 0x23af,
12445 // ⎯
12446 0x107e: 0x23a6,
12447 // ⎦
12448 0x107f: 0x23a1,
12449 // ⎡
12450 0x10a0: 0x3138 // ㄸ (CC char)
12451
12452 };
12453
12454 var get708CharFromCode = function (code) {
12455 var newCode = CHARACTER_TRANSLATION_708[code] || code;
12456
12457 if (code & 0x1000 && code === newCode) {
12458 // Invalid extended code
12459 return '';
12460 }
12461
12462 return String.fromCharCode(newCode);
12463 };
12464
12465 var within708TextBlock = function (b) {
12466 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
12467 };
12468
12469 var Cea708Window = function (windowNum) {
12470 this.windowNum = windowNum;
12471 this.reset();
12472 };
12473
12474 Cea708Window.prototype.reset = function () {
12475 this.clearText();
12476 this.pendingNewLine = false;
12477 this.winAttr = {};
12478 this.penAttr = {};
12479 this.penLoc = {};
12480 this.penColor = {}; // These default values are arbitrary,
12481 // defineWindow will usually override them
12482
12483 this.visible = 0;
12484 this.rowLock = 0;
12485 this.columnLock = 0;
12486 this.priority = 0;
12487 this.relativePositioning = 0;
12488 this.anchorVertical = 0;
12489 this.anchorHorizontal = 0;
12490 this.anchorPoint = 0;
12491 this.rowCount = 1;
12492 this.virtualRowCount = this.rowCount + 1;
12493 this.columnCount = 41;
12494 this.windowStyle = 0;
12495 this.penStyle = 0;
12496 };
12497
12498 Cea708Window.prototype.getText = function () {
12499 return this.rows.join('\n');
12500 };
12501
12502 Cea708Window.prototype.clearText = function () {
12503 this.rows = [''];
12504 this.rowIdx = 0;
12505 };
12506
12507 Cea708Window.prototype.newLine = function (pts) {
12508 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
12509 this.beforeRowOverflow(pts);
12510 }
12511
12512 if (this.rows.length > 0) {
12513 this.rows.push('');
12514 this.rowIdx++;
12515 } // Show all virtual rows since there's no visible scrolling
12516
12517
12518 while (this.rows.length > this.virtualRowCount) {
12519 this.rows.shift();
12520 this.rowIdx--;
12521 }
12522 };
12523
12524 Cea708Window.prototype.isEmpty = function () {
12525 if (this.rows.length === 0) {
12526 return true;
12527 } else if (this.rows.length === 1) {
12528 return this.rows[0] === '';
12529 }
12530
12531 return false;
12532 };
12533
12534 Cea708Window.prototype.addText = function (text) {
12535 this.rows[this.rowIdx] += text;
12536 };
12537
12538 Cea708Window.prototype.backspace = function () {
12539 if (!this.isEmpty()) {
12540 var row = this.rows[this.rowIdx];
12541 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
12542 }
12543 };
12544
12545 var Cea708Service = function (serviceNum, encoding, stream) {
12546 this.serviceNum = serviceNum;
12547 this.text = '';
12548 this.currentWindow = new Cea708Window(-1);
12549 this.windows = [];
12550 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
12551
12552 if (typeof encoding === 'string') {
12553 this.createTextDecoder(encoding);
12554 }
12555 };
12556 /**
12557 * Initialize service windows
12558 * Must be run before service use
12559 *
12560 * @param {Integer} pts PTS value
12561 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
12562 */
12563
12564
12565 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
12566 this.startPts = pts;
12567
12568 for (var win = 0; win < 8; win++) {
12569 this.windows[win] = new Cea708Window(win);
12570
12571 if (typeof beforeRowOverflow === 'function') {
12572 this.windows[win].beforeRowOverflow = beforeRowOverflow;
12573 }
12574 }
12575 };
12576 /**
12577 * Set current window of service to be affected by commands
12578 *
12579 * @param {Integer} windowNum Window number
12580 */
12581
12582
12583 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
12584 this.currentWindow = this.windows[windowNum];
12585 };
12586 /**
12587 * Try to create a TextDecoder if it is natively supported
12588 */
12589
12590
12591 Cea708Service.prototype.createTextDecoder = function (encoding) {
12592 if (typeof TextDecoder === 'undefined') {
12593 this.stream.trigger('log', {
12594 level: 'warn',
12595 message: 'The `encoding` option is unsupported without TextDecoder support'
12596 });
12597 } else {
12598 try {
12599 this.textDecoder_ = new TextDecoder(encoding);
12600 } catch (error) {
12601 this.stream.trigger('log', {
12602 level: 'warn',
12603 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
12604 });
12605 }
12606 }
12607 };
12608
12609 var Cea708Stream = function (options) {
12610 options = options || {};
12611 Cea708Stream.prototype.init.call(this);
12612 var self = this;
12613 var captionServices = options.captionServices || {};
12614 var captionServiceEncodings = {};
12615 var serviceProps; // Get service encodings from captionServices option block
12616
12617 Object.keys(captionServices).forEach(serviceName => {
12618 serviceProps = captionServices[serviceName];
12619
12620 if (/^SERVICE/.test(serviceName)) {
12621 captionServiceEncodings[serviceName] = serviceProps.encoding;
12622 }
12623 });
12624 this.serviceEncodings = captionServiceEncodings;
12625 this.current708Packet = null;
12626 this.services = {};
12627
12628 this.push = function (packet) {
12629 if (packet.type === 3) {
12630 // 708 packet start
12631 self.new708Packet();
12632 self.add708Bytes(packet);
12633 } else {
12634 if (self.current708Packet === null) {
12635 // This should only happen at the start of a file if there's no packet start.
12636 self.new708Packet();
12637 }
12638
12639 self.add708Bytes(packet);
12640 }
12641 };
12642 };
12643
12644 Cea708Stream.prototype = new Stream$7();
12645 /**
12646 * Push current 708 packet, create new 708 packet.
12647 */
12648
12649 Cea708Stream.prototype.new708Packet = function () {
12650 if (this.current708Packet !== null) {
12651 this.push708Packet();
12652 }
12653
12654 this.current708Packet = {
12655 data: [],
12656 ptsVals: []
12657 };
12658 };
12659 /**
12660 * Add pts and both bytes from packet into current 708 packet.
12661 */
12662
12663
12664 Cea708Stream.prototype.add708Bytes = function (packet) {
12665 var data = packet.ccData;
12666 var byte0 = data >>> 8;
12667 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
12668 // that service blocks will always line up with byte pairs.
12669
12670 this.current708Packet.ptsVals.push(packet.pts);
12671 this.current708Packet.data.push(byte0);
12672 this.current708Packet.data.push(byte1);
12673 };
12674 /**
12675 * Parse completed 708 packet into service blocks and push each service block.
12676 */
12677
12678
12679 Cea708Stream.prototype.push708Packet = function () {
12680 var packet708 = this.current708Packet;
12681 var packetData = packet708.data;
12682 var serviceNum = null;
12683 var blockSize = null;
12684 var i = 0;
12685 var b = packetData[i++];
12686 packet708.seq = b >> 6;
12687 packet708.sizeCode = b & 0x3f; // 0b00111111;
12688
12689 for (; i < packetData.length; i++) {
12690 b = packetData[i++];
12691 serviceNum = b >> 5;
12692 blockSize = b & 0x1f; // 0b00011111
12693
12694 if (serviceNum === 7 && blockSize > 0) {
12695 // Extended service num
12696 b = packetData[i++];
12697 serviceNum = b;
12698 }
12699
12700 this.pushServiceBlock(serviceNum, i, blockSize);
12701
12702 if (blockSize > 0) {
12703 i += blockSize - 1;
12704 }
12705 }
12706 };
12707 /**
12708 * Parse service block, execute commands, read text.
12709 *
12710 * Note: While many of these commands serve important purposes,
12711 * many others just parse out the parameters or attributes, but
12712 * nothing is done with them because this is not a full and complete
12713 * implementation of the entire 708 spec.
12714 *
12715 * @param {Integer} serviceNum Service number
12716 * @param {Integer} start Start index of the 708 packet data
12717 * @param {Integer} size Block size
12718 */
12719
12720
12721 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
12722 var b;
12723 var i = start;
12724 var packetData = this.current708Packet.data;
12725 var service = this.services[serviceNum];
12726
12727 if (!service) {
12728 service = this.initService(serviceNum, i);
12729 }
12730
12731 for (; i < start + size && i < packetData.length; i++) {
12732 b = packetData[i];
12733
12734 if (within708TextBlock(b)) {
12735 i = this.handleText(i, service);
12736 } else if (b === 0x18) {
12737 i = this.multiByteCharacter(i, service);
12738 } else if (b === 0x10) {
12739 i = this.extendedCommands(i, service);
12740 } else if (0x80 <= b && b <= 0x87) {
12741 i = this.setCurrentWindow(i, service);
12742 } else if (0x98 <= b && b <= 0x9f) {
12743 i = this.defineWindow(i, service);
12744 } else if (b === 0x88) {
12745 i = this.clearWindows(i, service);
12746 } else if (b === 0x8c) {
12747 i = this.deleteWindows(i, service);
12748 } else if (b === 0x89) {
12749 i = this.displayWindows(i, service);
12750 } else if (b === 0x8a) {
12751 i = this.hideWindows(i, service);
12752 } else if (b === 0x8b) {
12753 i = this.toggleWindows(i, service);
12754 } else if (b === 0x97) {
12755 i = this.setWindowAttributes(i, service);
12756 } else if (b === 0x90) {
12757 i = this.setPenAttributes(i, service);
12758 } else if (b === 0x91) {
12759 i = this.setPenColor(i, service);
12760 } else if (b === 0x92) {
12761 i = this.setPenLocation(i, service);
12762 } else if (b === 0x8f) {
12763 service = this.reset(i, service);
12764 } else if (b === 0x08) {
12765 // BS: Backspace
12766 service.currentWindow.backspace();
12767 } else if (b === 0x0c) {
12768 // FF: Form feed
12769 service.currentWindow.clearText();
12770 } else if (b === 0x0d) {
12771 // CR: Carriage return
12772 service.currentWindow.pendingNewLine = true;
12773 } else if (b === 0x0e) {
12774 // HCR: Horizontal carriage return
12775 service.currentWindow.clearText();
12776 } else if (b === 0x8d) {
12777 // DLY: Delay, nothing to do
12778 i++;
12779 } else ;
12780 }
12781 };
12782 /**
12783 * Execute an extended command
12784 *
12785 * @param {Integer} i Current index in the 708 packet
12786 * @param {Service} service The service object to be affected
12787 * @return {Integer} New index after parsing
12788 */
12789
12790
12791 Cea708Stream.prototype.extendedCommands = function (i, service) {
12792 var packetData = this.current708Packet.data;
12793 var b = packetData[++i];
12794
12795 if (within708TextBlock(b)) {
12796 i = this.handleText(i, service, {
12797 isExtended: true
12798 });
12799 }
12800
12801 return i;
12802 };
12803 /**
12804 * Get PTS value of a given byte index
12805 *
12806 * @param {Integer} byteIndex Index of the byte
12807 * @return {Integer} PTS
12808 */
12809
12810
12811 Cea708Stream.prototype.getPts = function (byteIndex) {
12812 // There's 1 pts value per 2 bytes
12813 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
12814 };
12815 /**
12816 * Initializes a service
12817 *
12818 * @param {Integer} serviceNum Service number
12819 * @return {Service} Initialized service object
12820 */
12821
12822
12823 Cea708Stream.prototype.initService = function (serviceNum, i) {
12824 var serviceName = 'SERVICE' + serviceNum;
12825 var self = this;
12826 var serviceName;
12827 var encoding;
12828
12829 if (serviceName in this.serviceEncodings) {
12830 encoding = this.serviceEncodings[serviceName];
12831 }
12832
12833 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
12834 this.services[serviceNum].init(this.getPts(i), function (pts) {
12835 self.flushDisplayed(pts, self.services[serviceNum]);
12836 });
12837 return this.services[serviceNum];
12838 };
12839 /**
12840 * Execute text writing to current window
12841 *
12842 * @param {Integer} i Current index in the 708 packet
12843 * @param {Service} service The service object to be affected
12844 * @return {Integer} New index after parsing
12845 */
12846
12847
12848 Cea708Stream.prototype.handleText = function (i, service, options) {
12849 var isExtended = options && options.isExtended;
12850 var isMultiByte = options && options.isMultiByte;
12851 var packetData = this.current708Packet.data;
12852 var extended = isExtended ? 0x1000 : 0x0000;
12853 var currentByte = packetData[i];
12854 var nextByte = packetData[i + 1];
12855 var win = service.currentWindow;
12856 var char;
12857 var charCodeArray; // Converts an array of bytes to a unicode hex string.
12858
12859 function toHexString(byteArray) {
12860 return byteArray.map(byte => {
12861 return ('0' + (byte & 0xFF).toString(16)).slice(-2);
12862 }).join('');
12863 }
12864
12865 if (isMultiByte) {
12866 charCodeArray = [currentByte, nextByte];
12867 i++;
12868 } else {
12869 charCodeArray = [currentByte];
12870 } // Use the TextDecoder if one was created for this service
12871
12872
12873 if (service.textDecoder_ && !isExtended) {
12874 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
12875 } else {
12876 // We assume any multi-byte char without a decoder is unicode.
12877 if (isMultiByte) {
12878 const unicode = toHexString(charCodeArray); // Takes a unicode hex string and creates a single character.
12879
12880 char = String.fromCharCode(parseInt(unicode, 16));
12881 } else {
12882 char = get708CharFromCode(extended | currentByte);
12883 }
12884 }
12885
12886 if (win.pendingNewLine && !win.isEmpty()) {
12887 win.newLine(this.getPts(i));
12888 }
12889
12890 win.pendingNewLine = false;
12891 win.addText(char);
12892 return i;
12893 };
12894 /**
12895 * Handle decoding of multibyte character
12896 *
12897 * @param {Integer} i Current index in the 708 packet
12898 * @param {Service} service The service object to be affected
12899 * @return {Integer} New index after parsing
12900 */
12901
12902
12903 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
12904 var packetData = this.current708Packet.data;
12905 var firstByte = packetData[i + 1];
12906 var secondByte = packetData[i + 2];
12907
12908 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
12909 i = this.handleText(++i, service, {
12910 isMultiByte: true
12911 });
12912 }
12913
12914 return i;
12915 };
12916 /**
12917 * Parse and execute the CW# command.
12918 *
12919 * Set the current window.
12920 *
12921 * @param {Integer} i Current index in the 708 packet
12922 * @param {Service} service The service object to be affected
12923 * @return {Integer} New index after parsing
12924 */
12925
12926
12927 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
12928 var packetData = this.current708Packet.data;
12929 var b = packetData[i];
12930 var windowNum = b & 0x07;
12931 service.setCurrentWindow(windowNum);
12932 return i;
12933 };
12934 /**
12935 * Parse and execute the DF# command.
12936 *
12937 * Define a window and set it as the current window.
12938 *
12939 * @param {Integer} i Current index in the 708 packet
12940 * @param {Service} service The service object to be affected
12941 * @return {Integer} New index after parsing
12942 */
12943
12944
12945 Cea708Stream.prototype.defineWindow = function (i, service) {
12946 var packetData = this.current708Packet.data;
12947 var b = packetData[i];
12948 var windowNum = b & 0x07;
12949 service.setCurrentWindow(windowNum);
12950 var win = service.currentWindow;
12951 b = packetData[++i];
12952 win.visible = (b & 0x20) >> 5; // v
12953
12954 win.rowLock = (b & 0x10) >> 4; // rl
12955
12956 win.columnLock = (b & 0x08) >> 3; // cl
12957
12958 win.priority = b & 0x07; // p
12959
12960 b = packetData[++i];
12961 win.relativePositioning = (b & 0x80) >> 7; // rp
12962
12963 win.anchorVertical = b & 0x7f; // av
12964
12965 b = packetData[++i];
12966 win.anchorHorizontal = b; // ah
12967
12968 b = packetData[++i];
12969 win.anchorPoint = (b & 0xf0) >> 4; // ap
12970
12971 win.rowCount = b & 0x0f; // rc
12972
12973 b = packetData[++i];
12974 win.columnCount = b & 0x3f; // cc
12975
12976 b = packetData[++i];
12977 win.windowStyle = (b & 0x38) >> 3; // ws
12978
12979 win.penStyle = b & 0x07; // ps
12980 // The spec says there are (rowCount+1) "virtual rows"
12981
12982 win.virtualRowCount = win.rowCount + 1;
12983 return i;
12984 };
12985 /**
12986 * Parse and execute the SWA command.
12987 *
12988 * Set attributes of the current window.
12989 *
12990 * @param {Integer} i Current index in the 708 packet
12991 * @param {Service} service The service object to be affected
12992 * @return {Integer} New index after parsing
12993 */
12994
12995
12996 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
12997 var packetData = this.current708Packet.data;
12998 var b = packetData[i];
12999 var winAttr = service.currentWindow.winAttr;
13000 b = packetData[++i];
13001 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
13002
13003 winAttr.fillRed = (b & 0x30) >> 4; // fr
13004
13005 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
13006
13007 winAttr.fillBlue = b & 0x03; // fb
13008
13009 b = packetData[++i];
13010 winAttr.borderType = (b & 0xc0) >> 6; // bt
13011
13012 winAttr.borderRed = (b & 0x30) >> 4; // br
13013
13014 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
13015
13016 winAttr.borderBlue = b & 0x03; // bb
13017
13018 b = packetData[++i];
13019 winAttr.borderType += (b & 0x80) >> 5; // bt
13020
13021 winAttr.wordWrap = (b & 0x40) >> 6; // ww
13022
13023 winAttr.printDirection = (b & 0x30) >> 4; // pd
13024
13025 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
13026
13027 winAttr.justify = b & 0x03; // j
13028
13029 b = packetData[++i];
13030 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
13031
13032 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
13033
13034 winAttr.displayEffect = b & 0x03; // de
13035
13036 return i;
13037 };
13038 /**
13039 * Gather text from all displayed windows and push a caption to output.
13040 *
13041 * @param {Integer} i Current index in the 708 packet
13042 * @param {Service} service The service object to be affected
13043 */
13044
13045
13046 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
13047 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
13048 // display text in the correct order, but sample files so far have not shown any issue.
13049
13050 for (var winId = 0; winId < 8; winId++) {
13051 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
13052 displayedText.push(service.windows[winId].getText());
13053 }
13054 }
13055
13056 service.endPts = pts;
13057 service.text = displayedText.join('\n\n');
13058 this.pushCaption(service);
13059 service.startPts = pts;
13060 };
13061 /**
13062 * Push a caption to output if the caption contains text.
13063 *
13064 * @param {Service} service The service object to be affected
13065 */
13066
13067
13068 Cea708Stream.prototype.pushCaption = function (service) {
13069 if (service.text !== '') {
13070 this.trigger('data', {
13071 startPts: service.startPts,
13072 endPts: service.endPts,
13073 text: service.text,
13074 stream: 'cc708_' + service.serviceNum
13075 });
13076 service.text = '';
13077 service.startPts = service.endPts;
13078 }
13079 };
13080 /**
13081 * Parse and execute the DSW command.
13082 *
13083 * Set visible property of windows based on the parsed bitmask.
13084 *
13085 * @param {Integer} i Current index in the 708 packet
13086 * @param {Service} service The service object to be affected
13087 * @return {Integer} New index after parsing
13088 */
13089
13090
13091 Cea708Stream.prototype.displayWindows = function (i, service) {
13092 var packetData = this.current708Packet.data;
13093 var b = packetData[++i];
13094 var pts = this.getPts(i);
13095 this.flushDisplayed(pts, service);
13096
13097 for (var winId = 0; winId < 8; winId++) {
13098 if (b & 0x01 << winId) {
13099 service.windows[winId].visible = 1;
13100 }
13101 }
13102
13103 return i;
13104 };
13105 /**
13106 * Parse and execute the HDW command.
13107 *
13108 * Set visible property of windows based on the parsed bitmask.
13109 *
13110 * @param {Integer} i Current index in the 708 packet
13111 * @param {Service} service The service object to be affected
13112 * @return {Integer} New index after parsing
13113 */
13114
13115
13116 Cea708Stream.prototype.hideWindows = function (i, service) {
13117 var packetData = this.current708Packet.data;
13118 var b = packetData[++i];
13119 var pts = this.getPts(i);
13120 this.flushDisplayed(pts, service);
13121
13122 for (var winId = 0; winId < 8; winId++) {
13123 if (b & 0x01 << winId) {
13124 service.windows[winId].visible = 0;
13125 }
13126 }
13127
13128 return i;
13129 };
13130 /**
13131 * Parse and execute the TGW command.
13132 *
13133 * Set visible property of windows based on the parsed bitmask.
13134 *
13135 * @param {Integer} i Current index in the 708 packet
13136 * @param {Service} service The service object to be affected
13137 * @return {Integer} New index after parsing
13138 */
13139
13140
13141 Cea708Stream.prototype.toggleWindows = function (i, service) {
13142 var packetData = this.current708Packet.data;
13143 var b = packetData[++i];
13144 var pts = this.getPts(i);
13145 this.flushDisplayed(pts, service);
13146
13147 for (var winId = 0; winId < 8; winId++) {
13148 if (b & 0x01 << winId) {
13149 service.windows[winId].visible ^= 1;
13150 }
13151 }
13152
13153 return i;
13154 };
13155 /**
13156 * Parse and execute the CLW command.
13157 *
13158 * Clear text of windows based on the parsed bitmask.
13159 *
13160 * @param {Integer} i Current index in the 708 packet
13161 * @param {Service} service The service object to be affected
13162 * @return {Integer} New index after parsing
13163 */
13164
13165
13166 Cea708Stream.prototype.clearWindows = function (i, service) {
13167 var packetData = this.current708Packet.data;
13168 var b = packetData[++i];
13169 var pts = this.getPts(i);
13170 this.flushDisplayed(pts, service);
13171
13172 for (var winId = 0; winId < 8; winId++) {
13173 if (b & 0x01 << winId) {
13174 service.windows[winId].clearText();
13175 }
13176 }
13177
13178 return i;
13179 };
13180 /**
13181 * Parse and execute the DLW command.
13182 *
13183 * Re-initialize windows based on the parsed bitmask.
13184 *
13185 * @param {Integer} i Current index in the 708 packet
13186 * @param {Service} service The service object to be affected
13187 * @return {Integer} New index after parsing
13188 */
13189
13190
13191 Cea708Stream.prototype.deleteWindows = function (i, service) {
13192 var packetData = this.current708Packet.data;
13193 var b = packetData[++i];
13194 var pts = this.getPts(i);
13195 this.flushDisplayed(pts, service);
13196
13197 for (var winId = 0; winId < 8; winId++) {
13198 if (b & 0x01 << winId) {
13199 service.windows[winId].reset();
13200 }
13201 }
13202
13203 return i;
13204 };
13205 /**
13206 * Parse and execute the SPA command.
13207 *
13208 * Set pen attributes of the current window.
13209 *
13210 * @param {Integer} i Current index in the 708 packet
13211 * @param {Service} service The service object to be affected
13212 * @return {Integer} New index after parsing
13213 */
13214
13215
13216 Cea708Stream.prototype.setPenAttributes = function (i, service) {
13217 var packetData = this.current708Packet.data;
13218 var b = packetData[i];
13219 var penAttr = service.currentWindow.penAttr;
13220 b = packetData[++i];
13221 penAttr.textTag = (b & 0xf0) >> 4; // tt
13222
13223 penAttr.offset = (b & 0x0c) >> 2; // o
13224
13225 penAttr.penSize = b & 0x03; // s
13226
13227 b = packetData[++i];
13228 penAttr.italics = (b & 0x80) >> 7; // i
13229
13230 penAttr.underline = (b & 0x40) >> 6; // u
13231
13232 penAttr.edgeType = (b & 0x38) >> 3; // et
13233
13234 penAttr.fontStyle = b & 0x07; // fs
13235
13236 return i;
13237 };
13238 /**
13239 * Parse and execute the SPC command.
13240 *
13241 * Set pen color of the current window.
13242 *
13243 * @param {Integer} i Current index in the 708 packet
13244 * @param {Service} service The service object to be affected
13245 * @return {Integer} New index after parsing
13246 */
13247
13248
13249 Cea708Stream.prototype.setPenColor = function (i, service) {
13250 var packetData = this.current708Packet.data;
13251 var b = packetData[i];
13252 var penColor = service.currentWindow.penColor;
13253 b = packetData[++i];
13254 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
13255
13256 penColor.fgRed = (b & 0x30) >> 4; // fr
13257
13258 penColor.fgGreen = (b & 0x0c) >> 2; // fg
13259
13260 penColor.fgBlue = b & 0x03; // fb
13261
13262 b = packetData[++i];
13263 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
13264
13265 penColor.bgRed = (b & 0x30) >> 4; // br
13266
13267 penColor.bgGreen = (b & 0x0c) >> 2; // bg
13268
13269 penColor.bgBlue = b & 0x03; // bb
13270
13271 b = packetData[++i];
13272 penColor.edgeRed = (b & 0x30) >> 4; // er
13273
13274 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
13275
13276 penColor.edgeBlue = b & 0x03; // eb
13277
13278 return i;
13279 };
13280 /**
13281 * Parse and execute the SPL command.
13282 *
13283 * Set pen location of the current window.
13284 *
13285 * @param {Integer} i Current index in the 708 packet
13286 * @param {Service} service The service object to be affected
13287 * @return {Integer} New index after parsing
13288 */
13289
13290
13291 Cea708Stream.prototype.setPenLocation = function (i, service) {
13292 var packetData = this.current708Packet.data;
13293 var b = packetData[i];
13294 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
13295
13296 service.currentWindow.pendingNewLine = true;
13297 b = packetData[++i];
13298 penLoc.row = b & 0x0f; // r
13299
13300 b = packetData[++i];
13301 penLoc.column = b & 0x3f; // c
13302
13303 return i;
13304 };
13305 /**
13306 * Execute the RST command.
13307 *
13308 * Reset service to a clean slate. Re-initialize.
13309 *
13310 * @param {Integer} i Current index in the 708 packet
13311 * @param {Service} service The service object to be affected
13312 * @return {Service} Re-initialized service
13313 */
13314
13315
13316 Cea708Stream.prototype.reset = function (i, service) {
13317 var pts = this.getPts(i);
13318 this.flushDisplayed(pts, service);
13319 return this.initService(service.serviceNum, i);
13320 }; // This hash maps non-ASCII, special, and extended character codes to their
13321 // proper Unicode equivalent. The first keys that are only a single byte
13322 // are the non-standard ASCII characters, which simply map the CEA608 byte
13323 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
13324 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
13325 // can be performed regardless of the field and data channel on which the
13326 // character code was received.
13327
13328
13329 var CHARACTER_TRANSLATION = {
13330 0x2a: 0xe1,
13331 // á
13332 0x5c: 0xe9,
13333 // é
13334 0x5e: 0xed,
13335 // í
13336 0x5f: 0xf3,
13337 // ó
13338 0x60: 0xfa,
13339 // ú
13340 0x7b: 0xe7,
13341 // ç
13342 0x7c: 0xf7,
13343 // ÷
13344 0x7d: 0xd1,
13345 // Ñ
13346 0x7e: 0xf1,
13347 // ñ
13348 0x7f: 0x2588,
13349 // █
13350 0x0130: 0xae,
13351 // ®
13352 0x0131: 0xb0,
13353 // °
13354 0x0132: 0xbd,
13355 // ½
13356 0x0133: 0xbf,
13357 // ¿
13358 0x0134: 0x2122,
13359 // ™
13360 0x0135: 0xa2,
13361 // ¢
13362 0x0136: 0xa3,
13363 // £
13364 0x0137: 0x266a,
13365 // ♪
13366 0x0138: 0xe0,
13367 // à
13368 0x0139: 0xa0,
13369 //
13370 0x013a: 0xe8,
13371 // è
13372 0x013b: 0xe2,
13373 // â
13374 0x013c: 0xea,
13375 // ê
13376 0x013d: 0xee,
13377 // î
13378 0x013e: 0xf4,
13379 // ô
13380 0x013f: 0xfb,
13381 // û
13382 0x0220: 0xc1,
13383 // Á
13384 0x0221: 0xc9,
13385 // É
13386 0x0222: 0xd3,
13387 // Ó
13388 0x0223: 0xda,
13389 // Ú
13390 0x0224: 0xdc,
13391 // Ü
13392 0x0225: 0xfc,
13393 // ü
13394 0x0226: 0x2018,
13395 // ‘
13396 0x0227: 0xa1,
13397 // ¡
13398 0x0228: 0x2a,
13399 // *
13400 0x0229: 0x27,
13401 // '
13402 0x022a: 0x2014,
13403 // —
13404 0x022b: 0xa9,
13405 // ©
13406 0x022c: 0x2120,
13407 // ℠
13408 0x022d: 0x2022,
13409 // •
13410 0x022e: 0x201c,
13411 // “
13412 0x022f: 0x201d,
13413 // ”
13414 0x0230: 0xc0,
13415 // À
13416 0x0231: 0xc2,
13417 // Â
13418 0x0232: 0xc7,
13419 // Ç
13420 0x0233: 0xc8,
13421 // È
13422 0x0234: 0xca,
13423 // Ê
13424 0x0235: 0xcb,
13425 // Ë
13426 0x0236: 0xeb,
13427 // ë
13428 0x0237: 0xce,
13429 // Î
13430 0x0238: 0xcf,
13431 // Ï
13432 0x0239: 0xef,
13433 // ï
13434 0x023a: 0xd4,
13435 // Ô
13436 0x023b: 0xd9,
13437 // Ù
13438 0x023c: 0xf9,
13439 // ù
13440 0x023d: 0xdb,
13441 // Û
13442 0x023e: 0xab,
13443 // «
13444 0x023f: 0xbb,
13445 // »
13446 0x0320: 0xc3,
13447 // Ã
13448 0x0321: 0xe3,
13449 // ã
13450 0x0322: 0xcd,
13451 // Í
13452 0x0323: 0xcc,
13453 // Ì
13454 0x0324: 0xec,
13455 // ì
13456 0x0325: 0xd2,
13457 // Ò
13458 0x0326: 0xf2,
13459 // ò
13460 0x0327: 0xd5,
13461 // Õ
13462 0x0328: 0xf5,
13463 // õ
13464 0x0329: 0x7b,
13465 // {
13466 0x032a: 0x7d,
13467 // }
13468 0x032b: 0x5c,
13469 // \
13470 0x032c: 0x5e,
13471 // ^
13472 0x032d: 0x5f,
13473 // _
13474 0x032e: 0x7c,
13475 // |
13476 0x032f: 0x7e,
13477 // ~
13478 0x0330: 0xc4,
13479 // Ä
13480 0x0331: 0xe4,
13481 // ä
13482 0x0332: 0xd6,
13483 // Ö
13484 0x0333: 0xf6,
13485 // ö
13486 0x0334: 0xdf,
13487 // ß
13488 0x0335: 0xa5,
13489 // ¥
13490 0x0336: 0xa4,
13491 // ¤
13492 0x0337: 0x2502,
13493 // │
13494 0x0338: 0xc5,
13495 // Å
13496 0x0339: 0xe5,
13497 // å
13498 0x033a: 0xd8,
13499 // Ø
13500 0x033b: 0xf8,
13501 // ø
13502 0x033c: 0x250c,
13503 // ┌
13504 0x033d: 0x2510,
13505 // ┐
13506 0x033e: 0x2514,
13507 // └
13508 0x033f: 0x2518 // ┘
13509
13510 };
13511
13512 var getCharFromCode = function (code) {
13513 if (code === null) {
13514 return '';
13515 }
13516
13517 code = CHARACTER_TRANSLATION[code] || code;
13518 return String.fromCharCode(code);
13519 }; // the index of the last row in a CEA-608 display buffer
13520
13521
13522 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
13523 // getting it through bit logic.
13524
13525 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
13526 // cells. The "bottom" row is the last element in the outer array.
13527 // We keep track of positioning information as we go by storing the
13528 // number of indentations and the tab offset in this buffer.
13529
13530 var createDisplayBuffer = function () {
13531 var result = [],
13532 i = BOTTOM_ROW + 1;
13533
13534 while (i--) {
13535 result.push({
13536 text: '',
13537 indent: 0,
13538 offset: 0
13539 });
13540 }
13541
13542 return result;
13543 };
13544
13545 var Cea608Stream = function (field, dataChannel) {
13546 Cea608Stream.prototype.init.call(this);
13547 this.field_ = field || 0;
13548 this.dataChannel_ = dataChannel || 0;
13549 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
13550 this.setConstants();
13551 this.reset();
13552
13553 this.push = function (packet) {
13554 var data, swap, char0, char1, text; // remove the parity bits
13555
13556 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
13557
13558 if (data === this.lastControlCode_) {
13559 this.lastControlCode_ = null;
13560 return;
13561 } // Store control codes
13562
13563
13564 if ((data & 0xf000) === 0x1000) {
13565 this.lastControlCode_ = data;
13566 } else if (data !== this.PADDING_) {
13567 this.lastControlCode_ = null;
13568 }
13569
13570 char0 = data >>> 8;
13571 char1 = data & 0xff;
13572
13573 if (data === this.PADDING_) {
13574 return;
13575 } else if (data === this.RESUME_CAPTION_LOADING_) {
13576 this.mode_ = 'popOn';
13577 } else if (data === this.END_OF_CAPTION_) {
13578 // If an EOC is received while in paint-on mode, the displayed caption
13579 // text should be swapped to non-displayed memory as if it was a pop-on
13580 // caption. Because of that, we should explicitly switch back to pop-on
13581 // mode
13582 this.mode_ = 'popOn';
13583 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
13584
13585 this.flushDisplayed(packet.pts); // flip memory
13586
13587 swap = this.displayed_;
13588 this.displayed_ = this.nonDisplayed_;
13589 this.nonDisplayed_ = swap; // start measuring the time to display the caption
13590
13591 this.startPts_ = packet.pts;
13592 } else if (data === this.ROLL_UP_2_ROWS_) {
13593 this.rollUpRows_ = 2;
13594 this.setRollUp(packet.pts);
13595 } else if (data === this.ROLL_UP_3_ROWS_) {
13596 this.rollUpRows_ = 3;
13597 this.setRollUp(packet.pts);
13598 } else if (data === this.ROLL_UP_4_ROWS_) {
13599 this.rollUpRows_ = 4;
13600 this.setRollUp(packet.pts);
13601 } else if (data === this.CARRIAGE_RETURN_) {
13602 this.clearFormatting(packet.pts);
13603 this.flushDisplayed(packet.pts);
13604 this.shiftRowsUp_();
13605 this.startPts_ = packet.pts;
13606 } else if (data === this.BACKSPACE_) {
13607 if (this.mode_ === 'popOn') {
13608 this.nonDisplayed_[this.row_].text = this.nonDisplayed_[this.row_].text.slice(0, -1);
13609 } else {
13610 this.displayed_[this.row_].text = this.displayed_[this.row_].text.slice(0, -1);
13611 }
13612 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
13613 this.flushDisplayed(packet.pts);
13614 this.displayed_ = createDisplayBuffer();
13615 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
13616 this.nonDisplayed_ = createDisplayBuffer();
13617 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
13618 if (this.mode_ !== 'paintOn') {
13619 // NOTE: This should be removed when proper caption positioning is
13620 // implemented
13621 this.flushDisplayed(packet.pts);
13622 this.displayed_ = createDisplayBuffer();
13623 }
13624
13625 this.mode_ = 'paintOn';
13626 this.startPts_ = packet.pts; // Append special characters to caption text
13627 } else if (this.isSpecialCharacter(char0, char1)) {
13628 // Bitmask char0 so that we can apply character transformations
13629 // regardless of field and data channel.
13630 // Then byte-shift to the left and OR with char1 so we can pass the
13631 // entire character code to `getCharFromCode`.
13632 char0 = (char0 & 0x03) << 8;
13633 text = getCharFromCode(char0 | char1);
13634 this[this.mode_](packet.pts, text);
13635 this.column_++; // Append extended characters to caption text
13636 } else if (this.isExtCharacter(char0, char1)) {
13637 // Extended characters always follow their "non-extended" equivalents.
13638 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
13639 // decoders are supposed to drop the "è", while compliant decoders
13640 // backspace the "e" and insert "è".
13641 // Delete the previous character
13642 if (this.mode_ === 'popOn') {
13643 this.nonDisplayed_[this.row_].text = this.nonDisplayed_[this.row_].text.slice(0, -1);
13644 } else {
13645 this.displayed_[this.row_].text = this.displayed_[this.row_].text.slice(0, -1);
13646 } // Bitmask char0 so that we can apply character transformations
13647 // regardless of field and data channel.
13648 // Then byte-shift to the left and OR with char1 so we can pass the
13649 // entire character code to `getCharFromCode`.
13650
13651
13652 char0 = (char0 & 0x03) << 8;
13653 text = getCharFromCode(char0 | char1);
13654 this[this.mode_](packet.pts, text);
13655 this.column_++; // Process mid-row codes
13656 } else if (this.isMidRowCode(char0, char1)) {
13657 // Attributes are not additive, so clear all formatting
13658 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
13659 // should be replaced with spaces, so add one now
13660
13661 this[this.mode_](packet.pts, ' ');
13662 this.column_++;
13663
13664 if ((char1 & 0xe) === 0xe) {
13665 this.addFormatting(packet.pts, ['i']);
13666 }
13667
13668 if ((char1 & 0x1) === 0x1) {
13669 this.addFormatting(packet.pts, ['u']);
13670 } // Detect offset control codes and adjust cursor
13671
13672 } else if (this.isOffsetControlCode(char0, char1)) {
13673 // Cursor position is set by indent PAC (see below) in 4-column
13674 // increments, with an additional offset code of 1-3 to reach any
13675 // of the 32 columns specified by CEA-608. So all we need to do
13676 // here is increment the column cursor by the given offset.
13677 const offset = char1 & 0x03; // For an offest value 1-3, set the offset for that caption
13678 // in the non-displayed array.
13679
13680 this.nonDisplayed_[this.row_].offset = offset;
13681 this.column_ += offset; // Detect PACs (Preamble Address Codes)
13682 } else if (this.isPAC(char0, char1)) {
13683 // There's no logic for PAC -> row mapping, so we have to just
13684 // find the row code in an array and use its index :(
13685 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
13686
13687 if (this.mode_ === 'rollUp') {
13688 // This implies that the base row is incorrectly set.
13689 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
13690 // of roll-up rows set.
13691 if (row - this.rollUpRows_ + 1 < 0) {
13692 row = this.rollUpRows_ - 1;
13693 }
13694
13695 this.setRollUp(packet.pts, row);
13696 }
13697
13698 if (row !== this.row_) {
13699 // formatting is only persistent for current row
13700 this.clearFormatting(packet.pts);
13701 this.row_ = row;
13702 } // All PACs can apply underline, so detect and apply
13703 // (All odd-numbered second bytes set underline)
13704
13705
13706 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
13707 this.addFormatting(packet.pts, ['u']);
13708 }
13709
13710 if ((data & 0x10) === 0x10) {
13711 // We've got an indent level code. Each successive even number
13712 // increments the column cursor by 4, so we can get the desired
13713 // column position by bit-shifting to the right (to get n/2)
13714 // and multiplying by 4.
13715 const indentations = (data & 0xe) >> 1;
13716 this.column_ = indentations * 4; // add to the number of indentations for positioning
13717
13718 this.nonDisplayed_[this.row_].indent += indentations;
13719 }
13720
13721 if (this.isColorPAC(char1)) {
13722 // it's a color code, though we only support white, which
13723 // can be either normal or italicized. white italics can be
13724 // either 0x4e or 0x6e depending on the row, so we just
13725 // bitwise-and with 0xe to see if italics should be turned on
13726 if ((char1 & 0xe) === 0xe) {
13727 this.addFormatting(packet.pts, ['i']);
13728 }
13729 } // We have a normal character in char0, and possibly one in char1
13730
13731 } else if (this.isNormalChar(char0)) {
13732 if (char1 === 0x00) {
13733 char1 = null;
13734 }
13735
13736 text = getCharFromCode(char0);
13737 text += getCharFromCode(char1);
13738 this[this.mode_](packet.pts, text);
13739 this.column_ += text.length;
13740 } // finish data processing
13741
13742 };
13743 };
13744
13745 Cea608Stream.prototype = new Stream$7(); // Trigger a cue point that captures the current state of the
13746 // display buffer
13747
13748 Cea608Stream.prototype.flushDisplayed = function (pts) {
13749 const logWarning = index => {
13750 this.trigger('log', {
13751 level: 'warn',
13752 message: 'Skipping a malformed 608 caption at index ' + index + '.'
13753 });
13754 };
13755
13756 const content = [];
13757 this.displayed_.forEach((row, i) => {
13758 if (row && row.text && row.text.length) {
13759 try {
13760 // remove spaces from the start and end of the string
13761 row.text = row.text.trim();
13762 } catch (e) {
13763 // Ordinarily, this shouldn't happen. However, caption
13764 // parsing errors should not throw exceptions and
13765 // break playback.
13766 logWarning(i);
13767 } // See the below link for more details on the following fields:
13768 // https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608
13769
13770
13771 if (row.text.length) {
13772 content.push({
13773 // The text to be displayed in the caption from this specific row, with whitespace removed.
13774 text: row.text,
13775 // Value between 1 and 15 representing the PAC row used to calculate line height.
13776 line: i + 1,
13777 // A number representing the indent position by percentage (CEA-608 PAC indent code).
13778 // The value will be a number between 10 and 80. Offset is used to add an aditional
13779 // value to the position if necessary.
13780 position: 10 + Math.min(70, row.indent * 10) + row.offset * 2.5
13781 });
13782 }
13783 } else if (row === undefined || row === null) {
13784 logWarning(i);
13785 }
13786 });
13787
13788 if (content.length) {
13789 this.trigger('data', {
13790 startPts: this.startPts_,
13791 endPts: pts,
13792 content,
13793 stream: this.name_
13794 });
13795 }
13796 };
13797 /**
13798 * Zero out the data, used for startup and on seek
13799 */
13800
13801
13802 Cea608Stream.prototype.reset = function () {
13803 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
13804 // actually display captions. If a caption is shifted to a row
13805 // with a lower index than this, it is cleared from the display
13806 // buffer
13807
13808 this.topRow_ = 0;
13809 this.startPts_ = 0;
13810 this.displayed_ = createDisplayBuffer();
13811 this.nonDisplayed_ = createDisplayBuffer();
13812 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
13813
13814 this.column_ = 0;
13815 this.row_ = BOTTOM_ROW;
13816 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
13817
13818 this.formatting_ = [];
13819 };
13820 /**
13821 * Sets up control code and related constants for this instance
13822 */
13823
13824
13825 Cea608Stream.prototype.setConstants = function () {
13826 // The following attributes have these uses:
13827 // ext_ : char0 for mid-row codes, and the base for extended
13828 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
13829 // extended codes)
13830 // control_: char0 for control codes, except byte-shifted to the
13831 // left so that we can do this.control_ | CONTROL_CODE
13832 // offset_: char0 for tab offset codes
13833 //
13834 // It's also worth noting that control codes, and _only_ control codes,
13835 // differ between field 1 and field2. Field 2 control codes are always
13836 // their field 1 value plus 1. That's why there's the "| field" on the
13837 // control value.
13838 if (this.dataChannel_ === 0) {
13839 this.BASE_ = 0x10;
13840 this.EXT_ = 0x11;
13841 this.CONTROL_ = (0x14 | this.field_) << 8;
13842 this.OFFSET_ = 0x17;
13843 } else if (this.dataChannel_ === 1) {
13844 this.BASE_ = 0x18;
13845 this.EXT_ = 0x19;
13846 this.CONTROL_ = (0x1c | this.field_) << 8;
13847 this.OFFSET_ = 0x1f;
13848 } // Constants for the LSByte command codes recognized by Cea608Stream. This
13849 // list is not exhaustive. For a more comprehensive listing and semantics see
13850 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
13851 // Padding
13852
13853
13854 this.PADDING_ = 0x0000; // Pop-on Mode
13855
13856 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
13857 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
13858
13859 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
13860 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
13861 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
13862 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
13863
13864 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
13865
13866 this.BACKSPACE_ = this.CONTROL_ | 0x21;
13867 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
13868 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
13869 };
13870 /**
13871 * Detects if the 2-byte packet data is a special character
13872 *
13873 * Special characters have a second byte in the range 0x30 to 0x3f,
13874 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
13875 * data channel 2).
13876 *
13877 * @param {Integer} char0 The first byte
13878 * @param {Integer} char1 The second byte
13879 * @return {Boolean} Whether the 2 bytes are an special character
13880 */
13881
13882
13883 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
13884 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
13885 };
13886 /**
13887 * Detects if the 2-byte packet data is an extended character
13888 *
13889 * Extended characters have a second byte in the range 0x20 to 0x3f,
13890 * with the first byte being 0x12 or 0x13 (for data channel 1) or
13891 * 0x1a or 0x1b (for data channel 2).
13892 *
13893 * @param {Integer} char0 The first byte
13894 * @param {Integer} char1 The second byte
13895 * @return {Boolean} Whether the 2 bytes are an extended character
13896 */
13897
13898
13899 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
13900 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
13901 };
13902 /**
13903 * Detects if the 2-byte packet is a mid-row code
13904 *
13905 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
13906 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
13907 * channel 2).
13908 *
13909 * @param {Integer} char0 The first byte
13910 * @param {Integer} char1 The second byte
13911 * @return {Boolean} Whether the 2 bytes are a mid-row code
13912 */
13913
13914
13915 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
13916 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
13917 };
13918 /**
13919 * Detects if the 2-byte packet is an offset control code
13920 *
13921 * Offset control codes have a second byte in the range 0x21 to 0x23,
13922 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
13923 * data channel 2).
13924 *
13925 * @param {Integer} char0 The first byte
13926 * @param {Integer} char1 The second byte
13927 * @return {Boolean} Whether the 2 bytes are an offset control code
13928 */
13929
13930
13931 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
13932 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
13933 };
13934 /**
13935 * Detects if the 2-byte packet is a Preamble Address Code
13936 *
13937 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
13938 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
13939 * range 0x40 to 0x7f.
13940 *
13941 * @param {Integer} char0 The first byte
13942 * @param {Integer} char1 The second byte
13943 * @return {Boolean} Whether the 2 bytes are a PAC
13944 */
13945
13946
13947 Cea608Stream.prototype.isPAC = function (char0, char1) {
13948 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
13949 };
13950 /**
13951 * Detects if a packet's second byte is in the range of a PAC color code
13952 *
13953 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
13954 * 0x60 to 0x6f.
13955 *
13956 * @param {Integer} char1 The second byte
13957 * @return {Boolean} Whether the byte is a color PAC
13958 */
13959
13960
13961 Cea608Stream.prototype.isColorPAC = function (char1) {
13962 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
13963 };
13964 /**
13965 * Detects if a single byte is in the range of a normal character
13966 *
13967 * Normal text bytes are in the range 0x20 to 0x7f.
13968 *
13969 * @param {Integer} char The byte
13970 * @return {Boolean} Whether the byte is a normal character
13971 */
13972
13973
13974 Cea608Stream.prototype.isNormalChar = function (char) {
13975 return char >= 0x20 && char <= 0x7f;
13976 };
13977 /**
13978 * Configures roll-up
13979 *
13980 * @param {Integer} pts Current PTS
13981 * @param {Integer} newBaseRow Used by PACs to slide the current window to
13982 * a new position
13983 */
13984
13985
13986 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
13987 // Reset the base row to the bottom row when switching modes
13988 if (this.mode_ !== 'rollUp') {
13989 this.row_ = BOTTOM_ROW;
13990 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
13991
13992 this.flushDisplayed(pts);
13993 this.nonDisplayed_ = createDisplayBuffer();
13994 this.displayed_ = createDisplayBuffer();
13995 }
13996
13997 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
13998 // move currently displayed captions (up or down) to the new base row
13999 for (var i = 0; i < this.rollUpRows_; i++) {
14000 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
14001 this.displayed_[this.row_ - i] = {
14002 text: '',
14003 indent: 0,
14004 offset: 0
14005 };
14006 }
14007 }
14008
14009 if (newBaseRow === undefined) {
14010 newBaseRow = this.row_;
14011 }
14012
14013 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
14014 }; // Adds the opening HTML tag for the passed character to the caption text,
14015 // and keeps track of it for later closing
14016
14017
14018 Cea608Stream.prototype.addFormatting = function (pts, format) {
14019 this.formatting_ = this.formatting_.concat(format);
14020 var text = format.reduce(function (text, format) {
14021 return text + '<' + format + '>';
14022 }, '');
14023 this[this.mode_](pts, text);
14024 }; // Adds HTML closing tags for current formatting to caption text and
14025 // clears remembered formatting
14026
14027
14028 Cea608Stream.prototype.clearFormatting = function (pts) {
14029 if (!this.formatting_.length) {
14030 return;
14031 }
14032
14033 var text = this.formatting_.reverse().reduce(function (text, format) {
14034 return text + '</' + format + '>';
14035 }, '');
14036 this.formatting_ = [];
14037 this[this.mode_](pts, text);
14038 }; // Mode Implementations
14039
14040
14041 Cea608Stream.prototype.popOn = function (pts, text) {
14042 var baseRow = this.nonDisplayed_[this.row_].text; // buffer characters
14043
14044 baseRow += text;
14045 this.nonDisplayed_[this.row_].text = baseRow;
14046 };
14047
14048 Cea608Stream.prototype.rollUp = function (pts, text) {
14049 var baseRow = this.displayed_[this.row_].text;
14050 baseRow += text;
14051 this.displayed_[this.row_].text = baseRow;
14052 };
14053
14054 Cea608Stream.prototype.shiftRowsUp_ = function () {
14055 var i; // clear out inactive rows
14056
14057 for (i = 0; i < this.topRow_; i++) {
14058 this.displayed_[i] = {
14059 text: '',
14060 indent: 0,
14061 offset: 0
14062 };
14063 }
14064
14065 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
14066 this.displayed_[i] = {
14067 text: '',
14068 indent: 0,
14069 offset: 0
14070 };
14071 } // shift displayed rows up
14072
14073
14074 for (i = this.topRow_; i < this.row_; i++) {
14075 this.displayed_[i] = this.displayed_[i + 1];
14076 } // clear out the bottom row
14077
14078
14079 this.displayed_[this.row_] = {
14080 text: '',
14081 indent: 0,
14082 offset: 0
14083 };
14084 };
14085
14086 Cea608Stream.prototype.paintOn = function (pts, text) {
14087 var baseRow = this.displayed_[this.row_].text;
14088 baseRow += text;
14089 this.displayed_[this.row_].text = baseRow;
14090 }; // exports
14091
14092
14093 var captionStream = {
14094 CaptionStream: CaptionStream$2,
14095 Cea608Stream: Cea608Stream,
14096 Cea708Stream: Cea708Stream
14097 };
14098 /**
14099 * mux.js
14100 *
14101 * Copyright (c) Brightcove
14102 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14103 */
14104
14105 var streamTypes = {
14106 H264_STREAM_TYPE: 0x1B,
14107 ADTS_STREAM_TYPE: 0x0F,
14108 METADATA_STREAM_TYPE: 0x15
14109 };
14110 /**
14111 * mux.js
14112 *
14113 * Copyright (c) Brightcove
14114 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14115 *
14116 * Accepts program elementary stream (PES) data events and corrects
14117 * decode and presentation time stamps to account for a rollover
14118 * of the 33 bit value.
14119 */
14120
14121 var Stream$6 = stream;
14122 var MAX_TS = 8589934592;
14123 var RO_THRESH = 4294967296;
14124 var TYPE_SHARED = 'shared';
14125
14126 var handleRollover$1 = function (value, reference) {
14127 var direction = 1;
14128
14129 if (value > reference) {
14130 // If the current timestamp value is greater than our reference timestamp and we detect a
14131 // timestamp rollover, this means the roll over is happening in the opposite direction.
14132 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
14133 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
14134 // rollover point. In loading this segment, the timestamp values will be very large,
14135 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
14136 // the time stamp to be `value - 2^33`.
14137 direction = -1;
14138 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
14139 // cause an incorrect adjustment.
14140
14141
14142 while (Math.abs(reference - value) > RO_THRESH) {
14143 value += direction * MAX_TS;
14144 }
14145
14146 return value;
14147 };
14148
14149 var TimestampRolloverStream$1 = function (type) {
14150 var lastDTS, referenceDTS;
14151 TimestampRolloverStream$1.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
14152 // video and audio. We could use `undefined` here, but having a string
14153 // makes debugging a little clearer.
14154
14155 this.type_ = type || TYPE_SHARED;
14156
14157 this.push = function (data) {
14158 /**
14159 * Rollover stream expects data from elementary stream.
14160 * Elementary stream can push forward 2 types of data
14161 * - Parsed Video/Audio/Timed-metadata PES (packetized elementary stream) packets
14162 * - Tracks metadata from PMT (Program Map Table)
14163 * Rollover stream expects pts/dts info to be available, since it stores lastDTS
14164 * We should ignore non-PES packets since they may override lastDTS to undefined.
14165 * lastDTS is important to signal the next segments
14166 * about rollover from the previous segments.
14167 */
14168 if (data.type === 'metadata') {
14169 this.trigger('data', data);
14170 return;
14171 } // Any "shared" rollover streams will accept _all_ data. Otherwise,
14172 // streams will only accept data that matches their type.
14173
14174
14175 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
14176 return;
14177 }
14178
14179 if (referenceDTS === undefined) {
14180 referenceDTS = data.dts;
14181 }
14182
14183 data.dts = handleRollover$1(data.dts, referenceDTS);
14184 data.pts = handleRollover$1(data.pts, referenceDTS);
14185 lastDTS = data.dts;
14186 this.trigger('data', data);
14187 };
14188
14189 this.flush = function () {
14190 referenceDTS = lastDTS;
14191 this.trigger('done');
14192 };
14193
14194 this.endTimeline = function () {
14195 this.flush();
14196 this.trigger('endedtimeline');
14197 };
14198
14199 this.discontinuity = function () {
14200 referenceDTS = void 0;
14201 lastDTS = void 0;
14202 };
14203
14204 this.reset = function () {
14205 this.discontinuity();
14206 this.trigger('reset');
14207 };
14208 };
14209
14210 TimestampRolloverStream$1.prototype = new Stream$6();
14211 var timestampRolloverStream = {
14212 TimestampRolloverStream: TimestampRolloverStream$1,
14213 handleRollover: handleRollover$1
14214 }; // Once IE11 support is dropped, this function should be removed.
14215
14216 var typedArrayIndexOf$1 = (typedArray, element, fromIndex) => {
14217 if (!typedArray) {
14218 return -1;
14219 }
14220
14221 var currentIndex = fromIndex;
14222
14223 for (; currentIndex < typedArray.length; currentIndex++) {
14224 if (typedArray[currentIndex] === element) {
14225 return currentIndex;
14226 }
14227 }
14228
14229 return -1;
14230 };
14231
14232 var typedArray = {
14233 typedArrayIndexOf: typedArrayIndexOf$1
14234 };
14235 /**
14236 * mux.js
14237 *
14238 * Copyright (c) Brightcove
14239 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14240 *
14241 * Tools for parsing ID3 frame data
14242 * @see http://id3.org/id3v2.3.0
14243 */
14244
14245 var typedArrayIndexOf = typedArray.typedArrayIndexOf,
14246 // Frames that allow different types of text encoding contain a text
14247 // encoding description byte [ID3v2.4.0 section 4.]
14248 textEncodingDescriptionByte = {
14249 Iso88591: 0x00,
14250 // ISO-8859-1, terminated with \0.
14251 Utf16: 0x01,
14252 // UTF-16 encoded Unicode BOM, terminated with \0\0
14253 Utf16be: 0x02,
14254 // UTF-16BE encoded Unicode, without BOM, terminated with \0\0
14255 Utf8: 0x03 // UTF-8 encoded Unicode, terminated with \0
14256
14257 },
14258 // return a percent-encoded representation of the specified byte range
14259 // @see http://en.wikipedia.org/wiki/Percent-encoding
14260 percentEncode$1 = function (bytes, start, end) {
14261 var i,
14262 result = '';
14263
14264 for (i = start; i < end; i++) {
14265 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
14266 }
14267
14268 return result;
14269 },
14270 // return the string representation of the specified byte range,
14271 // interpreted as UTf-8.
14272 parseUtf8 = function (bytes, start, end) {
14273 return decodeURIComponent(percentEncode$1(bytes, start, end));
14274 },
14275 // return the string representation of the specified byte range,
14276 // interpreted as ISO-8859-1.
14277 parseIso88591$1 = function (bytes, start, end) {
14278 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
14279 },
14280 parseSyncSafeInteger$1 = function (data) {
14281 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
14282 },
14283 frameParsers = {
14284 'APIC': function (frame) {
14285 var i = 1,
14286 mimeTypeEndIndex,
14287 descriptionEndIndex,
14288 LINK_MIME_TYPE = '-->';
14289
14290 if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
14291 // ignore frames with unrecognized character encodings
14292 return;
14293 } // parsing fields [ID3v2.4.0 section 4.14.]
14294
14295
14296 mimeTypeEndIndex = typedArrayIndexOf(frame.data, 0, i);
14297
14298 if (mimeTypeEndIndex < 0) {
14299 // malformed frame
14300 return;
14301 } // parsing Mime type field (terminated with \0)
14302
14303
14304 frame.mimeType = parseIso88591$1(frame.data, i, mimeTypeEndIndex);
14305 i = mimeTypeEndIndex + 1; // parsing 1-byte Picture Type field
14306
14307 frame.pictureType = frame.data[i];
14308 i++;
14309 descriptionEndIndex = typedArrayIndexOf(frame.data, 0, i);
14310
14311 if (descriptionEndIndex < 0) {
14312 // malformed frame
14313 return;
14314 } // parsing Description field (terminated with \0)
14315
14316
14317 frame.description = parseUtf8(frame.data, i, descriptionEndIndex);
14318 i = descriptionEndIndex + 1;
14319
14320 if (frame.mimeType === LINK_MIME_TYPE) {
14321 // parsing Picture Data field as URL (always represented as ISO-8859-1 [ID3v2.4.0 section 4.])
14322 frame.url = parseIso88591$1(frame.data, i, frame.data.length);
14323 } else {
14324 // parsing Picture Data field as binary data
14325 frame.pictureData = frame.data.subarray(i, frame.data.length);
14326 }
14327 },
14328 'T*': function (frame) {
14329 if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
14330 // ignore frames with unrecognized character encodings
14331 return;
14332 } // parse text field, do not include null terminator in the frame value
14333 // frames that allow different types of encoding contain terminated text [ID3v2.4.0 section 4.]
14334
14335
14336 frame.value = parseUtf8(frame.data, 1, frame.data.length).replace(/\0*$/, ''); // text information frames supports multiple strings, stored as a terminator separated list [ID3v2.4.0 section 4.2.]
14337
14338 frame.values = frame.value.split('\0');
14339 },
14340 'TXXX': function (frame) {
14341 var descriptionEndIndex;
14342
14343 if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
14344 // ignore frames with unrecognized character encodings
14345 return;
14346 }
14347
14348 descriptionEndIndex = typedArrayIndexOf(frame.data, 0, 1);
14349
14350 if (descriptionEndIndex === -1) {
14351 return;
14352 } // parse the text fields
14353
14354
14355 frame.description = parseUtf8(frame.data, 1, descriptionEndIndex); // do not include the null terminator in the tag value
14356 // frames that allow different types of encoding contain terminated text
14357 // [ID3v2.4.0 section 4.]
14358
14359 frame.value = parseUtf8(frame.data, descriptionEndIndex + 1, frame.data.length).replace(/\0*$/, '');
14360 frame.data = frame.value;
14361 },
14362 'W*': function (frame) {
14363 // parse URL field; URL fields are always represented as ISO-8859-1 [ID3v2.4.0 section 4.]
14364 // if the value is followed by a string termination all the following information should be ignored [ID3v2.4.0 section 4.3]
14365 frame.url = parseIso88591$1(frame.data, 0, frame.data.length).replace(/\0.*$/, '');
14366 },
14367 'WXXX': function (frame) {
14368 var descriptionEndIndex;
14369
14370 if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {
14371 // ignore frames with unrecognized character encodings
14372 return;
14373 }
14374
14375 descriptionEndIndex = typedArrayIndexOf(frame.data, 0, 1);
14376
14377 if (descriptionEndIndex === -1) {
14378 return;
14379 } // parse the description and URL fields
14380
14381
14382 frame.description = parseUtf8(frame.data, 1, descriptionEndIndex); // URL fields are always represented as ISO-8859-1 [ID3v2.4.0 section 4.]
14383 // if the value is followed by a string termination all the following information
14384 // should be ignored [ID3v2.4.0 section 4.3]
14385
14386 frame.url = parseIso88591$1(frame.data, descriptionEndIndex + 1, frame.data.length).replace(/\0.*$/, '');
14387 },
14388 'PRIV': function (frame) {
14389 var i;
14390
14391 for (i = 0; i < frame.data.length; i++) {
14392 if (frame.data[i] === 0) {
14393 // parse the description and URL fields
14394 frame.owner = parseIso88591$1(frame.data, 0, i);
14395 break;
14396 }
14397 }
14398
14399 frame.privateData = frame.data.subarray(i + 1);
14400 frame.data = frame.privateData;
14401 }
14402 };
14403
14404 var parseId3Frames$1 = function (data) {
14405 var frameSize,
14406 frameHeader,
14407 frameStart = 10,
14408 tagSize = 0,
14409 frames = []; // If we don't have enough data for a header, 10 bytes,
14410 // or 'ID3' in the first 3 bytes this is not a valid ID3 tag.
14411
14412 if (data.length < 10 || data[0] !== 'I'.charCodeAt(0) || data[1] !== 'D'.charCodeAt(0) || data[2] !== '3'.charCodeAt(0)) {
14413 return;
14414 } // the frame size is transmitted as a 28-bit integer in the
14415 // last four bytes of the ID3 header.
14416 // The most significant bit of each byte is dropped and the
14417 // results concatenated to recover the actual value.
14418
14419
14420 tagSize = parseSyncSafeInteger$1(data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
14421 // convenient for our comparisons to include it
14422
14423 tagSize += 10; // check bit 6 of byte 5 for the extended header flag.
14424
14425 var hasExtendedHeader = data[5] & 0x40;
14426
14427 if (hasExtendedHeader) {
14428 // advance the frame start past the extended header
14429 frameStart += 4; // header size field
14430
14431 frameStart += parseSyncSafeInteger$1(data.subarray(10, 14));
14432 tagSize -= parseSyncSafeInteger$1(data.subarray(16, 20)); // clip any padding off the end
14433 } // parse one or more ID3 frames
14434 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
14435
14436
14437 do {
14438 // determine the number of bytes in this frame
14439 frameSize = parseSyncSafeInteger$1(data.subarray(frameStart + 4, frameStart + 8));
14440
14441 if (frameSize < 1) {
14442 break;
14443 }
14444
14445 frameHeader = String.fromCharCode(data[frameStart], data[frameStart + 1], data[frameStart + 2], data[frameStart + 3]);
14446 var frame = {
14447 id: frameHeader,
14448 data: data.subarray(frameStart + 10, frameStart + frameSize + 10)
14449 };
14450 frame.key = frame.id; // parse frame values
14451
14452 if (frameParsers[frame.id]) {
14453 // use frame specific parser
14454 frameParsers[frame.id](frame);
14455 } else if (frame.id[0] === 'T') {
14456 // use text frame generic parser
14457 frameParsers['T*'](frame);
14458 } else if (frame.id[0] === 'W') {
14459 // use URL link frame generic parser
14460 frameParsers['W*'](frame);
14461 }
14462
14463 frames.push(frame);
14464 frameStart += 10; // advance past the frame header
14465
14466 frameStart += frameSize; // advance past the frame body
14467 } while (frameStart < tagSize);
14468
14469 return frames;
14470 };
14471
14472 var parseId3 = {
14473 parseId3Frames: parseId3Frames$1,
14474 parseSyncSafeInteger: parseSyncSafeInteger$1,
14475 frameParsers: frameParsers
14476 };
14477 /**
14478 * mux.js
14479 *
14480 * Copyright (c) Brightcove
14481 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14482 *
14483 * Accepts program elementary stream (PES) data events and parses out
14484 * ID3 metadata from them, if present.
14485 * @see http://id3.org/id3v2.3.0
14486 */
14487
14488 var Stream$5 = stream,
14489 StreamTypes$3 = streamTypes,
14490 id3 = parseId3,
14491 MetadataStream;
14492
14493 MetadataStream = function (options) {
14494 var settings = {
14495 // the bytes of the program-level descriptor field in MP2T
14496 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
14497 // program element descriptors"
14498 descriptor: options && options.descriptor
14499 },
14500 // the total size in bytes of the ID3 tag being parsed
14501 tagSize = 0,
14502 // tag data that is not complete enough to be parsed
14503 buffer = [],
14504 // the total number of bytes currently in the buffer
14505 bufferSize = 0,
14506 i;
14507 MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
14508 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
14509
14510 this.dispatchType = StreamTypes$3.METADATA_STREAM_TYPE.toString(16);
14511
14512 if (settings.descriptor) {
14513 for (i = 0; i < settings.descriptor.length; i++) {
14514 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
14515 }
14516 }
14517
14518 this.push = function (chunk) {
14519 var tag, frameStart, frameSize, frame, i, frameHeader;
14520
14521 if (chunk.type !== 'timed-metadata') {
14522 return;
14523 } // if data_alignment_indicator is set in the PES header,
14524 // we must have the start of a new ID3 tag. Assume anything
14525 // remaining in the buffer was malformed and throw it out
14526
14527
14528 if (chunk.dataAlignmentIndicator) {
14529 bufferSize = 0;
14530 buffer.length = 0;
14531 } // ignore events that don't look like ID3 data
14532
14533
14534 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
14535 this.trigger('log', {
14536 level: 'warn',
14537 message: 'Skipping unrecognized metadata packet'
14538 });
14539 return;
14540 } // add this chunk to the data we've collected so far
14541
14542
14543 buffer.push(chunk);
14544 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
14545
14546 if (buffer.length === 1) {
14547 // the frame size is transmitted as a 28-bit integer in the
14548 // last four bytes of the ID3 header.
14549 // The most significant bit of each byte is dropped and the
14550 // results concatenated to recover the actual value.
14551 tagSize = id3.parseSyncSafeInteger(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
14552 // convenient for our comparisons to include it
14553
14554 tagSize += 10;
14555 } // if the entire frame has not arrived, wait for more data
14556
14557
14558 if (bufferSize < tagSize) {
14559 return;
14560 } // collect the entire frame so it can be parsed
14561
14562
14563 tag = {
14564 data: new Uint8Array(tagSize),
14565 frames: [],
14566 pts: buffer[0].pts,
14567 dts: buffer[0].dts
14568 };
14569
14570 for (i = 0; i < tagSize;) {
14571 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
14572 i += buffer[0].data.byteLength;
14573 bufferSize -= buffer[0].data.byteLength;
14574 buffer.shift();
14575 } // find the start of the first frame and the end of the tag
14576
14577
14578 frameStart = 10;
14579
14580 if (tag.data[5] & 0x40) {
14581 // advance the frame start past the extended header
14582 frameStart += 4; // header size field
14583
14584 frameStart += id3.parseSyncSafeInteger(tag.data.subarray(10, 14)); // clip any padding off the end
14585
14586 tagSize -= id3.parseSyncSafeInteger(tag.data.subarray(16, 20));
14587 } // parse one or more ID3 frames
14588 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
14589
14590
14591 do {
14592 // determine the number of bytes in this frame
14593 frameSize = id3.parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
14594
14595 if (frameSize < 1) {
14596 this.trigger('log', {
14597 level: 'warn',
14598 message: 'Malformed ID3 frame encountered. Skipping remaining metadata parsing.'
14599 }); // If the frame is malformed, don't parse any further frames but allow previous valid parsed frames
14600 // to be sent along.
14601
14602 break;
14603 }
14604
14605 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
14606 frame = {
14607 id: frameHeader,
14608 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
14609 };
14610 frame.key = frame.id; // parse frame values
14611
14612 if (id3.frameParsers[frame.id]) {
14613 // use frame specific parser
14614 id3.frameParsers[frame.id](frame);
14615 } else if (frame.id[0] === 'T') {
14616 // use text frame generic parser
14617 id3.frameParsers['T*'](frame);
14618 } else if (frame.id[0] === 'W') {
14619 // use URL link frame generic parser
14620 id3.frameParsers['W*'](frame);
14621 } // handle the special PRIV frame used to indicate the start
14622 // time for raw AAC data
14623
14624
14625 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
14626 var d = frame.data,
14627 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
14628 size *= 4;
14629 size += d[7] & 0x03;
14630 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
14631 // on the value of this frame
14632 // we couldn't have known the appropriate pts and dts before
14633 // parsing this ID3 tag so set those values now
14634
14635 if (tag.pts === undefined && tag.dts === undefined) {
14636 tag.pts = frame.timeStamp;
14637 tag.dts = frame.timeStamp;
14638 }
14639
14640 this.trigger('timestamp', frame);
14641 }
14642
14643 tag.frames.push(frame);
14644 frameStart += 10; // advance past the frame header
14645
14646 frameStart += frameSize; // advance past the frame body
14647 } while (frameStart < tagSize);
14648
14649 this.trigger('data', tag);
14650 };
14651 };
14652
14653 MetadataStream.prototype = new Stream$5();
14654 var metadataStream = MetadataStream;
14655 /**
14656 * mux.js
14657 *
14658 * Copyright (c) Brightcove
14659 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14660 *
14661 * A stream-based mp2t to mp4 converter. This utility can be used to
14662 * deliver mp4s to a SourceBuffer on platforms that support native
14663 * Media Source Extensions.
14664 */
14665
14666 var Stream$4 = stream,
14667 CaptionStream$1 = captionStream,
14668 StreamTypes$2 = streamTypes,
14669 TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
14670
14671 var TransportPacketStream, TransportParseStream, ElementaryStream; // constants
14672
14673 var MP2T_PACKET_LENGTH$1 = 188,
14674 // bytes
14675 SYNC_BYTE$1 = 0x47;
14676 /**
14677 * Splits an incoming stream of binary data into MPEG-2 Transport
14678 * Stream packets.
14679 */
14680
14681 TransportPacketStream = function () {
14682 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
14683 bytesInBuffer = 0;
14684 TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
14685
14686 /**
14687 * Split a stream of data into M2TS packets
14688 **/
14689
14690 this.push = function (bytes) {
14691 var startIndex = 0,
14692 endIndex = MP2T_PACKET_LENGTH$1,
14693 everything; // If there are bytes remaining from the last segment, prepend them to the
14694 // bytes that were pushed in
14695
14696 if (bytesInBuffer) {
14697 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
14698 everything.set(buffer.subarray(0, bytesInBuffer));
14699 everything.set(bytes, bytesInBuffer);
14700 bytesInBuffer = 0;
14701 } else {
14702 everything = bytes;
14703 } // While we have enough data for a packet
14704
14705
14706 while (endIndex < everything.byteLength) {
14707 // Look for a pair of start and end sync bytes in the data..
14708 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
14709 // We found a packet so emit it and jump one whole packet forward in
14710 // the stream
14711 this.trigger('data', everything.subarray(startIndex, endIndex));
14712 startIndex += MP2T_PACKET_LENGTH$1;
14713 endIndex += MP2T_PACKET_LENGTH$1;
14714 continue;
14715 } // If we get here, we have somehow become de-synchronized and we need to step
14716 // forward one byte at a time until we find a pair of sync bytes that denote
14717 // a packet
14718
14719
14720 startIndex++;
14721 endIndex++;
14722 } // If there was some data left over at the end of the segment that couldn't
14723 // possibly be a whole packet, keep it because it might be the start of a packet
14724 // that continues in the next segment
14725
14726
14727 if (startIndex < everything.byteLength) {
14728 buffer.set(everything.subarray(startIndex), 0);
14729 bytesInBuffer = everything.byteLength - startIndex;
14730 }
14731 };
14732 /**
14733 * Passes identified M2TS packets to the TransportParseStream to be parsed
14734 **/
14735
14736
14737 this.flush = function () {
14738 // If the buffer contains a whole packet when we are being flushed, emit it
14739 // and empty the buffer. Otherwise hold onto the data because it may be
14740 // important for decoding the next segment
14741 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
14742 this.trigger('data', buffer);
14743 bytesInBuffer = 0;
14744 }
14745
14746 this.trigger('done');
14747 };
14748
14749 this.endTimeline = function () {
14750 this.flush();
14751 this.trigger('endedtimeline');
14752 };
14753
14754 this.reset = function () {
14755 bytesInBuffer = 0;
14756 this.trigger('reset');
14757 };
14758 };
14759
14760 TransportPacketStream.prototype = new Stream$4();
14761 /**
14762 * Accepts an MP2T TransportPacketStream and emits data events with parsed
14763 * forms of the individual transport stream packets.
14764 */
14765
14766 TransportParseStream = function () {
14767 var parsePsi, parsePat, parsePmt, self;
14768 TransportParseStream.prototype.init.call(this);
14769 self = this;
14770 this.packetsWaitingForPmt = [];
14771 this.programMapTable = undefined;
14772
14773 parsePsi = function (payload, psi) {
14774 var offset = 0; // PSI packets may be split into multiple sections and those
14775 // sections may be split into multiple packets. If a PSI
14776 // section starts in this packet, the payload_unit_start_indicator
14777 // will be true and the first byte of the payload will indicate
14778 // the offset from the current position to the start of the
14779 // section.
14780
14781 if (psi.payloadUnitStartIndicator) {
14782 offset += payload[offset] + 1;
14783 }
14784
14785 if (psi.type === 'pat') {
14786 parsePat(payload.subarray(offset), psi);
14787 } else {
14788 parsePmt(payload.subarray(offset), psi);
14789 }
14790 };
14791
14792 parsePat = function (payload, pat) {
14793 pat.section_number = payload[7]; // eslint-disable-line camelcase
14794
14795 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
14796 // skip the PSI header and parse the first PMT entry
14797
14798 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
14799 pat.pmtPid = self.pmtPid;
14800 };
14801 /**
14802 * Parse out the relevant fields of a Program Map Table (PMT).
14803 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
14804 * packet. The first byte in this array should be the table_id
14805 * field.
14806 * @param pmt {object} the object that should be decorated with
14807 * fields parsed from the PMT.
14808 */
14809
14810
14811 parsePmt = function (payload, pmt) {
14812 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
14813 // take effect. We don't believe this should ever be the case
14814 // for HLS but we'll ignore "forward" PMT declarations if we see
14815 // them. Future PMT declarations have the current_next_indicator
14816 // set to zero.
14817
14818 if (!(payload[5] & 0x01)) {
14819 return;
14820 } // overwrite any existing program map table
14821
14822
14823 self.programMapTable = {
14824 video: null,
14825 audio: null,
14826 'timed-metadata': {}
14827 }; // the mapping table ends at the end of the current section
14828
14829 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
14830 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
14831 // long the program info descriptors are
14832
14833 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
14834
14835 offset = 12 + programInfoLength;
14836
14837 while (offset < tableEnd) {
14838 var streamType = payload[offset];
14839 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
14840 // TODO: should this be done for metadata too? for now maintain behavior of
14841 // multiple metadata streams
14842
14843 if (streamType === StreamTypes$2.H264_STREAM_TYPE && self.programMapTable.video === null) {
14844 self.programMapTable.video = pid;
14845 } else if (streamType === StreamTypes$2.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
14846 self.programMapTable.audio = pid;
14847 } else if (streamType === StreamTypes$2.METADATA_STREAM_TYPE) {
14848 // map pid to stream type for metadata streams
14849 self.programMapTable['timed-metadata'][pid] = streamType;
14850 } // move to the next table entry
14851 // skip past the elementary stream descriptors, if present
14852
14853
14854 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
14855 } // record the map on the packet as well
14856
14857
14858 pmt.programMapTable = self.programMapTable;
14859 };
14860 /**
14861 * Deliver a new MP2T packet to the next stream in the pipeline.
14862 */
14863
14864
14865 this.push = function (packet) {
14866 var result = {},
14867 offset = 4;
14868 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
14869
14870 result.pid = packet[1] & 0x1f;
14871 result.pid <<= 8;
14872 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
14873 // fifth byte of the TS packet header. The adaptation field is
14874 // used to add stuffing to PES packets that don't fill a complete
14875 // TS packet, and to specify some forms of timing and control data
14876 // that we do not currently use.
14877
14878 if ((packet[3] & 0x30) >>> 4 > 0x01) {
14879 offset += packet[offset] + 1;
14880 } // parse the rest of the packet based on the type
14881
14882
14883 if (result.pid === 0) {
14884 result.type = 'pat';
14885 parsePsi(packet.subarray(offset), result);
14886 this.trigger('data', result);
14887 } else if (result.pid === this.pmtPid) {
14888 result.type = 'pmt';
14889 parsePsi(packet.subarray(offset), result);
14890 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
14891
14892 while (this.packetsWaitingForPmt.length) {
14893 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
14894 }
14895 } else if (this.programMapTable === undefined) {
14896 // When we have not seen a PMT yet, defer further processing of
14897 // PES packets until one has been parsed
14898 this.packetsWaitingForPmt.push([packet, offset, result]);
14899 } else {
14900 this.processPes_(packet, offset, result);
14901 }
14902 };
14903
14904 this.processPes_ = function (packet, offset, result) {
14905 // set the appropriate stream type
14906 if (result.pid === this.programMapTable.video) {
14907 result.streamType = StreamTypes$2.H264_STREAM_TYPE;
14908 } else if (result.pid === this.programMapTable.audio) {
14909 result.streamType = StreamTypes$2.ADTS_STREAM_TYPE;
14910 } else {
14911 // if not video or audio, it is timed-metadata or unknown
14912 // if unknown, streamType will be undefined
14913 result.streamType = this.programMapTable['timed-metadata'][result.pid];
14914 }
14915
14916 result.type = 'pes';
14917 result.data = packet.subarray(offset);
14918 this.trigger('data', result);
14919 };
14920 };
14921
14922 TransportParseStream.prototype = new Stream$4();
14923 TransportParseStream.STREAM_TYPES = {
14924 h264: 0x1b,
14925 adts: 0x0f
14926 };
14927 /**
14928 * Reconsistutes program elementary stream (PES) packets from parsed
14929 * transport stream packets. That is, if you pipe an
14930 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
14931 * events will be events which capture the bytes for individual PES
14932 * packets plus relevant metadata that has been extracted from the
14933 * container.
14934 */
14935
14936 ElementaryStream = function () {
14937 var self = this,
14938 segmentHadPmt = false,
14939 // PES packet fragments
14940 video = {
14941 data: [],
14942 size: 0
14943 },
14944 audio = {
14945 data: [],
14946 size: 0
14947 },
14948 timedMetadata = {
14949 data: [],
14950 size: 0
14951 },
14952 programMapTable,
14953 parsePes = function (payload, pes) {
14954 var ptsDtsFlags;
14955 const startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
14956
14957 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
14958 // that are frame data that is continuing from the previous fragment. This
14959 // is to check that the pes data is the start of a new pes payload
14960
14961 if (startPrefix !== 1) {
14962 return;
14963 } // get the packet length, this will be 0 for video
14964
14965
14966 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
14967
14968 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
14969 // and a DTS value. Determine what combination of values is
14970 // available to work with.
14971
14972 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
14973 // performs all bitwise operations on 32-bit integers but javascript
14974 // supports a much greater range (52-bits) of integer using standard
14975 // mathematical operations.
14976 // We construct a 31-bit value using bitwise operators over the 31
14977 // most significant bits and then multiply by 4 (equal to a left-shift
14978 // of 2) before we add the final 2 least significant bits of the
14979 // timestamp (equal to an OR.)
14980
14981 if (ptsDtsFlags & 0xC0) {
14982 // the PTS and DTS are not written out directly. For information
14983 // on how they are encoded, see
14984 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
14985 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
14986 pes.pts *= 4; // Left shift by 2
14987
14988 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
14989
14990 pes.dts = pes.pts;
14991
14992 if (ptsDtsFlags & 0x40) {
14993 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
14994 pes.dts *= 4; // Left shift by 2
14995
14996 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
14997 }
14998 } // the data section starts immediately after the PES header.
14999 // pes_header_data_length specifies the number of header bytes
15000 // that follow the last byte of the field.
15001
15002
15003 pes.data = payload.subarray(9 + payload[8]);
15004 },
15005
15006 /**
15007 * Pass completely parsed PES packets to the next stream in the pipeline
15008 **/
15009 flushStream = function (stream, type, forceFlush) {
15010 var packetData = new Uint8Array(stream.size),
15011 event = {
15012 type: type
15013 },
15014 i = 0,
15015 offset = 0,
15016 packetFlushable = false,
15017 fragment; // do nothing if there is not enough buffered data for a complete
15018 // PES header
15019
15020 if (!stream.data.length || stream.size < 9) {
15021 return;
15022 }
15023
15024 event.trackId = stream.data[0].pid; // reassemble the packet
15025
15026 for (i = 0; i < stream.data.length; i++) {
15027 fragment = stream.data[i];
15028 packetData.set(fragment.data, offset);
15029 offset += fragment.data.byteLength;
15030 } // parse assembled packet's PES header
15031
15032
15033 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
15034 // check that there is enough stream data to fill the packet
15035
15036 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
15037
15038 if (forceFlush || packetFlushable) {
15039 stream.size = 0;
15040 stream.data.length = 0;
15041 } // only emit packets that are complete. this is to avoid assembling
15042 // incomplete PES packets due to poor segmentation
15043
15044
15045 if (packetFlushable) {
15046 self.trigger('data', event);
15047 }
15048 };
15049
15050 ElementaryStream.prototype.init.call(this);
15051 /**
15052 * Identifies M2TS packet types and parses PES packets using metadata
15053 * parsed from the PMT
15054 **/
15055
15056 this.push = function (data) {
15057 ({
15058 pat: function () {// we have to wait for the PMT to arrive as well before we
15059 // have any meaningful metadata
15060 },
15061 pes: function () {
15062 var stream, streamType;
15063
15064 switch (data.streamType) {
15065 case StreamTypes$2.H264_STREAM_TYPE:
15066 stream = video;
15067 streamType = 'video';
15068 break;
15069
15070 case StreamTypes$2.ADTS_STREAM_TYPE:
15071 stream = audio;
15072 streamType = 'audio';
15073 break;
15074
15075 case StreamTypes$2.METADATA_STREAM_TYPE:
15076 stream = timedMetadata;
15077 streamType = 'timed-metadata';
15078 break;
15079
15080 default:
15081 // ignore unknown stream types
15082 return;
15083 } // if a new packet is starting, we can flush the completed
15084 // packet
15085
15086
15087 if (data.payloadUnitStartIndicator) {
15088 flushStream(stream, streamType, true);
15089 } // buffer this fragment until we are sure we've received the
15090 // complete payload
15091
15092
15093 stream.data.push(data);
15094 stream.size += data.data.byteLength;
15095 },
15096 pmt: function () {
15097 var event = {
15098 type: 'metadata',
15099 tracks: []
15100 };
15101 programMapTable = data.programMapTable; // translate audio and video streams to tracks
15102
15103 if (programMapTable.video !== null) {
15104 event.tracks.push({
15105 timelineStartInfo: {
15106 baseMediaDecodeTime: 0
15107 },
15108 id: +programMapTable.video,
15109 codec: 'avc',
15110 type: 'video'
15111 });
15112 }
15113
15114 if (programMapTable.audio !== null) {
15115 event.tracks.push({
15116 timelineStartInfo: {
15117 baseMediaDecodeTime: 0
15118 },
15119 id: +programMapTable.audio,
15120 codec: 'adts',
15121 type: 'audio'
15122 });
15123 }
15124
15125 segmentHadPmt = true;
15126 self.trigger('data', event);
15127 }
15128 })[data.type]();
15129 };
15130
15131 this.reset = function () {
15132 video.size = 0;
15133 video.data.length = 0;
15134 audio.size = 0;
15135 audio.data.length = 0;
15136 this.trigger('reset');
15137 };
15138 /**
15139 * Flush any remaining input. Video PES packets may be of variable
15140 * length. Normally, the start of a new video packet can trigger the
15141 * finalization of the previous packet. That is not possible if no
15142 * more video is forthcoming, however. In that case, some other
15143 * mechanism (like the end of the file) has to be employed. When it is
15144 * clear that no additional data is forthcoming, calling this method
15145 * will flush the buffered packets.
15146 */
15147
15148
15149 this.flushStreams_ = function () {
15150 // !!THIS ORDER IS IMPORTANT!!
15151 // video first then audio
15152 flushStream(video, 'video');
15153 flushStream(audio, 'audio');
15154 flushStream(timedMetadata, 'timed-metadata');
15155 };
15156
15157 this.flush = function () {
15158 // if on flush we haven't had a pmt emitted
15159 // and we have a pmt to emit. emit the pmt
15160 // so that we trigger a trackinfo downstream.
15161 if (!segmentHadPmt && programMapTable) {
15162 var pmt = {
15163 type: 'metadata',
15164 tracks: []
15165 }; // translate audio and video streams to tracks
15166
15167 if (programMapTable.video !== null) {
15168 pmt.tracks.push({
15169 timelineStartInfo: {
15170 baseMediaDecodeTime: 0
15171 },
15172 id: +programMapTable.video,
15173 codec: 'avc',
15174 type: 'video'
15175 });
15176 }
15177
15178 if (programMapTable.audio !== null) {
15179 pmt.tracks.push({
15180 timelineStartInfo: {
15181 baseMediaDecodeTime: 0
15182 },
15183 id: +programMapTable.audio,
15184 codec: 'adts',
15185 type: 'audio'
15186 });
15187 }
15188
15189 self.trigger('data', pmt);
15190 }
15191
15192 segmentHadPmt = false;
15193 this.flushStreams_();
15194 this.trigger('done');
15195 };
15196 };
15197
15198 ElementaryStream.prototype = new Stream$4();
15199 var m2ts$1 = {
15200 PAT_PID: 0x0000,
15201 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
15202 TransportPacketStream: TransportPacketStream,
15203 TransportParseStream: TransportParseStream,
15204 ElementaryStream: ElementaryStream,
15205 TimestampRolloverStream: TimestampRolloverStream,
15206 CaptionStream: CaptionStream$1.CaptionStream,
15207 Cea608Stream: CaptionStream$1.Cea608Stream,
15208 Cea708Stream: CaptionStream$1.Cea708Stream,
15209 MetadataStream: metadataStream
15210 };
15211
15212 for (var type in StreamTypes$2) {
15213 if (StreamTypes$2.hasOwnProperty(type)) {
15214 m2ts$1[type] = StreamTypes$2[type];
15215 }
15216 }
15217
15218 var m2ts_1 = m2ts$1;
15219 /**
15220 * mux.js
15221 *
15222 * Copyright (c) Brightcove
15223 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15224 */
15225
15226 var Stream$3 = stream;
15227 var ONE_SECOND_IN_TS$2 = clock$2.ONE_SECOND_IN_TS;
15228 var AdtsStream$1;
15229 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
15230 /*
15231 * Accepts a ElementaryStream and emits data events with parsed
15232 * AAC Audio Frames of the individual packets. Input audio in ADTS
15233 * format is unpacked and re-emitted as AAC frames.
15234 *
15235 * @see http://wiki.multimedia.cx/index.php?title=ADTS
15236 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
15237 */
15238
15239 AdtsStream$1 = function (handlePartialSegments) {
15240 var buffer,
15241 frameNum = 0;
15242 AdtsStream$1.prototype.init.call(this);
15243
15244 this.skipWarn_ = function (start, end) {
15245 this.trigger('log', {
15246 level: 'warn',
15247 message: `adts skiping bytes ${start} to ${end} in frame ${frameNum} outside syncword`
15248 });
15249 };
15250
15251 this.push = function (packet) {
15252 var i = 0,
15253 frameLength,
15254 protectionSkipBytes,
15255 oldBuffer,
15256 sampleCount,
15257 adtsFrameDuration;
15258
15259 if (!handlePartialSegments) {
15260 frameNum = 0;
15261 }
15262
15263 if (packet.type !== 'audio') {
15264 // ignore non-audio data
15265 return;
15266 } // Prepend any data in the buffer to the input data so that we can parse
15267 // aac frames the cross a PES packet boundary
15268
15269
15270 if (buffer && buffer.length) {
15271 oldBuffer = buffer;
15272 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
15273 buffer.set(oldBuffer);
15274 buffer.set(packet.data, oldBuffer.byteLength);
15275 } else {
15276 buffer = packet.data;
15277 } // unpack any ADTS frames which have been fully received
15278 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
15279
15280
15281 var skip; // We use i + 7 here because we want to be able to parse the entire header.
15282 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
15283
15284 while (i + 7 < buffer.length) {
15285 // Look for the start of an ADTS header..
15286 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
15287 if (typeof skip !== 'number') {
15288 skip = i;
15289 } // If a valid header was not found, jump one forward and attempt to
15290 // find a valid ADTS header starting at the next byte
15291
15292
15293 i++;
15294 continue;
15295 }
15296
15297 if (typeof skip === 'number') {
15298 this.skipWarn_(skip, i);
15299 skip = null;
15300 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
15301 // end of the ADTS header
15302
15303
15304 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
15305 // end of the sync sequence
15306 // NOTE: frame length includes the size of the header
15307
15308 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
15309 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
15310 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
15311 // then we have to wait for more data
15312
15313 if (buffer.byteLength - i < frameLength) {
15314 break;
15315 } // Otherwise, deliver the complete AAC frame
15316
15317
15318 this.trigger('data', {
15319 pts: packet.pts + frameNum * adtsFrameDuration,
15320 dts: packet.dts + frameNum * adtsFrameDuration,
15321 sampleCount: sampleCount,
15322 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
15323 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
15324 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
15325 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
15326 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
15327 samplesize: 16,
15328 // data is the frame without it's header
15329 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
15330 });
15331 frameNum++;
15332 i += frameLength;
15333 }
15334
15335 if (typeof skip === 'number') {
15336 this.skipWarn_(skip, i);
15337 skip = null;
15338 } // remove processed bytes from the buffer.
15339
15340
15341 buffer = buffer.subarray(i);
15342 };
15343
15344 this.flush = function () {
15345 frameNum = 0;
15346 this.trigger('done');
15347 };
15348
15349 this.reset = function () {
15350 buffer = void 0;
15351 this.trigger('reset');
15352 };
15353
15354 this.endTimeline = function () {
15355 buffer = void 0;
15356 this.trigger('endedtimeline');
15357 };
15358 };
15359
15360 AdtsStream$1.prototype = new Stream$3();
15361 var adts = AdtsStream$1;
15362 /**
15363 * mux.js
15364 *
15365 * Copyright (c) Brightcove
15366 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15367 */
15368
15369 var ExpGolomb$1;
15370 /**
15371 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
15372 * scheme used by h264.
15373 */
15374
15375 ExpGolomb$1 = function (workingData) {
15376 var // the number of bytes left to examine in workingData
15377 workingBytesAvailable = workingData.byteLength,
15378 // the current word being examined
15379 workingWord = 0,
15380 // :uint
15381 // the number of bits left to examine in the current word
15382 workingBitsAvailable = 0; // :uint;
15383 // ():uint
15384
15385 this.length = function () {
15386 return 8 * workingBytesAvailable;
15387 }; // ():uint
15388
15389
15390 this.bitsAvailable = function () {
15391 return 8 * workingBytesAvailable + workingBitsAvailable;
15392 }; // ():void
15393
15394
15395 this.loadWord = function () {
15396 var position = workingData.byteLength - workingBytesAvailable,
15397 workingBytes = new Uint8Array(4),
15398 availableBytes = Math.min(4, workingBytesAvailable);
15399
15400 if (availableBytes === 0) {
15401 throw new Error('no bytes available');
15402 }
15403
15404 workingBytes.set(workingData.subarray(position, position + availableBytes));
15405 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
15406
15407 workingBitsAvailable = availableBytes * 8;
15408 workingBytesAvailable -= availableBytes;
15409 }; // (count:int):void
15410
15411
15412 this.skipBits = function (count) {
15413 var skipBytes; // :int
15414
15415 if (workingBitsAvailable > count) {
15416 workingWord <<= count;
15417 workingBitsAvailable -= count;
15418 } else {
15419 count -= workingBitsAvailable;
15420 skipBytes = Math.floor(count / 8);
15421 count -= skipBytes * 8;
15422 workingBytesAvailable -= skipBytes;
15423 this.loadWord();
15424 workingWord <<= count;
15425 workingBitsAvailable -= count;
15426 }
15427 }; // (size:int):uint
15428
15429
15430 this.readBits = function (size) {
15431 var bits = Math.min(workingBitsAvailable, size),
15432 // :uint
15433 valu = workingWord >>> 32 - bits; // :uint
15434 // if size > 31, handle error
15435
15436 workingBitsAvailable -= bits;
15437
15438 if (workingBitsAvailable > 0) {
15439 workingWord <<= bits;
15440 } else if (workingBytesAvailable > 0) {
15441 this.loadWord();
15442 }
15443
15444 bits = size - bits;
15445
15446 if (bits > 0) {
15447 return valu << bits | this.readBits(bits);
15448 }
15449
15450 return valu;
15451 }; // ():uint
15452
15453
15454 this.skipLeadingZeros = function () {
15455 var leadingZeroCount; // :uint
15456
15457 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
15458 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
15459 // the first bit of working word is 1
15460 workingWord <<= leadingZeroCount;
15461 workingBitsAvailable -= leadingZeroCount;
15462 return leadingZeroCount;
15463 }
15464 } // we exhausted workingWord and still have not found a 1
15465
15466
15467 this.loadWord();
15468 return leadingZeroCount + this.skipLeadingZeros();
15469 }; // ():void
15470
15471
15472 this.skipUnsignedExpGolomb = function () {
15473 this.skipBits(1 + this.skipLeadingZeros());
15474 }; // ():void
15475
15476
15477 this.skipExpGolomb = function () {
15478 this.skipBits(1 + this.skipLeadingZeros());
15479 }; // ():uint
15480
15481
15482 this.readUnsignedExpGolomb = function () {
15483 var clz = this.skipLeadingZeros(); // :uint
15484
15485 return this.readBits(clz + 1) - 1;
15486 }; // ():int
15487
15488
15489 this.readExpGolomb = function () {
15490 var valu = this.readUnsignedExpGolomb(); // :int
15491
15492 if (0x01 & valu) {
15493 // the number is odd if the low order bit is set
15494 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
15495 }
15496
15497 return -1 * (valu >>> 1); // divide by two then make it negative
15498 }; // Some convenience functions
15499 // :Boolean
15500
15501
15502 this.readBoolean = function () {
15503 return this.readBits(1) === 1;
15504 }; // ():int
15505
15506
15507 this.readUnsignedByte = function () {
15508 return this.readBits(8);
15509 };
15510
15511 this.loadWord();
15512 };
15513
15514 var expGolomb = ExpGolomb$1;
15515 /**
15516 * mux.js
15517 *
15518 * Copyright (c) Brightcove
15519 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15520 */
15521
15522 var Stream$2 = stream;
15523 var ExpGolomb = expGolomb;
15524 var H264Stream$1, NalByteStream;
15525 var PROFILES_WITH_OPTIONAL_SPS_DATA;
15526 /**
15527 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
15528 */
15529
15530 NalByteStream = function () {
15531 var syncPoint = 0,
15532 i,
15533 buffer;
15534 NalByteStream.prototype.init.call(this);
15535 /*
15536 * Scans a byte stream and triggers a data event with the NAL units found.
15537 * @param {Object} data Event received from H264Stream
15538 * @param {Uint8Array} data.data The h264 byte stream to be scanned
15539 *
15540 * @see H264Stream.push
15541 */
15542
15543 this.push = function (data) {
15544 var swapBuffer;
15545
15546 if (!buffer) {
15547 buffer = data.data;
15548 } else {
15549 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
15550 swapBuffer.set(buffer);
15551 swapBuffer.set(data.data, buffer.byteLength);
15552 buffer = swapBuffer;
15553 }
15554
15555 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
15556 // scan for NAL unit boundaries
15557 // a match looks like this:
15558 // 0 0 1 .. NAL .. 0 0 1
15559 // ^ sync point ^ i
15560 // or this:
15561 // 0 0 1 .. NAL .. 0 0 0
15562 // ^ sync point ^ i
15563 // advance the sync point to a NAL start, if necessary
15564
15565 for (; syncPoint < len - 3; syncPoint++) {
15566 if (buffer[syncPoint + 2] === 1) {
15567 // the sync point is properly aligned
15568 i = syncPoint + 5;
15569 break;
15570 }
15571 }
15572
15573 while (i < len) {
15574 // look at the current byte to determine if we've hit the end of
15575 // a NAL unit boundary
15576 switch (buffer[i]) {
15577 case 0:
15578 // skip past non-sync sequences
15579 if (buffer[i - 1] !== 0) {
15580 i += 2;
15581 break;
15582 } else if (buffer[i - 2] !== 0) {
15583 i++;
15584 break;
15585 } // deliver the NAL unit if it isn't empty
15586
15587
15588 if (syncPoint + 3 !== i - 2) {
15589 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
15590 } // drop trailing zeroes
15591
15592
15593 do {
15594 i++;
15595 } while (buffer[i] !== 1 && i < len);
15596
15597 syncPoint = i - 2;
15598 i += 3;
15599 break;
15600
15601 case 1:
15602 // skip past non-sync sequences
15603 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
15604 i += 3;
15605 break;
15606 } // deliver the NAL unit
15607
15608
15609 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
15610 syncPoint = i - 2;
15611 i += 3;
15612 break;
15613
15614 default:
15615 // the current byte isn't a one or zero, so it cannot be part
15616 // of a sync sequence
15617 i += 3;
15618 break;
15619 }
15620 } // filter out the NAL units that were delivered
15621
15622
15623 buffer = buffer.subarray(syncPoint);
15624 i -= syncPoint;
15625 syncPoint = 0;
15626 };
15627
15628 this.reset = function () {
15629 buffer = null;
15630 syncPoint = 0;
15631 this.trigger('reset');
15632 };
15633
15634 this.flush = function () {
15635 // deliver the last buffered NAL unit
15636 if (buffer && buffer.byteLength > 3) {
15637 this.trigger('data', buffer.subarray(syncPoint + 3));
15638 } // reset the stream state
15639
15640
15641 buffer = null;
15642 syncPoint = 0;
15643 this.trigger('done');
15644 };
15645
15646 this.endTimeline = function () {
15647 this.flush();
15648 this.trigger('endedtimeline');
15649 };
15650 };
15651
15652 NalByteStream.prototype = new Stream$2(); // values of profile_idc that indicate additional fields are included in the SPS
15653 // see Recommendation ITU-T H.264 (4/2013),
15654 // 7.3.2.1.1 Sequence parameter set data syntax
15655
15656 PROFILES_WITH_OPTIONAL_SPS_DATA = {
15657 100: true,
15658 110: true,
15659 122: true,
15660 244: true,
15661 44: true,
15662 83: true,
15663 86: true,
15664 118: true,
15665 128: true,
15666 // TODO: the three profiles below don't
15667 // appear to have sps data in the specificiation anymore?
15668 138: true,
15669 139: true,
15670 134: true
15671 };
15672 /**
15673 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
15674 * events.
15675 */
15676
15677 H264Stream$1 = function () {
15678 var nalByteStream = new NalByteStream(),
15679 self,
15680 trackId,
15681 currentPts,
15682 currentDts,
15683 discardEmulationPreventionBytes,
15684 readSequenceParameterSet,
15685 skipScalingList;
15686 H264Stream$1.prototype.init.call(this);
15687 self = this;
15688 /*
15689 * Pushes a packet from a stream onto the NalByteStream
15690 *
15691 * @param {Object} packet - A packet received from a stream
15692 * @param {Uint8Array} packet.data - The raw bytes of the packet
15693 * @param {Number} packet.dts - Decode timestamp of the packet
15694 * @param {Number} packet.pts - Presentation timestamp of the packet
15695 * @param {Number} packet.trackId - The id of the h264 track this packet came from
15696 * @param {('video'|'audio')} packet.type - The type of packet
15697 *
15698 */
15699
15700 this.push = function (packet) {
15701 if (packet.type !== 'video') {
15702 return;
15703 }
15704
15705 trackId = packet.trackId;
15706 currentPts = packet.pts;
15707 currentDts = packet.dts;
15708 nalByteStream.push(packet);
15709 };
15710 /*
15711 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
15712 * for the NALUs to the next stream component.
15713 * Also, preprocess caption and sequence parameter NALUs.
15714 *
15715 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
15716 * @see NalByteStream.push
15717 */
15718
15719
15720 nalByteStream.on('data', function (data) {
15721 var event = {
15722 trackId: trackId,
15723 pts: currentPts,
15724 dts: currentDts,
15725 data: data,
15726 nalUnitTypeCode: data[0] & 0x1f
15727 };
15728
15729 switch (event.nalUnitTypeCode) {
15730 case 0x05:
15731 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
15732 break;
15733
15734 case 0x06:
15735 event.nalUnitType = 'sei_rbsp';
15736 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
15737 break;
15738
15739 case 0x07:
15740 event.nalUnitType = 'seq_parameter_set_rbsp';
15741 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
15742 event.config = readSequenceParameterSet(event.escapedRBSP);
15743 break;
15744
15745 case 0x08:
15746 event.nalUnitType = 'pic_parameter_set_rbsp';
15747 break;
15748
15749 case 0x09:
15750 event.nalUnitType = 'access_unit_delimiter_rbsp';
15751 break;
15752 } // This triggers data on the H264Stream
15753
15754
15755 self.trigger('data', event);
15756 });
15757 nalByteStream.on('done', function () {
15758 self.trigger('done');
15759 });
15760 nalByteStream.on('partialdone', function () {
15761 self.trigger('partialdone');
15762 });
15763 nalByteStream.on('reset', function () {
15764 self.trigger('reset');
15765 });
15766 nalByteStream.on('endedtimeline', function () {
15767 self.trigger('endedtimeline');
15768 });
15769
15770 this.flush = function () {
15771 nalByteStream.flush();
15772 };
15773
15774 this.partialFlush = function () {
15775 nalByteStream.partialFlush();
15776 };
15777
15778 this.reset = function () {
15779 nalByteStream.reset();
15780 };
15781
15782 this.endTimeline = function () {
15783 nalByteStream.endTimeline();
15784 };
15785 /**
15786 * Advance the ExpGolomb decoder past a scaling list. The scaling
15787 * list is optionally transmitted as part of a sequence parameter
15788 * set and is not relevant to transmuxing.
15789 * @param count {number} the number of entries in this scaling list
15790 * @param expGolombDecoder {object} an ExpGolomb pointed to the
15791 * start of a scaling list
15792 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
15793 */
15794
15795
15796 skipScalingList = function (count, expGolombDecoder) {
15797 var lastScale = 8,
15798 nextScale = 8,
15799 j,
15800 deltaScale;
15801
15802 for (j = 0; j < count; j++) {
15803 if (nextScale !== 0) {
15804 deltaScale = expGolombDecoder.readExpGolomb();
15805 nextScale = (lastScale + deltaScale + 256) % 256;
15806 }
15807
15808 lastScale = nextScale === 0 ? lastScale : nextScale;
15809 }
15810 };
15811 /**
15812 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
15813 * Sequence Payload"
15814 * @param data {Uint8Array} the bytes of a RBSP from a NAL
15815 * unit
15816 * @return {Uint8Array} the RBSP without any Emulation
15817 * Prevention Bytes
15818 */
15819
15820
15821 discardEmulationPreventionBytes = function (data) {
15822 var length = data.byteLength,
15823 emulationPreventionBytesPositions = [],
15824 i = 1,
15825 newLength,
15826 newData; // Find all `Emulation Prevention Bytes`
15827
15828 while (i < length - 2) {
15829 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
15830 emulationPreventionBytesPositions.push(i + 2);
15831 i += 2;
15832 } else {
15833 i++;
15834 }
15835 } // If no Emulation Prevention Bytes were found just return the original
15836 // array
15837
15838
15839 if (emulationPreventionBytesPositions.length === 0) {
15840 return data;
15841 } // Create a new array to hold the NAL unit data
15842
15843
15844 newLength = length - emulationPreventionBytesPositions.length;
15845 newData = new Uint8Array(newLength);
15846 var sourceIndex = 0;
15847
15848 for (i = 0; i < newLength; sourceIndex++, i++) {
15849 if (sourceIndex === emulationPreventionBytesPositions[0]) {
15850 // Skip this byte
15851 sourceIndex++; // Remove this position index
15852
15853 emulationPreventionBytesPositions.shift();
15854 }
15855
15856 newData[i] = data[sourceIndex];
15857 }
15858
15859 return newData;
15860 };
15861 /**
15862 * Read a sequence parameter set and return some interesting video
15863 * properties. A sequence parameter set is the H264 metadata that
15864 * describes the properties of upcoming video frames.
15865 * @param data {Uint8Array} the bytes of a sequence parameter set
15866 * @return {object} an object with configuration parsed from the
15867 * sequence parameter set, including the dimensions of the
15868 * associated video frames.
15869 */
15870
15871
15872 readSequenceParameterSet = function (data) {
15873 var frameCropLeftOffset = 0,
15874 frameCropRightOffset = 0,
15875 frameCropTopOffset = 0,
15876 frameCropBottomOffset = 0,
15877 expGolombDecoder,
15878 profileIdc,
15879 levelIdc,
15880 profileCompatibility,
15881 chromaFormatIdc,
15882 picOrderCntType,
15883 numRefFramesInPicOrderCntCycle,
15884 picWidthInMbsMinus1,
15885 picHeightInMapUnitsMinus1,
15886 frameMbsOnlyFlag,
15887 scalingListCount,
15888 sarRatio = [1, 1],
15889 aspectRatioIdc,
15890 i;
15891 expGolombDecoder = new ExpGolomb(data);
15892 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
15893
15894 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
15895
15896 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
15897
15898 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
15899 // some profiles have more optional data we don't need
15900
15901 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
15902 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
15903
15904 if (chromaFormatIdc === 3) {
15905 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
15906 }
15907
15908 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
15909
15910 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
15911
15912 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
15913
15914 if (expGolombDecoder.readBoolean()) {
15915 // seq_scaling_matrix_present_flag
15916 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
15917
15918 for (i = 0; i < scalingListCount; i++) {
15919 if (expGolombDecoder.readBoolean()) {
15920 // seq_scaling_list_present_flag[ i ]
15921 if (i < 6) {
15922 skipScalingList(16, expGolombDecoder);
15923 } else {
15924 skipScalingList(64, expGolombDecoder);
15925 }
15926 }
15927 }
15928 }
15929 }
15930
15931 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
15932
15933 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
15934
15935 if (picOrderCntType === 0) {
15936 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
15937 } else if (picOrderCntType === 1) {
15938 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
15939
15940 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
15941
15942 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
15943
15944 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
15945
15946 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
15947 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
15948 }
15949 }
15950
15951 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
15952
15953 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
15954
15955 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15956 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15957 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
15958
15959 if (frameMbsOnlyFlag === 0) {
15960 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
15961 }
15962
15963 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
15964
15965 if (expGolombDecoder.readBoolean()) {
15966 // frame_cropping_flag
15967 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
15968 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
15969 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
15970 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
15971 }
15972
15973 if (expGolombDecoder.readBoolean()) {
15974 // vui_parameters_present_flag
15975 if (expGolombDecoder.readBoolean()) {
15976 // aspect_ratio_info_present_flag
15977 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
15978
15979 switch (aspectRatioIdc) {
15980 case 1:
15981 sarRatio = [1, 1];
15982 break;
15983
15984 case 2:
15985 sarRatio = [12, 11];
15986 break;
15987
15988 case 3:
15989 sarRatio = [10, 11];
15990 break;
15991
15992 case 4:
15993 sarRatio = [16, 11];
15994 break;
15995
15996 case 5:
15997 sarRatio = [40, 33];
15998 break;
15999
16000 case 6:
16001 sarRatio = [24, 11];
16002 break;
16003
16004 case 7:
16005 sarRatio = [20, 11];
16006 break;
16007
16008 case 8:
16009 sarRatio = [32, 11];
16010 break;
16011
16012 case 9:
16013 sarRatio = [80, 33];
16014 break;
16015
16016 case 10:
16017 sarRatio = [18, 11];
16018 break;
16019
16020 case 11:
16021 sarRatio = [15, 11];
16022 break;
16023
16024 case 12:
16025 sarRatio = [64, 33];
16026 break;
16027
16028 case 13:
16029 sarRatio = [160, 99];
16030 break;
16031
16032 case 14:
16033 sarRatio = [4, 3];
16034 break;
16035
16036 case 15:
16037 sarRatio = [3, 2];
16038 break;
16039
16040 case 16:
16041 sarRatio = [2, 1];
16042 break;
16043
16044 case 255:
16045 {
16046 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
16047 break;
16048 }
16049 }
16050
16051 if (sarRatio) {
16052 sarRatio[0] / sarRatio[1];
16053 }
16054 }
16055 }
16056
16057 return {
16058 profileIdc: profileIdc,
16059 levelIdc: levelIdc,
16060 profileCompatibility: profileCompatibility,
16061 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
16062 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
16063 // sar is sample aspect ratio
16064 sarRatio: sarRatio
16065 };
16066 };
16067 };
16068
16069 H264Stream$1.prototype = new Stream$2();
16070 var h264 = {
16071 H264Stream: H264Stream$1,
16072 NalByteStream: NalByteStream
16073 };
16074 /**
16075 * mux.js
16076 *
16077 * Copyright (c) Brightcove
16078 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16079 *
16080 * Utilities to detect basic properties and metadata about Aac data.
16081 */
16082
16083 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
16084
16085 var parseId3TagSize = function (header, byteIndex) {
16086 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
16087 flags = header[byteIndex + 5],
16088 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
16089
16090 returnSize = returnSize >= 0 ? returnSize : 0;
16091
16092 if (footerPresent) {
16093 return returnSize + 20;
16094 }
16095
16096 return returnSize + 10;
16097 };
16098
16099 var getId3Offset = function (data, offset) {
16100 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
16101 return offset;
16102 }
16103
16104 offset += parseId3TagSize(data, offset);
16105 return getId3Offset(data, offset);
16106 }; // TODO: use vhs-utils
16107
16108
16109 var isLikelyAacData$1 = function (data) {
16110 var offset = getId3Offset(data, 0);
16111 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
16112 // is not mp3 data but aac data.
16113 (data[offset + 1] & 0x16) === 0x10;
16114 };
16115
16116 var parseSyncSafeInteger = function (data) {
16117 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
16118 }; // return a percent-encoded representation of the specified byte range
16119 // @see http://en.wikipedia.org/wiki/Percent-encoding
16120
16121
16122 var percentEncode = function (bytes, start, end) {
16123 var i,
16124 result = '';
16125
16126 for (i = start; i < end; i++) {
16127 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
16128 }
16129
16130 return result;
16131 }; // return the string representation of the specified byte range,
16132 // interpreted as ISO-8859-1.
16133
16134
16135 var parseIso88591 = function (bytes, start, end) {
16136 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
16137 };
16138
16139 var parseAdtsSize = function (header, byteIndex) {
16140 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
16141 middle = header[byteIndex + 4] << 3,
16142 highTwo = header[byteIndex + 3] & 0x3 << 11;
16143 return highTwo | middle | lowThree;
16144 };
16145
16146 var parseType$4 = function (header, byteIndex) {
16147 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
16148 return 'timed-metadata';
16149 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
16150 return 'audio';
16151 }
16152
16153 return null;
16154 };
16155
16156 var parseSampleRate = function (packet) {
16157 var i = 0;
16158
16159 while (i + 5 < packet.length) {
16160 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
16161 // If a valid header was not found, jump one forward and attempt to
16162 // find a valid ADTS header starting at the next byte
16163 i++;
16164 continue;
16165 }
16166
16167 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
16168 }
16169
16170 return null;
16171 };
16172
16173 var parseAacTimestamp = function (packet) {
16174 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
16175
16176 frameStart = 10;
16177
16178 if (packet[5] & 0x40) {
16179 // advance the frame start past the extended header
16180 frameStart += 4; // header size field
16181
16182 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
16183 } // parse one or more ID3 frames
16184 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
16185
16186
16187 do {
16188 // determine the number of bytes in this frame
16189 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
16190
16191 if (frameSize < 1) {
16192 return null;
16193 }
16194
16195 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
16196
16197 if (frameHeader === 'PRIV') {
16198 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
16199
16200 for (var i = 0; i < frame.byteLength; i++) {
16201 if (frame[i] === 0) {
16202 var owner = parseIso88591(frame, 0, i);
16203
16204 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
16205 var d = frame.subarray(i + 1);
16206 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
16207 size *= 4;
16208 size += d[7] & 0x03;
16209 return size;
16210 }
16211
16212 break;
16213 }
16214 }
16215 }
16216
16217 frameStart += 10; // advance past the frame header
16218
16219 frameStart += frameSize; // advance past the frame body
16220 } while (frameStart < packet.byteLength);
16221
16222 return null;
16223 };
16224
16225 var utils = {
16226 isLikelyAacData: isLikelyAacData$1,
16227 parseId3TagSize: parseId3TagSize,
16228 parseAdtsSize: parseAdtsSize,
16229 parseType: parseType$4,
16230 parseSampleRate: parseSampleRate,
16231 parseAacTimestamp: parseAacTimestamp
16232 };
16233 /**
16234 * mux.js
16235 *
16236 * Copyright (c) Brightcove
16237 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16238 *
16239 * A stream-based aac to mp4 converter. This utility can be used to
16240 * deliver mp4s to a SourceBuffer on platforms that support native
16241 * Media Source Extensions.
16242 */
16243
16244 var Stream$1 = stream;
16245 var aacUtils = utils; // Constants
16246
16247 var AacStream$1;
16248 /**
16249 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
16250 */
16251
16252 AacStream$1 = function () {
16253 var everything = new Uint8Array(),
16254 timeStamp = 0;
16255 AacStream$1.prototype.init.call(this);
16256
16257 this.setTimestamp = function (timestamp) {
16258 timeStamp = timestamp;
16259 };
16260
16261 this.push = function (bytes) {
16262 var frameSize = 0,
16263 byteIndex = 0,
16264 bytesLeft,
16265 chunk,
16266 packet,
16267 tempLength; // If there are bytes remaining from the last segment, prepend them to the
16268 // bytes that were pushed in
16269
16270 if (everything.length) {
16271 tempLength = everything.length;
16272 everything = new Uint8Array(bytes.byteLength + tempLength);
16273 everything.set(everything.subarray(0, tempLength));
16274 everything.set(bytes, tempLength);
16275 } else {
16276 everything = bytes;
16277 }
16278
16279 while (everything.length - byteIndex >= 3) {
16280 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
16281 // Exit early because we don't have enough to parse
16282 // the ID3 tag header
16283 if (everything.length - byteIndex < 10) {
16284 break;
16285 } // check framesize
16286
16287
16288 frameSize = aacUtils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
16289 // to emit a full packet
16290 // Add to byteIndex to support multiple ID3 tags in sequence
16291
16292 if (byteIndex + frameSize > everything.length) {
16293 break;
16294 }
16295
16296 chunk = {
16297 type: 'timed-metadata',
16298 data: everything.subarray(byteIndex, byteIndex + frameSize)
16299 };
16300 this.trigger('data', chunk);
16301 byteIndex += frameSize;
16302 continue;
16303 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
16304 // Exit early because we don't have enough to parse
16305 // the ADTS frame header
16306 if (everything.length - byteIndex < 7) {
16307 break;
16308 }
16309
16310 frameSize = aacUtils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
16311 // to emit a full packet
16312
16313 if (byteIndex + frameSize > everything.length) {
16314 break;
16315 }
16316
16317 packet = {
16318 type: 'audio',
16319 data: everything.subarray(byteIndex, byteIndex + frameSize),
16320 pts: timeStamp,
16321 dts: timeStamp
16322 };
16323 this.trigger('data', packet);
16324 byteIndex += frameSize;
16325 continue;
16326 }
16327
16328 byteIndex++;
16329 }
16330
16331 bytesLeft = everything.length - byteIndex;
16332
16333 if (bytesLeft > 0) {
16334 everything = everything.subarray(byteIndex);
16335 } else {
16336 everything = new Uint8Array();
16337 }
16338 };
16339
16340 this.reset = function () {
16341 everything = new Uint8Array();
16342 this.trigger('reset');
16343 };
16344
16345 this.endTimeline = function () {
16346 everything = new Uint8Array();
16347 this.trigger('endedtimeline');
16348 };
16349 };
16350
16351 AacStream$1.prototype = new Stream$1();
16352 var aac = AacStream$1;
16353 var AUDIO_PROPERTIES$1 = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
16354 var audioProperties = AUDIO_PROPERTIES$1;
16355 var VIDEO_PROPERTIES$1 = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
16356 var videoProperties = VIDEO_PROPERTIES$1;
16357 /**
16358 * mux.js
16359 *
16360 * Copyright (c) Brightcove
16361 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16362 *
16363 * A stream-based mp2t to mp4 converter. This utility can be used to
16364 * deliver mp4s to a SourceBuffer on platforms that support native
16365 * Media Source Extensions.
16366 */
16367
16368 var Stream = stream;
16369 var mp4 = mp4Generator;
16370 var frameUtils = frameUtils$1;
16371 var audioFrameUtils = audioFrameUtils$1;
16372 var trackDecodeInfo = trackDecodeInfo$1;
16373 var m2ts = m2ts_1;
16374 var clock = clock$2;
16375 var AdtsStream = adts;
16376 var H264Stream = h264.H264Stream;
16377 var AacStream = aac;
16378 var isLikelyAacData = utils.isLikelyAacData;
16379 var ONE_SECOND_IN_TS$1 = clock$2.ONE_SECOND_IN_TS;
16380 var AUDIO_PROPERTIES = audioProperties;
16381 var VIDEO_PROPERTIES = videoProperties; // object types
16382
16383 var VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;
16384
16385 var retriggerForStream = function (key, event) {
16386 event.stream = key;
16387 this.trigger('log', event);
16388 };
16389
16390 var addPipelineLogRetriggers = function (transmuxer, pipeline) {
16391 var keys = Object.keys(pipeline);
16392
16393 for (var i = 0; i < keys.length; i++) {
16394 var key = keys[i]; // skip non-stream keys and headOfPipeline
16395 // which is just a duplicate
16396
16397 if (key === 'headOfPipeline' || !pipeline[key].on) {
16398 continue;
16399 }
16400
16401 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
16402 }
16403 };
16404 /**
16405 * Compare two arrays (even typed) for same-ness
16406 */
16407
16408
16409 var arrayEquals = function (a, b) {
16410 var i;
16411
16412 if (a.length !== b.length) {
16413 return false;
16414 } // compare the value of each element in the array
16415
16416
16417 for (i = 0; i < a.length; i++) {
16418 if (a[i] !== b[i]) {
16419 return false;
16420 }
16421 }
16422
16423 return true;
16424 };
16425
16426 var generateSegmentTimingInfo = function (baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
16427 var ptsOffsetFromDts = startPts - startDts,
16428 decodeDuration = endDts - startDts,
16429 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
16430 // however, the player time values will reflect a start from the baseMediaDecodeTime.
16431 // In order to provide relevant values for the player times, base timing info on the
16432 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
16433
16434 return {
16435 start: {
16436 dts: baseMediaDecodeTime,
16437 pts: baseMediaDecodeTime + ptsOffsetFromDts
16438 },
16439 end: {
16440 dts: baseMediaDecodeTime + decodeDuration,
16441 pts: baseMediaDecodeTime + presentationDuration
16442 },
16443 prependedContentDuration: prependedContentDuration,
16444 baseMediaDecodeTime: baseMediaDecodeTime
16445 };
16446 };
16447 /**
16448 * Constructs a single-track, ISO BMFF media segment from AAC data
16449 * events. The output of this stream can be fed to a SourceBuffer
16450 * configured with a suitable initialization segment.
16451 * @param track {object} track metadata configuration
16452 * @param options {object} transmuxer options object
16453 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
16454 * in the source; false to adjust the first segment to start at 0.
16455 */
16456
16457
16458 AudioSegmentStream = function (track, options) {
16459 var adtsFrames = [],
16460 sequenceNumber,
16461 earliestAllowedDts = 0,
16462 audioAppendStartTs = 0,
16463 videoBaseMediaDecodeTime = Infinity;
16464 options = options || {};
16465 sequenceNumber = options.firstSequenceNumber || 0;
16466 AudioSegmentStream.prototype.init.call(this);
16467
16468 this.push = function (data) {
16469 trackDecodeInfo.collectDtsInfo(track, data);
16470
16471 if (track) {
16472 AUDIO_PROPERTIES.forEach(function (prop) {
16473 track[prop] = data[prop];
16474 });
16475 } // buffer audio data until end() is called
16476
16477
16478 adtsFrames.push(data);
16479 };
16480
16481 this.setEarliestDts = function (earliestDts) {
16482 earliestAllowedDts = earliestDts;
16483 };
16484
16485 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
16486 videoBaseMediaDecodeTime = baseMediaDecodeTime;
16487 };
16488
16489 this.setAudioAppendStart = function (timestamp) {
16490 audioAppendStartTs = timestamp;
16491 };
16492
16493 this.flush = function () {
16494 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
16495
16496 if (adtsFrames.length === 0) {
16497 this.trigger('done', 'AudioSegmentStream');
16498 return;
16499 }
16500
16501 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
16502 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
16503
16504 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
16505 // samples (that is, adts frames) in the audio data
16506
16507 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
16508
16509 mdat = mp4.mdat(audioFrameUtils.concatenateFrameData(frames));
16510 adtsFrames = [];
16511 moof = mp4.moof(sequenceNumber, [track]);
16512 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
16513
16514 sequenceNumber++;
16515 boxes.set(moof);
16516 boxes.set(mdat, moof.byteLength);
16517 trackDecodeInfo.clearDtsInfo(track);
16518 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
16519 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
16520 // valid use-case where an init segment/data should be triggered without associated
16521 // frames. Leaving for now, but should be looked into.
16522
16523 if (frames.length) {
16524 segmentDuration = frames.length * frameDuration;
16525 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
16526 // frame info is in video clock cycles. Convert to match expectation of
16527 // listeners (that all timestamps will be based on video clock cycles).
16528 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
16529 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
16530 this.trigger('timingInfo', {
16531 start: frames[0].pts,
16532 end: frames[0].pts + segmentDuration
16533 });
16534 }
16535
16536 this.trigger('data', {
16537 track: track,
16538 boxes: boxes
16539 });
16540 this.trigger('done', 'AudioSegmentStream');
16541 };
16542
16543 this.reset = function () {
16544 trackDecodeInfo.clearDtsInfo(track);
16545 adtsFrames = [];
16546 this.trigger('reset');
16547 };
16548 };
16549
16550 AudioSegmentStream.prototype = new Stream();
16551 /**
16552 * Constructs a single-track, ISO BMFF media segment from H264 data
16553 * events. The output of this stream can be fed to a SourceBuffer
16554 * configured with a suitable initialization segment.
16555 * @param track {object} track metadata configuration
16556 * @param options {object} transmuxer options object
16557 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
16558 * gopsToAlignWith list when attempting to align gop pts
16559 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
16560 * in the source; false to adjust the first segment to start at 0.
16561 */
16562
16563 VideoSegmentStream = function (track, options) {
16564 var sequenceNumber,
16565 nalUnits = [],
16566 gopsToAlignWith = [],
16567 config,
16568 pps;
16569 options = options || {};
16570 sequenceNumber = options.firstSequenceNumber || 0;
16571 VideoSegmentStream.prototype.init.call(this);
16572 delete track.minPTS;
16573 this.gopCache_ = [];
16574 /**
16575 * Constructs a ISO BMFF segment given H264 nalUnits
16576 * @param {Object} nalUnit A data event representing a nalUnit
16577 * @param {String} nalUnit.nalUnitType
16578 * @param {Object} nalUnit.config Properties for a mp4 track
16579 * @param {Uint8Array} nalUnit.data The nalUnit bytes
16580 * @see lib/codecs/h264.js
16581 **/
16582
16583 this.push = function (nalUnit) {
16584 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
16585
16586 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
16587 config = nalUnit.config;
16588 track.sps = [nalUnit.data];
16589 VIDEO_PROPERTIES.forEach(function (prop) {
16590 track[prop] = config[prop];
16591 }, this);
16592 }
16593
16594 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
16595 pps = nalUnit.data;
16596 track.pps = [nalUnit.data];
16597 } // buffer video until flush() is called
16598
16599
16600 nalUnits.push(nalUnit);
16601 };
16602 /**
16603 * Pass constructed ISO BMFF track and boxes on to the
16604 * next stream in the pipeline
16605 **/
16606
16607
16608 this.flush = function () {
16609 var frames,
16610 gopForFusion,
16611 gops,
16612 moof,
16613 mdat,
16614 boxes,
16615 prependedContentDuration = 0,
16616 firstGop,
16617 lastGop; // Throw away nalUnits at the start of the byte stream until
16618 // we find the first AUD
16619
16620 while (nalUnits.length) {
16621 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
16622 break;
16623 }
16624
16625 nalUnits.shift();
16626 } // Return early if no video data has been observed
16627
16628
16629 if (nalUnits.length === 0) {
16630 this.resetStream_();
16631 this.trigger('done', 'VideoSegmentStream');
16632 return;
16633 } // Organize the raw nal-units into arrays that represent
16634 // higher-level constructs such as frames and gops
16635 // (group-of-pictures)
16636
16637
16638 frames = frameUtils.groupNalsIntoFrames(nalUnits);
16639 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
16640 // a problem since MSE (on Chrome) requires a leading keyframe.
16641 //
16642 // We have two approaches to repairing this situation:
16643 // 1) GOP-FUSION:
16644 // This is where we keep track of the GOPS (group-of-pictures)
16645 // from previous fragments and attempt to find one that we can
16646 // prepend to the current fragment in order to create a valid
16647 // fragment.
16648 // 2) KEYFRAME-PULLING:
16649 // Here we search for the first keyframe in the fragment and
16650 // throw away all the frames between the start of the fragment
16651 // and that keyframe. We then extend the duration and pull the
16652 // PTS of the keyframe forward so that it covers the time range
16653 // of the frames that were disposed of.
16654 //
16655 // #1 is far prefereable over #2 which can cause "stuttering" but
16656 // requires more things to be just right.
16657
16658 if (!gops[0][0].keyFrame) {
16659 // Search for a gop for fusion from our gopCache
16660 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
16661
16662 if (gopForFusion) {
16663 // in order to provide more accurate timing information about the segment, save
16664 // the number of seconds prepended to the original segment due to GOP fusion
16665 prependedContentDuration = gopForFusion.duration;
16666 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
16667 // new gop at the beginning
16668
16669 gops.byteLength += gopForFusion.byteLength;
16670 gops.nalCount += gopForFusion.nalCount;
16671 gops.pts = gopForFusion.pts;
16672 gops.dts = gopForFusion.dts;
16673 gops.duration += gopForFusion.duration;
16674 } else {
16675 // If we didn't find a candidate gop fall back to keyframe-pulling
16676 gops = frameUtils.extendFirstKeyFrame(gops);
16677 }
16678 } // Trim gops to align with gopsToAlignWith
16679
16680
16681 if (gopsToAlignWith.length) {
16682 var alignedGops;
16683
16684 if (options.alignGopsAtEnd) {
16685 alignedGops = this.alignGopsAtEnd_(gops);
16686 } else {
16687 alignedGops = this.alignGopsAtStart_(gops);
16688 }
16689
16690 if (!alignedGops) {
16691 // save all the nals in the last GOP into the gop cache
16692 this.gopCache_.unshift({
16693 gop: gops.pop(),
16694 pps: track.pps,
16695 sps: track.sps
16696 }); // Keep a maximum of 6 GOPs in the cache
16697
16698 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
16699
16700 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
16701
16702 this.resetStream_();
16703 this.trigger('done', 'VideoSegmentStream');
16704 return;
16705 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
16706 // when recalculated before sending off to CoalesceStream
16707
16708
16709 trackDecodeInfo.clearDtsInfo(track);
16710 gops = alignedGops;
16711 }
16712
16713 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
16714 // samples (that is, frames) in the video data
16715
16716 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
16717
16718 mdat = mp4.mdat(frameUtils.concatenateNalData(gops));
16719 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
16720 this.trigger('processedGopsInfo', gops.map(function (gop) {
16721 return {
16722 pts: gop.pts,
16723 dts: gop.dts,
16724 byteLength: gop.byteLength
16725 };
16726 }));
16727 firstGop = gops[0];
16728 lastGop = gops[gops.length - 1];
16729 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
16730 this.trigger('timingInfo', {
16731 start: gops[0].pts,
16732 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
16733 }); // save all the nals in the last GOP into the gop cache
16734
16735 this.gopCache_.unshift({
16736 gop: gops.pop(),
16737 pps: track.pps,
16738 sps: track.sps
16739 }); // Keep a maximum of 6 GOPs in the cache
16740
16741 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
16742
16743 nalUnits = [];
16744 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
16745 this.trigger('timelineStartInfo', track.timelineStartInfo);
16746 moof = mp4.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
16747 // throwing away hundreds of media segment fragments
16748
16749 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
16750
16751 sequenceNumber++;
16752 boxes.set(moof);
16753 boxes.set(mdat, moof.byteLength);
16754 this.trigger('data', {
16755 track: track,
16756 boxes: boxes
16757 });
16758 this.resetStream_(); // Continue with the flush process now
16759
16760 this.trigger('done', 'VideoSegmentStream');
16761 };
16762
16763 this.reset = function () {
16764 this.resetStream_();
16765 nalUnits = [];
16766 this.gopCache_.length = 0;
16767 gopsToAlignWith.length = 0;
16768 this.trigger('reset');
16769 };
16770
16771 this.resetStream_ = function () {
16772 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
16773 // for instance, when we are rendition switching
16774
16775 config = undefined;
16776 pps = undefined;
16777 }; // Search for a candidate Gop for gop-fusion from the gop cache and
16778 // return it or return null if no good candidate was found
16779
16780
16781 this.getGopForFusion_ = function (nalUnit) {
16782 var halfSecond = 45000,
16783 // Half-a-second in a 90khz clock
16784 allowableOverlap = 10000,
16785 // About 3 frames @ 30fps
16786 nearestDistance = Infinity,
16787 dtsDistance,
16788 nearestGopObj,
16789 currentGop,
16790 currentGopObj,
16791 i; // Search for the GOP nearest to the beginning of this nal unit
16792
16793 for (i = 0; i < this.gopCache_.length; i++) {
16794 currentGopObj = this.gopCache_[i];
16795 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
16796
16797 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
16798 continue;
16799 } // Reject Gops that would require a negative baseMediaDecodeTime
16800
16801
16802 if (currentGop.dts < track.timelineStartInfo.dts) {
16803 continue;
16804 } // The distance between the end of the gop and the start of the nalUnit
16805
16806
16807 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
16808 // a half-second of the nal unit
16809
16810 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
16811 // Always use the closest GOP we found if there is more than
16812 // one candidate
16813 if (!nearestGopObj || nearestDistance > dtsDistance) {
16814 nearestGopObj = currentGopObj;
16815 nearestDistance = dtsDistance;
16816 }
16817 }
16818 }
16819
16820 if (nearestGopObj) {
16821 return nearestGopObj.gop;
16822 }
16823
16824 return null;
16825 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
16826 // of gopsToAlignWith starting from the START of the list
16827
16828
16829 this.alignGopsAtStart_ = function (gops) {
16830 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
16831 byteLength = gops.byteLength;
16832 nalCount = gops.nalCount;
16833 duration = gops.duration;
16834 alignIndex = gopIndex = 0;
16835
16836 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
16837 align = gopsToAlignWith[alignIndex];
16838 gop = gops[gopIndex];
16839
16840 if (align.pts === gop.pts) {
16841 break;
16842 }
16843
16844 if (gop.pts > align.pts) {
16845 // this current gop starts after the current gop we want to align on, so increment
16846 // align index
16847 alignIndex++;
16848 continue;
16849 } // current gop starts before the current gop we want to align on. so increment gop
16850 // index
16851
16852
16853 gopIndex++;
16854 byteLength -= gop.byteLength;
16855 nalCount -= gop.nalCount;
16856 duration -= gop.duration;
16857 }
16858
16859 if (gopIndex === 0) {
16860 // no gops to trim
16861 return gops;
16862 }
16863
16864 if (gopIndex === gops.length) {
16865 // all gops trimmed, skip appending all gops
16866 return null;
16867 }
16868
16869 alignedGops = gops.slice(gopIndex);
16870 alignedGops.byteLength = byteLength;
16871 alignedGops.duration = duration;
16872 alignedGops.nalCount = nalCount;
16873 alignedGops.pts = alignedGops[0].pts;
16874 alignedGops.dts = alignedGops[0].dts;
16875 return alignedGops;
16876 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
16877 // of gopsToAlignWith starting from the END of the list
16878
16879
16880 this.alignGopsAtEnd_ = function (gops) {
16881 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
16882 alignIndex = gopsToAlignWith.length - 1;
16883 gopIndex = gops.length - 1;
16884 alignEndIndex = null;
16885 matchFound = false;
16886
16887 while (alignIndex >= 0 && gopIndex >= 0) {
16888 align = gopsToAlignWith[alignIndex];
16889 gop = gops[gopIndex];
16890
16891 if (align.pts === gop.pts) {
16892 matchFound = true;
16893 break;
16894 }
16895
16896 if (align.pts > gop.pts) {
16897 alignIndex--;
16898 continue;
16899 }
16900
16901 if (alignIndex === gopsToAlignWith.length - 1) {
16902 // gop.pts is greater than the last alignment candidate. If no match is found
16903 // by the end of this loop, we still want to append gops that come after this
16904 // point
16905 alignEndIndex = gopIndex;
16906 }
16907
16908 gopIndex--;
16909 }
16910
16911 if (!matchFound && alignEndIndex === null) {
16912 return null;
16913 }
16914
16915 var trimIndex;
16916
16917 if (matchFound) {
16918 trimIndex = gopIndex;
16919 } else {
16920 trimIndex = alignEndIndex;
16921 }
16922
16923 if (trimIndex === 0) {
16924 return gops;
16925 }
16926
16927 var alignedGops = gops.slice(trimIndex);
16928 var metadata = alignedGops.reduce(function (total, gop) {
16929 total.byteLength += gop.byteLength;
16930 total.duration += gop.duration;
16931 total.nalCount += gop.nalCount;
16932 return total;
16933 }, {
16934 byteLength: 0,
16935 duration: 0,
16936 nalCount: 0
16937 });
16938 alignedGops.byteLength = metadata.byteLength;
16939 alignedGops.duration = metadata.duration;
16940 alignedGops.nalCount = metadata.nalCount;
16941 alignedGops.pts = alignedGops[0].pts;
16942 alignedGops.dts = alignedGops[0].dts;
16943 return alignedGops;
16944 };
16945
16946 this.alignGopsWith = function (newGopsToAlignWith) {
16947 gopsToAlignWith = newGopsToAlignWith;
16948 };
16949 };
16950
16951 VideoSegmentStream.prototype = new Stream();
16952 /**
16953 * A Stream that can combine multiple streams (ie. audio & video)
16954 * into a single output segment for MSE. Also supports audio-only
16955 * and video-only streams.
16956 * @param options {object} transmuxer options object
16957 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
16958 * in the source; false to adjust the first segment to start at media timeline start.
16959 */
16960
16961 CoalesceStream = function (options, metadataStream) {
16962 // Number of Tracks per output segment
16963 // If greater than 1, we combine multiple
16964 // tracks into a single segment
16965 this.numberOfTracks = 0;
16966 this.metadataStream = metadataStream;
16967 options = options || {};
16968
16969 if (typeof options.remux !== 'undefined') {
16970 this.remuxTracks = !!options.remux;
16971 } else {
16972 this.remuxTracks = true;
16973 }
16974
16975 if (typeof options.keepOriginalTimestamps === 'boolean') {
16976 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
16977 } else {
16978 this.keepOriginalTimestamps = false;
16979 }
16980
16981 this.pendingTracks = [];
16982 this.videoTrack = null;
16983 this.pendingBoxes = [];
16984 this.pendingCaptions = [];
16985 this.pendingMetadata = [];
16986 this.pendingBytes = 0;
16987 this.emittedTracks = 0;
16988 CoalesceStream.prototype.init.call(this); // Take output from multiple
16989
16990 this.push = function (output) {
16991 // buffer incoming captions until the associated video segment
16992 // finishes
16993 if (output.content || output.text) {
16994 return this.pendingCaptions.push(output);
16995 } // buffer incoming id3 tags until the final flush
16996
16997
16998 if (output.frames) {
16999 return this.pendingMetadata.push(output);
17000 } // Add this track to the list of pending tracks and store
17001 // important information required for the construction of
17002 // the final segment
17003
17004
17005 this.pendingTracks.push(output.track);
17006 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
17007 // We unshift audio and push video because
17008 // as of Chrome 75 when switching from
17009 // one init segment to another if the video
17010 // mdat does not appear after the audio mdat
17011 // only audio will play for the duration of our transmux.
17012
17013 if (output.track.type === 'video') {
17014 this.videoTrack = output.track;
17015 this.pendingBoxes.push(output.boxes);
17016 }
17017
17018 if (output.track.type === 'audio') {
17019 this.audioTrack = output.track;
17020 this.pendingBoxes.unshift(output.boxes);
17021 }
17022 };
17023 };
17024
17025 CoalesceStream.prototype = new Stream();
17026
17027 CoalesceStream.prototype.flush = function (flushSource) {
17028 var offset = 0,
17029 event = {
17030 captions: [],
17031 captionStreams: {},
17032 metadata: [],
17033 info: {}
17034 },
17035 caption,
17036 id3,
17037 initSegment,
17038 timelineStartPts = 0,
17039 i;
17040
17041 if (this.pendingTracks.length < this.numberOfTracks) {
17042 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
17043 // Return because we haven't received a flush from a data-generating
17044 // portion of the segment (meaning that we have only recieved meta-data
17045 // or captions.)
17046 return;
17047 } else if (this.remuxTracks) {
17048 // Return until we have enough tracks from the pipeline to remux (if we
17049 // are remuxing audio and video into a single MP4)
17050 return;
17051 } else if (this.pendingTracks.length === 0) {
17052 // In the case where we receive a flush without any data having been
17053 // received we consider it an emitted track for the purposes of coalescing
17054 // `done` events.
17055 // We do this for the case where there is an audio and video track in the
17056 // segment but no audio data. (seen in several playlists with alternate
17057 // audio tracks and no audio present in the main TS segments.)
17058 this.emittedTracks++;
17059
17060 if (this.emittedTracks >= this.numberOfTracks) {
17061 this.trigger('done');
17062 this.emittedTracks = 0;
17063 }
17064
17065 return;
17066 }
17067 }
17068
17069 if (this.videoTrack) {
17070 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
17071 VIDEO_PROPERTIES.forEach(function (prop) {
17072 event.info[prop] = this.videoTrack[prop];
17073 }, this);
17074 } else if (this.audioTrack) {
17075 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
17076 AUDIO_PROPERTIES.forEach(function (prop) {
17077 event.info[prop] = this.audioTrack[prop];
17078 }, this);
17079 }
17080
17081 if (this.videoTrack || this.audioTrack) {
17082 if (this.pendingTracks.length === 1) {
17083 event.type = this.pendingTracks[0].type;
17084 } else {
17085 event.type = 'combined';
17086 }
17087
17088 this.emittedTracks += this.pendingTracks.length;
17089 initSegment = mp4.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
17090
17091 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
17092 // and track definitions
17093
17094 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
17095
17096 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
17097
17098 for (i = 0; i < this.pendingBoxes.length; i++) {
17099 event.data.set(this.pendingBoxes[i], offset);
17100 offset += this.pendingBoxes[i].byteLength;
17101 } // Translate caption PTS times into second offsets to match the
17102 // video timeline for the segment, and add track info
17103
17104
17105 for (i = 0; i < this.pendingCaptions.length; i++) {
17106 caption = this.pendingCaptions[i];
17107 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
17108 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
17109 event.captionStreams[caption.stream] = true;
17110 event.captions.push(caption);
17111 } // Translate ID3 frame PTS times into second offsets to match the
17112 // video timeline for the segment
17113
17114
17115 for (i = 0; i < this.pendingMetadata.length; i++) {
17116 id3 = this.pendingMetadata[i];
17117 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
17118 event.metadata.push(id3);
17119 } // We add this to every single emitted segment even though we only need
17120 // it for the first
17121
17122
17123 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
17124
17125 this.pendingTracks.length = 0;
17126 this.videoTrack = null;
17127 this.pendingBoxes.length = 0;
17128 this.pendingCaptions.length = 0;
17129 this.pendingBytes = 0;
17130 this.pendingMetadata.length = 0; // Emit the built segment
17131 // We include captions and ID3 tags for backwards compatibility,
17132 // ideally we should send only video and audio in the data event
17133
17134 this.trigger('data', event); // Emit each caption to the outside world
17135 // Ideally, this would happen immediately on parsing captions,
17136 // but we need to ensure that video data is sent back first
17137 // so that caption timing can be adjusted to match video timing
17138
17139 for (i = 0; i < event.captions.length; i++) {
17140 caption = event.captions[i];
17141 this.trigger('caption', caption);
17142 } // Emit each id3 tag to the outside world
17143 // Ideally, this would happen immediately on parsing the tag,
17144 // but we need to ensure that video data is sent back first
17145 // so that ID3 frame timing can be adjusted to match video timing
17146
17147
17148 for (i = 0; i < event.metadata.length; i++) {
17149 id3 = event.metadata[i];
17150 this.trigger('id3Frame', id3);
17151 }
17152 } // Only emit `done` if all tracks have been flushed and emitted
17153
17154
17155 if (this.emittedTracks >= this.numberOfTracks) {
17156 this.trigger('done');
17157 this.emittedTracks = 0;
17158 }
17159 };
17160
17161 CoalesceStream.prototype.setRemux = function (val) {
17162 this.remuxTracks = val;
17163 };
17164 /**
17165 * A Stream that expects MP2T binary data as input and produces
17166 * corresponding media segments, suitable for use with Media Source
17167 * Extension (MSE) implementations that support the ISO BMFF byte
17168 * stream format, like Chrome.
17169 */
17170
17171
17172 Transmuxer = function (options) {
17173 var self = this,
17174 hasFlushed = true,
17175 videoTrack,
17176 audioTrack;
17177 Transmuxer.prototype.init.call(this);
17178 options = options || {};
17179 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
17180 this.transmuxPipeline_ = {};
17181
17182 this.setupAacPipeline = function () {
17183 var pipeline = {};
17184 this.transmuxPipeline_ = pipeline;
17185 pipeline.type = 'aac';
17186 pipeline.metadataStream = new m2ts.MetadataStream(); // set up the parsing pipeline
17187
17188 pipeline.aacStream = new AacStream();
17189 pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
17190 pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
17191 pipeline.adtsStream = new AdtsStream();
17192 pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
17193 pipeline.headOfPipeline = pipeline.aacStream;
17194 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
17195 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
17196 pipeline.metadataStream.on('timestamp', function (frame) {
17197 pipeline.aacStream.setTimestamp(frame.timeStamp);
17198 });
17199 pipeline.aacStream.on('data', function (data) {
17200 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
17201 return;
17202 }
17203
17204 audioTrack = audioTrack || {
17205 timelineStartInfo: {
17206 baseMediaDecodeTime: self.baseMediaDecodeTime
17207 },
17208 codec: 'adts',
17209 type: 'audio'
17210 }; // hook up the audio segment stream to the first track with aac data
17211
17212 pipeline.coalesceStream.numberOfTracks++;
17213 pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options);
17214 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
17215 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
17216
17217 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
17218
17219 self.trigger('trackinfo', {
17220 hasAudio: !!audioTrack,
17221 hasVideo: !!videoTrack
17222 });
17223 }); // Re-emit any data coming from the coalesce stream to the outside world
17224
17225 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
17226
17227 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
17228 addPipelineLogRetriggers(this, pipeline);
17229 };
17230
17231 this.setupTsPipeline = function () {
17232 var pipeline = {};
17233 this.transmuxPipeline_ = pipeline;
17234 pipeline.type = 'ts';
17235 pipeline.metadataStream = new m2ts.MetadataStream(); // set up the parsing pipeline
17236
17237 pipeline.packetStream = new m2ts.TransportPacketStream();
17238 pipeline.parseStream = new m2ts.TransportParseStream();
17239 pipeline.elementaryStream = new m2ts.ElementaryStream();
17240 pipeline.timestampRolloverStream = new m2ts.TimestampRolloverStream();
17241 pipeline.adtsStream = new AdtsStream();
17242 pipeline.h264Stream = new H264Stream();
17243 pipeline.captionStream = new m2ts.CaptionStream(options);
17244 pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
17245 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
17246
17247 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
17248 // demux the streams
17249
17250 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
17251 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
17252 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
17253
17254 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
17255 pipeline.elementaryStream.on('data', function (data) {
17256 var i;
17257
17258 if (data.type === 'metadata') {
17259 i = data.tracks.length; // scan the tracks listed in the metadata
17260
17261 while (i--) {
17262 if (!videoTrack && data.tracks[i].type === 'video') {
17263 videoTrack = data.tracks[i];
17264 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
17265 } else if (!audioTrack && data.tracks[i].type === 'audio') {
17266 audioTrack = data.tracks[i];
17267 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
17268 }
17269 } // hook up the video segment stream to the first track with h264 data
17270
17271
17272 if (videoTrack && !pipeline.videoSegmentStream) {
17273 pipeline.coalesceStream.numberOfTracks++;
17274 pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack, options);
17275 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
17276 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
17277 // When video emits timelineStartInfo data after a flush, we forward that
17278 // info to the AudioSegmentStream, if it exists, because video timeline
17279 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
17280 // because this is a particularly subtle form of timestamp alteration.
17281 if (audioTrack && !options.keepOriginalTimestamps) {
17282 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
17283 // very earliest DTS we have seen in video because Chrome will
17284 // interpret any video track with a baseMediaDecodeTime that is
17285 // non-zero as a gap.
17286
17287 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
17288 }
17289 });
17290 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
17291 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
17292 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
17293 if (audioTrack) {
17294 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
17295 }
17296 });
17297 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
17298
17299 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
17300 }
17301
17302 if (audioTrack && !pipeline.audioSegmentStream) {
17303 // hook up the audio segment stream to the first track with aac data
17304 pipeline.coalesceStream.numberOfTracks++;
17305 pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options);
17306 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
17307 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
17308 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
17309
17310 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
17311 } // emit pmt info
17312
17313
17314 self.trigger('trackinfo', {
17315 hasAudio: !!audioTrack,
17316 hasVideo: !!videoTrack
17317 });
17318 }
17319 }); // Re-emit any data coming from the coalesce stream to the outside world
17320
17321 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
17322 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
17323 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
17324 self.trigger('id3Frame', id3Frame);
17325 });
17326 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
17327
17328 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
17329 addPipelineLogRetriggers(this, pipeline);
17330 }; // hook up the segment streams once track metadata is delivered
17331
17332
17333 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
17334 var pipeline = this.transmuxPipeline_;
17335
17336 if (!options.keepOriginalTimestamps) {
17337 this.baseMediaDecodeTime = baseMediaDecodeTime;
17338 }
17339
17340 if (audioTrack) {
17341 audioTrack.timelineStartInfo.dts = undefined;
17342 audioTrack.timelineStartInfo.pts = undefined;
17343 trackDecodeInfo.clearDtsInfo(audioTrack);
17344
17345 if (pipeline.audioTimestampRolloverStream) {
17346 pipeline.audioTimestampRolloverStream.discontinuity();
17347 }
17348 }
17349
17350 if (videoTrack) {
17351 if (pipeline.videoSegmentStream) {
17352 pipeline.videoSegmentStream.gopCache_ = [];
17353 }
17354
17355 videoTrack.timelineStartInfo.dts = undefined;
17356 videoTrack.timelineStartInfo.pts = undefined;
17357 trackDecodeInfo.clearDtsInfo(videoTrack);
17358 pipeline.captionStream.reset();
17359 }
17360
17361 if (pipeline.timestampRolloverStream) {
17362 pipeline.timestampRolloverStream.discontinuity();
17363 }
17364 };
17365
17366 this.setAudioAppendStart = function (timestamp) {
17367 if (audioTrack) {
17368 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
17369 }
17370 };
17371
17372 this.setRemux = function (val) {
17373 var pipeline = this.transmuxPipeline_;
17374 options.remux = val;
17375
17376 if (pipeline && pipeline.coalesceStream) {
17377 pipeline.coalesceStream.setRemux(val);
17378 }
17379 };
17380
17381 this.alignGopsWith = function (gopsToAlignWith) {
17382 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
17383 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
17384 }
17385 };
17386
17387 this.getLogTrigger_ = function (key) {
17388 var self = this;
17389 return function (event) {
17390 event.stream = key;
17391 self.trigger('log', event);
17392 };
17393 }; // feed incoming data to the front of the parsing pipeline
17394
17395
17396 this.push = function (data) {
17397 if (hasFlushed) {
17398 var isAac = isLikelyAacData(data);
17399
17400 if (isAac && this.transmuxPipeline_.type !== 'aac') {
17401 this.setupAacPipeline();
17402 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
17403 this.setupTsPipeline();
17404 }
17405
17406 hasFlushed = false;
17407 }
17408
17409 this.transmuxPipeline_.headOfPipeline.push(data);
17410 }; // flush any buffered data
17411
17412
17413 this.flush = function () {
17414 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
17415
17416 this.transmuxPipeline_.headOfPipeline.flush();
17417 };
17418
17419 this.endTimeline = function () {
17420 this.transmuxPipeline_.headOfPipeline.endTimeline();
17421 };
17422
17423 this.reset = function () {
17424 if (this.transmuxPipeline_.headOfPipeline) {
17425 this.transmuxPipeline_.headOfPipeline.reset();
17426 }
17427 }; // Caption data has to be reset when seeking outside buffered range
17428
17429
17430 this.resetCaptions = function () {
17431 if (this.transmuxPipeline_.captionStream) {
17432 this.transmuxPipeline_.captionStream.reset();
17433 }
17434 };
17435 };
17436
17437 Transmuxer.prototype = new Stream();
17438 var transmuxer = {
17439 Transmuxer: Transmuxer,
17440 VideoSegmentStream: VideoSegmentStream,
17441 AudioSegmentStream: AudioSegmentStream,
17442 AUDIO_PROPERTIES: AUDIO_PROPERTIES,
17443 VIDEO_PROPERTIES: VIDEO_PROPERTIES,
17444 // exported for testing
17445 generateSegmentTimingInfo: generateSegmentTimingInfo
17446 };
17447 /**
17448 * mux.js
17449 *
17450 * Copyright (c) Brightcove
17451 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
17452 */
17453
17454 var toUnsigned$3 = function (value) {
17455 return value >>> 0;
17456 };
17457
17458 var toHexString$1 = function (value) {
17459 return ('00' + value.toString(16)).slice(-2);
17460 };
17461
17462 var bin = {
17463 toUnsigned: toUnsigned$3,
17464 toHexString: toHexString$1
17465 };
17466
17467 var parseType$3 = function (buffer) {
17468 var result = '';
17469 result += String.fromCharCode(buffer[0]);
17470 result += String.fromCharCode(buffer[1]);
17471 result += String.fromCharCode(buffer[2]);
17472 result += String.fromCharCode(buffer[3]);
17473 return result;
17474 };
17475
17476 var parseType_1 = parseType$3;
17477 var toUnsigned$2 = bin.toUnsigned;
17478 var parseType$2 = parseType_1;
17479
17480 var findBox$2 = function (data, path) {
17481 var results = [],
17482 i,
17483 size,
17484 type,
17485 end,
17486 subresults;
17487
17488 if (!path.length) {
17489 // short-circuit the search for empty paths
17490 return null;
17491 }
17492
17493 for (i = 0; i < data.byteLength;) {
17494 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
17495 type = parseType$2(data.subarray(i + 4, i + 8));
17496 end = size > 1 ? i + size : data.byteLength;
17497
17498 if (type === path[0]) {
17499 if (path.length === 1) {
17500 // this is the end of the path and we've found the box we were
17501 // looking for
17502 results.push(data.subarray(i + 8, end));
17503 } else {
17504 // recursively search for the next box along the path
17505 subresults = findBox$2(data.subarray(i + 8, end), path.slice(1));
17506
17507 if (subresults.length) {
17508 results = results.concat(subresults);
17509 }
17510 }
17511 }
17512
17513 i = end;
17514 } // we've finished searching all of data
17515
17516
17517 return results;
17518 };
17519
17520 var findBox_1 = findBox$2;
17521 var toUnsigned$1 = bin.toUnsigned;
17522 var getUint64$2 = numbers.getUint64;
17523
17524 var tfdt = function (data) {
17525 var result = {
17526 version: data[0],
17527 flags: new Uint8Array(data.subarray(1, 4))
17528 };
17529
17530 if (result.version === 1) {
17531 result.baseMediaDecodeTime = getUint64$2(data.subarray(4));
17532 } else {
17533 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
17534 }
17535
17536 return result;
17537 };
17538
17539 var parseTfdt$2 = tfdt;
17540
17541 var parseSampleFlags$1 = function (flags) {
17542 return {
17543 isLeading: (flags[0] & 0x0c) >>> 2,
17544 dependsOn: flags[0] & 0x03,
17545 isDependedOn: (flags[1] & 0xc0) >>> 6,
17546 hasRedundancy: (flags[1] & 0x30) >>> 4,
17547 paddingValue: (flags[1] & 0x0e) >>> 1,
17548 isNonSyncSample: flags[1] & 0x01,
17549 degradationPriority: flags[2] << 8 | flags[3]
17550 };
17551 };
17552
17553 var parseSampleFlags_1 = parseSampleFlags$1;
17554 var parseSampleFlags = parseSampleFlags_1;
17555
17556 var trun = function (data) {
17557 var result = {
17558 version: data[0],
17559 flags: new Uint8Array(data.subarray(1, 4)),
17560 samples: []
17561 },
17562 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
17563 // Flag interpretation
17564 dataOffsetPresent = result.flags[2] & 0x01,
17565 // compare with 2nd byte of 0x1
17566 firstSampleFlagsPresent = result.flags[2] & 0x04,
17567 // compare with 2nd byte of 0x4
17568 sampleDurationPresent = result.flags[1] & 0x01,
17569 // compare with 2nd byte of 0x100
17570 sampleSizePresent = result.flags[1] & 0x02,
17571 // compare with 2nd byte of 0x200
17572 sampleFlagsPresent = result.flags[1] & 0x04,
17573 // compare with 2nd byte of 0x400
17574 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
17575 // compare with 2nd byte of 0x800
17576 sampleCount = view.getUint32(4),
17577 offset = 8,
17578 sample;
17579
17580 if (dataOffsetPresent) {
17581 // 32 bit signed integer
17582 result.dataOffset = view.getInt32(offset);
17583 offset += 4;
17584 } // Overrides the flags for the first sample only. The order of
17585 // optional values will be: duration, size, compositionTimeOffset
17586
17587
17588 if (firstSampleFlagsPresent && sampleCount) {
17589 sample = {
17590 flags: parseSampleFlags(data.subarray(offset, offset + 4))
17591 };
17592 offset += 4;
17593
17594 if (sampleDurationPresent) {
17595 sample.duration = view.getUint32(offset);
17596 offset += 4;
17597 }
17598
17599 if (sampleSizePresent) {
17600 sample.size = view.getUint32(offset);
17601 offset += 4;
17602 }
17603
17604 if (sampleCompositionTimeOffsetPresent) {
17605 if (result.version === 1) {
17606 sample.compositionTimeOffset = view.getInt32(offset);
17607 } else {
17608 sample.compositionTimeOffset = view.getUint32(offset);
17609 }
17610
17611 offset += 4;
17612 }
17613
17614 result.samples.push(sample);
17615 sampleCount--;
17616 }
17617
17618 while (sampleCount--) {
17619 sample = {};
17620
17621 if (sampleDurationPresent) {
17622 sample.duration = view.getUint32(offset);
17623 offset += 4;
17624 }
17625
17626 if (sampleSizePresent) {
17627 sample.size = view.getUint32(offset);
17628 offset += 4;
17629 }
17630
17631 if (sampleFlagsPresent) {
17632 sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
17633 offset += 4;
17634 }
17635
17636 if (sampleCompositionTimeOffsetPresent) {
17637 if (result.version === 1) {
17638 sample.compositionTimeOffset = view.getInt32(offset);
17639 } else {
17640 sample.compositionTimeOffset = view.getUint32(offset);
17641 }
17642
17643 offset += 4;
17644 }
17645
17646 result.samples.push(sample);
17647 }
17648
17649 return result;
17650 };
17651
17652 var parseTrun$2 = trun;
17653
17654 var tfhd = function (data) {
17655 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
17656 result = {
17657 version: data[0],
17658 flags: new Uint8Array(data.subarray(1, 4)),
17659 trackId: view.getUint32(4)
17660 },
17661 baseDataOffsetPresent = result.flags[2] & 0x01,
17662 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
17663 defaultSampleDurationPresent = result.flags[2] & 0x08,
17664 defaultSampleSizePresent = result.flags[2] & 0x10,
17665 defaultSampleFlagsPresent = result.flags[2] & 0x20,
17666 durationIsEmpty = result.flags[0] & 0x010000,
17667 defaultBaseIsMoof = result.flags[0] & 0x020000,
17668 i;
17669 i = 8;
17670
17671 if (baseDataOffsetPresent) {
17672 i += 4; // truncate top 4 bytes
17673 // FIXME: should we read the full 64 bits?
17674
17675 result.baseDataOffset = view.getUint32(12);
17676 i += 4;
17677 }
17678
17679 if (sampleDescriptionIndexPresent) {
17680 result.sampleDescriptionIndex = view.getUint32(i);
17681 i += 4;
17682 }
17683
17684 if (defaultSampleDurationPresent) {
17685 result.defaultSampleDuration = view.getUint32(i);
17686 i += 4;
17687 }
17688
17689 if (defaultSampleSizePresent) {
17690 result.defaultSampleSize = view.getUint32(i);
17691 i += 4;
17692 }
17693
17694 if (defaultSampleFlagsPresent) {
17695 result.defaultSampleFlags = view.getUint32(i);
17696 }
17697
17698 if (durationIsEmpty) {
17699 result.durationIsEmpty = true;
17700 }
17701
17702 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
17703 result.baseDataOffsetIsMoof = true;
17704 }
17705
17706 return result;
17707 };
17708
17709 var parseTfhd$2 = tfhd;
17710 var win;
17711
17712 if (typeof window !== "undefined") {
17713 win = window;
17714 } else if (typeof commonjsGlobal !== "undefined") {
17715 win = commonjsGlobal;
17716 } else if (typeof self !== "undefined") {
17717 win = self;
17718 } else {
17719 win = {};
17720 }
17721
17722 var window_1 = win;
17723 /**
17724 * mux.js
17725 *
17726 * Copyright (c) Brightcove
17727 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
17728 *
17729 * Reads in-band CEA-708 captions out of FMP4 segments.
17730 * @see https://en.wikipedia.org/wiki/CEA-708
17731 */
17732
17733 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
17734 var CaptionStream = captionStream.CaptionStream;
17735 var findBox$1 = findBox_1;
17736 var parseTfdt$1 = parseTfdt$2;
17737 var parseTrun$1 = parseTrun$2;
17738 var parseTfhd$1 = parseTfhd$2;
17739 var window$2 = window_1;
17740 /**
17741 * Maps an offset in the mdat to a sample based on the the size of the samples.
17742 * Assumes that `parseSamples` has been called first.
17743 *
17744 * @param {Number} offset - The offset into the mdat
17745 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
17746 * @return {?Object} The matching sample, or null if no match was found.
17747 *
17748 * @see ISO-BMFF-12/2015, Section 8.8.8
17749 **/
17750
17751 var mapToSample = function (offset, samples) {
17752 var approximateOffset = offset;
17753
17754 for (var i = 0; i < samples.length; i++) {
17755 var sample = samples[i];
17756
17757 if (approximateOffset < sample.size) {
17758 return sample;
17759 }
17760
17761 approximateOffset -= sample.size;
17762 }
17763
17764 return null;
17765 };
17766 /**
17767 * Finds SEI nal units contained in a Media Data Box.
17768 * Assumes that `parseSamples` has been called first.
17769 *
17770 * @param {Uint8Array} avcStream - The bytes of the mdat
17771 * @param {Object[]} samples - The samples parsed out by `parseSamples`
17772 * @param {Number} trackId - The trackId of this video track
17773 * @return {Object[]} seiNals - the parsed SEI NALUs found.
17774 * The contents of the seiNal should match what is expected by
17775 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
17776 *
17777 * @see ISO-BMFF-12/2015, Section 8.1.1
17778 * @see Rec. ITU-T H.264, 7.3.2.3.1
17779 **/
17780
17781
17782 var findSeiNals = function (avcStream, samples, trackId) {
17783 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
17784 result = {
17785 logs: [],
17786 seiNals: []
17787 },
17788 seiNal,
17789 i,
17790 length,
17791 lastMatchedSample;
17792
17793 for (i = 0; i + 4 < avcStream.length; i += length) {
17794 length = avcView.getUint32(i);
17795 i += 4; // Bail if this doesn't appear to be an H264 stream
17796
17797 if (length <= 0) {
17798 continue;
17799 }
17800
17801 switch (avcStream[i] & 0x1F) {
17802 case 0x06:
17803 var data = avcStream.subarray(i + 1, i + 1 + length);
17804 var matchingSample = mapToSample(i, samples);
17805 seiNal = {
17806 nalUnitType: 'sei_rbsp',
17807 size: length,
17808 data: data,
17809 escapedRBSP: discardEmulationPreventionBytes(data),
17810 trackId: trackId
17811 };
17812
17813 if (matchingSample) {
17814 seiNal.pts = matchingSample.pts;
17815 seiNal.dts = matchingSample.dts;
17816 lastMatchedSample = matchingSample;
17817 } else if (lastMatchedSample) {
17818 // If a matching sample cannot be found, use the last
17819 // sample's values as they should be as close as possible
17820 seiNal.pts = lastMatchedSample.pts;
17821 seiNal.dts = lastMatchedSample.dts;
17822 } else {
17823 result.logs.push({
17824 level: 'warn',
17825 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
17826 });
17827 break;
17828 }
17829
17830 result.seiNals.push(seiNal);
17831 break;
17832 }
17833 }
17834
17835 return result;
17836 };
17837 /**
17838 * Parses sample information out of Track Run Boxes and calculates
17839 * the absolute presentation and decode timestamps of each sample.
17840 *
17841 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
17842 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
17843 @see ISO-BMFF-12/2015, Section 8.8.12
17844 * @param {Object} tfhd - The parsed Track Fragment Header
17845 * @see inspect.parseTfhd
17846 * @return {Object[]} the parsed samples
17847 *
17848 * @see ISO-BMFF-12/2015, Section 8.8.8
17849 **/
17850
17851
17852 var parseSamples = function (truns, baseMediaDecodeTime, tfhd) {
17853 var currentDts = baseMediaDecodeTime;
17854 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
17855 var defaultSampleSize = tfhd.defaultSampleSize || 0;
17856 var trackId = tfhd.trackId;
17857 var allSamples = [];
17858 truns.forEach(function (trun) {
17859 // Note: We currently do not parse the sample table as well
17860 // as the trun. It's possible some sources will require this.
17861 // moov > trak > mdia > minf > stbl
17862 var trackRun = parseTrun$1(trun);
17863 var samples = trackRun.samples;
17864 samples.forEach(function (sample) {
17865 if (sample.duration === undefined) {
17866 sample.duration = defaultSampleDuration;
17867 }
17868
17869 if (sample.size === undefined) {
17870 sample.size = defaultSampleSize;
17871 }
17872
17873 sample.trackId = trackId;
17874 sample.dts = currentDts;
17875
17876 if (sample.compositionTimeOffset === undefined) {
17877 sample.compositionTimeOffset = 0;
17878 }
17879
17880 if (typeof currentDts === 'bigint') {
17881 sample.pts = currentDts + window$2.BigInt(sample.compositionTimeOffset);
17882 currentDts += window$2.BigInt(sample.duration);
17883 } else {
17884 sample.pts = currentDts + sample.compositionTimeOffset;
17885 currentDts += sample.duration;
17886 }
17887 });
17888 allSamples = allSamples.concat(samples);
17889 });
17890 return allSamples;
17891 };
17892 /**
17893 * Parses out caption nals from an FMP4 segment's video tracks.
17894 *
17895 * @param {Uint8Array} segment - The bytes of a single segment
17896 * @param {Number} videoTrackId - The trackId of a video track in the segment
17897 * @return {Object.<Number, Object[]>} A mapping of video trackId to
17898 * a list of seiNals found in that track
17899 **/
17900
17901
17902 var parseCaptionNals = function (segment, videoTrackId) {
17903 // To get the samples
17904 var trafs = findBox$1(segment, ['moof', 'traf']); // To get SEI NAL units
17905
17906 var mdats = findBox$1(segment, ['mdat']);
17907 var captionNals = {};
17908 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
17909
17910 mdats.forEach(function (mdat, index) {
17911 var matchingTraf = trafs[index];
17912 mdatTrafPairs.push({
17913 mdat: mdat,
17914 traf: matchingTraf
17915 });
17916 });
17917 mdatTrafPairs.forEach(function (pair) {
17918 var mdat = pair.mdat;
17919 var traf = pair.traf;
17920 var tfhd = findBox$1(traf, ['tfhd']); // Exactly 1 tfhd per traf
17921
17922 var headerInfo = parseTfhd$1(tfhd[0]);
17923 var trackId = headerInfo.trackId;
17924 var tfdt = findBox$1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
17925
17926 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt$1(tfdt[0]).baseMediaDecodeTime : 0;
17927 var truns = findBox$1(traf, ['trun']);
17928 var samples;
17929 var result; // Only parse video data for the chosen video track
17930
17931 if (videoTrackId === trackId && truns.length > 0) {
17932 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
17933 result = findSeiNals(mdat, samples, trackId);
17934
17935 if (!captionNals[trackId]) {
17936 captionNals[trackId] = {
17937 seiNals: [],
17938 logs: []
17939 };
17940 }
17941
17942 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
17943 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
17944 }
17945 });
17946 return captionNals;
17947 };
17948 /**
17949 * Parses out inband captions from an MP4 container and returns
17950 * caption objects that can be used by WebVTT and the TextTrack API.
17951 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
17952 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
17953 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
17954 *
17955 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
17956 * @param {Number} trackId - The id of the video track to parse
17957 * @param {Number} timescale - The timescale for the video track from the init segment
17958 *
17959 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
17960 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
17961 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
17962 * @return {Object[]} parsedCaptions[].content - A list of individual caption segments
17963 * @return {String} parsedCaptions[].content.text - The visible content of the caption segment
17964 * @return {Number} parsedCaptions[].content.line - The line height from 1-15 for positioning of the caption segment
17965 * @return {Number} parsedCaptions[].content.position - The column indent percentage for cue positioning from 10-80
17966 **/
17967
17968
17969 var parseEmbeddedCaptions = function (segment, trackId, timescale) {
17970 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
17971
17972 if (trackId === null) {
17973 return null;
17974 }
17975
17976 captionNals = parseCaptionNals(segment, trackId);
17977 var trackNals = captionNals[trackId] || {};
17978 return {
17979 seiNals: trackNals.seiNals,
17980 logs: trackNals.logs,
17981 timescale: timescale
17982 };
17983 };
17984 /**
17985 * Converts SEI NALUs into captions that can be used by video.js
17986 **/
17987
17988
17989 var CaptionParser = function () {
17990 var isInitialized = false;
17991 var captionStream; // Stores segments seen before trackId and timescale are set
17992
17993 var segmentCache; // Stores video track ID of the track being parsed
17994
17995 var trackId; // Stores the timescale of the track being parsed
17996
17997 var timescale; // Stores captions parsed so far
17998
17999 var parsedCaptions; // Stores whether we are receiving partial data or not
18000
18001 var parsingPartial;
18002 /**
18003 * A method to indicate whether a CaptionParser has been initalized
18004 * @returns {Boolean}
18005 **/
18006
18007 this.isInitialized = function () {
18008 return isInitialized;
18009 };
18010 /**
18011 * Initializes the underlying CaptionStream, SEI NAL parsing
18012 * and management, and caption collection
18013 **/
18014
18015
18016 this.init = function (options) {
18017 captionStream = new CaptionStream();
18018 isInitialized = true;
18019 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
18020
18021 captionStream.on('data', function (event) {
18022 // Convert to seconds in the source's timescale
18023 event.startTime = event.startPts / timescale;
18024 event.endTime = event.endPts / timescale;
18025 parsedCaptions.captions.push(event);
18026 parsedCaptions.captionStreams[event.stream] = true;
18027 });
18028 captionStream.on('log', function (log) {
18029 parsedCaptions.logs.push(log);
18030 });
18031 };
18032 /**
18033 * Determines if a new video track will be selected
18034 * or if the timescale changed
18035 * @return {Boolean}
18036 **/
18037
18038
18039 this.isNewInit = function (videoTrackIds, timescales) {
18040 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
18041 return false;
18042 }
18043
18044 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
18045 };
18046 /**
18047 * Parses out SEI captions and interacts with underlying
18048 * CaptionStream to return dispatched captions
18049 *
18050 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
18051 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
18052 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
18053 * @see parseEmbeddedCaptions
18054 * @see m2ts/caption-stream.js
18055 **/
18056
18057
18058 this.parse = function (segment, videoTrackIds, timescales) {
18059 var parsedData;
18060
18061 if (!this.isInitialized()) {
18062 return null; // This is not likely to be a video segment
18063 } else if (!videoTrackIds || !timescales) {
18064 return null;
18065 } else if (this.isNewInit(videoTrackIds, timescales)) {
18066 // Use the first video track only as there is no
18067 // mechanism to switch to other video tracks
18068 trackId = videoTrackIds[0];
18069 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
18070 // data until we have one.
18071 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
18072 } else if (trackId === null || !timescale) {
18073 segmentCache.push(segment);
18074 return null;
18075 } // Now that a timescale and trackId is set, parse cached segments
18076
18077
18078 while (segmentCache.length > 0) {
18079 var cachedSegment = segmentCache.shift();
18080 this.parse(cachedSegment, videoTrackIds, timescales);
18081 }
18082
18083 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
18084
18085 if (parsedData && parsedData.logs) {
18086 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
18087 }
18088
18089 if (parsedData === null || !parsedData.seiNals) {
18090 if (parsedCaptions.logs.length) {
18091 return {
18092 logs: parsedCaptions.logs,
18093 captions: [],
18094 captionStreams: []
18095 };
18096 }
18097
18098 return null;
18099 }
18100
18101 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
18102
18103 this.flushStream();
18104 return parsedCaptions;
18105 };
18106 /**
18107 * Pushes SEI NALUs onto CaptionStream
18108 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
18109 * Assumes that `parseCaptionNals` has been called first
18110 * @see m2ts/caption-stream.js
18111 **/
18112
18113
18114 this.pushNals = function (nals) {
18115 if (!this.isInitialized() || !nals || nals.length === 0) {
18116 return null;
18117 }
18118
18119 nals.forEach(function (nal) {
18120 captionStream.push(nal);
18121 });
18122 };
18123 /**
18124 * Flushes underlying CaptionStream to dispatch processed, displayable captions
18125 * @see m2ts/caption-stream.js
18126 **/
18127
18128
18129 this.flushStream = function () {
18130 if (!this.isInitialized()) {
18131 return null;
18132 }
18133
18134 if (!parsingPartial) {
18135 captionStream.flush();
18136 } else {
18137 captionStream.partialFlush();
18138 }
18139 };
18140 /**
18141 * Reset caption buckets for new data
18142 **/
18143
18144
18145 this.clearParsedCaptions = function () {
18146 parsedCaptions.captions = [];
18147 parsedCaptions.captionStreams = {};
18148 parsedCaptions.logs = [];
18149 };
18150 /**
18151 * Resets underlying CaptionStream
18152 * @see m2ts/caption-stream.js
18153 **/
18154
18155
18156 this.resetCaptionStream = function () {
18157 if (!this.isInitialized()) {
18158 return null;
18159 }
18160
18161 captionStream.reset();
18162 };
18163 /**
18164 * Convenience method to clear all captions flushed from the
18165 * CaptionStream and still being parsed
18166 * @see m2ts/caption-stream.js
18167 **/
18168
18169
18170 this.clearAllCaptions = function () {
18171 this.clearParsedCaptions();
18172 this.resetCaptionStream();
18173 };
18174 /**
18175 * Reset caption parser
18176 **/
18177
18178
18179 this.reset = function () {
18180 segmentCache = [];
18181 trackId = null;
18182 timescale = null;
18183
18184 if (!parsedCaptions) {
18185 parsedCaptions = {
18186 captions: [],
18187 // CC1, CC2, CC3, CC4
18188 captionStreams: {},
18189 logs: []
18190 };
18191 } else {
18192 this.clearParsedCaptions();
18193 }
18194
18195 this.resetCaptionStream();
18196 };
18197
18198 this.reset();
18199 };
18200
18201 var captionParser = CaptionParser;
18202 /**
18203 * Returns the first string in the data array ending with a null char '\0'
18204 * @param {UInt8} data
18205 * @returns the string with the null char
18206 */
18207
18208 var uint8ToCString$1 = function (data) {
18209 var index = 0;
18210 var curChar = String.fromCharCode(data[index]);
18211 var retString = '';
18212
18213 while (curChar !== '\0') {
18214 retString += curChar;
18215 index++;
18216 curChar = String.fromCharCode(data[index]);
18217 } // Add nullChar
18218
18219
18220 retString += curChar;
18221 return retString;
18222 };
18223
18224 var string = {
18225 uint8ToCString: uint8ToCString$1
18226 };
18227 var uint8ToCString = string.uint8ToCString;
18228 var getUint64$1 = numbers.getUint64;
18229 /**
18230 * Based on: ISO/IEC 23009 Section: 5.10.3.3
18231 * References:
18232 * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
18233 * https://aomediacodec.github.io/id3-emsg/
18234 *
18235 * Takes emsg box data as a uint8 array and returns a emsg box object
18236 * @param {UInt8Array} boxData data from emsg box
18237 * @returns A parsed emsg box object
18238 */
18239
18240 var parseEmsgBox = function (boxData) {
18241 // version + flags
18242 var offset = 4;
18243 var version = boxData[0];
18244 var scheme_id_uri, value, timescale, presentation_time, presentation_time_delta, event_duration, id, message_data;
18245
18246 if (version === 0) {
18247 scheme_id_uri = uint8ToCString(boxData.subarray(offset));
18248 offset += scheme_id_uri.length;
18249 value = uint8ToCString(boxData.subarray(offset));
18250 offset += value.length;
18251 var dv = new DataView(boxData.buffer);
18252 timescale = dv.getUint32(offset);
18253 offset += 4;
18254 presentation_time_delta = dv.getUint32(offset);
18255 offset += 4;
18256 event_duration = dv.getUint32(offset);
18257 offset += 4;
18258 id = dv.getUint32(offset);
18259 offset += 4;
18260 } else if (version === 1) {
18261 var dv = new DataView(boxData.buffer);
18262 timescale = dv.getUint32(offset);
18263 offset += 4;
18264 presentation_time = getUint64$1(boxData.subarray(offset));
18265 offset += 8;
18266 event_duration = dv.getUint32(offset);
18267 offset += 4;
18268 id = dv.getUint32(offset);
18269 offset += 4;
18270 scheme_id_uri = uint8ToCString(boxData.subarray(offset));
18271 offset += scheme_id_uri.length;
18272 value = uint8ToCString(boxData.subarray(offset));
18273 offset += value.length;
18274 }
18275
18276 message_data = new Uint8Array(boxData.subarray(offset, boxData.byteLength));
18277 var emsgBox = {
18278 scheme_id_uri,
18279 value,
18280 // if timescale is undefined or 0 set to 1
18281 timescale: timescale ? timescale : 1,
18282 presentation_time,
18283 presentation_time_delta,
18284 event_duration,
18285 id,
18286 message_data
18287 };
18288 return isValidEmsgBox(version, emsgBox) ? emsgBox : undefined;
18289 };
18290 /**
18291 * Scales a presentation time or time delta with an offset with a provided timescale
18292 * @param {number} presentationTime
18293 * @param {number} timescale
18294 * @param {number} timeDelta
18295 * @param {number} offset
18296 * @returns the scaled time as a number
18297 */
18298
18299
18300 var scaleTime = function (presentationTime, timescale, timeDelta, offset) {
18301 return presentationTime || presentationTime === 0 ? presentationTime / timescale : offset + timeDelta / timescale;
18302 };
18303 /**
18304 * Checks the emsg box data for validity based on the version
18305 * @param {number} version of the emsg box to validate
18306 * @param {Object} emsg the emsg data to validate
18307 * @returns if the box is valid as a boolean
18308 */
18309
18310
18311 var isValidEmsgBox = function (version, emsg) {
18312 var hasScheme = emsg.scheme_id_uri !== '\0';
18313 var isValidV0Box = version === 0 && isDefined(emsg.presentation_time_delta) && hasScheme;
18314 var isValidV1Box = version === 1 && isDefined(emsg.presentation_time) && hasScheme; // Only valid versions of emsg are 0 and 1
18315
18316 return !(version > 1) && isValidV0Box || isValidV1Box;
18317 }; // Utility function to check if an object is defined
18318
18319
18320 var isDefined = function (data) {
18321 return data !== undefined || data !== null;
18322 };
18323
18324 var emsg$1 = {
18325 parseEmsgBox: parseEmsgBox,
18326 scaleTime: scaleTime
18327 };
18328 /**
18329 * mux.js
18330 *
18331 * Copyright (c) Brightcove
18332 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
18333 *
18334 * Utilities to detect basic properties and metadata about MP4s.
18335 */
18336
18337 var toUnsigned = bin.toUnsigned;
18338 var toHexString = bin.toHexString;
18339 var findBox = findBox_1;
18340 var parseType$1 = parseType_1;
18341 var emsg = emsg$1;
18342 var parseTfhd = parseTfhd$2;
18343 var parseTrun = parseTrun$2;
18344 var parseTfdt = parseTfdt$2;
18345 var getUint64 = numbers.getUint64;
18346 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader, getEmsgID3;
18347 var window$1 = window_1;
18348 var parseId3Frames = parseId3.parseId3Frames;
18349 /**
18350 * Parses an MP4 initialization segment and extracts the timescale
18351 * values for any declared tracks. Timescale values indicate the
18352 * number of clock ticks per second to assume for time-based values
18353 * elsewhere in the MP4.
18354 *
18355 * To determine the start time of an MP4, you need two pieces of
18356 * information: the timescale unit and the earliest base media decode
18357 * time. Multiple timescales can be specified within an MP4 but the
18358 * base media decode time is always expressed in the timescale from
18359 * the media header box for the track:
18360 * ```
18361 * moov > trak > mdia > mdhd.timescale
18362 * ```
18363 * @param init {Uint8Array} the bytes of the init segment
18364 * @return {object} a hash of track ids to timescale values or null if
18365 * the init segment is malformed.
18366 */
18367
18368 timescale = function (init) {
18369 var result = {},
18370 traks = findBox(init, ['moov', 'trak']); // mdhd timescale
18371
18372 return traks.reduce(function (result, trak) {
18373 var tkhd, version, index, id, mdhd;
18374 tkhd = findBox(trak, ['tkhd'])[0];
18375
18376 if (!tkhd) {
18377 return null;
18378 }
18379
18380 version = tkhd[0];
18381 index = version === 0 ? 12 : 20;
18382 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
18383 mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
18384
18385 if (!mdhd) {
18386 return null;
18387 }
18388
18389 version = mdhd[0];
18390 index = version === 0 ? 12 : 20;
18391 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
18392 return result;
18393 }, result);
18394 };
18395 /**
18396 * Determine the base media decode start time, in seconds, for an MP4
18397 * fragment. If multiple fragments are specified, the earliest time is
18398 * returned.
18399 *
18400 * The base media decode time can be parsed from track fragment
18401 * metadata:
18402 * ```
18403 * moof > traf > tfdt.baseMediaDecodeTime
18404 * ```
18405 * It requires the timescale value from the mdhd to interpret.
18406 *
18407 * @param timescale {object} a hash of track ids to timescale values.
18408 * @return {number} the earliest base media decode start time for the
18409 * fragment, in seconds
18410 */
18411
18412
18413 startTime = function (timescale, fragment) {
18414 var trafs; // we need info from two childrend of each track fragment box
18415
18416 trafs = findBox(fragment, ['moof', 'traf']); // determine the start times for each track
18417
18418 var lowestTime = trafs.reduce(function (acc, traf) {
18419 var tfhd = findBox(traf, ['tfhd'])[0]; // get the track id from the tfhd
18420
18421 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
18422
18423 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
18424
18425 var tfdt = findBox(traf, ['tfdt'])[0];
18426 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
18427 var baseTime; // version 1 is 64 bit
18428
18429 if (tfdt[0] === 1) {
18430 baseTime = getUint64(tfdt.subarray(4, 12));
18431 } else {
18432 baseTime = dv.getUint32(4);
18433 } // convert base time to seconds if it is a valid number.
18434
18435
18436 let seconds;
18437
18438 if (typeof baseTime === 'bigint') {
18439 seconds = baseTime / window$1.BigInt(scale);
18440 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
18441 seconds = baseTime / scale;
18442 }
18443
18444 if (seconds < Number.MAX_SAFE_INTEGER) {
18445 seconds = Number(seconds);
18446 }
18447
18448 if (seconds < acc) {
18449 acc = seconds;
18450 }
18451
18452 return acc;
18453 }, Infinity);
18454 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
18455 };
18456 /**
18457 * Determine the composition start, in seconds, for an MP4
18458 * fragment.
18459 *
18460 * The composition start time of a fragment can be calculated using the base
18461 * media decode time, composition time offset, and timescale, as follows:
18462 *
18463 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
18464 *
18465 * All of the aforementioned information is contained within a media fragment's
18466 * `traf` box, except for timescale info, which comes from the initialization
18467 * segment, so a track id (also contained within a `traf`) is also necessary to
18468 * associate it with a timescale
18469 *
18470 *
18471 * @param timescales {object} - a hash of track ids to timescale values.
18472 * @param fragment {Unit8Array} - the bytes of a media segment
18473 * @return {number} the composition start time for the fragment, in seconds
18474 **/
18475
18476
18477 compositionStartTime = function (timescales, fragment) {
18478 var trafBoxes = findBox(fragment, ['moof', 'traf']);
18479 var baseMediaDecodeTime = 0;
18480 var compositionTimeOffset = 0;
18481 var trackId;
18482
18483 if (trafBoxes && trafBoxes.length) {
18484 // The spec states that track run samples contained within a `traf` box are contiguous, but
18485 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
18486 // We will assume that they are, so we only need the first to calculate start time.
18487 var tfhd = findBox(trafBoxes[0], ['tfhd'])[0];
18488 var trun = findBox(trafBoxes[0], ['trun'])[0];
18489 var tfdt = findBox(trafBoxes[0], ['tfdt'])[0];
18490
18491 if (tfhd) {
18492 var parsedTfhd = parseTfhd(tfhd);
18493 trackId = parsedTfhd.trackId;
18494 }
18495
18496 if (tfdt) {
18497 var parsedTfdt = parseTfdt(tfdt);
18498 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
18499 }
18500
18501 if (trun) {
18502 var parsedTrun = parseTrun(trun);
18503
18504 if (parsedTrun.samples && parsedTrun.samples.length) {
18505 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
18506 }
18507 }
18508 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
18509 // specified.
18510
18511
18512 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
18513
18514 if (typeof baseMediaDecodeTime === 'bigint') {
18515 compositionTimeOffset = window$1.BigInt(compositionTimeOffset);
18516 timescale = window$1.BigInt(timescale);
18517 }
18518
18519 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
18520
18521 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
18522 result = Number(result);
18523 }
18524
18525 return result;
18526 };
18527 /**
18528 * Find the trackIds of the video tracks in this source.
18529 * Found by parsing the Handler Reference and Track Header Boxes:
18530 * moov > trak > mdia > hdlr
18531 * moov > trak > tkhd
18532 *
18533 * @param {Uint8Array} init - The bytes of the init segment for this source
18534 * @return {Number[]} A list of trackIds
18535 *
18536 * @see ISO-BMFF-12/2015, Section 8.4.3
18537 **/
18538
18539
18540 getVideoTrackIds = function (init) {
18541 var traks = findBox(init, ['moov', 'trak']);
18542 var videoTrackIds = [];
18543 traks.forEach(function (trak) {
18544 var hdlrs = findBox(trak, ['mdia', 'hdlr']);
18545 var tkhds = findBox(trak, ['tkhd']);
18546 hdlrs.forEach(function (hdlr, index) {
18547 var handlerType = parseType$1(hdlr.subarray(8, 12));
18548 var tkhd = tkhds[index];
18549 var view;
18550 var version;
18551 var trackId;
18552
18553 if (handlerType === 'vide') {
18554 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
18555 version = view.getUint8(0);
18556 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
18557 videoTrackIds.push(trackId);
18558 }
18559 });
18560 });
18561 return videoTrackIds;
18562 };
18563
18564 getTimescaleFromMediaHeader = function (mdhd) {
18565 // mdhd is a FullBox, meaning it will have its own version as the first byte
18566 var version = mdhd[0];
18567 var index = version === 0 ? 12 : 20;
18568 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
18569 };
18570 /**
18571 * Get all the video, audio, and hint tracks from a non fragmented
18572 * mp4 segment
18573 */
18574
18575
18576 getTracks = function (init) {
18577 var traks = findBox(init, ['moov', 'trak']);
18578 var tracks = [];
18579 traks.forEach(function (trak) {
18580 var track = {};
18581 var tkhd = findBox(trak, ['tkhd'])[0];
18582 var view, tkhdVersion; // id
18583
18584 if (tkhd) {
18585 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
18586 tkhdVersion = view.getUint8(0);
18587 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
18588 }
18589
18590 var hdlr = findBox(trak, ['mdia', 'hdlr'])[0]; // type
18591
18592 if (hdlr) {
18593 var type = parseType$1(hdlr.subarray(8, 12));
18594
18595 if (type === 'vide') {
18596 track.type = 'video';
18597 } else if (type === 'soun') {
18598 track.type = 'audio';
18599 } else {
18600 track.type = type;
18601 }
18602 } // codec
18603
18604
18605 var stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
18606
18607 if (stsd) {
18608 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
18609
18610 track.codec = parseType$1(sampleDescriptions.subarray(4, 8));
18611 var codecBox = findBox(sampleDescriptions, [track.codec])[0];
18612 var codecConfig, codecConfigType;
18613
18614 if (codecBox) {
18615 // https://tools.ietf.org/html/rfc6381#section-3.3
18616 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
18617 // we don't need anything but the "config" parameter of the
18618 // avc1 codecBox
18619 codecConfig = codecBox.subarray(78);
18620 codecConfigType = parseType$1(codecConfig.subarray(4, 8));
18621
18622 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
18623 track.codec += '.'; // left padded with zeroes for single digit hex
18624 // profile idc
18625
18626 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
18627
18628 track.codec += toHexString(codecConfig[10]); // level idc
18629
18630 track.codec += toHexString(codecConfig[11]);
18631 } else {
18632 // TODO: show a warning that we couldn't parse the codec
18633 // and are using the default
18634 track.codec = 'avc1.4d400d';
18635 }
18636 } else if (/^mp4[a,v]$/i.test(track.codec)) {
18637 // we do not need anything but the streamDescriptor of the mp4a codecBox
18638 codecConfig = codecBox.subarray(28);
18639 codecConfigType = parseType$1(codecConfig.subarray(4, 8));
18640
18641 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
18642 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
18643
18644 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
18645 } else {
18646 // TODO: show a warning that we couldn't parse the codec
18647 // and are using the default
18648 track.codec = 'mp4a.40.2';
18649 }
18650 } else {
18651 // flac, opus, etc
18652 track.codec = track.codec.toLowerCase();
18653 }
18654 }
18655 }
18656
18657 var mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
18658
18659 if (mdhd) {
18660 track.timescale = getTimescaleFromMediaHeader(mdhd);
18661 }
18662
18663 tracks.push(track);
18664 });
18665 return tracks;
18666 };
18667 /**
18668 * Returns an array of emsg ID3 data from the provided segmentData.
18669 * An offset can also be provided as the Latest Arrival Time to calculate
18670 * the Event Start Time of v0 EMSG boxes.
18671 * See: https://dashif-documents.azurewebsites.net/Events/master/event.html#Inband-event-timing
18672 *
18673 * @param {Uint8Array} segmentData the segment byte array.
18674 * @param {number} offset the segment start time or Latest Arrival Time,
18675 * @return {Object[]} an array of ID3 parsed from EMSG boxes
18676 */
18677
18678
18679 getEmsgID3 = function (segmentData, offset = 0) {
18680 var emsgBoxes = findBox(segmentData, ['emsg']);
18681 return emsgBoxes.map(data => {
18682 var parsedBox = emsg.parseEmsgBox(new Uint8Array(data));
18683 var parsedId3Frames = parseId3Frames(parsedBox.message_data);
18684 return {
18685 cueTime: emsg.scaleTime(parsedBox.presentation_time, parsedBox.timescale, parsedBox.presentation_time_delta, offset),
18686 duration: emsg.scaleTime(parsedBox.event_duration, parsedBox.timescale),
18687 frames: parsedId3Frames
18688 };
18689 });
18690 };
18691
18692 var probe$2 = {
18693 // export mp4 inspector's findBox and parseType for backwards compatibility
18694 findBox: findBox,
18695 parseType: parseType$1,
18696 timescale: timescale,
18697 startTime: startTime,
18698 compositionStartTime: compositionStartTime,
18699 videoTrackIds: getVideoTrackIds,
18700 tracks: getTracks,
18701 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader,
18702 getEmsgID3: getEmsgID3
18703 };
18704 /**
18705 * mux.js
18706 *
18707 * Copyright (c) Brightcove
18708 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
18709 *
18710 * Utilities to detect basic properties and metadata about TS Segments.
18711 */
18712
18713 var StreamTypes$1 = streamTypes;
18714
18715 var parsePid = function (packet) {
18716 var pid = packet[1] & 0x1f;
18717 pid <<= 8;
18718 pid |= packet[2];
18719 return pid;
18720 };
18721
18722 var parsePayloadUnitStartIndicator = function (packet) {
18723 return !!(packet[1] & 0x40);
18724 };
18725
18726 var parseAdaptionField = function (packet) {
18727 var offset = 0; // if an adaption field is present, its length is specified by the
18728 // fifth byte of the TS packet header. The adaptation field is
18729 // used to add stuffing to PES packets that don't fill a complete
18730 // TS packet, and to specify some forms of timing and control data
18731 // that we do not currently use.
18732
18733 if ((packet[3] & 0x30) >>> 4 > 0x01) {
18734 offset += packet[4] + 1;
18735 }
18736
18737 return offset;
18738 };
18739
18740 var parseType = function (packet, pmtPid) {
18741 var pid = parsePid(packet);
18742
18743 if (pid === 0) {
18744 return 'pat';
18745 } else if (pid === pmtPid) {
18746 return 'pmt';
18747 } else if (pmtPid) {
18748 return 'pes';
18749 }
18750
18751 return null;
18752 };
18753
18754 var parsePat = function (packet) {
18755 var pusi = parsePayloadUnitStartIndicator(packet);
18756 var offset = 4 + parseAdaptionField(packet);
18757
18758 if (pusi) {
18759 offset += packet[offset] + 1;
18760 }
18761
18762 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
18763 };
18764
18765 var parsePmt = function (packet) {
18766 var programMapTable = {};
18767 var pusi = parsePayloadUnitStartIndicator(packet);
18768 var payloadOffset = 4 + parseAdaptionField(packet);
18769
18770 if (pusi) {
18771 payloadOffset += packet[payloadOffset] + 1;
18772 } // PMTs can be sent ahead of the time when they should actually
18773 // take effect. We don't believe this should ever be the case
18774 // for HLS but we'll ignore "forward" PMT declarations if we see
18775 // them. Future PMT declarations have the current_next_indicator
18776 // set to zero.
18777
18778
18779 if (!(packet[payloadOffset + 5] & 0x01)) {
18780 return;
18781 }
18782
18783 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
18784
18785 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
18786 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
18787 // long the program info descriptors are
18788
18789 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
18790
18791 var offset = 12 + programInfoLength;
18792
18793 while (offset < tableEnd) {
18794 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
18795
18796 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
18797 // skip past the elementary stream descriptors, if present
18798
18799 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
18800 }
18801
18802 return programMapTable;
18803 };
18804
18805 var parsePesType = function (packet, programMapTable) {
18806 var pid = parsePid(packet);
18807 var type = programMapTable[pid];
18808
18809 switch (type) {
18810 case StreamTypes$1.H264_STREAM_TYPE:
18811 return 'video';
18812
18813 case StreamTypes$1.ADTS_STREAM_TYPE:
18814 return 'audio';
18815
18816 case StreamTypes$1.METADATA_STREAM_TYPE:
18817 return 'timed-metadata';
18818
18819 default:
18820 return null;
18821 }
18822 };
18823
18824 var parsePesTime = function (packet) {
18825 var pusi = parsePayloadUnitStartIndicator(packet);
18826
18827 if (!pusi) {
18828 return null;
18829 }
18830
18831 var offset = 4 + parseAdaptionField(packet);
18832
18833 if (offset >= packet.byteLength) {
18834 // From the H 222.0 MPEG-TS spec
18835 // "For transport stream packets carrying PES packets, stuffing is needed when there
18836 // is insufficient PES packet data to completely fill the transport stream packet
18837 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
18838 // the sum of the lengths of the data elements in it, so that the payload bytes
18839 // remaining after the adaptation field exactly accommodates the available PES packet
18840 // data."
18841 //
18842 // If the offset is >= the length of the packet, then the packet contains no data
18843 // and instead is just adaption field stuffing bytes
18844 return null;
18845 }
18846
18847 var pes = null;
18848 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
18849 // and a DTS value. Determine what combination of values is
18850 // available to work with.
18851
18852 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
18853 // performs all bitwise operations on 32-bit integers but javascript
18854 // supports a much greater range (52-bits) of integer using standard
18855 // mathematical operations.
18856 // We construct a 31-bit value using bitwise operators over the 31
18857 // most significant bits and then multiply by 4 (equal to a left-shift
18858 // of 2) before we add the final 2 least significant bits of the
18859 // timestamp (equal to an OR.)
18860
18861 if (ptsDtsFlags & 0xC0) {
18862 pes = {}; // the PTS and DTS are not written out directly. For information
18863 // on how they are encoded, see
18864 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
18865
18866 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
18867 pes.pts *= 4; // Left shift by 2
18868
18869 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
18870
18871 pes.dts = pes.pts;
18872
18873 if (ptsDtsFlags & 0x40) {
18874 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
18875 pes.dts *= 4; // Left shift by 2
18876
18877 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
18878 }
18879 }
18880
18881 return pes;
18882 };
18883
18884 var parseNalUnitType = function (type) {
18885 switch (type) {
18886 case 0x05:
18887 return 'slice_layer_without_partitioning_rbsp_idr';
18888
18889 case 0x06:
18890 return 'sei_rbsp';
18891
18892 case 0x07:
18893 return 'seq_parameter_set_rbsp';
18894
18895 case 0x08:
18896 return 'pic_parameter_set_rbsp';
18897
18898 case 0x09:
18899 return 'access_unit_delimiter_rbsp';
18900
18901 default:
18902 return null;
18903 }
18904 };
18905
18906 var videoPacketContainsKeyFrame = function (packet) {
18907 var offset = 4 + parseAdaptionField(packet);
18908 var frameBuffer = packet.subarray(offset);
18909 var frameI = 0;
18910 var frameSyncPoint = 0;
18911 var foundKeyFrame = false;
18912 var nalType; // advance the sync point to a NAL start, if necessary
18913
18914 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
18915 if (frameBuffer[frameSyncPoint + 2] === 1) {
18916 // the sync point is properly aligned
18917 frameI = frameSyncPoint + 5;
18918 break;
18919 }
18920 }
18921
18922 while (frameI < frameBuffer.byteLength) {
18923 // look at the current byte to determine if we've hit the end of
18924 // a NAL unit boundary
18925 switch (frameBuffer[frameI]) {
18926 case 0:
18927 // skip past non-sync sequences
18928 if (frameBuffer[frameI - 1] !== 0) {
18929 frameI += 2;
18930 break;
18931 } else if (frameBuffer[frameI - 2] !== 0) {
18932 frameI++;
18933 break;
18934 }
18935
18936 if (frameSyncPoint + 3 !== frameI - 2) {
18937 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
18938
18939 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
18940 foundKeyFrame = true;
18941 }
18942 } // drop trailing zeroes
18943
18944
18945 do {
18946 frameI++;
18947 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
18948
18949 frameSyncPoint = frameI - 2;
18950 frameI += 3;
18951 break;
18952
18953 case 1:
18954 // skip past non-sync sequences
18955 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
18956 frameI += 3;
18957 break;
18958 }
18959
18960 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
18961
18962 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
18963 foundKeyFrame = true;
18964 }
18965
18966 frameSyncPoint = frameI - 2;
18967 frameI += 3;
18968 break;
18969
18970 default:
18971 // the current byte isn't a one or zero, so it cannot be part
18972 // of a sync sequence
18973 frameI += 3;
18974 break;
18975 }
18976 }
18977
18978 frameBuffer = frameBuffer.subarray(frameSyncPoint);
18979 frameI -= frameSyncPoint;
18980 frameSyncPoint = 0; // parse the final nal
18981
18982 if (frameBuffer && frameBuffer.byteLength > 3) {
18983 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
18984
18985 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
18986 foundKeyFrame = true;
18987 }
18988 }
18989
18990 return foundKeyFrame;
18991 };
18992
18993 var probe$1 = {
18994 parseType: parseType,
18995 parsePat: parsePat,
18996 parsePmt: parsePmt,
18997 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
18998 parsePesType: parsePesType,
18999 parsePesTime: parsePesTime,
19000 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
19001 };
19002 /**
19003 * mux.js
19004 *
19005 * Copyright (c) Brightcove
19006 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
19007 *
19008 * Parse mpeg2 transport stream packets to extract basic timing information
19009 */
19010
19011 var StreamTypes = streamTypes;
19012 var handleRollover = timestampRolloverStream.handleRollover;
19013 var probe = {};
19014 probe.ts = probe$1;
19015 probe.aac = utils;
19016 var ONE_SECOND_IN_TS = clock$2.ONE_SECOND_IN_TS;
19017 var MP2T_PACKET_LENGTH = 188,
19018 // bytes
19019 SYNC_BYTE = 0x47;
19020 /**
19021 * walks through segment data looking for pat and pmt packets to parse out
19022 * program map table information
19023 */
19024
19025 var parsePsi_ = function (bytes, pmt) {
19026 var startIndex = 0,
19027 endIndex = MP2T_PACKET_LENGTH,
19028 packet,
19029 type;
19030
19031 while (endIndex < bytes.byteLength) {
19032 // Look for a pair of start and end sync bytes in the data..
19033 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
19034 // We found a packet
19035 packet = bytes.subarray(startIndex, endIndex);
19036 type = probe.ts.parseType(packet, pmt.pid);
19037
19038 switch (type) {
19039 case 'pat':
19040 pmt.pid = probe.ts.parsePat(packet);
19041 break;
19042
19043 case 'pmt':
19044 var table = probe.ts.parsePmt(packet);
19045 pmt.table = pmt.table || {};
19046 Object.keys(table).forEach(function (key) {
19047 pmt.table[key] = table[key];
19048 });
19049 break;
19050 }
19051
19052 startIndex += MP2T_PACKET_LENGTH;
19053 endIndex += MP2T_PACKET_LENGTH;
19054 continue;
19055 } // If we get here, we have somehow become de-synchronized and we need to step
19056 // forward one byte at a time until we find a pair of sync bytes that denote
19057 // a packet
19058
19059
19060 startIndex++;
19061 endIndex++;
19062 }
19063 };
19064 /**
19065 * walks through the segment data from the start and end to get timing information
19066 * for the first and last audio pes packets
19067 */
19068
19069
19070 var parseAudioPes_ = function (bytes, pmt, result) {
19071 var startIndex = 0,
19072 endIndex = MP2T_PACKET_LENGTH,
19073 packet,
19074 type,
19075 pesType,
19076 pusi,
19077 parsed;
19078 var endLoop = false; // Start walking from start of segment to get first audio packet
19079
19080 while (endIndex <= bytes.byteLength) {
19081 // Look for a pair of start and end sync bytes in the data..
19082 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
19083 // We found a packet
19084 packet = bytes.subarray(startIndex, endIndex);
19085 type = probe.ts.parseType(packet, pmt.pid);
19086
19087 switch (type) {
19088 case 'pes':
19089 pesType = probe.ts.parsePesType(packet, pmt.table);
19090 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
19091
19092 if (pesType === 'audio' && pusi) {
19093 parsed = probe.ts.parsePesTime(packet);
19094
19095 if (parsed) {
19096 parsed.type = 'audio';
19097 result.audio.push(parsed);
19098 endLoop = true;
19099 }
19100 }
19101
19102 break;
19103 }
19104
19105 if (endLoop) {
19106 break;
19107 }
19108
19109 startIndex += MP2T_PACKET_LENGTH;
19110 endIndex += MP2T_PACKET_LENGTH;
19111 continue;
19112 } // If we get here, we have somehow become de-synchronized and we need to step
19113 // forward one byte at a time until we find a pair of sync bytes that denote
19114 // a packet
19115
19116
19117 startIndex++;
19118 endIndex++;
19119 } // Start walking from end of segment to get last audio packet
19120
19121
19122 endIndex = bytes.byteLength;
19123 startIndex = endIndex - MP2T_PACKET_LENGTH;
19124 endLoop = false;
19125
19126 while (startIndex >= 0) {
19127 // Look for a pair of start and end sync bytes in the data..
19128 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
19129 // We found a packet
19130 packet = bytes.subarray(startIndex, endIndex);
19131 type = probe.ts.parseType(packet, pmt.pid);
19132
19133 switch (type) {
19134 case 'pes':
19135 pesType = probe.ts.parsePesType(packet, pmt.table);
19136 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
19137
19138 if (pesType === 'audio' && pusi) {
19139 parsed = probe.ts.parsePesTime(packet);
19140
19141 if (parsed) {
19142 parsed.type = 'audio';
19143 result.audio.push(parsed);
19144 endLoop = true;
19145 }
19146 }
19147
19148 break;
19149 }
19150
19151 if (endLoop) {
19152 break;
19153 }
19154
19155 startIndex -= MP2T_PACKET_LENGTH;
19156 endIndex -= MP2T_PACKET_LENGTH;
19157 continue;
19158 } // If we get here, we have somehow become de-synchronized and we need to step
19159 // forward one byte at a time until we find a pair of sync bytes that denote
19160 // a packet
19161
19162
19163 startIndex--;
19164 endIndex--;
19165 }
19166 };
19167 /**
19168 * walks through the segment data from the start and end to get timing information
19169 * for the first and last video pes packets as well as timing information for the first
19170 * key frame.
19171 */
19172
19173
19174 var parseVideoPes_ = function (bytes, pmt, result) {
19175 var startIndex = 0,
19176 endIndex = MP2T_PACKET_LENGTH,
19177 packet,
19178 type,
19179 pesType,
19180 pusi,
19181 parsed,
19182 frame,
19183 i,
19184 pes;
19185 var endLoop = false;
19186 var currentFrame = {
19187 data: [],
19188 size: 0
19189 }; // Start walking from start of segment to get first video packet
19190
19191 while (endIndex < bytes.byteLength) {
19192 // Look for a pair of start and end sync bytes in the data..
19193 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
19194 // We found a packet
19195 packet = bytes.subarray(startIndex, endIndex);
19196 type = probe.ts.parseType(packet, pmt.pid);
19197
19198 switch (type) {
19199 case 'pes':
19200 pesType = probe.ts.parsePesType(packet, pmt.table);
19201 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
19202
19203 if (pesType === 'video') {
19204 if (pusi && !endLoop) {
19205 parsed = probe.ts.parsePesTime(packet);
19206
19207 if (parsed) {
19208 parsed.type = 'video';
19209 result.video.push(parsed);
19210 endLoop = true;
19211 }
19212 }
19213
19214 if (!result.firstKeyFrame) {
19215 if (pusi) {
19216 if (currentFrame.size !== 0) {
19217 frame = new Uint8Array(currentFrame.size);
19218 i = 0;
19219
19220 while (currentFrame.data.length) {
19221 pes = currentFrame.data.shift();
19222 frame.set(pes, i);
19223 i += pes.byteLength;
19224 }
19225
19226 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
19227 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
19228 // the keyframe seems to work fine with HLS playback
19229 // and definitely preferable to a crash with TypeError...
19230
19231 if (firstKeyFrame) {
19232 result.firstKeyFrame = firstKeyFrame;
19233 result.firstKeyFrame.type = 'video';
19234 } else {
19235 // eslint-disable-next-line
19236 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
19237 }
19238 }
19239
19240 currentFrame.size = 0;
19241 }
19242 }
19243
19244 currentFrame.data.push(packet);
19245 currentFrame.size += packet.byteLength;
19246 }
19247 }
19248
19249 break;
19250 }
19251
19252 if (endLoop && result.firstKeyFrame) {
19253 break;
19254 }
19255
19256 startIndex += MP2T_PACKET_LENGTH;
19257 endIndex += MP2T_PACKET_LENGTH;
19258 continue;
19259 } // If we get here, we have somehow become de-synchronized and we need to step
19260 // forward one byte at a time until we find a pair of sync bytes that denote
19261 // a packet
19262
19263
19264 startIndex++;
19265 endIndex++;
19266 } // Start walking from end of segment to get last video packet
19267
19268
19269 endIndex = bytes.byteLength;
19270 startIndex = endIndex - MP2T_PACKET_LENGTH;
19271 endLoop = false;
19272
19273 while (startIndex >= 0) {
19274 // Look for a pair of start and end sync bytes in the data..
19275 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
19276 // We found a packet
19277 packet = bytes.subarray(startIndex, endIndex);
19278 type = probe.ts.parseType(packet, pmt.pid);
19279
19280 switch (type) {
19281 case 'pes':
19282 pesType = probe.ts.parsePesType(packet, pmt.table);
19283 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
19284
19285 if (pesType === 'video' && pusi) {
19286 parsed = probe.ts.parsePesTime(packet);
19287
19288 if (parsed) {
19289 parsed.type = 'video';
19290 result.video.push(parsed);
19291 endLoop = true;
19292 }
19293 }
19294
19295 break;
19296 }
19297
19298 if (endLoop) {
19299 break;
19300 }
19301
19302 startIndex -= MP2T_PACKET_LENGTH;
19303 endIndex -= MP2T_PACKET_LENGTH;
19304 continue;
19305 } // If we get here, we have somehow become de-synchronized and we need to step
19306 // forward one byte at a time until we find a pair of sync bytes that denote
19307 // a packet
19308
19309
19310 startIndex--;
19311 endIndex--;
19312 }
19313 };
19314 /**
19315 * Adjusts the timestamp information for the segment to account for
19316 * rollover and convert to seconds based on pes packet timescale (90khz clock)
19317 */
19318
19319
19320 var adjustTimestamp_ = function (segmentInfo, baseTimestamp) {
19321 if (segmentInfo.audio && segmentInfo.audio.length) {
19322 var audioBaseTimestamp = baseTimestamp;
19323
19324 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
19325 audioBaseTimestamp = segmentInfo.audio[0].dts;
19326 }
19327
19328 segmentInfo.audio.forEach(function (info) {
19329 info.dts = handleRollover(info.dts, audioBaseTimestamp);
19330 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
19331
19332 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
19333 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
19334 });
19335 }
19336
19337 if (segmentInfo.video && segmentInfo.video.length) {
19338 var videoBaseTimestamp = baseTimestamp;
19339
19340 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
19341 videoBaseTimestamp = segmentInfo.video[0].dts;
19342 }
19343
19344 segmentInfo.video.forEach(function (info) {
19345 info.dts = handleRollover(info.dts, videoBaseTimestamp);
19346 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
19347
19348 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
19349 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
19350 });
19351
19352 if (segmentInfo.firstKeyFrame) {
19353 var frame = segmentInfo.firstKeyFrame;
19354 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
19355 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
19356
19357 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
19358 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
19359 }
19360 }
19361 };
19362 /**
19363 * inspects the aac data stream for start and end time information
19364 */
19365
19366
19367 var inspectAac_ = function (bytes) {
19368 var endLoop = false,
19369 audioCount = 0,
19370 sampleRate = null,
19371 timestamp = null,
19372 frameSize = 0,
19373 byteIndex = 0,
19374 packet;
19375
19376 while (bytes.length - byteIndex >= 3) {
19377 var type = probe.aac.parseType(bytes, byteIndex);
19378
19379 switch (type) {
19380 case 'timed-metadata':
19381 // Exit early because we don't have enough to parse
19382 // the ID3 tag header
19383 if (bytes.length - byteIndex < 10) {
19384 endLoop = true;
19385 break;
19386 }
19387
19388 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
19389 // to emit a full packet
19390
19391 if (frameSize > bytes.length) {
19392 endLoop = true;
19393 break;
19394 }
19395
19396 if (timestamp === null) {
19397 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
19398 timestamp = probe.aac.parseAacTimestamp(packet);
19399 }
19400
19401 byteIndex += frameSize;
19402 break;
19403
19404 case 'audio':
19405 // Exit early because we don't have enough to parse
19406 // the ADTS frame header
19407 if (bytes.length - byteIndex < 7) {
19408 endLoop = true;
19409 break;
19410 }
19411
19412 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
19413 // to emit a full packet
19414
19415 if (frameSize > bytes.length) {
19416 endLoop = true;
19417 break;
19418 }
19419
19420 if (sampleRate === null) {
19421 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
19422 sampleRate = probe.aac.parseSampleRate(packet);
19423 }
19424
19425 audioCount++;
19426 byteIndex += frameSize;
19427 break;
19428
19429 default:
19430 byteIndex++;
19431 break;
19432 }
19433
19434 if (endLoop) {
19435 return null;
19436 }
19437 }
19438
19439 if (sampleRate === null || timestamp === null) {
19440 return null;
19441 }
19442
19443 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
19444 var result = {
19445 audio: [{
19446 type: 'audio',
19447 dts: timestamp,
19448 pts: timestamp
19449 }, {
19450 type: 'audio',
19451 dts: timestamp + audioCount * 1024 * audioTimescale,
19452 pts: timestamp + audioCount * 1024 * audioTimescale
19453 }]
19454 };
19455 return result;
19456 };
19457 /**
19458 * inspects the transport stream segment data for start and end time information
19459 * of the audio and video tracks (when present) as well as the first key frame's
19460 * start time.
19461 */
19462
19463
19464 var inspectTs_ = function (bytes) {
19465 var pmt = {
19466 pid: null,
19467 table: null
19468 };
19469 var result = {};
19470 parsePsi_(bytes, pmt);
19471
19472 for (var pid in pmt.table) {
19473 if (pmt.table.hasOwnProperty(pid)) {
19474 var type = pmt.table[pid];
19475
19476 switch (type) {
19477 case StreamTypes.H264_STREAM_TYPE:
19478 result.video = [];
19479 parseVideoPes_(bytes, pmt, result);
19480
19481 if (result.video.length === 0) {
19482 delete result.video;
19483 }
19484
19485 break;
19486
19487 case StreamTypes.ADTS_STREAM_TYPE:
19488 result.audio = [];
19489 parseAudioPes_(bytes, pmt, result);
19490
19491 if (result.audio.length === 0) {
19492 delete result.audio;
19493 }
19494
19495 break;
19496 }
19497 }
19498 }
19499
19500 return result;
19501 };
19502 /**
19503 * Inspects segment byte data and returns an object with start and end timing information
19504 *
19505 * @param {Uint8Array} bytes The segment byte data
19506 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
19507 * timestamps for rollover. This value must be in 90khz clock.
19508 * @return {Object} Object containing start and end frame timing info of segment.
19509 */
19510
19511
19512 var inspect = function (bytes, baseTimestamp) {
19513 var isAacData = probe.aac.isLikelyAacData(bytes);
19514 var result;
19515
19516 if (isAacData) {
19517 result = inspectAac_(bytes);
19518 } else {
19519 result = inspectTs_(bytes);
19520 }
19521
19522 if (!result || !result.audio && !result.video) {
19523 return null;
19524 }
19525
19526 adjustTimestamp_(result, baseTimestamp);
19527 return result;
19528 };
19529
19530 var tsInspector = {
19531 inspect: inspect,
19532 parseAudioPes_: parseAudioPes_
19533 };
19534 /* global self */
19535
19536 /**
19537 * Re-emits transmuxer events by converting them into messages to the
19538 * world outside the worker.
19539 *
19540 * @param {Object} transmuxer the transmuxer to wire events on
19541 * @private
19542 */
19543
19544 const wireTransmuxerEvents = function (self, transmuxer) {
19545 transmuxer.on('data', function (segment) {
19546 // transfer ownership of the underlying ArrayBuffer
19547 // instead of doing a copy to save memory
19548 // ArrayBuffers are transferable but generic TypedArrays are not
19549 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
19550 const initArray = segment.initSegment;
19551 segment.initSegment = {
19552 data: initArray.buffer,
19553 byteOffset: initArray.byteOffset,
19554 byteLength: initArray.byteLength
19555 };
19556 const typedArray = segment.data;
19557 segment.data = typedArray.buffer;
19558 self.postMessage({
19559 action: 'data',
19560 segment,
19561 byteOffset: typedArray.byteOffset,
19562 byteLength: typedArray.byteLength
19563 }, [segment.data]);
19564 });
19565 transmuxer.on('done', function (data) {
19566 self.postMessage({
19567 action: 'done'
19568 });
19569 });
19570 transmuxer.on('gopInfo', function (gopInfo) {
19571 self.postMessage({
19572 action: 'gopInfo',
19573 gopInfo
19574 });
19575 });
19576 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
19577 const videoSegmentTimingInfo = {
19578 start: {
19579 decode: clock$2.videoTsToSeconds(timingInfo.start.dts),
19580 presentation: clock$2.videoTsToSeconds(timingInfo.start.pts)
19581 },
19582 end: {
19583 decode: clock$2.videoTsToSeconds(timingInfo.end.dts),
19584 presentation: clock$2.videoTsToSeconds(timingInfo.end.pts)
19585 },
19586 baseMediaDecodeTime: clock$2.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
19587 };
19588
19589 if (timingInfo.prependedContentDuration) {
19590 videoSegmentTimingInfo.prependedContentDuration = clock$2.videoTsToSeconds(timingInfo.prependedContentDuration);
19591 }
19592
19593 self.postMessage({
19594 action: 'videoSegmentTimingInfo',
19595 videoSegmentTimingInfo
19596 });
19597 });
19598 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
19599 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
19600 const audioSegmentTimingInfo = {
19601 start: {
19602 decode: clock$2.videoTsToSeconds(timingInfo.start.dts),
19603 presentation: clock$2.videoTsToSeconds(timingInfo.start.pts)
19604 },
19605 end: {
19606 decode: clock$2.videoTsToSeconds(timingInfo.end.dts),
19607 presentation: clock$2.videoTsToSeconds(timingInfo.end.pts)
19608 },
19609 baseMediaDecodeTime: clock$2.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
19610 };
19611
19612 if (timingInfo.prependedContentDuration) {
19613 audioSegmentTimingInfo.prependedContentDuration = clock$2.videoTsToSeconds(timingInfo.prependedContentDuration);
19614 }
19615
19616 self.postMessage({
19617 action: 'audioSegmentTimingInfo',
19618 audioSegmentTimingInfo
19619 });
19620 });
19621 transmuxer.on('id3Frame', function (id3Frame) {
19622 self.postMessage({
19623 action: 'id3Frame',
19624 id3Frame
19625 });
19626 });
19627 transmuxer.on('caption', function (caption) {
19628 self.postMessage({
19629 action: 'caption',
19630 caption
19631 });
19632 });
19633 transmuxer.on('trackinfo', function (trackInfo) {
19634 self.postMessage({
19635 action: 'trackinfo',
19636 trackInfo
19637 });
19638 });
19639 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
19640 // convert to video TS since we prioritize video time over audio
19641 self.postMessage({
19642 action: 'audioTimingInfo',
19643 audioTimingInfo: {
19644 start: clock$2.videoTsToSeconds(audioTimingInfo.start),
19645 end: clock$2.videoTsToSeconds(audioTimingInfo.end)
19646 }
19647 });
19648 });
19649 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
19650 self.postMessage({
19651 action: 'videoTimingInfo',
19652 videoTimingInfo: {
19653 start: clock$2.videoTsToSeconds(videoTimingInfo.start),
19654 end: clock$2.videoTsToSeconds(videoTimingInfo.end)
19655 }
19656 });
19657 });
19658 transmuxer.on('log', function (log) {
19659 self.postMessage({
19660 action: 'log',
19661 log
19662 });
19663 });
19664 };
19665 /**
19666 * All incoming messages route through this hash. If no function exists
19667 * to handle an incoming message, then we ignore the message.
19668 *
19669 * @class MessageHandlers
19670 * @param {Object} options the options to initialize with
19671 */
19672
19673
19674 class MessageHandlers {
19675 constructor(self, options) {
19676 this.options = options || {};
19677 this.self = self;
19678 this.init();
19679 }
19680 /**
19681 * initialize our web worker and wire all the events.
19682 */
19683
19684
19685 init() {
19686 if (this.transmuxer) {
19687 this.transmuxer.dispose();
19688 }
19689
19690 this.transmuxer = new transmuxer.Transmuxer(this.options);
19691 wireTransmuxerEvents(this.self, this.transmuxer);
19692 }
19693
19694 pushMp4Captions(data) {
19695 if (!this.captionParser) {
19696 this.captionParser = new captionParser();
19697 this.captionParser.init();
19698 }
19699
19700 const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
19701 const parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
19702 this.self.postMessage({
19703 action: 'mp4Captions',
19704 captions: parsed && parsed.captions || [],
19705 logs: parsed && parsed.logs || [],
19706 data: segment.buffer
19707 }, [segment.buffer]);
19708 }
19709
19710 probeMp4StartTime({
19711 timescales,
19712 data
19713 }) {
19714 const startTime = probe$2.startTime(timescales, data);
19715 this.self.postMessage({
19716 action: 'probeMp4StartTime',
19717 startTime,
19718 data
19719 }, [data.buffer]);
19720 }
19721
19722 probeMp4Tracks({
19723 data
19724 }) {
19725 const tracks = probe$2.tracks(data);
19726 this.self.postMessage({
19727 action: 'probeMp4Tracks',
19728 tracks,
19729 data
19730 }, [data.buffer]);
19731 }
19732 /**
19733 * Probes an mp4 segment for EMSG boxes containing ID3 data.
19734 * https://aomediacodec.github.io/id3-emsg/
19735 *
19736 * @param {Uint8Array} data segment data
19737 * @param {number} offset segment start time
19738 * @return {Object[]} an array of ID3 frames
19739 */
19740
19741
19742 probeEmsgID3({
19743 data,
19744 offset
19745 }) {
19746 const id3Frames = probe$2.getEmsgID3(data, offset);
19747 this.self.postMessage({
19748 action: 'probeEmsgID3',
19749 id3Frames,
19750 emsgData: data
19751 }, [data.buffer]);
19752 }
19753 /**
19754 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
19755 * internal "media time," as well as whether it contains video and/or audio.
19756 *
19757 * @private
19758 * @param {Uint8Array} bytes - segment bytes
19759 * @param {number} baseStartTime
19760 * Relative reference timestamp used when adjusting frame timestamps for rollover.
19761 * This value should be in seconds, as it's converted to a 90khz clock within the
19762 * function body.
19763 * @return {Object} The start time of the current segment in "media time" as well as
19764 * whether it contains video and/or audio
19765 */
19766
19767
19768 probeTs({
19769 data,
19770 baseStartTime
19771 }) {
19772 const tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock$2.ONE_SECOND_IN_TS : void 0;
19773 const timeInfo = tsInspector.inspect(data, tsStartTime);
19774 let result = null;
19775
19776 if (timeInfo) {
19777 result = {
19778 // each type's time info comes back as an array of 2 times, start and end
19779 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
19780 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
19781 };
19782
19783 if (result.hasVideo) {
19784 result.videoStart = timeInfo.video[0].ptsTime;
19785 }
19786
19787 if (result.hasAudio) {
19788 result.audioStart = timeInfo.audio[0].ptsTime;
19789 }
19790 }
19791
19792 this.self.postMessage({
19793 action: 'probeTs',
19794 result,
19795 data
19796 }, [data.buffer]);
19797 }
19798
19799 clearAllMp4Captions() {
19800 if (this.captionParser) {
19801 this.captionParser.clearAllCaptions();
19802 }
19803 }
19804
19805 clearParsedMp4Captions() {
19806 if (this.captionParser) {
19807 this.captionParser.clearParsedCaptions();
19808 }
19809 }
19810 /**
19811 * Adds data (a ts segment) to the start of the transmuxer pipeline for
19812 * processing.
19813 *
19814 * @param {ArrayBuffer} data data to push into the muxer
19815 */
19816
19817
19818 push(data) {
19819 // Cast array buffer to correct type for transmuxer
19820 const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
19821 this.transmuxer.push(segment);
19822 }
19823 /**
19824 * Recreate the transmuxer so that the next segment added via `push`
19825 * start with a fresh transmuxer.
19826 */
19827
19828
19829 reset() {
19830 this.transmuxer.reset();
19831 }
19832 /**
19833 * Set the value that will be used as the `baseMediaDecodeTime` time for the
19834 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
19835 * set relative to the first based on the PTS values.
19836 *
19837 * @param {Object} data used to set the timestamp offset in the muxer
19838 */
19839
19840
19841 setTimestampOffset(data) {
19842 const timestampOffset = data.timestampOffset || 0;
19843 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock$2.secondsToVideoTs(timestampOffset)));
19844 }
19845
19846 setAudioAppendStart(data) {
19847 this.transmuxer.setAudioAppendStart(Math.ceil(clock$2.secondsToVideoTs(data.appendStart)));
19848 }
19849
19850 setRemux(data) {
19851 this.transmuxer.setRemux(data.remux);
19852 }
19853 /**
19854 * Forces the pipeline to finish processing the last segment and emit it's
19855 * results.
19856 *
19857 * @param {Object} data event data, not really used
19858 */
19859
19860
19861 flush(data) {
19862 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
19863
19864 self.postMessage({
19865 action: 'done',
19866 type: 'transmuxed'
19867 });
19868 }
19869
19870 endTimeline() {
19871 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
19872 // timelines
19873
19874 self.postMessage({
19875 action: 'endedtimeline',
19876 type: 'transmuxed'
19877 });
19878 }
19879
19880 alignGopsWith(data) {
19881 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
19882 }
19883
19884 }
19885 /**
19886 * Our web worker interface so that things can talk to mux.js
19887 * that will be running in a web worker. the scope is passed to this by
19888 * webworkify.
19889 *
19890 * @param {Object} self the scope for the web worker
19891 */
19892
19893
19894 self.onmessage = function (event) {
19895 if (event.data.action === 'init' && event.data.options) {
19896 this.messageHandlers = new MessageHandlers(self, event.data.options);
19897 return;
19898 }
19899
19900 if (!this.messageHandlers) {
19901 this.messageHandlers = new MessageHandlers(self);
19902 }
19903
19904 if (event.data && event.data.action && event.data.action !== 'init') {
19905 if (this.messageHandlers[event.data.action]) {
19906 this.messageHandlers[event.data.action](event.data);
19907 }
19908 }
19909 };
19910 }));
19911 var TransmuxWorker = factory(workerCode$1);
19912 /* rollup-plugin-worker-factory end for worker!/home/runner/work/http-streaming/http-streaming/src/transmuxer-worker.js */
19913
19914 const handleData_ = (event, transmuxedData, callback) => {
19915 const {
19916 type,
19917 initSegment,
19918 captions,
19919 captionStreams,
19920 metadata,
19921 videoFrameDtsTime,
19922 videoFramePtsTime
19923 } = event.data.segment;
19924 transmuxedData.buffer.push({
19925 captions,
19926 captionStreams,
19927 metadata
19928 });
19929 const boxes = event.data.segment.boxes || {
19930 data: event.data.segment.data
19931 };
19932 const result = {
19933 type,
19934 // cast ArrayBuffer to TypedArray
19935 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
19936 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
19937 };
19938
19939 if (typeof videoFrameDtsTime !== 'undefined') {
19940 result.videoFrameDtsTime = videoFrameDtsTime;
19941 }
19942
19943 if (typeof videoFramePtsTime !== 'undefined') {
19944 result.videoFramePtsTime = videoFramePtsTime;
19945 }
19946
19947 callback(result);
19948 };
19949 const handleDone_ = ({
19950 transmuxedData,
19951 callback
19952 }) => {
19953 // Previously we only returned data on data events,
19954 // not on done events. Clear out the buffer to keep that consistent.
19955 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
19956 // have received
19957
19958 callback(transmuxedData);
19959 };
19960 const handleGopInfo_ = (event, transmuxedData) => {
19961 transmuxedData.gopInfo = event.data.gopInfo;
19962 };
19963 const processTransmux = options => {
19964 const {
19965 transmuxer,
19966 bytes,
19967 audioAppendStart,
19968 gopsToAlignWith,
19969 remux,
19970 onData,
19971 onTrackInfo,
19972 onAudioTimingInfo,
19973 onVideoTimingInfo,
19974 onVideoSegmentTimingInfo,
19975 onAudioSegmentTimingInfo,
19976 onId3,
19977 onCaptions,
19978 onDone,
19979 onEndedTimeline,
19980 onTransmuxerLog,
19981 isEndOfTimeline
19982 } = options;
19983 const transmuxedData = {
19984 buffer: []
19985 };
19986 let waitForEndedTimelineEvent = isEndOfTimeline;
19987
19988 const handleMessage = event => {
19989 if (transmuxer.currentTransmux !== options) {
19990 // disposed
19991 return;
19992 }
19993
19994 if (event.data.action === 'data') {
19995 handleData_(event, transmuxedData, onData);
19996 }
19997
19998 if (event.data.action === 'trackinfo') {
19999 onTrackInfo(event.data.trackInfo);
20000 }
20001
20002 if (event.data.action === 'gopInfo') {
20003 handleGopInfo_(event, transmuxedData);
20004 }
20005
20006 if (event.data.action === 'audioTimingInfo') {
20007 onAudioTimingInfo(event.data.audioTimingInfo);
20008 }
20009
20010 if (event.data.action === 'videoTimingInfo') {
20011 onVideoTimingInfo(event.data.videoTimingInfo);
20012 }
20013
20014 if (event.data.action === 'videoSegmentTimingInfo') {
20015 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
20016 }
20017
20018 if (event.data.action === 'audioSegmentTimingInfo') {
20019 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
20020 }
20021
20022 if (event.data.action === 'id3Frame') {
20023 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
20024 }
20025
20026 if (event.data.action === 'caption') {
20027 onCaptions(event.data.caption);
20028 }
20029
20030 if (event.data.action === 'endedtimeline') {
20031 waitForEndedTimelineEvent = false;
20032 onEndedTimeline();
20033 }
20034
20035 if (event.data.action === 'log') {
20036 onTransmuxerLog(event.data.log);
20037 } // wait for the transmuxed event since we may have audio and video
20038
20039
20040 if (event.data.type !== 'transmuxed') {
20041 return;
20042 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
20043 // of a timeline, that means there may still be data events before the segment
20044 // processing can be considerred complete. In that case, the final event should be
20045 // an "endedtimeline" event with the type "transmuxed."
20046
20047
20048 if (waitForEndedTimelineEvent) {
20049 return;
20050 }
20051
20052 transmuxer.onmessage = null;
20053 handleDone_({
20054 transmuxedData,
20055 callback: onDone
20056 });
20057 /* eslint-disable no-use-before-define */
20058
20059 dequeue(transmuxer);
20060 /* eslint-enable */
20061 };
20062
20063 transmuxer.onmessage = handleMessage;
20064
20065 if (audioAppendStart) {
20066 transmuxer.postMessage({
20067 action: 'setAudioAppendStart',
20068 appendStart: audioAppendStart
20069 });
20070 } // allow empty arrays to be passed to clear out GOPs
20071
20072
20073 if (Array.isArray(gopsToAlignWith)) {
20074 transmuxer.postMessage({
20075 action: 'alignGopsWith',
20076 gopsToAlignWith
20077 });
20078 }
20079
20080 if (typeof remux !== 'undefined') {
20081 transmuxer.postMessage({
20082 action: 'setRemux',
20083 remux
20084 });
20085 }
20086
20087 if (bytes.byteLength) {
20088 const buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
20089 const byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
20090 transmuxer.postMessage({
20091 action: 'push',
20092 // Send the typed-array of data as an ArrayBuffer so that
20093 // it can be sent as a "Transferable" and avoid the costly
20094 // memory copy
20095 data: buffer,
20096 // To recreate the original typed-array, we need information
20097 // about what portion of the ArrayBuffer it was a view into
20098 byteOffset,
20099 byteLength: bytes.byteLength
20100 }, [buffer]);
20101 }
20102
20103 if (isEndOfTimeline) {
20104 transmuxer.postMessage({
20105 action: 'endTimeline'
20106 });
20107 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
20108 // the end of the segment
20109
20110
20111 transmuxer.postMessage({
20112 action: 'flush'
20113 });
20114 };
20115 const dequeue = transmuxer => {
20116 transmuxer.currentTransmux = null;
20117
20118 if (transmuxer.transmuxQueue.length) {
20119 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
20120
20121 if (typeof transmuxer.currentTransmux === 'function') {
20122 transmuxer.currentTransmux();
20123 } else {
20124 processTransmux(transmuxer.currentTransmux);
20125 }
20126 }
20127 };
20128 const processAction = (transmuxer, action) => {
20129 transmuxer.postMessage({
20130 action
20131 });
20132 dequeue(transmuxer);
20133 };
20134 const enqueueAction = (action, transmuxer) => {
20135 if (!transmuxer.currentTransmux) {
20136 transmuxer.currentTransmux = action;
20137 processAction(transmuxer, action);
20138 return;
20139 }
20140
20141 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
20142 };
20143 const reset = transmuxer => {
20144 enqueueAction('reset', transmuxer);
20145 };
20146 const endTimeline = transmuxer => {
20147 enqueueAction('endTimeline', transmuxer);
20148 };
20149 const transmux = options => {
20150 if (!options.transmuxer.currentTransmux) {
20151 options.transmuxer.currentTransmux = options;
20152 processTransmux(options);
20153 return;
20154 }
20155
20156 options.transmuxer.transmuxQueue.push(options);
20157 };
20158 const createTransmuxer = options => {
20159 const transmuxer = new TransmuxWorker();
20160 transmuxer.currentTransmux = null;
20161 transmuxer.transmuxQueue = [];
20162 const term = transmuxer.terminate;
20163
20164 transmuxer.terminate = () => {
20165 transmuxer.currentTransmux = null;
20166 transmuxer.transmuxQueue.length = 0;
20167 return term.call(transmuxer);
20168 };
20169
20170 transmuxer.postMessage({
20171 action: 'init',
20172 options
20173 });
20174 return transmuxer;
20175 };
20176 var segmentTransmuxer = {
20177 reset,
20178 endTimeline,
20179 transmux,
20180 createTransmuxer
20181 };
20182
20183 const workerCallback = function (options) {
20184 const transmuxer = options.transmuxer;
20185 const endAction = options.endAction || options.action;
20186 const callback = options.callback;
20187
20188 const message = _extends({}, options, {
20189 endAction: null,
20190 transmuxer: null,
20191 callback: null
20192 });
20193
20194 const listenForEndEvent = event => {
20195 if (event.data.action !== endAction) {
20196 return;
20197 }
20198
20199 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
20200
20201 if (event.data.data) {
20202 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
20203
20204 if (options.data) {
20205 options.data = event.data.data;
20206 }
20207 }
20208
20209 callback(event.data);
20210 };
20211
20212 transmuxer.addEventListener('message', listenForEndEvent);
20213
20214 if (options.data) {
20215 const isArrayBuffer = options.data instanceof ArrayBuffer;
20216 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
20217 message.byteLength = options.data.byteLength;
20218 const transfers = [isArrayBuffer ? options.data : options.data.buffer];
20219 transmuxer.postMessage(message, transfers);
20220 } else {
20221 transmuxer.postMessage(message);
20222 }
20223 };
20224
20225 const REQUEST_ERRORS = {
20226 FAILURE: 2,
20227 TIMEOUT: -101,
20228 ABORTED: -102
20229 };
20230 /**
20231 * Abort all requests
20232 *
20233 * @param {Object} activeXhrs - an object that tracks all XHR requests
20234 */
20235
20236 const abortAll = activeXhrs => {
20237 activeXhrs.forEach(xhr => {
20238 xhr.abort();
20239 });
20240 };
20241 /**
20242 * Gather important bandwidth stats once a request has completed
20243 *
20244 * @param {Object} request - the XHR request from which to gather stats
20245 */
20246
20247
20248 const getRequestStats = request => {
20249 return {
20250 bandwidth: request.bandwidth,
20251 bytesReceived: request.bytesReceived || 0,
20252 roundTripTime: request.roundTripTime || 0
20253 };
20254 };
20255 /**
20256 * If possible gather bandwidth stats as a request is in
20257 * progress
20258 *
20259 * @param {Event} progressEvent - an event object from an XHR's progress event
20260 */
20261
20262
20263 const getProgressStats = progressEvent => {
20264 const request = progressEvent.target;
20265 const roundTripTime = Date.now() - request.requestTime;
20266 const stats = {
20267 bandwidth: Infinity,
20268 bytesReceived: 0,
20269 roundTripTime: roundTripTime || 0
20270 };
20271 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
20272 // because we should only use bandwidth stats on progress to determine when
20273 // abort a request early due to insufficient bandwidth
20274
20275 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
20276 return stats;
20277 };
20278 /**
20279 * Handle all error conditions in one place and return an object
20280 * with all the information
20281 *
20282 * @param {Error|null} error - if non-null signals an error occured with the XHR
20283 * @param {Object} request - the XHR request that possibly generated the error
20284 */
20285
20286
20287 const handleErrors = (error, request) => {
20288 if (request.timedout) {
20289 return {
20290 status: request.status,
20291 message: 'HLS request timed-out at URL: ' + request.uri,
20292 code: REQUEST_ERRORS.TIMEOUT,
20293 xhr: request
20294 };
20295 }
20296
20297 if (request.aborted) {
20298 return {
20299 status: request.status,
20300 message: 'HLS request aborted at URL: ' + request.uri,
20301 code: REQUEST_ERRORS.ABORTED,
20302 xhr: request
20303 };
20304 }
20305
20306 if (error) {
20307 return {
20308 status: request.status,
20309 message: 'HLS request errored at URL: ' + request.uri,
20310 code: REQUEST_ERRORS.FAILURE,
20311 xhr: request
20312 };
20313 }
20314
20315 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
20316 return {
20317 status: request.status,
20318 message: 'Empty HLS response at URL: ' + request.uri,
20319 code: REQUEST_ERRORS.FAILURE,
20320 xhr: request
20321 };
20322 }
20323
20324 return null;
20325 };
20326 /**
20327 * Handle responses for key data and convert the key data to the correct format
20328 * for the decryption step later
20329 *
20330 * @param {Object} segment - a simplified copy of the segmentInfo object
20331 * from SegmentLoader
20332 * @param {Array} objects - objects to add the key bytes to.
20333 * @param {Function} finishProcessingFn - a callback to execute to continue processing
20334 * this request
20335 */
20336
20337
20338 const handleKeyResponse = (segment, objects, finishProcessingFn) => (error, request) => {
20339 const response = request.response;
20340 const errorObj = handleErrors(error, request);
20341
20342 if (errorObj) {
20343 return finishProcessingFn(errorObj, segment);
20344 }
20345
20346 if (response.byteLength !== 16) {
20347 return finishProcessingFn({
20348 status: request.status,
20349 message: 'Invalid HLS key at URL: ' + request.uri,
20350 code: REQUEST_ERRORS.FAILURE,
20351 xhr: request
20352 }, segment);
20353 }
20354
20355 const view = new DataView(response);
20356 const bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
20357
20358 for (let i = 0; i < objects.length; i++) {
20359 objects[i].bytes = bytes;
20360 }
20361
20362 return finishProcessingFn(null, segment);
20363 };
20364
20365 const parseInitSegment = (segment, callback) => {
20366 const type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
20367 // only know how to parse mp4 init segments at the moment
20368
20369 if (type !== 'mp4') {
20370 const uri = segment.map.resolvedUri || segment.map.uri;
20371 return callback({
20372 internal: true,
20373 message: `Found unsupported ${type || 'unknown'} container for initialization segment at URL: ${uri}`,
20374 code: REQUEST_ERRORS.FAILURE
20375 });
20376 }
20377
20378 workerCallback({
20379 action: 'probeMp4Tracks',
20380 data: segment.map.bytes,
20381 transmuxer: segment.transmuxer,
20382 callback: ({
20383 tracks,
20384 data
20385 }) => {
20386 // transfer bytes back to us
20387 segment.map.bytes = data;
20388 tracks.forEach(function (track) {
20389 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
20390
20391 if (segment.map.tracks[track.type]) {
20392 return;
20393 }
20394
20395 segment.map.tracks[track.type] = track;
20396
20397 if (typeof track.id === 'number' && track.timescale) {
20398 segment.map.timescales = segment.map.timescales || {};
20399 segment.map.timescales[track.id] = track.timescale;
20400 }
20401 });
20402 return callback(null);
20403 }
20404 });
20405 };
20406 /**
20407 * Handle init-segment responses
20408 *
20409 * @param {Object} segment - a simplified copy of the segmentInfo object
20410 * from SegmentLoader
20411 * @param {Function} finishProcessingFn - a callback to execute to continue processing
20412 * this request
20413 */
20414
20415
20416 const handleInitSegmentResponse = ({
20417 segment,
20418 finishProcessingFn
20419 }) => (error, request) => {
20420 const errorObj = handleErrors(error, request);
20421
20422 if (errorObj) {
20423 return finishProcessingFn(errorObj, segment);
20424 }
20425
20426 const bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
20427 // until the key request is done to decrypt.
20428
20429 if (segment.map.key) {
20430 segment.map.encryptedBytes = bytes;
20431 return finishProcessingFn(null, segment);
20432 }
20433
20434 segment.map.bytes = bytes;
20435 parseInitSegment(segment, function (parseError) {
20436 if (parseError) {
20437 parseError.xhr = request;
20438 parseError.status = request.status;
20439 return finishProcessingFn(parseError, segment);
20440 }
20441
20442 finishProcessingFn(null, segment);
20443 });
20444 };
20445 /**
20446 * Response handler for segment-requests being sure to set the correct
20447 * property depending on whether the segment is encryped or not
20448 * Also records and keeps track of stats that are used for ABR purposes
20449 *
20450 * @param {Object} segment - a simplified copy of the segmentInfo object
20451 * from SegmentLoader
20452 * @param {Function} finishProcessingFn - a callback to execute to continue processing
20453 * this request
20454 */
20455
20456
20457 const handleSegmentResponse = ({
20458 segment,
20459 finishProcessingFn,
20460 responseType
20461 }) => (error, request) => {
20462 const errorObj = handleErrors(error, request);
20463
20464 if (errorObj) {
20465 return finishProcessingFn(errorObj, segment);
20466 }
20467
20468 const newBytes = // although responseText "should" exist, this guard serves to prevent an error being
20469 // thrown for two primary cases:
20470 // 1. the mime type override stops working, or is not implemented for a specific
20471 // browser
20472 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
20473 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
20474 segment.stats = getRequestStats(request);
20475
20476 if (segment.key) {
20477 segment.encryptedBytes = new Uint8Array(newBytes);
20478 } else {
20479 segment.bytes = new Uint8Array(newBytes);
20480 }
20481
20482 return finishProcessingFn(null, segment);
20483 };
20484
20485 const transmuxAndNotify = ({
20486 segment,
20487 bytes,
20488 trackInfoFn,
20489 timingInfoFn,
20490 videoSegmentTimingInfoFn,
20491 audioSegmentTimingInfoFn,
20492 id3Fn,
20493 captionsFn,
20494 isEndOfTimeline,
20495 endedTimelineFn,
20496 dataFn,
20497 doneFn,
20498 onTransmuxerLog
20499 }) => {
20500 const fmp4Tracks = segment.map && segment.map.tracks || {};
20501 const isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
20502 // One reason for this is that in the case of full segments, we want to trust start
20503 // times from the probe, rather than the transmuxer.
20504
20505 let audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
20506 const audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
20507 let videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
20508 const videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
20509
20510 const finish = () => transmux({
20511 bytes,
20512 transmuxer: segment.transmuxer,
20513 audioAppendStart: segment.audioAppendStart,
20514 gopsToAlignWith: segment.gopsToAlignWith,
20515 remux: isMuxed,
20516 onData: result => {
20517 result.type = result.type === 'combined' ? 'video' : result.type;
20518 dataFn(segment, result);
20519 },
20520 onTrackInfo: trackInfo => {
20521 if (trackInfoFn) {
20522 if (isMuxed) {
20523 trackInfo.isMuxed = true;
20524 }
20525
20526 trackInfoFn(segment, trackInfo);
20527 }
20528 },
20529 onAudioTimingInfo: audioTimingInfo => {
20530 // we only want the first start value we encounter
20531 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
20532 audioStartFn(audioTimingInfo.start);
20533 audioStartFn = null;
20534 } // we want to continually update the end time
20535
20536
20537 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
20538 audioEndFn(audioTimingInfo.end);
20539 }
20540 },
20541 onVideoTimingInfo: videoTimingInfo => {
20542 // we only want the first start value we encounter
20543 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
20544 videoStartFn(videoTimingInfo.start);
20545 videoStartFn = null;
20546 } // we want to continually update the end time
20547
20548
20549 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
20550 videoEndFn(videoTimingInfo.end);
20551 }
20552 },
20553 onVideoSegmentTimingInfo: videoSegmentTimingInfo => {
20554 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
20555 },
20556 onAudioSegmentTimingInfo: audioSegmentTimingInfo => {
20557 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
20558 },
20559 onId3: (id3Frames, dispatchType) => {
20560 id3Fn(segment, id3Frames, dispatchType);
20561 },
20562 onCaptions: captions => {
20563 captionsFn(segment, [captions]);
20564 },
20565 isEndOfTimeline,
20566 onEndedTimeline: () => {
20567 endedTimelineFn();
20568 },
20569 onTransmuxerLog,
20570 onDone: result => {
20571 if (!doneFn) {
20572 return;
20573 }
20574
20575 result.type = result.type === 'combined' ? 'video' : result.type;
20576 doneFn(null, segment, result);
20577 }
20578 }); // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
20579 // Meaning cached frame data may corrupt our notion of where this segment
20580 // really starts. To get around this, probe for the info needed.
20581
20582
20583 workerCallback({
20584 action: 'probeTs',
20585 transmuxer: segment.transmuxer,
20586 data: bytes,
20587 baseStartTime: segment.baseStartTime,
20588 callback: data => {
20589 segment.bytes = bytes = data.data;
20590 const probeResult = data.result;
20591
20592 if (probeResult) {
20593 trackInfoFn(segment, {
20594 hasAudio: probeResult.hasAudio,
20595 hasVideo: probeResult.hasVideo,
20596 isMuxed
20597 });
20598 trackInfoFn = null;
20599 }
20600
20601 finish();
20602 }
20603 });
20604 };
20605
20606 const handleSegmentBytes = ({
20607 segment,
20608 bytes,
20609 trackInfoFn,
20610 timingInfoFn,
20611 videoSegmentTimingInfoFn,
20612 audioSegmentTimingInfoFn,
20613 id3Fn,
20614 captionsFn,
20615 isEndOfTimeline,
20616 endedTimelineFn,
20617 dataFn,
20618 doneFn,
20619 onTransmuxerLog
20620 }) => {
20621 let bytesAsUint8Array = new Uint8Array(bytes); // TODO:
20622 // We should have a handler that fetches the number of bytes required
20623 // to check if something is fmp4. This will allow us to save bandwidth
20624 // because we can only exclude a playlist and abort requests
20625 // by codec after trackinfo triggers.
20626
20627 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
20628 segment.isFmp4 = true;
20629 const {
20630 tracks
20631 } = segment.map;
20632 const trackInfo = {
20633 isFmp4: true,
20634 hasVideo: !!tracks.video,
20635 hasAudio: !!tracks.audio
20636 }; // if we have a audio track, with a codec that is not set to
20637 // encrypted audio
20638
20639 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
20640 trackInfo.audioCodec = tracks.audio.codec;
20641 } // if we have a video track, with a codec that is not set to
20642 // encrypted video
20643
20644
20645 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
20646 trackInfo.videoCodec = tracks.video.codec;
20647 }
20648
20649 if (tracks.video && tracks.audio) {
20650 trackInfo.isMuxed = true;
20651 } // since we don't support appending fmp4 data on progress, we know we have the full
20652 // segment here
20653
20654
20655 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
20656 // time. The end time can be roughly calculated by the receiver using the duration.
20657 //
20658 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
20659 // that is the true start of the segment (where the playback engine should begin
20660 // decoding).
20661
20662 const finishLoading = (captions, id3Frames) => {
20663 // if the track still has audio at this point it is only possible
20664 // for it to be audio only. See `tracks.video && tracks.audio` if statement
20665 // above.
20666 // we make sure to use segment.bytes here as that
20667 dataFn(segment, {
20668 data: bytesAsUint8Array,
20669 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
20670 });
20671
20672 if (id3Frames && id3Frames.length) {
20673 id3Fn(segment, id3Frames);
20674 }
20675
20676 if (captions && captions.length) {
20677 captionsFn(segment, captions);
20678 }
20679
20680 doneFn(null, segment, {});
20681 };
20682
20683 workerCallback({
20684 action: 'probeMp4StartTime',
20685 timescales: segment.map.timescales,
20686 data: bytesAsUint8Array,
20687 transmuxer: segment.transmuxer,
20688 callback: ({
20689 data,
20690 startTime
20691 }) => {
20692 // transfer bytes back to us
20693 bytes = data.buffer;
20694 segment.bytes = bytesAsUint8Array = data;
20695
20696 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
20697 timingInfoFn(segment, 'audio', 'start', startTime);
20698 }
20699
20700 if (trackInfo.hasVideo) {
20701 timingInfoFn(segment, 'video', 'start', startTime);
20702 }
20703
20704 workerCallback({
20705 action: 'probeEmsgID3',
20706 data: bytesAsUint8Array,
20707 transmuxer: segment.transmuxer,
20708 offset: startTime,
20709 callback: ({
20710 emsgData,
20711 id3Frames
20712 }) => {
20713 // transfer bytes back to us
20714 bytes = emsgData.buffer;
20715 segment.bytes = bytesAsUint8Array = emsgData; // Run through the CaptionParser in case there are captions.
20716 // Initialize CaptionParser if it hasn't been yet
20717
20718 if (!tracks.video || !emsgData.byteLength || !segment.transmuxer) {
20719 finishLoading(undefined, id3Frames);
20720 return;
20721 }
20722
20723 workerCallback({
20724 action: 'pushMp4Captions',
20725 endAction: 'mp4Captions',
20726 transmuxer: segment.transmuxer,
20727 data: bytesAsUint8Array,
20728 timescales: segment.map.timescales,
20729 trackIds: [tracks.video.id],
20730 callback: message => {
20731 // transfer bytes back to us
20732 bytes = message.data.buffer;
20733 segment.bytes = bytesAsUint8Array = message.data;
20734 message.logs.forEach(function (log) {
20735 onTransmuxerLog(merge$1(log, {
20736 stream: 'mp4CaptionParser'
20737 }));
20738 });
20739 finishLoading(message.captions, id3Frames);
20740 }
20741 });
20742 }
20743 });
20744 }
20745 });
20746 return;
20747 } // VTT or other segments that don't need processing
20748
20749
20750 if (!segment.transmuxer) {
20751 doneFn(null, segment, {});
20752 return;
20753 }
20754
20755 if (typeof segment.container === 'undefined') {
20756 segment.container = detectContainerForBytes(bytesAsUint8Array);
20757 }
20758
20759 if (segment.container !== 'ts' && segment.container !== 'aac') {
20760 trackInfoFn(segment, {
20761 hasAudio: false,
20762 hasVideo: false
20763 });
20764 doneFn(null, segment, {});
20765 return;
20766 } // ts or aac
20767
20768
20769 transmuxAndNotify({
20770 segment,
20771 bytes,
20772 trackInfoFn,
20773 timingInfoFn,
20774 videoSegmentTimingInfoFn,
20775 audioSegmentTimingInfoFn,
20776 id3Fn,
20777 captionsFn,
20778 isEndOfTimeline,
20779 endedTimelineFn,
20780 dataFn,
20781 doneFn,
20782 onTransmuxerLog
20783 });
20784 };
20785
20786 const decrypt = function ({
20787 id,
20788 key,
20789 encryptedBytes,
20790 decryptionWorker
20791 }, callback) {
20792 const decryptionHandler = event => {
20793 if (event.data.source === id) {
20794 decryptionWorker.removeEventListener('message', decryptionHandler);
20795 const decrypted = event.data.decrypted;
20796 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
20797 }
20798 };
20799
20800 decryptionWorker.addEventListener('message', decryptionHandler);
20801 let keyBytes;
20802
20803 if (key.bytes.slice) {
20804 keyBytes = key.bytes.slice();
20805 } else {
20806 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
20807 } // incrementally decrypt the bytes
20808
20809
20810 decryptionWorker.postMessage(createTransferableMessage({
20811 source: id,
20812 encrypted: encryptedBytes,
20813 key: keyBytes,
20814 iv: key.iv
20815 }), [encryptedBytes.buffer, keyBytes.buffer]);
20816 };
20817 /**
20818 * Decrypt the segment via the decryption web worker
20819 *
20820 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
20821 * routines
20822 * @param {Object} segment - a simplified copy of the segmentInfo object
20823 * from SegmentLoader
20824 * @param {Function} trackInfoFn - a callback that receives track info
20825 * @param {Function} timingInfoFn - a callback that receives timing info
20826 * @param {Function} videoSegmentTimingInfoFn
20827 * a callback that receives video timing info based on media times and
20828 * any adjustments made by the transmuxer
20829 * @param {Function} audioSegmentTimingInfoFn
20830 * a callback that receives audio timing info based on media times and
20831 * any adjustments made by the transmuxer
20832 * @param {boolean} isEndOfTimeline
20833 * true if this segment represents the last segment in a timeline
20834 * @param {Function} endedTimelineFn
20835 * a callback made when a timeline is ended, will only be called if
20836 * isEndOfTimeline is true
20837 * @param {Function} dataFn - a callback that is executed when segment bytes are available
20838 * and ready to use
20839 * @param {Function} doneFn - a callback that is executed after decryption has completed
20840 */
20841
20842
20843 const decryptSegment = ({
20844 decryptionWorker,
20845 segment,
20846 trackInfoFn,
20847 timingInfoFn,
20848 videoSegmentTimingInfoFn,
20849 audioSegmentTimingInfoFn,
20850 id3Fn,
20851 captionsFn,
20852 isEndOfTimeline,
20853 endedTimelineFn,
20854 dataFn,
20855 doneFn,
20856 onTransmuxerLog
20857 }) => {
20858 decrypt({
20859 id: segment.requestId,
20860 key: segment.key,
20861 encryptedBytes: segment.encryptedBytes,
20862 decryptionWorker
20863 }, decryptedBytes => {
20864 segment.bytes = decryptedBytes;
20865 handleSegmentBytes({
20866 segment,
20867 bytes: segment.bytes,
20868 trackInfoFn,
20869 timingInfoFn,
20870 videoSegmentTimingInfoFn,
20871 audioSegmentTimingInfoFn,
20872 id3Fn,
20873 captionsFn,
20874 isEndOfTimeline,
20875 endedTimelineFn,
20876 dataFn,
20877 doneFn,
20878 onTransmuxerLog
20879 });
20880 });
20881 };
20882 /**
20883 * This function waits for all XHRs to finish (with either success or failure)
20884 * before continueing processing via it's callback. The function gathers errors
20885 * from each request into a single errors array so that the error status for
20886 * each request can be examined later.
20887 *
20888 * @param {Object} activeXhrs - an object that tracks all XHR requests
20889 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
20890 * routines
20891 * @param {Function} trackInfoFn - a callback that receives track info
20892 * @param {Function} timingInfoFn - a callback that receives timing info
20893 * @param {Function} videoSegmentTimingInfoFn
20894 * a callback that receives video timing info based on media times and
20895 * any adjustments made by the transmuxer
20896 * @param {Function} audioSegmentTimingInfoFn
20897 * a callback that receives audio timing info based on media times and
20898 * any adjustments made by the transmuxer
20899 * @param {Function} id3Fn - a callback that receives ID3 metadata
20900 * @param {Function} captionsFn - a callback that receives captions
20901 * @param {boolean} isEndOfTimeline
20902 * true if this segment represents the last segment in a timeline
20903 * @param {Function} endedTimelineFn
20904 * a callback made when a timeline is ended, will only be called if
20905 * isEndOfTimeline is true
20906 * @param {Function} dataFn - a callback that is executed when segment bytes are available
20907 * and ready to use
20908 * @param {Function} doneFn - a callback that is executed after all resources have been
20909 * downloaded and any decryption completed
20910 */
20911
20912
20913 const waitForCompletion = ({
20914 activeXhrs,
20915 decryptionWorker,
20916 trackInfoFn,
20917 timingInfoFn,
20918 videoSegmentTimingInfoFn,
20919 audioSegmentTimingInfoFn,
20920 id3Fn,
20921 captionsFn,
20922 isEndOfTimeline,
20923 endedTimelineFn,
20924 dataFn,
20925 doneFn,
20926 onTransmuxerLog
20927 }) => {
20928 let count = 0;
20929 let didError = false;
20930 return (error, segment) => {
20931 if (didError) {
20932 return;
20933 }
20934
20935 if (error) {
20936 didError = true; // If there are errors, we have to abort any outstanding requests
20937
20938 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
20939 // handle the aborted events from those requests, there are some cases where we may
20940 // never get an aborted event. For instance, if the network connection is lost and
20941 // there were two requests, the first may have triggered an error immediately, while
20942 // the second request remains unsent. In that case, the aborted algorithm will not
20943 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
20944 //
20945 // We also can't rely on the ready state of the XHR, since the request that
20946 // triggered the connection error may also show as a ready state of 0 (unsent).
20947 // Therefore, we have to finish this group of requests immediately after the first
20948 // seen error.
20949
20950 return doneFn(error, segment);
20951 }
20952
20953 count += 1;
20954
20955 if (count === activeXhrs.length) {
20956 const segmentFinish = function () {
20957 if (segment.encryptedBytes) {
20958 return decryptSegment({
20959 decryptionWorker,
20960 segment,
20961 trackInfoFn,
20962 timingInfoFn,
20963 videoSegmentTimingInfoFn,
20964 audioSegmentTimingInfoFn,
20965 id3Fn,
20966 captionsFn,
20967 isEndOfTimeline,
20968 endedTimelineFn,
20969 dataFn,
20970 doneFn,
20971 onTransmuxerLog
20972 });
20973 } // Otherwise, everything is ready just continue
20974
20975
20976 handleSegmentBytes({
20977 segment,
20978 bytes: segment.bytes,
20979 trackInfoFn,
20980 timingInfoFn,
20981 videoSegmentTimingInfoFn,
20982 audioSegmentTimingInfoFn,
20983 id3Fn,
20984 captionsFn,
20985 isEndOfTimeline,
20986 endedTimelineFn,
20987 dataFn,
20988 doneFn,
20989 onTransmuxerLog
20990 });
20991 }; // Keep track of when *all* of the requests have completed
20992
20993
20994 segment.endOfAllRequests = Date.now();
20995
20996 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
20997 return decrypt({
20998 decryptionWorker,
20999 // add -init to the "id" to differentiate between segment
21000 // and init segment decryption, just in case they happen
21001 // at the same time at some point in the future.
21002 id: segment.requestId + '-init',
21003 encryptedBytes: segment.map.encryptedBytes,
21004 key: segment.map.key
21005 }, decryptedBytes => {
21006 segment.map.bytes = decryptedBytes;
21007 parseInitSegment(segment, parseError => {
21008 if (parseError) {
21009 abortAll(activeXhrs);
21010 return doneFn(parseError, segment);
21011 }
21012
21013 segmentFinish();
21014 });
21015 });
21016 }
21017
21018 segmentFinish();
21019 }
21020 };
21021 };
21022 /**
21023 * Calls the abort callback if any request within the batch was aborted. Will only call
21024 * the callback once per batch of requests, even if multiple were aborted.
21025 *
21026 * @param {Object} loadendState - state to check to see if the abort function was called
21027 * @param {Function} abortFn - callback to call for abort
21028 */
21029
21030
21031 const handleLoadEnd = ({
21032 loadendState,
21033 abortFn
21034 }) => event => {
21035 const request = event.target;
21036
21037 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
21038 abortFn();
21039 loadendState.calledAbortFn = true;
21040 }
21041 };
21042 /**
21043 * Simple progress event callback handler that gathers some stats before
21044 * executing a provided callback with the `segment` object
21045 *
21046 * @param {Object} segment - a simplified copy of the segmentInfo object
21047 * from SegmentLoader
21048 * @param {Function} progressFn - a callback that is executed each time a progress event
21049 * is received
21050 * @param {Function} trackInfoFn - a callback that receives track info
21051 * @param {Function} timingInfoFn - a callback that receives timing info
21052 * @param {Function} videoSegmentTimingInfoFn
21053 * a callback that receives video timing info based on media times and
21054 * any adjustments made by the transmuxer
21055 * @param {Function} audioSegmentTimingInfoFn
21056 * a callback that receives audio timing info based on media times and
21057 * any adjustments made by the transmuxer
21058 * @param {boolean} isEndOfTimeline
21059 * true if this segment represents the last segment in a timeline
21060 * @param {Function} endedTimelineFn
21061 * a callback made when a timeline is ended, will only be called if
21062 * isEndOfTimeline is true
21063 * @param {Function} dataFn - a callback that is executed when segment bytes are available
21064 * and ready to use
21065 * @param {Event} event - the progress event object from XMLHttpRequest
21066 */
21067
21068
21069 const handleProgress = ({
21070 segment,
21071 progressFn,
21072 trackInfoFn,
21073 timingInfoFn,
21074 videoSegmentTimingInfoFn,
21075 audioSegmentTimingInfoFn,
21076 id3Fn,
21077 captionsFn,
21078 isEndOfTimeline,
21079 endedTimelineFn,
21080 dataFn
21081 }) => event => {
21082 const request = event.target;
21083
21084 if (request.aborted) {
21085 return;
21086 }
21087
21088 segment.stats = merge$1(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
21089
21090 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
21091 segment.stats.firstBytesReceivedAt = Date.now();
21092 }
21093
21094 return progressFn(event, segment);
21095 };
21096 /**
21097 * Load all resources and does any processing necessary for a media-segment
21098 *
21099 * Features:
21100 * decrypts the media-segment if it has a key uri and an iv
21101 * aborts *all* requests if *any* one request fails
21102 *
21103 * The segment object, at minimum, has the following format:
21104 * {
21105 * resolvedUri: String,
21106 * [transmuxer]: Object,
21107 * [byterange]: {
21108 * offset: Number,
21109 * length: Number
21110 * },
21111 * [key]: {
21112 * resolvedUri: String
21113 * [byterange]: {
21114 * offset: Number,
21115 * length: Number
21116 * },
21117 * iv: {
21118 * bytes: Uint32Array
21119 * }
21120 * },
21121 * [map]: {
21122 * resolvedUri: String,
21123 * [byterange]: {
21124 * offset: Number,
21125 * length: Number
21126 * },
21127 * [bytes]: Uint8Array
21128 * }
21129 * }
21130 * ...where [name] denotes optional properties
21131 *
21132 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
21133 * @param {Object} xhrOptions - the base options to provide to all xhr requests
21134 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
21135 * decryption routines
21136 * @param {Object} segment - a simplified copy of the segmentInfo object
21137 * from SegmentLoader
21138 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
21139 * aborted
21140 * @param {Function} progressFn - a callback that receives progress events from the main
21141 * segment's xhr request
21142 * @param {Function} trackInfoFn - a callback that receives track info
21143 * @param {Function} timingInfoFn - a callback that receives timing info
21144 * @param {Function} videoSegmentTimingInfoFn
21145 * a callback that receives video timing info based on media times and
21146 * any adjustments made by the transmuxer
21147 * @param {Function} audioSegmentTimingInfoFn
21148 * a callback that receives audio timing info based on media times and
21149 * any adjustments made by the transmuxer
21150 * @param {Function} id3Fn - a callback that receives ID3 metadata
21151 * @param {Function} captionsFn - a callback that receives captions
21152 * @param {boolean} isEndOfTimeline
21153 * true if this segment represents the last segment in a timeline
21154 * @param {Function} endedTimelineFn
21155 * a callback made when a timeline is ended, will only be called if
21156 * isEndOfTimeline is true
21157 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
21158 * request, transmuxed if needed
21159 * @param {Function} doneFn - a callback that is executed only once all requests have
21160 * succeeded or failed
21161 * @return {Function} a function that, when invoked, immediately aborts all
21162 * outstanding requests
21163 */
21164
21165
21166 const mediaSegmentRequest = ({
21167 xhr,
21168 xhrOptions,
21169 decryptionWorker,
21170 segment,
21171 abortFn,
21172 progressFn,
21173 trackInfoFn,
21174 timingInfoFn,
21175 videoSegmentTimingInfoFn,
21176 audioSegmentTimingInfoFn,
21177 id3Fn,
21178 captionsFn,
21179 isEndOfTimeline,
21180 endedTimelineFn,
21181 dataFn,
21182 doneFn,
21183 onTransmuxerLog
21184 }) => {
21185 const activeXhrs = [];
21186 const finishProcessingFn = waitForCompletion({
21187 activeXhrs,
21188 decryptionWorker,
21189 trackInfoFn,
21190 timingInfoFn,
21191 videoSegmentTimingInfoFn,
21192 audioSegmentTimingInfoFn,
21193 id3Fn,
21194 captionsFn,
21195 isEndOfTimeline,
21196 endedTimelineFn,
21197 dataFn,
21198 doneFn,
21199 onTransmuxerLog
21200 }); // optionally, request the decryption key
21201
21202 if (segment.key && !segment.key.bytes) {
21203 const objects = [segment.key];
21204
21205 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
21206 objects.push(segment.map.key);
21207 }
21208
21209 const keyRequestOptions = merge$1(xhrOptions, {
21210 uri: segment.key.resolvedUri,
21211 responseType: 'arraybuffer'
21212 });
21213 const keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
21214 const keyXhr = xhr(keyRequestOptions, keyRequestCallback);
21215 activeXhrs.push(keyXhr);
21216 } // optionally, request the associated media init segment
21217
21218
21219 if (segment.map && !segment.map.bytes) {
21220 const differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
21221
21222 if (differentMapKey) {
21223 const mapKeyRequestOptions = merge$1(xhrOptions, {
21224 uri: segment.map.key.resolvedUri,
21225 responseType: 'arraybuffer'
21226 });
21227 const mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
21228 const mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
21229 activeXhrs.push(mapKeyXhr);
21230 }
21231
21232 const initSegmentOptions = merge$1(xhrOptions, {
21233 uri: segment.map.resolvedUri,
21234 responseType: 'arraybuffer',
21235 headers: segmentXhrHeaders(segment.map)
21236 });
21237 const initSegmentRequestCallback = handleInitSegmentResponse({
21238 segment,
21239 finishProcessingFn
21240 });
21241 const initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
21242 activeXhrs.push(initSegmentXhr);
21243 }
21244
21245 const segmentRequestOptions = merge$1(xhrOptions, {
21246 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
21247 responseType: 'arraybuffer',
21248 headers: segmentXhrHeaders(segment)
21249 });
21250 const segmentRequestCallback = handleSegmentResponse({
21251 segment,
21252 finishProcessingFn,
21253 responseType: segmentRequestOptions.responseType
21254 });
21255 const segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
21256 segmentXhr.addEventListener('progress', handleProgress({
21257 segment,
21258 progressFn,
21259 trackInfoFn,
21260 timingInfoFn,
21261 videoSegmentTimingInfoFn,
21262 audioSegmentTimingInfoFn,
21263 id3Fn,
21264 captionsFn,
21265 isEndOfTimeline,
21266 endedTimelineFn,
21267 dataFn
21268 }));
21269 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
21270 // multiple times, provide a shared state object
21271
21272 const loadendState = {};
21273 activeXhrs.forEach(activeXhr => {
21274 activeXhr.addEventListener('loadend', handleLoadEnd({
21275 loadendState,
21276 abortFn
21277 }));
21278 });
21279 return () => abortAll(activeXhrs);
21280 };
21281
21282 /**
21283 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
21284 * codec strings, or translating codec strings into objects that can be examined.
21285 */
21286 const logFn$1 = logger('CodecUtils');
21287 /**
21288 * Returns a set of codec strings parsed from the playlist or the default
21289 * codec strings if no codecs were specified in the playlist
21290 *
21291 * @param {Playlist} media the current media playlist
21292 * @return {Object} an object with the video and audio codecs
21293 */
21294
21295 const getCodecs = function (media) {
21296 // if the codecs were explicitly specified, use them instead of the
21297 // defaults
21298 const mediaAttributes = media.attributes || {};
21299
21300 if (mediaAttributes.CODECS) {
21301 return parseCodecs(mediaAttributes.CODECS);
21302 }
21303 };
21304
21305 const isMaat = (main, media) => {
21306 const mediaAttributes = media.attributes || {};
21307 return main && main.mediaGroups && main.mediaGroups.AUDIO && mediaAttributes.AUDIO && main.mediaGroups.AUDIO[mediaAttributes.AUDIO];
21308 };
21309 const isMuxed = (main, media) => {
21310 if (!isMaat(main, media)) {
21311 return true;
21312 }
21313
21314 const mediaAttributes = media.attributes || {};
21315 const audioGroup = main.mediaGroups.AUDIO[mediaAttributes.AUDIO];
21316
21317 for (const groupId in audioGroup) {
21318 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
21319 // or there are listed playlists (the case for DASH, as the manifest will have already
21320 // provided all of the details necessary to generate the audio playlist, as opposed to
21321 // HLS' externally requested playlists), then the content is demuxed.
21322 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
21323 return true;
21324 }
21325 }
21326
21327 return false;
21328 };
21329 const unwrapCodecList = function (codecList) {
21330 const codecs = {};
21331 codecList.forEach(({
21332 mediaType,
21333 type,
21334 details
21335 }) => {
21336 codecs[mediaType] = codecs[mediaType] || [];
21337 codecs[mediaType].push(translateLegacyCodec(`${type}${details}`));
21338 });
21339 Object.keys(codecs).forEach(function (mediaType) {
21340 if (codecs[mediaType].length > 1) {
21341 logFn$1(`multiple ${mediaType} codecs found as attributes: ${codecs[mediaType].join(', ')}. Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.`);
21342 codecs[mediaType] = null;
21343 return;
21344 }
21345
21346 codecs[mediaType] = codecs[mediaType][0];
21347 });
21348 return codecs;
21349 };
21350 const codecCount = function (codecObj) {
21351 let count = 0;
21352
21353 if (codecObj.audio) {
21354 count++;
21355 }
21356
21357 if (codecObj.video) {
21358 count++;
21359 }
21360
21361 return count;
21362 };
21363 /**
21364 * Calculates the codec strings for a working configuration of
21365 * SourceBuffers to play variant streams in a main playlist. If
21366 * there is no possible working configuration, an empty object will be
21367 * returned.
21368 *
21369 * @param main {Object} the m3u8 object for the main playlist
21370 * @param media {Object} the m3u8 object for the variant playlist
21371 * @return {Object} the codec strings.
21372 *
21373 * @private
21374 */
21375
21376 const codecsForPlaylist = function (main, media) {
21377 const mediaAttributes = media.attributes || {};
21378 const codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
21379 // Put another way, there is no way to have a video-only multiple-audio HLS!
21380
21381 if (isMaat(main, media) && !codecInfo.audio) {
21382 if (!isMuxed(main, media)) {
21383 // It is possible for codecs to be specified on the audio media group playlist but
21384 // not on the rendition playlist. This is mostly the case for DASH, where audio and
21385 // video are always separate (and separately specified).
21386 const defaultCodecs = unwrapCodecList(codecsFromDefault(main, mediaAttributes.AUDIO) || []);
21387
21388 if (defaultCodecs.audio) {
21389 codecInfo.audio = defaultCodecs.audio;
21390 }
21391 }
21392 }
21393
21394 return codecInfo;
21395 };
21396
21397 const logFn = logger('PlaylistSelector');
21398
21399 const representationToString = function (representation) {
21400 if (!representation || !representation.playlist) {
21401 return;
21402 }
21403
21404 const playlist = representation.playlist;
21405 return JSON.stringify({
21406 id: playlist.id,
21407 bandwidth: representation.bandwidth,
21408 width: representation.width,
21409 height: representation.height,
21410 codecs: playlist.attributes && playlist.attributes.CODECS || ''
21411 });
21412 }; // Utilities
21413
21414 /**
21415 * Returns the CSS value for the specified property on an element
21416 * using `getComputedStyle`. Firefox has a long-standing issue where
21417 * getComputedStyle() may return null when running in an iframe with
21418 * `display: none`.
21419 *
21420 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
21421 * @param {HTMLElement} el the htmlelement to work on
21422 * @param {string} the proprety to get the style for
21423 */
21424
21425
21426 const safeGetComputedStyle = function (el, property) {
21427 if (!el) {
21428 return '';
21429 }
21430
21431 const result = window.getComputedStyle(el);
21432
21433 if (!result) {
21434 return '';
21435 }
21436
21437 return result[property];
21438 };
21439 /**
21440 * Resuable stable sort function
21441 *
21442 * @param {Playlists} array
21443 * @param {Function} sortFn Different comparators
21444 * @function stableSort
21445 */
21446
21447
21448 const stableSort = function (array, sortFn) {
21449 const newArray = array.slice();
21450 array.sort(function (left, right) {
21451 const cmp = sortFn(left, right);
21452
21453 if (cmp === 0) {
21454 return newArray.indexOf(left) - newArray.indexOf(right);
21455 }
21456
21457 return cmp;
21458 });
21459 };
21460 /**
21461 * A comparator function to sort two playlist object by bandwidth.
21462 *
21463 * @param {Object} left a media playlist object
21464 * @param {Object} right a media playlist object
21465 * @return {number} Greater than zero if the bandwidth attribute of
21466 * left is greater than the corresponding attribute of right. Less
21467 * than zero if the bandwidth of right is greater than left and
21468 * exactly zero if the two are equal.
21469 */
21470
21471
21472 const comparePlaylistBandwidth = function (left, right) {
21473 let leftBandwidth;
21474 let rightBandwidth;
21475
21476 if (left.attributes.BANDWIDTH) {
21477 leftBandwidth = left.attributes.BANDWIDTH;
21478 }
21479
21480 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
21481
21482 if (right.attributes.BANDWIDTH) {
21483 rightBandwidth = right.attributes.BANDWIDTH;
21484 }
21485
21486 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
21487 return leftBandwidth - rightBandwidth;
21488 };
21489 /**
21490 * A comparator function to sort two playlist object by resolution (width).
21491 *
21492 * @param {Object} left a media playlist object
21493 * @param {Object} right a media playlist object
21494 * @return {number} Greater than zero if the resolution.width attribute of
21495 * left is greater than the corresponding attribute of right. Less
21496 * than zero if the resolution.width of right is greater than left and
21497 * exactly zero if the two are equal.
21498 */
21499
21500 const comparePlaylistResolution = function (left, right) {
21501 let leftWidth;
21502 let rightWidth;
21503
21504 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
21505 leftWidth = left.attributes.RESOLUTION.width;
21506 }
21507
21508 leftWidth = leftWidth || window.Number.MAX_VALUE;
21509
21510 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
21511 rightWidth = right.attributes.RESOLUTION.width;
21512 }
21513
21514 rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
21515 // have the same media dimensions/ resolution
21516
21517 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
21518 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
21519 }
21520
21521 return leftWidth - rightWidth;
21522 };
21523 /**
21524 * Chooses the appropriate media playlist based on bandwidth and player size
21525 *
21526 * @param {Object} main
21527 * Object representation of the main manifest
21528 * @param {number} playerBandwidth
21529 * Current calculated bandwidth of the player
21530 * @param {number} playerWidth
21531 * Current width of the player element (should account for the device pixel ratio)
21532 * @param {number} playerHeight
21533 * Current height of the player element (should account for the device pixel ratio)
21534 * @param {boolean} limitRenditionByPlayerDimensions
21535 * True if the player width and height should be used during the selection, false otherwise
21536 * @param {Object} playlistController
21537 * the current playlistController object
21538 * @return {Playlist} the highest bitrate playlist less than the
21539 * currently detected bandwidth, accounting for some amount of
21540 * bandwidth variance
21541 */
21542
21543 let simpleSelector = function (main, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, playlistController) {
21544 // If we end up getting called before `main` is available, exit early
21545 if (!main) {
21546 return;
21547 }
21548
21549 const options = {
21550 bandwidth: playerBandwidth,
21551 width: playerWidth,
21552 height: playerHeight,
21553 limitRenditionByPlayerDimensions
21554 };
21555 let playlists = main.playlists; // if playlist is audio only, select between currently active audio group playlists.
21556
21557 if (Playlist.isAudioOnly(main)) {
21558 playlists = playlistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
21559 // at the buttom of this function for debugging.
21560
21561 options.audioOnly = true;
21562 } // convert the playlists to an intermediary representation to make comparisons easier
21563
21564
21565 let sortedPlaylistReps = playlists.map(playlist => {
21566 let bandwidth;
21567 const width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
21568 const height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
21569 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
21570 bandwidth = bandwidth || window.Number.MAX_VALUE;
21571 return {
21572 bandwidth,
21573 width,
21574 height,
21575 playlist
21576 };
21577 });
21578 stableSort(sortedPlaylistReps, (left, right) => left.bandwidth - right.bandwidth); // filter out any playlists that have been excluded due to
21579 // incompatible configurations
21580
21581 sortedPlaylistReps = sortedPlaylistReps.filter(rep => !Playlist.isIncompatible(rep.playlist)); // filter out any playlists that have been disabled manually through the representations
21582 // api or excluded temporarily due to playback errors.
21583
21584 let enabledPlaylistReps = sortedPlaylistReps.filter(rep => Playlist.isEnabled(rep.playlist));
21585
21586 if (!enabledPlaylistReps.length) {
21587 // if there are no enabled playlists, then they have all been excluded or disabled
21588 // by the user through the representations api. In this case, ignore exclusion and
21589 // fallback to what the user wants by using playlists the user has not disabled.
21590 enabledPlaylistReps = sortedPlaylistReps.filter(rep => !Playlist.isDisabled(rep.playlist));
21591 } // filter out any variant that has greater effective bitrate
21592 // than the current estimated bandwidth
21593
21594
21595 const bandwidthPlaylistReps = enabledPlaylistReps.filter(rep => rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth);
21596 let highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
21597 // and then taking the very first element
21598
21599 const bandwidthBestRep = bandwidthPlaylistReps.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0]; // if we're not going to limit renditions by player size, make an early decision.
21600
21601 if (limitRenditionByPlayerDimensions === false) {
21602 const chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
21603
21604 if (chosenRep && chosenRep.playlist) {
21605 let type = 'sortedPlaylistReps';
21606
21607 if (bandwidthBestRep) {
21608 type = 'bandwidthBestRep';
21609 }
21610
21611 if (enabledPlaylistReps[0]) {
21612 type = 'enabledPlaylistReps';
21613 }
21614
21615 logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
21616 return chosenRep.playlist;
21617 }
21618
21619 logFn('could not choose a playlist with options', options);
21620 return null;
21621 } // filter out playlists without resolution information
21622
21623
21624 const haveResolution = bandwidthPlaylistReps.filter(rep => rep.width && rep.height); // sort variants by resolution
21625
21626 stableSort(haveResolution, (left, right) => left.width - right.width); // if we have the exact resolution as the player use it
21627
21628 const resolutionBestRepList = haveResolution.filter(rep => rep.width === playerWidth && rep.height === playerHeight);
21629 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
21630
21631 const resolutionBestRep = resolutionBestRepList.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
21632 let resolutionPlusOneList;
21633 let resolutionPlusOneSmallest;
21634 let resolutionPlusOneRep; // find the smallest variant that is larger than the player
21635 // if there is no match of exact resolution
21636
21637 if (!resolutionBestRep) {
21638 resolutionPlusOneList = haveResolution.filter(rep => rep.width > playerWidth || rep.height > playerHeight); // find all the variants have the same smallest resolution
21639
21640 resolutionPlusOneSmallest = resolutionPlusOneList.filter(rep => rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height); // ensure that we also pick the highest bandwidth variant that
21641 // is just-larger-than the video player
21642
21643 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
21644 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
21645 }
21646
21647 let leastPixelDiffRep; // If this selector proves to be better than others,
21648 // resolutionPlusOneRep and resolutionBestRep and all
21649 // the code involving them should be removed.
21650
21651 if (playlistController.leastPixelDiffSelector) {
21652 // find the variant that is closest to the player's pixel size
21653 const leastPixelDiffList = haveResolution.map(rep => {
21654 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
21655 return rep;
21656 }); // get the highest bandwidth, closest resolution playlist
21657
21658 stableSort(leastPixelDiffList, (left, right) => {
21659 // sort by highest bandwidth if pixelDiff is the same
21660 if (left.pixelDiff === right.pixelDiff) {
21661 return right.bandwidth - left.bandwidth;
21662 }
21663
21664 return left.pixelDiff - right.pixelDiff;
21665 });
21666 leastPixelDiffRep = leastPixelDiffList[0];
21667 } // fallback chain of variants
21668
21669
21670 const chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
21671
21672 if (chosenRep && chosenRep.playlist) {
21673 let type = 'sortedPlaylistReps';
21674
21675 if (leastPixelDiffRep) {
21676 type = 'leastPixelDiffRep';
21677 } else if (resolutionPlusOneRep) {
21678 type = 'resolutionPlusOneRep';
21679 } else if (resolutionBestRep) {
21680 type = 'resolutionBestRep';
21681 } else if (bandwidthBestRep) {
21682 type = 'bandwidthBestRep';
21683 } else if (enabledPlaylistReps[0]) {
21684 type = 'enabledPlaylistReps';
21685 }
21686
21687 logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
21688 return chosenRep.playlist;
21689 }
21690
21691 logFn('could not choose a playlist with options', options);
21692 return null;
21693 };
21694
21695 /**
21696 * Chooses the appropriate media playlist based on the most recent
21697 * bandwidth estimate and the player size.
21698 *
21699 * Expects to be called within the context of an instance of VhsHandler
21700 *
21701 * @return {Playlist} the highest bitrate playlist less than the
21702 * currently detected bandwidth, accounting for some amount of
21703 * bandwidth variance
21704 */
21705
21706 const lastBandwidthSelector = function () {
21707 const pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
21708 return simpleSelector(this.playlists.main, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.playlistController_);
21709 };
21710 /**
21711 * Chooses the appropriate media playlist based on an
21712 * exponential-weighted moving average of the bandwidth after
21713 * filtering for player size.
21714 *
21715 * Expects to be called within the context of an instance of VhsHandler
21716 *
21717 * @param {number} decay - a number between 0 and 1. Higher values of
21718 * this parameter will cause previous bandwidth estimates to lose
21719 * significance more quickly.
21720 * @return {Function} a function which can be invoked to create a new
21721 * playlist selector function.
21722 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
21723 */
21724
21725 const movingAverageBandwidthSelector = function (decay) {
21726 let average = -1;
21727 let lastSystemBandwidth = -1;
21728
21729 if (decay < 0 || decay > 1) {
21730 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
21731 }
21732
21733 return function () {
21734 const pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
21735
21736 if (average < 0) {
21737 average = this.systemBandwidth;
21738 lastSystemBandwidth = this.systemBandwidth;
21739 } // stop the average value from decaying for every 250ms
21740 // when the systemBandwidth is constant
21741 // and
21742 // stop average from setting to a very low value when the
21743 // systemBandwidth becomes 0 in case of chunk cancellation
21744
21745
21746 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
21747 average = decay * this.systemBandwidth + (1 - decay) * average;
21748 lastSystemBandwidth = this.systemBandwidth;
21749 }
21750
21751 return simpleSelector(this.playlists.main, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.playlistController_);
21752 };
21753 };
21754 /**
21755 * Chooses the appropriate media playlist based on the potential to rebuffer
21756 *
21757 * @param {Object} settings
21758 * Object of information required to use this selector
21759 * @param {Object} settings.main
21760 * Object representation of the main manifest
21761 * @param {number} settings.currentTime
21762 * The current time of the player
21763 * @param {number} settings.bandwidth
21764 * Current measured bandwidth
21765 * @param {number} settings.duration
21766 * Duration of the media
21767 * @param {number} settings.segmentDuration
21768 * Segment duration to be used in round trip time calculations
21769 * @param {number} settings.timeUntilRebuffer
21770 * Time left in seconds until the player has to rebuffer
21771 * @param {number} settings.currentTimeline
21772 * The current timeline segments are being loaded from
21773 * @param {SyncController} settings.syncController
21774 * SyncController for determining if we have a sync point for a given playlist
21775 * @return {Object|null}
21776 * {Object} return.playlist
21777 * The highest bandwidth playlist with the least amount of rebuffering
21778 * {Number} return.rebufferingImpact
21779 * The amount of time in seconds switching to this playlist will rebuffer. A
21780 * negative value means that switching will cause zero rebuffering.
21781 */
21782
21783 const minRebufferMaxBandwidthSelector = function (settings) {
21784 const {
21785 main,
21786 currentTime,
21787 bandwidth,
21788 duration,
21789 segmentDuration,
21790 timeUntilRebuffer,
21791 currentTimeline,
21792 syncController
21793 } = settings; // filter out any playlists that have been excluded due to
21794 // incompatible configurations
21795
21796 const compatiblePlaylists = main.playlists.filter(playlist => !Playlist.isIncompatible(playlist)); // filter out any playlists that have been disabled manually through the representations
21797 // api or excluded temporarily due to playback errors.
21798
21799 let enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
21800
21801 if (!enabledPlaylists.length) {
21802 // if there are no enabled playlists, then they have all been excluded or disabled
21803 // by the user through the representations api. In this case, ignore exclusion and
21804 // fallback to what the user wants by using playlists the user has not disabled.
21805 enabledPlaylists = compatiblePlaylists.filter(playlist => !Playlist.isDisabled(playlist));
21806 }
21807
21808 const bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
21809 const rebufferingEstimates = bandwidthPlaylists.map(playlist => {
21810 const syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
21811 // sync request first. This will double the request time
21812
21813 const numRequests = syncPoint ? 1 : 2;
21814 const requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
21815 const rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
21816 return {
21817 playlist,
21818 rebufferingImpact
21819 };
21820 });
21821 const noRebufferingPlaylists = rebufferingEstimates.filter(estimate => estimate.rebufferingImpact <= 0); // Sort by bandwidth DESC
21822
21823 stableSort(noRebufferingPlaylists, (a, b) => comparePlaylistBandwidth(b.playlist, a.playlist));
21824
21825 if (noRebufferingPlaylists.length) {
21826 return noRebufferingPlaylists[0];
21827 }
21828
21829 stableSort(rebufferingEstimates, (a, b) => a.rebufferingImpact - b.rebufferingImpact);
21830 return rebufferingEstimates[0] || null;
21831 };
21832 /**
21833 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
21834 * one with video. If no renditions with video exist, return the lowest audio rendition.
21835 *
21836 * Expects to be called within the context of an instance of VhsHandler
21837 *
21838 * @return {Object|null}
21839 * {Object} return.playlist
21840 * The lowest bitrate playlist that contains a video codec. If no such rendition
21841 * exists pick the lowest audio rendition.
21842 */
21843
21844 const lowestBitrateCompatibleVariantSelector = function () {
21845 // filter out any playlists that have been excluded due to
21846 // incompatible configurations or playback errors
21847 const playlists = this.playlists.main.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
21848
21849 stableSort(playlists, (a, b) => comparePlaylistBandwidth(a, b)); // Parse and assume that playlists with no video codec have no video
21850 // (this is not necessarily true, although it is generally true).
21851 //
21852 // If an entire manifest has no valid videos everything will get filtered
21853 // out.
21854
21855 const playlistsWithVideo = playlists.filter(playlist => !!codecsForPlaylist(this.playlists.main, playlist).video);
21856 return playlistsWithVideo[0] || null;
21857 };
21858
21859 /**
21860 * Combine all segments into a single Uint8Array
21861 *
21862 * @param {Object} segmentObj
21863 * @return {Uint8Array} concatenated bytes
21864 * @private
21865 */
21866 const concatSegments = segmentObj => {
21867 let offset = 0;
21868 let tempBuffer;
21869
21870 if (segmentObj.bytes) {
21871 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
21872
21873 segmentObj.segments.forEach(segment => {
21874 tempBuffer.set(segment, offset);
21875 offset += segment.byteLength;
21876 });
21877 }
21878
21879 return tempBuffer;
21880 };
21881
21882 /**
21883 * @file text-tracks.js
21884 */
21885 /**
21886 * Create captions text tracks on video.js if they do not exist
21887 *
21888 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
21889 * @param {Object} tech the video.js tech
21890 * @param {Object} captionStream the caption stream to create
21891 * @private
21892 */
21893
21894 const createCaptionsTrackIfNotExists = function (inbandTextTracks, tech, captionStream) {
21895 if (!inbandTextTracks[captionStream]) {
21896 tech.trigger({
21897 type: 'usage',
21898 name: 'vhs-608'
21899 });
21900 let instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
21901
21902 if (/^cc708_/.test(captionStream)) {
21903 instreamId = 'SERVICE' + captionStream.split('_')[1];
21904 }
21905
21906 const track = tech.textTracks().getTrackById(instreamId);
21907
21908 if (track) {
21909 // Resuse an existing track with a CC# id because this was
21910 // very likely created by videojs-contrib-hls from information
21911 // in the m3u8 for us to use
21912 inbandTextTracks[captionStream] = track;
21913 } else {
21914 // This section gets called when we have caption services that aren't specified in the manifest.
21915 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
21916 const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
21917 let label = captionStream;
21918 let language = captionStream;
21919 let def = false;
21920 const captionService = captionServices[instreamId];
21921
21922 if (captionService) {
21923 label = captionService.label;
21924 language = captionService.language;
21925 def = captionService.default;
21926 } // Otherwise, create a track with the default `CC#` label and
21927 // without a language
21928
21929
21930 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
21931 kind: 'captions',
21932 id: instreamId,
21933 // TODO: investigate why this doesn't seem to turn the caption on by default
21934 default: def,
21935 label,
21936 language
21937 }, false).track;
21938 }
21939 }
21940 };
21941 /**
21942 * Add caption text track data to a source handler given an array of captions
21943 *
21944 * @param {Object}
21945 * @param {Object} inbandTextTracks the inband text tracks
21946 * @param {number} timestampOffset the timestamp offset of the source buffer
21947 * @param {Array} captionArray an array of caption data
21948 * @private
21949 */
21950
21951 const addCaptionData = function ({
21952 inbandTextTracks,
21953 captionArray,
21954 timestampOffset
21955 }) {
21956 if (!captionArray) {
21957 return;
21958 }
21959
21960 const Cue = window.WebKitDataCue || window.VTTCue;
21961 captionArray.forEach(caption => {
21962 const track = caption.stream; // in CEA 608 captions, video.js/mux.js sends a content array
21963 // with positioning data
21964
21965 if (caption.content) {
21966 caption.content.forEach(value => {
21967 const cue = new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, value.text);
21968 cue.line = value.line;
21969 cue.align = 'left';
21970 cue.position = value.position;
21971 cue.positionAlign = 'line-left';
21972 inbandTextTracks[track].addCue(cue);
21973 });
21974 } else {
21975 // otherwise, a text value with combined captions is sent
21976 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
21977 }
21978 });
21979 };
21980 /**
21981 * Define properties on a cue for backwards compatability,
21982 * but warn the user that the way that they are using it
21983 * is depricated and will be removed at a later date.
21984 *
21985 * @param {Cue} cue the cue to add the properties on
21986 * @private
21987 */
21988
21989 const deprecateOldCue = function (cue) {
21990 Object.defineProperties(cue.frame, {
21991 id: {
21992 get() {
21993 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
21994 return cue.value.key;
21995 }
21996
21997 },
21998 value: {
21999 get() {
22000 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
22001 return cue.value.data;
22002 }
22003
22004 },
22005 privateData: {
22006 get() {
22007 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
22008 return cue.value.data;
22009 }
22010
22011 }
22012 });
22013 };
22014 /**
22015 * Add metadata text track data to a source handler given an array of metadata
22016 *
22017 * @param {Object}
22018 * @param {Object} inbandTextTracks the inband text tracks
22019 * @param {Array} metadataArray an array of meta data
22020 * @param {number} timestampOffset the timestamp offset of the source buffer
22021 * @param {number} videoDuration the duration of the video
22022 * @private
22023 */
22024
22025
22026 const addMetadata = ({
22027 inbandTextTracks,
22028 metadataArray,
22029 timestampOffset,
22030 videoDuration
22031 }) => {
22032 if (!metadataArray) {
22033 return;
22034 }
22035
22036 const Cue = window.WebKitDataCue || window.VTTCue;
22037 const metadataTrack = inbandTextTracks.metadataTrack_;
22038
22039 if (!metadataTrack) {
22040 return;
22041 }
22042
22043 metadataArray.forEach(metadata => {
22044 const time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
22045 // ignore this bit of metadata.
22046 // This likely occurs when you have an non-timed ID3 tag like TIT2,
22047 // which is the "Title/Songname/Content description" frame
22048
22049 if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
22050 return;
22051 } // If we have no frames, we can't create a cue.
22052
22053
22054 if (!metadata.frames || !metadata.frames.length) {
22055 return;
22056 }
22057
22058 metadata.frames.forEach(frame => {
22059 const cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
22060 cue.frame = frame;
22061 cue.value = frame;
22062 deprecateOldCue(cue);
22063 metadataTrack.addCue(cue);
22064 });
22065 });
22066
22067 if (!metadataTrack.cues || !metadataTrack.cues.length) {
22068 return;
22069 } // Updating the metadeta cues so that
22070 // the endTime of each cue is the startTime of the next cue
22071 // the endTime of last cue is the duration of the video
22072
22073
22074 const cues = metadataTrack.cues;
22075 const cuesArray = []; // Create a copy of the TextTrackCueList...
22076 // ...disregarding cues with a falsey value
22077
22078 for (let i = 0; i < cues.length; i++) {
22079 if (cues[i]) {
22080 cuesArray.push(cues[i]);
22081 }
22082 } // Group cues by their startTime value
22083
22084
22085 const cuesGroupedByStartTime = cuesArray.reduce((obj, cue) => {
22086 const timeSlot = obj[cue.startTime] || [];
22087 timeSlot.push(cue);
22088 obj[cue.startTime] = timeSlot;
22089 return obj;
22090 }, {}); // Sort startTimes by ascending order
22091
22092 const sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort((a, b) => Number(a) - Number(b)); // Map each cue group's endTime to the next group's startTime
22093
22094 sortedStartTimes.forEach((startTime, idx) => {
22095 const cueGroup = cuesGroupedByStartTime[startTime];
22096 const finiteDuration = isFinite(videoDuration) ? videoDuration : startTime;
22097 const nextTime = Number(sortedStartTimes[idx + 1]) || finiteDuration; // Map each cue's endTime the next group's startTime
22098
22099 cueGroup.forEach(cue => {
22100 cue.endTime = nextTime;
22101 });
22102 });
22103 }; // object for mapping daterange attributes
22104
22105 const dateRangeAttr = {
22106 id: 'ID',
22107 class: 'CLASS',
22108 startDate: 'START-DATE',
22109 duration: 'DURATION',
22110 endDate: 'END-DATE',
22111 endOnNext: 'END-ON-NEXT',
22112 plannedDuration: 'PLANNED-DURATION',
22113 scte35Out: 'SCTE35-OUT',
22114 scte35In: 'SCTE35-IN'
22115 };
22116 const dateRangeKeysToOmit = new Set(['id', 'class', 'startDate', 'duration', 'endDate', 'endOnNext', 'startTime', 'endTime', 'processDateRange']);
22117 /**
22118 * Add DateRange metadata text track to a source handler given an array of metadata
22119 *
22120 * @param {Object}
22121 * @param {Object} inbandTextTracks the inband text tracks
22122 * @param {Array} dateRanges parsed media playlist
22123 * @private
22124 */
22125
22126 const addDateRangeMetadata = ({
22127 inbandTextTracks,
22128 dateRanges
22129 }) => {
22130 const metadataTrack = inbandTextTracks.metadataTrack_;
22131
22132 if (!metadataTrack) {
22133 return;
22134 }
22135
22136 const Cue = window.WebKitDataCue || window.VTTCue;
22137 dateRanges.forEach(dateRange => {
22138 // we generate multiple cues for each date range with different attributes
22139 for (const key of Object.keys(dateRange)) {
22140 if (dateRangeKeysToOmit.has(key)) {
22141 continue;
22142 }
22143
22144 const cue = new Cue(dateRange.startTime, dateRange.endTime, '');
22145 cue.id = dateRange.id;
22146 cue.type = 'com.apple.quicktime.HLS';
22147 cue.value = {
22148 key: dateRangeAttr[key],
22149 data: dateRange[key]
22150 };
22151
22152 if (key === 'scte35Out' || key === 'scte35In') {
22153 cue.value.data = new Uint8Array(cue.value.data.match(/[\da-f]{2}/gi)).buffer;
22154 }
22155
22156 metadataTrack.addCue(cue);
22157 }
22158
22159 dateRange.processDateRange();
22160 });
22161 };
22162 /**
22163 * Create metadata text track on video.js if it does not exist
22164 *
22165 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
22166 * @param {string} dispatchType the inband metadata track dispatch type
22167 * @param {Object} tech the video.js tech
22168 * @private
22169 */
22170
22171 const createMetadataTrackIfNotExists = (inbandTextTracks, dispatchType, tech) => {
22172 if (inbandTextTracks.metadataTrack_) {
22173 return;
22174 }
22175
22176 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
22177 kind: 'metadata',
22178 label: 'Timed Metadata'
22179 }, false).track;
22180
22181 if (!videojs__default["default"].browser.IS_ANY_SAFARI) {
22182 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
22183 }
22184 };
22185 /**
22186 * Remove cues from a track on video.js.
22187 *
22188 * @param {Double} start start of where we should remove the cue
22189 * @param {Double} end end of where the we should remove the cue
22190 * @param {Object} track the text track to remove the cues from
22191 * @private
22192 */
22193
22194 const removeCuesFromTrack = function (start, end, track) {
22195 let i;
22196 let cue;
22197
22198 if (!track) {
22199 return;
22200 }
22201
22202 if (!track.cues) {
22203 return;
22204 }
22205
22206 i = track.cues.length;
22207
22208 while (i--) {
22209 cue = track.cues[i]; // Remove any cue within the provided start and end time
22210
22211 if (cue.startTime >= start && cue.endTime <= end) {
22212 track.removeCue(cue);
22213 }
22214 }
22215 };
22216 /**
22217 * Remove duplicate cues from a track on video.js (a cue is considered a
22218 * duplicate if it has the same time interval and text as another)
22219 *
22220 * @param {Object} track the text track to remove the duplicate cues from
22221 * @private
22222 */
22223
22224 const removeDuplicateCuesFromTrack = function (track) {
22225 const cues = track.cues;
22226
22227 if (!cues) {
22228 return;
22229 }
22230
22231 const uniqueCues = {};
22232
22233 for (let i = cues.length - 1; i >= 0; i--) {
22234 const cue = cues[i];
22235 const cueKey = `${cue.startTime}-${cue.endTime}-${cue.text}`;
22236
22237 if (uniqueCues[cueKey]) {
22238 track.removeCue(cue);
22239 } else {
22240 uniqueCues[cueKey] = cue;
22241 }
22242 }
22243 };
22244
22245 /**
22246 * mux.js
22247 *
22248 * Copyright (c) Brightcove
22249 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
22250 */
22251 var ONE_SECOND_IN_TS = 90000,
22252 // 90kHz clock
22253 secondsToVideoTs,
22254 secondsToAudioTs,
22255 videoTsToSeconds,
22256 audioTsToSeconds,
22257 audioTsToVideoTs,
22258 videoTsToAudioTs,
22259 metadataTsToSeconds;
22260
22261 secondsToVideoTs = function (seconds) {
22262 return seconds * ONE_SECOND_IN_TS;
22263 };
22264
22265 secondsToAudioTs = function (seconds, sampleRate) {
22266 return seconds * sampleRate;
22267 };
22268
22269 videoTsToSeconds = function (timestamp) {
22270 return timestamp / ONE_SECOND_IN_TS;
22271 };
22272
22273 audioTsToSeconds = function (timestamp, sampleRate) {
22274 return timestamp / sampleRate;
22275 };
22276
22277 audioTsToVideoTs = function (timestamp, sampleRate) {
22278 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
22279 };
22280
22281 videoTsToAudioTs = function (timestamp, sampleRate) {
22282 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
22283 };
22284 /**
22285 * Adjust ID3 tag or caption timing information by the timeline pts values
22286 * (if keepOriginalTimestamps is false) and convert to seconds
22287 */
22288
22289
22290 metadataTsToSeconds = function (timestamp, timelineStartPts, keepOriginalTimestamps) {
22291 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
22292 };
22293
22294 var clock = {
22295 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
22296 secondsToVideoTs: secondsToVideoTs,
22297 secondsToAudioTs: secondsToAudioTs,
22298 videoTsToSeconds: videoTsToSeconds,
22299 audioTsToSeconds: audioTsToSeconds,
22300 audioTsToVideoTs: audioTsToVideoTs,
22301 videoTsToAudioTs: videoTsToAudioTs,
22302 metadataTsToSeconds: metadataTsToSeconds
22303 };
22304
22305 /**
22306 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
22307 * front of current time.
22308 *
22309 * @param {Array} buffer
22310 * The current buffer of gop information
22311 * @param {number} currentTime
22312 * The current time
22313 * @param {Double} mapping
22314 * Offset to map display time to stream presentation time
22315 * @return {Array}
22316 * List of gops considered safe to append over
22317 */
22318
22319 const gopsSafeToAlignWith = (buffer, currentTime, mapping) => {
22320 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
22321 return [];
22322 } // pts value for current time + 3 seconds to give a bit more wiggle room
22323
22324
22325 const currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
22326 let i;
22327
22328 for (i = 0; i < buffer.length; i++) {
22329 if (buffer[i].pts > currentTimePts) {
22330 break;
22331 }
22332 }
22333
22334 return buffer.slice(i);
22335 };
22336 /**
22337 * Appends gop information (timing and byteLength) received by the transmuxer for the
22338 * gops appended in the last call to appendBuffer
22339 *
22340 * @param {Array} buffer
22341 * The current buffer of gop information
22342 * @param {Array} gops
22343 * List of new gop information
22344 * @param {boolean} replace
22345 * If true, replace the buffer with the new gop information. If false, append the
22346 * new gop information to the buffer in the right location of time.
22347 * @return {Array}
22348 * Updated list of gop information
22349 */
22350
22351 const updateGopBuffer = (buffer, gops, replace) => {
22352 if (!gops.length) {
22353 return buffer;
22354 }
22355
22356 if (replace) {
22357 // If we are in safe append mode, then completely overwrite the gop buffer
22358 // with the most recent appeneded data. This will make sure that when appending
22359 // future segments, we only try to align with gops that are both ahead of current
22360 // time and in the last segment appended.
22361 return gops.slice();
22362 }
22363
22364 const start = gops[0].pts;
22365 let i = 0;
22366
22367 for (i; i < buffer.length; i++) {
22368 if (buffer[i].pts >= start) {
22369 break;
22370 }
22371 }
22372
22373 return buffer.slice(0, i).concat(gops);
22374 };
22375 /**
22376 * Removes gop information in buffer that overlaps with provided start and end
22377 *
22378 * @param {Array} buffer
22379 * The current buffer of gop information
22380 * @param {Double} start
22381 * position to start the remove at
22382 * @param {Double} end
22383 * position to end the remove at
22384 * @param {Double} mapping
22385 * Offset to map display time to stream presentation time
22386 */
22387
22388 const removeGopBuffer = (buffer, start, end, mapping) => {
22389 const startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
22390 const endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
22391 const updatedBuffer = buffer.slice();
22392 let i = buffer.length;
22393
22394 while (i--) {
22395 if (buffer[i].pts <= endPts) {
22396 break;
22397 }
22398 }
22399
22400 if (i === -1) {
22401 // no removal because end of remove range is before start of buffer
22402 return updatedBuffer;
22403 }
22404
22405 let j = i + 1;
22406
22407 while (j--) {
22408 if (buffer[j].pts <= startPts) {
22409 break;
22410 }
22411 } // clamp remove range start to 0 index
22412
22413
22414 j = Math.max(j, 0);
22415 updatedBuffer.splice(j, i - j + 1);
22416 return updatedBuffer;
22417 };
22418
22419 const shallowEqual = function (a, b) {
22420 // if both are undefined
22421 // or one or the other is undefined
22422 // they are not equal
22423 if (!a && !b || !a && b || a && !b) {
22424 return false;
22425 } // they are the same object and thus, equal
22426
22427
22428 if (a === b) {
22429 return true;
22430 } // sort keys so we can make sure they have
22431 // all the same keys later.
22432
22433
22434 const akeys = Object.keys(a).sort();
22435 const bkeys = Object.keys(b).sort(); // different number of keys, not equal
22436
22437 if (akeys.length !== bkeys.length) {
22438 return false;
22439 }
22440
22441 for (let i = 0; i < akeys.length; i++) {
22442 const key = akeys[i]; // different sorted keys, not equal
22443
22444 if (key !== bkeys[i]) {
22445 return false;
22446 } // different values, not equal
22447
22448
22449 if (a[key] !== b[key]) {
22450 return false;
22451 }
22452 }
22453
22454 return true;
22455 };
22456
22457 // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
22458 const QUOTA_EXCEEDED_ERR = 22;
22459
22460 /**
22461 * The segment loader has no recourse except to fetch a segment in the
22462 * current playlist and use the internal timestamps in that segment to
22463 * generate a syncPoint. This function returns a good candidate index
22464 * for that process.
22465 *
22466 * @param {Array} segments - the segments array from a playlist.
22467 * @return {number} An index of a segment from the playlist to load
22468 */
22469
22470 const getSyncSegmentCandidate = function (currentTimeline, segments, targetTime) {
22471 segments = segments || [];
22472 const timelineSegments = [];
22473 let time = 0;
22474
22475 for (let i = 0; i < segments.length; i++) {
22476 const segment = segments[i];
22477
22478 if (currentTimeline === segment.timeline) {
22479 timelineSegments.push(i);
22480 time += segment.duration;
22481
22482 if (time > targetTime) {
22483 return i;
22484 }
22485 }
22486 }
22487
22488 if (timelineSegments.length === 0) {
22489 return 0;
22490 } // default to the last timeline segment
22491
22492
22493 return timelineSegments[timelineSegments.length - 1];
22494 }; // In the event of a quota exceeded error, keep at least one second of back buffer. This
22495 // number was arbitrarily chosen and may be updated in the future, but seemed reasonable
22496 // as a start to prevent any potential issues with removing content too close to the
22497 // playhead.
22498
22499 const MIN_BACK_BUFFER = 1; // in ms
22500
22501 const CHECK_BUFFER_DELAY = 500;
22502
22503 const finite = num => typeof num === 'number' && isFinite(num); // With most content hovering around 30fps, if a segment has a duration less than a half
22504 // frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
22505 // not accurately reflect the rest of the content.
22506
22507
22508 const MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
22509 const illegalMediaSwitch = (loaderType, startingMedia, trackInfo) => {
22510 // Although these checks should most likely cover non 'main' types, for now it narrows
22511 // the scope of our checks.
22512 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
22513 return null;
22514 }
22515
22516 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
22517 return 'Neither audio nor video found in segment.';
22518 }
22519
22520 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
22521 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
22522 }
22523
22524 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
22525 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
22526 }
22527
22528 return null;
22529 };
22530 /**
22531 * Calculates a time value that is safe to remove from the back buffer without interrupting
22532 * playback.
22533 *
22534 * @param {TimeRange} seekable
22535 * The current seekable range
22536 * @param {number} currentTime
22537 * The current time of the player
22538 * @param {number} targetDuration
22539 * The target duration of the current playlist
22540 * @return {number}
22541 * Time that is safe to remove from the back buffer without interrupting playback
22542 */
22543
22544 const safeBackBufferTrimTime = (seekable, currentTime, targetDuration) => {
22545 // 30 seconds before the playhead provides a safe default for trimming.
22546 //
22547 // Choosing a reasonable default is particularly important for high bitrate content and
22548 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
22549 // throw an APPEND_BUFFER_ERR.
22550 let trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
22551
22552 if (seekable.length) {
22553 // Some live playlists may have a shorter window of content than the full allowed back
22554 // buffer. For these playlists, don't save content that's no longer within the window.
22555 trimTime = Math.max(trimTime, seekable.start(0));
22556 } // Don't remove within target duration of the current time to avoid the possibility of
22557 // removing the GOP currently being played, as removing it can cause playback stalls.
22558
22559
22560 const maxTrimTime = currentTime - targetDuration;
22561 return Math.min(maxTrimTime, trimTime);
22562 };
22563 const segmentInfoString = segmentInfo => {
22564 const {
22565 startOfSegment,
22566 duration,
22567 segment,
22568 part,
22569 playlist: {
22570 mediaSequence: seq,
22571 id,
22572 segments = []
22573 },
22574 mediaIndex: index,
22575 partIndex,
22576 timeline
22577 } = segmentInfo;
22578 const segmentLen = segments.length - 1;
22579 let selection = 'mediaIndex/partIndex increment';
22580
22581 if (segmentInfo.getMediaInfoForTime) {
22582 selection = `getMediaInfoForTime (${segmentInfo.getMediaInfoForTime})`;
22583 } else if (segmentInfo.isSyncRequest) {
22584 selection = 'getSyncSegmentCandidate (isSyncRequest)';
22585 }
22586
22587 if (segmentInfo.independent) {
22588 selection += ` with independent ${segmentInfo.independent}`;
22589 }
22590
22591 const hasPartIndex = typeof partIndex === 'number';
22592 const name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
22593 const zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
22594 preloadSegment: segment
22595 }) - 1 : 0;
22596 return `${name} [${seq + index}/${seq + segmentLen}]` + (hasPartIndex ? ` part [${partIndex}/${zeroBasedPartCount}]` : '') + ` segment start/end [${segment.start} => ${segment.end}]` + (hasPartIndex ? ` part start/end [${part.start} => ${part.end}]` : '') + ` startOfSegment [${startOfSegment}]` + ` duration [${duration}]` + ` timeline [${timeline}]` + ` selected by [${selection}]` + ` playlist [${id}]`;
22597 };
22598
22599 const timingInfoPropertyForMedia = mediaType => `${mediaType}TimingInfo`;
22600 /**
22601 * Returns the timestamp offset to use for the segment.
22602 *
22603 * @param {number} segmentTimeline
22604 * The timeline of the segment
22605 * @param {number} currentTimeline
22606 * The timeline currently being followed by the loader
22607 * @param {number} startOfSegment
22608 * The estimated segment start
22609 * @param {TimeRange[]} buffered
22610 * The loader's buffer
22611 * @param {boolean} overrideCheck
22612 * If true, no checks are made to see if the timestamp offset value should be set,
22613 * but sets it directly to a value.
22614 *
22615 * @return {number|null}
22616 * Either a number representing a new timestamp offset, or null if the segment is
22617 * part of the same timeline
22618 */
22619
22620
22621 const timestampOffsetForSegment = ({
22622 segmentTimeline,
22623 currentTimeline,
22624 startOfSegment,
22625 buffered,
22626 overrideCheck
22627 }) => {
22628 // Check to see if we are crossing a discontinuity to see if we need to set the
22629 // timestamp offset on the transmuxer and source buffer.
22630 //
22631 // Previously, we changed the timestampOffset if the start of this segment was less than
22632 // the currently set timestampOffset, but this isn't desirable as it can produce bad
22633 // behavior, especially around long running live streams.
22634 if (!overrideCheck && segmentTimeline === currentTimeline) {
22635 return null;
22636 } // When changing renditions, it's possible to request a segment on an older timeline. For
22637 // instance, given two renditions with the following:
22638 //
22639 // #EXTINF:10
22640 // segment1
22641 // #EXT-X-DISCONTINUITY
22642 // #EXTINF:10
22643 // segment2
22644 // #EXTINF:10
22645 // segment3
22646 //
22647 // And the current player state:
22648 //
22649 // current time: 8
22650 // buffer: 0 => 20
22651 //
22652 // The next segment on the current rendition would be segment3, filling the buffer from
22653 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
22654 // then the next segment to be requested will be segment1 from the new rendition in
22655 // order to fill time 8 and onwards. Using the buffered end would result in repeated
22656 // content (since it would position segment1 of the new rendition starting at 20s). This
22657 // case can be identified when the new segment's timeline is a prior value. Instead of
22658 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
22659 // more accurate to the actual start time of the segment.
22660
22661
22662 if (segmentTimeline < currentTimeline) {
22663 return startOfSegment;
22664 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
22665 // value uses the end of the last segment if it is available. While this value
22666 // should often be correct, it's better to rely on the buffered end, as the new
22667 // content post discontinuity should line up with the buffered end as if it were
22668 // time 0 for the new content.
22669
22670
22671 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
22672 };
22673 /**
22674 * Returns whether or not the loader should wait for a timeline change from the timeline
22675 * change controller before processing the segment.
22676 *
22677 * Primary timing in VHS goes by video. This is different from most media players, as
22678 * audio is more often used as the primary timing source. For the foreseeable future, VHS
22679 * will continue to use video as the primary timing source, due to the current logic and
22680 * expectations built around it.
22681
22682 * Since the timing follows video, in order to maintain sync, the video loader is
22683 * responsible for setting both audio and video source buffer timestamp offsets.
22684 *
22685 * Setting different values for audio and video source buffers could lead to
22686 * desyncing. The following examples demonstrate some of the situations where this
22687 * distinction is important. Note that all of these cases involve demuxed content. When
22688 * content is muxed, the audio and video are packaged together, therefore syncing
22689 * separate media playlists is not an issue.
22690 *
22691 * CASE 1: Audio prepares to load a new timeline before video:
22692 *
22693 * Timeline: 0 1
22694 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
22695 * Audio Loader: ^
22696 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
22697 * Video Loader ^
22698 *
22699 * In the above example, the audio loader is preparing to load the 6th segment, the first
22700 * after a discontinuity, while the video loader is still loading the 5th segment, before
22701 * the discontinuity.
22702 *
22703 * If the audio loader goes ahead and loads and appends the 6th segment before the video
22704 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
22705 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
22706 * the audio loader must provide the audioAppendStart value to trim the content in the
22707 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
22708 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
22709 * segment until that value is provided.
22710 *
22711 * CASE 2: Video prepares to load a new timeline before audio:
22712 *
22713 * Timeline: 0 1
22714 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
22715 * Audio Loader: ^
22716 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
22717 * Video Loader ^
22718 *
22719 * In the above example, the video loader is preparing to load the 6th segment, the first
22720 * after a discontinuity, while the audio loader is still loading the 5th segment, before
22721 * the discontinuity.
22722 *
22723 * If the video loader goes ahead and loads and appends the 6th segment, then once the
22724 * segment is loaded and processed, both the video and audio timestamp offsets will be
22725 * set, since video is used as the primary timing source. This is to ensure content lines
22726 * up appropriately, as any modifications to the video timing are reflected by audio when
22727 * the video loader sets the audio and video timestamp offsets to the same value. However,
22728 * setting the timestamp offset for audio before audio has had a chance to change
22729 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
22730 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
22731 *
22732 * CASE 3: When seeking, audio prepares to load a new timeline before video
22733 *
22734 * Timeline: 0 1
22735 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
22736 * Audio Loader: ^
22737 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
22738 * Video Loader ^
22739 *
22740 * In the above example, both audio and video loaders are loading segments from timeline
22741 * 0, but imagine that the seek originated from timeline 1.
22742 *
22743 * When seeking to a new timeline, the timestamp offset will be set based on the expected
22744 * segment start of the loaded video segment. In order to maintain sync, the audio loader
22745 * must wait for the video loader to load its segment and update both the audio and video
22746 * timestamp offsets before it may load and append its own segment. This is the case
22747 * whether the seek results in a mismatched segment request (e.g., the audio loader
22748 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
22749 * loaders choose to load the same segment index from each playlist, as the segments may
22750 * not be aligned perfectly, even for matching segment indexes.
22751 *
22752 * @param {Object} timelinechangeController
22753 * @param {number} currentTimeline
22754 * The timeline currently being followed by the loader
22755 * @param {number} segmentTimeline
22756 * The timeline of the segment being loaded
22757 * @param {('main'|'audio')} loaderType
22758 * The loader type
22759 * @param {boolean} audioDisabled
22760 * Whether the audio is disabled for the loader. This should only be true when the
22761 * loader may have muxed audio in its segment, but should not append it, e.g., for
22762 * the main loader when an alternate audio playlist is active.
22763 *
22764 * @return {boolean}
22765 * Whether the loader should wait for a timeline change from the timeline change
22766 * controller before processing the segment
22767 */
22768
22769 const shouldWaitForTimelineChange = ({
22770 timelineChangeController,
22771 currentTimeline,
22772 segmentTimeline,
22773 loaderType,
22774 audioDisabled
22775 }) => {
22776 if (currentTimeline === segmentTimeline) {
22777 return false;
22778 }
22779
22780 if (loaderType === 'audio') {
22781 const lastMainTimelineChange = timelineChangeController.lastTimelineChange({
22782 type: 'main'
22783 }); // Audio loader should wait if:
22784 //
22785 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
22786 // * main hasn't yet changed to the timeline audio is looking to load
22787
22788 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
22789 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
22790 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
22791 // loader's segments (or the content is audio/video only and handled by the main
22792 // loader).
22793
22794
22795 if (loaderType === 'main' && audioDisabled) {
22796 const pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
22797 type: 'audio'
22798 }); // Main loader should wait for the audio loader if audio is not pending a timeline
22799 // change to the current timeline.
22800 //
22801 // Since the main loader is responsible for setting the timestamp offset for both
22802 // audio and video, the main loader must wait for audio to be about to change to its
22803 // timeline before setting the offset, otherwise, if audio is behind in loading,
22804 // segments from the previous timeline would be adjusted by the new timestamp offset.
22805 //
22806 // This requirement means that video will not cross a timeline until the audio is
22807 // about to cross to it, so that way audio and video will always cross the timeline
22808 // together.
22809 //
22810 // In addition to normal timeline changes, these rules also apply to the start of a
22811 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
22812 // that these rules apply to the first timeline change because if they did not, it's
22813 // possible that the main loader will cross two timelines before the audio loader has
22814 // crossed one. Logic may be implemented to handle the startup as a special case, but
22815 // it's easier to simply treat all timeline changes the same.
22816
22817 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
22818 return false;
22819 }
22820
22821 return true;
22822 }
22823
22824 return false;
22825 };
22826 const mediaDuration = timingInfos => {
22827 let maxDuration = 0;
22828 ['video', 'audio'].forEach(function (type) {
22829 const typeTimingInfo = timingInfos[`${type}TimingInfo`];
22830
22831 if (!typeTimingInfo) {
22832 return;
22833 }
22834
22835 const {
22836 start,
22837 end
22838 } = typeTimingInfo;
22839 let duration;
22840
22841 if (typeof start === 'bigint' || typeof end === 'bigint') {
22842 duration = window.BigInt(end) - window.BigInt(start);
22843 } else if (typeof start === 'number' && typeof end === 'number') {
22844 duration = end - start;
22845 }
22846
22847 if (typeof duration !== 'undefined' && duration > maxDuration) {
22848 maxDuration = duration;
22849 }
22850 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
22851 // as we only need BigInt when we are above that.
22852
22853 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
22854 maxDuration = Number(maxDuration);
22855 }
22856
22857 return maxDuration;
22858 };
22859 const segmentTooLong = ({
22860 segmentDuration,
22861 maxDuration
22862 }) => {
22863 // 0 duration segments are most likely due to metadata only segments or a lack of
22864 // information.
22865 if (!segmentDuration) {
22866 return false;
22867 } // For HLS:
22868 //
22869 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
22870 // The EXTINF duration of each Media Segment in the Playlist
22871 // file, when rounded to the nearest integer, MUST be less than or equal
22872 // to the target duration; longer segments can trigger playback stalls
22873 // or other errors.
22874 //
22875 // For DASH, the mpd-parser uses the largest reported segment duration as the target
22876 // duration. Although that reported duration is occasionally approximate (i.e., not
22877 // exact), a strict check may report that a segment is too long more often in DASH.
22878
22879
22880 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
22881 };
22882 const getTroublesomeSegmentDurationMessage = (segmentInfo, sourceType) => {
22883 // Right now we aren't following DASH's timing model exactly, so only perform
22884 // this check for HLS content.
22885 if (sourceType !== 'hls') {
22886 return null;
22887 }
22888
22889 const segmentDuration = mediaDuration({
22890 audioTimingInfo: segmentInfo.audioTimingInfo,
22891 videoTimingInfo: segmentInfo.videoTimingInfo
22892 }); // Don't report if we lack information.
22893 //
22894 // If the segment has a duration of 0 it is either a lack of information or a
22895 // metadata only segment and shouldn't be reported here.
22896
22897 if (!segmentDuration) {
22898 return null;
22899 }
22900
22901 const targetDuration = segmentInfo.playlist.targetDuration;
22902 const isSegmentWayTooLong = segmentTooLong({
22903 segmentDuration,
22904 maxDuration: targetDuration * 2
22905 });
22906 const isSegmentSlightlyTooLong = segmentTooLong({
22907 segmentDuration,
22908 maxDuration: targetDuration
22909 });
22910 const segmentTooLongMessage = `Segment with index ${segmentInfo.mediaIndex} ` + `from playlist ${segmentInfo.playlist.id} ` + `has a duration of ${segmentDuration} ` + `when the reported duration is ${segmentInfo.duration} ` + `and the target duration is ${targetDuration}. ` + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
22911
22912 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
22913 return {
22914 severity: isSegmentWayTooLong ? 'warn' : 'info',
22915 message: segmentTooLongMessage
22916 };
22917 }
22918
22919 return null;
22920 };
22921 /**
22922 * An object that manages segment loading and appending.
22923 *
22924 * @class SegmentLoader
22925 * @param {Object} options required and optional options
22926 * @extends videojs.EventTarget
22927 */
22928
22929 class SegmentLoader extends videojs__default["default"].EventTarget {
22930 constructor(settings, options = {}) {
22931 super(); // check pre-conditions
22932
22933 if (!settings) {
22934 throw new TypeError('Initialization settings are required');
22935 }
22936
22937 if (typeof settings.currentTime !== 'function') {
22938 throw new TypeError('No currentTime getter specified');
22939 }
22940
22941 if (!settings.mediaSource) {
22942 throw new TypeError('No MediaSource specified');
22943 } // public properties
22944
22945
22946 this.bandwidth = settings.bandwidth;
22947 this.throughput = {
22948 rate: 0,
22949 count: 0
22950 };
22951 this.roundTrip = NaN;
22952 this.resetStats_();
22953 this.mediaIndex = null;
22954 this.partIndex = null; // private settings
22955
22956 this.hasPlayed_ = settings.hasPlayed;
22957 this.currentTime_ = settings.currentTime;
22958 this.seekable_ = settings.seekable;
22959 this.seeking_ = settings.seeking;
22960 this.duration_ = settings.duration;
22961 this.mediaSource_ = settings.mediaSource;
22962 this.vhs_ = settings.vhs;
22963 this.loaderType_ = settings.loaderType;
22964 this.currentMediaInfo_ = void 0;
22965 this.startingMediaInfo_ = void 0;
22966 this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
22967 this.goalBufferLength_ = settings.goalBufferLength;
22968 this.sourceType_ = settings.sourceType;
22969 this.sourceUpdater_ = settings.sourceUpdater;
22970 this.inbandTextTracks_ = settings.inbandTextTracks;
22971 this.state_ = 'INIT';
22972 this.timelineChangeController_ = settings.timelineChangeController;
22973 this.shouldSaveSegmentTimingInfo_ = true;
22974 this.parse708captions_ = settings.parse708captions;
22975 this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
22976 this.captionServices_ = settings.captionServices;
22977 this.exactManifestTimings = settings.exactManifestTimings;
22978 this.addMetadataToTextTrack = settings.addMetadataToTextTrack; // private instance variables
22979
22980 this.checkBufferTimeout_ = null;
22981 this.error_ = void 0;
22982 this.currentTimeline_ = -1;
22983 this.shouldForceTimestampOffsetAfterResync_ = false;
22984 this.pendingSegment_ = null;
22985 this.xhrOptions_ = null;
22986 this.pendingSegments_ = [];
22987 this.audioDisabled_ = false;
22988 this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
22989
22990 this.gopBuffer_ = [];
22991 this.timeMapping_ = 0;
22992 this.safeAppend_ = false;
22993 this.appendInitSegment_ = {
22994 audio: true,
22995 video: true
22996 };
22997 this.playlistOfLastInitSegment_ = {
22998 audio: null,
22999 video: null
23000 };
23001 this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
23002 // information yet to start the loading process (e.g., if the audio loader wants to
23003 // load a segment from the next timeline but the main loader hasn't yet crossed that
23004 // timeline), then the load call will be added to the queue until it is ready to be
23005 // processed.
23006
23007 this.loadQueue_ = [];
23008 this.metadataQueue_ = {
23009 id3: [],
23010 caption: []
23011 };
23012 this.waitingOnRemove_ = false;
23013 this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
23014
23015 this.activeInitSegmentId_ = null;
23016 this.initSegments_ = {}; // HLSe playback
23017
23018 this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
23019 this.keyCache_ = {};
23020 this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
23021 // between a time in the display time and a segment index within
23022 // a playlist
23023
23024 this.syncController_ = settings.syncController;
23025 this.syncPoint_ = {
23026 segmentIndex: 0,
23027 time: 0
23028 };
23029 this.transmuxer_ = this.createTransmuxer_();
23030
23031 this.triggerSyncInfoUpdate_ = () => this.trigger('syncinfoupdate');
23032
23033 this.syncController_.on('syncinfoupdate', this.triggerSyncInfoUpdate_);
23034 this.mediaSource_.addEventListener('sourceopen', () => {
23035 if (!this.isEndOfStream_()) {
23036 this.ended_ = false;
23037 }
23038 }); // ...for determining the fetch location
23039
23040 this.fetchAtBuffer_ = false;
23041 this.logger_ = logger(`SegmentLoader[${this.loaderType_}]`);
23042 Object.defineProperty(this, 'state', {
23043 get() {
23044 return this.state_;
23045 },
23046
23047 set(newState) {
23048 if (newState !== this.state_) {
23049 this.logger_(`${this.state_} -> ${newState}`);
23050 this.state_ = newState;
23051 this.trigger('statechange');
23052 }
23053 }
23054
23055 });
23056 this.sourceUpdater_.on('ready', () => {
23057 if (this.hasEnoughInfoToAppend_()) {
23058 this.processCallQueue_();
23059 }
23060 }); // Only the main loader needs to listen for pending timeline changes, as the main
23061 // loader should wait for audio to be ready to change its timeline so that both main
23062 // and audio timelines change together. For more details, see the
23063 // shouldWaitForTimelineChange function.
23064
23065 if (this.loaderType_ === 'main') {
23066 this.timelineChangeController_.on('pendingtimelinechange', () => {
23067 if (this.hasEnoughInfoToAppend_()) {
23068 this.processCallQueue_();
23069 }
23070 });
23071 } // The main loader only listens on pending timeline changes, but the audio loader,
23072 // since its loads follow main, needs to listen on timeline changes. For more details,
23073 // see the shouldWaitForTimelineChange function.
23074
23075
23076 if (this.loaderType_ === 'audio') {
23077 this.timelineChangeController_.on('timelinechange', () => {
23078 if (this.hasEnoughInfoToLoad_()) {
23079 this.processLoadQueue_();
23080 }
23081
23082 if (this.hasEnoughInfoToAppend_()) {
23083 this.processCallQueue_();
23084 }
23085 });
23086 }
23087 }
23088
23089 createTransmuxer_() {
23090 return segmentTransmuxer.createTransmuxer({
23091 remux: false,
23092 alignGopsAtEnd: this.safeAppend_,
23093 keepOriginalTimestamps: true,
23094 parse708captions: this.parse708captions_,
23095 captionServices: this.captionServices_
23096 });
23097 }
23098 /**
23099 * reset all of our media stats
23100 *
23101 * @private
23102 */
23103
23104
23105 resetStats_() {
23106 this.mediaBytesTransferred = 0;
23107 this.mediaRequests = 0;
23108 this.mediaRequestsAborted = 0;
23109 this.mediaRequestsTimedout = 0;
23110 this.mediaRequestsErrored = 0;
23111 this.mediaTransferDuration = 0;
23112 this.mediaSecondsLoaded = 0;
23113 this.mediaAppends = 0;
23114 }
23115 /**
23116 * dispose of the SegmentLoader and reset to the default state
23117 */
23118
23119
23120 dispose() {
23121 this.trigger('dispose');
23122 this.state = 'DISPOSED';
23123 this.pause();
23124 this.abort_();
23125
23126 if (this.transmuxer_) {
23127 this.transmuxer_.terminate();
23128 }
23129
23130 this.resetStats_();
23131
23132 if (this.checkBufferTimeout_) {
23133 window.clearTimeout(this.checkBufferTimeout_);
23134 }
23135
23136 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
23137 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
23138 }
23139
23140 this.off();
23141 }
23142
23143 setAudio(enable) {
23144 this.audioDisabled_ = !enable;
23145
23146 if (enable) {
23147 this.appendInitSegment_.audio = true;
23148 } else {
23149 // remove current track audio if it gets disabled
23150 this.sourceUpdater_.removeAudio(0, this.duration_());
23151 }
23152 }
23153 /**
23154 * abort anything that is currently doing on with the SegmentLoader
23155 * and reset to a default state
23156 */
23157
23158
23159 abort() {
23160 if (this.state !== 'WAITING') {
23161 if (this.pendingSegment_) {
23162 this.pendingSegment_ = null;
23163 }
23164
23165 return;
23166 }
23167
23168 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
23169 // since we are no longer "waiting" on any requests. XHR callback is not always run
23170 // when the request is aborted. This will prevent the loader from being stuck in the
23171 // WAITING state indefinitely.
23172
23173 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
23174 // next segment
23175
23176 if (!this.paused()) {
23177 this.monitorBuffer_();
23178 }
23179 }
23180 /**
23181 * abort all pending xhr requests and null any pending segements
23182 *
23183 * @private
23184 */
23185
23186
23187 abort_() {
23188 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
23189 this.pendingSegment_.abortRequests();
23190 } // clear out the segment being processed
23191
23192
23193 this.pendingSegment_ = null;
23194 this.callQueue_ = [];
23195 this.loadQueue_ = [];
23196 this.metadataQueue_.id3 = [];
23197 this.metadataQueue_.caption = [];
23198 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
23199 this.waitingOnRemove_ = false;
23200 window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
23201 this.quotaExceededErrorRetryTimeout_ = null;
23202 }
23203
23204 checkForAbort_(requestId) {
23205 // If the state is APPENDING, then aborts will not modify the state, meaning the first
23206 // callback that happens should reset the state to READY so that loading can continue.
23207 if (this.state === 'APPENDING' && !this.pendingSegment_) {
23208 this.state = 'READY';
23209 return true;
23210 }
23211
23212 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
23213 return true;
23214 }
23215
23216 return false;
23217 }
23218 /**
23219 * set an error on the segment loader and null out any pending segements
23220 *
23221 * @param {Error} error the error to set on the SegmentLoader
23222 * @return {Error} the error that was set or that is currently set
23223 */
23224
23225
23226 error(error) {
23227 if (typeof error !== 'undefined') {
23228 this.logger_('error occurred:', error);
23229 this.error_ = error;
23230 }
23231
23232 this.pendingSegment_ = null;
23233 return this.error_;
23234 }
23235
23236 endOfStream() {
23237 this.ended_ = true;
23238
23239 if (this.transmuxer_) {
23240 // need to clear out any cached data to prepare for the new segment
23241 segmentTransmuxer.reset(this.transmuxer_);
23242 }
23243
23244 this.gopBuffer_.length = 0;
23245 this.pause();
23246 this.trigger('ended');
23247 }
23248 /**
23249 * Indicates which time ranges are buffered
23250 *
23251 * @return {TimeRange}
23252 * TimeRange object representing the current buffered ranges
23253 */
23254
23255
23256 buffered_() {
23257 const trackInfo = this.getMediaInfo_();
23258
23259 if (!this.sourceUpdater_ || !trackInfo) {
23260 return createTimeRanges();
23261 }
23262
23263 if (this.loaderType_ === 'main') {
23264 const {
23265 hasAudio,
23266 hasVideo,
23267 isMuxed
23268 } = trackInfo;
23269
23270 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
23271 return this.sourceUpdater_.buffered();
23272 }
23273
23274 if (hasVideo) {
23275 return this.sourceUpdater_.videoBuffered();
23276 }
23277 } // One case that can be ignored for now is audio only with alt audio,
23278 // as we don't yet have proper support for that.
23279
23280
23281 return this.sourceUpdater_.audioBuffered();
23282 }
23283 /**
23284 * Gets and sets init segment for the provided map
23285 *
23286 * @param {Object} map
23287 * The map object representing the init segment to get or set
23288 * @param {boolean=} set
23289 * If true, the init segment for the provided map should be saved
23290 * @return {Object}
23291 * map object for desired init segment
23292 */
23293
23294
23295 initSegmentForMap(map, set = false) {
23296 if (!map) {
23297 return null;
23298 }
23299
23300 const id = initSegmentId(map);
23301 let storedMap = this.initSegments_[id];
23302
23303 if (set && !storedMap && map.bytes) {
23304 this.initSegments_[id] = storedMap = {
23305 resolvedUri: map.resolvedUri,
23306 byterange: map.byterange,
23307 bytes: map.bytes,
23308 tracks: map.tracks,
23309 timescales: map.timescales
23310 };
23311 }
23312
23313 return storedMap || map;
23314 }
23315 /**
23316 * Gets and sets key for the provided key
23317 *
23318 * @param {Object} key
23319 * The key object representing the key to get or set
23320 * @param {boolean=} set
23321 * If true, the key for the provided key should be saved
23322 * @return {Object}
23323 * Key object for desired key
23324 */
23325
23326
23327 segmentKey(key, set = false) {
23328 if (!key) {
23329 return null;
23330 }
23331
23332 const id = segmentKeyId(key);
23333 let storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
23334 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
23335
23336 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
23337 this.keyCache_[id] = storedKey = {
23338 resolvedUri: key.resolvedUri,
23339 bytes: key.bytes
23340 };
23341 }
23342
23343 const result = {
23344 resolvedUri: (storedKey || key).resolvedUri
23345 };
23346
23347 if (storedKey) {
23348 result.bytes = storedKey.bytes;
23349 }
23350
23351 return result;
23352 }
23353 /**
23354 * Returns true if all configuration required for loading is present, otherwise false.
23355 *
23356 * @return {boolean} True if the all configuration is ready for loading
23357 * @private
23358 */
23359
23360
23361 couldBeginLoading_() {
23362 return this.playlist_ && !this.paused();
23363 }
23364 /**
23365 * load a playlist and start to fill the buffer
23366 */
23367
23368
23369 load() {
23370 // un-pause
23371 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
23372 // specified
23373
23374 if (!this.playlist_) {
23375 return;
23376 } // if all the configuration is ready, initialize and begin loading
23377
23378
23379 if (this.state === 'INIT' && this.couldBeginLoading_()) {
23380 return this.init_();
23381 } // if we're in the middle of processing a segment already, don't
23382 // kick off an additional segment request
23383
23384
23385 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
23386 return;
23387 }
23388
23389 this.state = 'READY';
23390 }
23391 /**
23392 * Once all the starting parameters have been specified, begin
23393 * operation. This method should only be invoked from the INIT
23394 * state.
23395 *
23396 * @private
23397 */
23398
23399
23400 init_() {
23401 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
23402 // audio data from the muxed content should be removed
23403
23404 this.resetEverything();
23405 return this.monitorBuffer_();
23406 }
23407 /**
23408 * set a playlist on the segment loader
23409 *
23410 * @param {PlaylistLoader} media the playlist to set on the segment loader
23411 */
23412
23413
23414 playlist(newPlaylist, options = {}) {
23415 if (!newPlaylist) {
23416 return;
23417 }
23418
23419 const oldPlaylist = this.playlist_;
23420 const segmentInfo = this.pendingSegment_;
23421 this.playlist_ = newPlaylist;
23422 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
23423 // is always our zero-time so force a sync update each time the playlist
23424 // is refreshed from the server
23425 //
23426 // Use the INIT state to determine if playback has started, as the playlist sync info
23427 // should be fixed once requests begin (as sync points are generated based on sync
23428 // info), but not before then.
23429
23430 if (this.state === 'INIT') {
23431 newPlaylist.syncInfo = {
23432 mediaSequence: newPlaylist.mediaSequence,
23433 time: 0
23434 }; // Setting the date time mapping means mapping the program date time (if available)
23435 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
23436 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
23437 // be updated as the playlist is refreshed before the loader starts loading, the
23438 // program date time mapping needs to be updated as well.
23439 //
23440 // This mapping is only done for the main loader because a program date time should
23441 // map equivalently between playlists.
23442
23443 if (this.loaderType_ === 'main') {
23444 this.syncController_.setDateTimeMappingForStart(newPlaylist);
23445 }
23446 }
23447
23448 let oldId = null;
23449
23450 if (oldPlaylist) {
23451 if (oldPlaylist.id) {
23452 oldId = oldPlaylist.id;
23453 } else if (oldPlaylist.uri) {
23454 oldId = oldPlaylist.uri;
23455 }
23456 }
23457
23458 this.logger_(`playlist update [${oldId} => ${newPlaylist.id || newPlaylist.uri}]`);
23459 this.syncController_.updateMediaSequenceMap(newPlaylist, this.currentTime_(), this.loaderType_); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
23460 // in LIVE, we always want to update with new playlists (including refreshes)
23461
23462 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
23463 // buffering now
23464
23465 if (this.state === 'INIT' && this.couldBeginLoading_()) {
23466 return this.init_();
23467 }
23468
23469 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
23470 if (this.mediaIndex !== null) {
23471 // we must reset/resync the segment loader when we switch renditions and
23472 // the segment loader is already synced to the previous rendition
23473 // We only want to reset the loader here for LLHLS playback, as resetLoader sets fetchAtBuffer_
23474 // to false, resulting in fetching segments at currentTime and causing repeated
23475 // same-segment requests on playlist change. This erroneously drives up the playback watcher
23476 // stalled segment count, as re-requesting segments at the currentTime or browser cached segments
23477 // will not change the buffer.
23478 // Reference for LLHLS fixes: https://github.com/videojs/http-streaming/pull/1201
23479 const isLLHLS = !newPlaylist.endList && typeof newPlaylist.partTargetDuration === 'number';
23480
23481 if (isLLHLS) {
23482 this.resetLoader();
23483 } else {
23484 this.resyncLoader();
23485 }
23486 }
23487
23488 this.currentMediaInfo_ = void 0;
23489 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
23490
23491 return;
23492 } // we reloaded the same playlist so we are in a live scenario
23493 // and we will likely need to adjust the mediaIndex
23494
23495
23496 const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
23497 this.logger_(`live window shift [${mediaSequenceDiff}]`); // update the mediaIndex on the SegmentLoader
23498 // this is important because we can abort a request and this value must be
23499 // equal to the last appended mediaIndex
23500
23501 if (this.mediaIndex !== null) {
23502 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
23503 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
23504 // new playlist was incremented by 1.
23505
23506 if (this.mediaIndex < 0) {
23507 this.mediaIndex = null;
23508 this.partIndex = null;
23509 } else {
23510 const segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
23511 // unless parts fell off of the playlist for this segment.
23512 // In that case we need to reset partIndex and resync
23513
23514 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
23515 const mediaIndex = this.mediaIndex;
23516 this.logger_(`currently processing part (index ${this.partIndex}) no longer exists.`);
23517 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
23518 // as the part was dropped from our current playlists segment.
23519 // The mediaIndex will still be valid so keep that around.
23520
23521 this.mediaIndex = mediaIndex;
23522 }
23523 }
23524 } // update the mediaIndex on the SegmentInfo object
23525 // this is important because we will update this.mediaIndex with this value
23526 // in `handleAppendsDone_` after the segment has been successfully appended
23527
23528
23529 if (segmentInfo) {
23530 segmentInfo.mediaIndex -= mediaSequenceDiff;
23531
23532 if (segmentInfo.mediaIndex < 0) {
23533 segmentInfo.mediaIndex = null;
23534 segmentInfo.partIndex = null;
23535 } else {
23536 // we need to update the referenced segment so that timing information is
23537 // saved for the new playlist's segment, however, if the segment fell off the
23538 // playlist, we can leave the old reference and just lose the timing info
23539 if (segmentInfo.mediaIndex >= 0) {
23540 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
23541 }
23542
23543 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
23544 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
23545 }
23546 }
23547 }
23548
23549 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
23550 }
23551 /**
23552 * Prevent the loader from fetching additional segments. If there
23553 * is a segment request outstanding, it will finish processing
23554 * before the loader halts. A segment loader can be unpaused by
23555 * calling load().
23556 */
23557
23558
23559 pause() {
23560 if (this.checkBufferTimeout_) {
23561 window.clearTimeout(this.checkBufferTimeout_);
23562 this.checkBufferTimeout_ = null;
23563 }
23564 }
23565 /**
23566 * Returns whether the segment loader is fetching additional
23567 * segments when given the opportunity. This property can be
23568 * modified through calls to pause() and load().
23569 */
23570
23571
23572 paused() {
23573 return this.checkBufferTimeout_ === null;
23574 }
23575 /**
23576 * Delete all the buffered data and reset the SegmentLoader
23577 *
23578 * @param {Function} [done] an optional callback to be executed when the remove
23579 * operation is complete
23580 */
23581
23582
23583 resetEverything(done) {
23584 this.ended_ = false;
23585 this.activeInitSegmentId_ = null;
23586 this.appendInitSegment_ = {
23587 audio: true,
23588 video: true
23589 };
23590 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
23591 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
23592 // we then clamp the value to duration if necessary.
23593
23594 this.remove(0, Infinity, done); // clears fmp4 captions
23595
23596 if (this.transmuxer_) {
23597 this.transmuxer_.postMessage({
23598 action: 'clearAllMp4Captions'
23599 }); // reset the cache in the transmuxer
23600
23601 this.transmuxer_.postMessage({
23602 action: 'reset'
23603 });
23604 }
23605 }
23606 /**
23607 * Force the SegmentLoader to resync and start loading around the currentTime instead
23608 * of starting at the end of the buffer
23609 *
23610 * Useful for fast quality changes
23611 */
23612
23613
23614 resetLoader() {
23615 this.fetchAtBuffer_ = false;
23616 this.resyncLoader();
23617 }
23618 /**
23619 * Force the SegmentLoader to restart synchronization and make a conservative guess
23620 * before returning to the simple walk-forward method
23621 */
23622
23623
23624 resyncLoader() {
23625 if (this.transmuxer_) {
23626 // need to clear out any cached data to prepare for the new segment
23627 segmentTransmuxer.reset(this.transmuxer_);
23628 }
23629
23630 this.mediaIndex = null;
23631 this.partIndex = null;
23632 this.syncPoint_ = null;
23633 this.isPendingTimestampOffset_ = false;
23634 this.shouldForceTimestampOffsetAfterResync_ = true;
23635 this.callQueue_ = [];
23636 this.loadQueue_ = [];
23637 this.metadataQueue_.id3 = [];
23638 this.metadataQueue_.caption = [];
23639 this.abort();
23640
23641 if (this.transmuxer_) {
23642 this.transmuxer_.postMessage({
23643 action: 'clearParsedMp4Captions'
23644 });
23645 }
23646 }
23647 /**
23648 * Remove any data in the source buffer between start and end times
23649 *
23650 * @param {number} start - the start time of the region to remove from the buffer
23651 * @param {number} end - the end time of the region to remove from the buffer
23652 * @param {Function} [done] - an optional callback to be executed when the remove
23653 * @param {boolean} force - force all remove operations to happen
23654 * operation is complete
23655 */
23656
23657
23658 remove(start, end, done = () => {}, force = false) {
23659 // clamp end to duration if we need to remove everything.
23660 // This is due to a browser bug that causes issues if we remove to Infinity.
23661 // videojs/videojs-contrib-hls#1225
23662 if (end === Infinity) {
23663 end = this.duration_();
23664 } // skip removes that would throw an error
23665 // commonly happens during a rendition switch at the start of a video
23666 // from start 0 to end 0
23667
23668
23669 if (end <= start) {
23670 this.logger_('skipping remove because end ${end} is <= start ${start}');
23671 return;
23672 }
23673
23674 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
23675 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
23676
23677 return;
23678 } // set it to one to complete this function's removes
23679
23680
23681 let removesRemaining = 1;
23682
23683 const removeFinished = () => {
23684 removesRemaining--;
23685
23686 if (removesRemaining === 0) {
23687 done();
23688 }
23689 };
23690
23691 if (force || !this.audioDisabled_) {
23692 removesRemaining++;
23693 this.sourceUpdater_.removeAudio(start, end, removeFinished);
23694 } // While it would be better to only remove video if the main loader has video, this
23695 // should be safe with audio only as removeVideo will call back even if there's no
23696 // video buffer.
23697 //
23698 // In theory we can check to see if there's video before calling the remove, but in
23699 // the event that we're switching between renditions and from video to audio only
23700 // (when we add support for that), we may need to clear the video contents despite
23701 // what the new media will contain.
23702
23703
23704 if (force || this.loaderType_ === 'main') {
23705 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
23706 removesRemaining++;
23707 this.sourceUpdater_.removeVideo(start, end, removeFinished);
23708 } // remove any captions and ID3 tags
23709
23710
23711 for (const track in this.inbandTextTracks_) {
23712 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
23713 }
23714
23715 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
23716
23717 removeFinished();
23718 }
23719 /**
23720 * (re-)schedule monitorBufferTick_ to run as soon as possible
23721 *
23722 * @private
23723 */
23724
23725
23726 monitorBuffer_() {
23727 if (this.checkBufferTimeout_) {
23728 window.clearTimeout(this.checkBufferTimeout_);
23729 }
23730
23731 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);
23732 }
23733 /**
23734 * As long as the SegmentLoader is in the READY state, periodically
23735 * invoke fillBuffer_().
23736 *
23737 * @private
23738 */
23739
23740
23741 monitorBufferTick_() {
23742 if (this.state === 'READY') {
23743 this.fillBuffer_();
23744 }
23745
23746 if (this.checkBufferTimeout_) {
23747 window.clearTimeout(this.checkBufferTimeout_);
23748 }
23749
23750 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
23751 }
23752 /**
23753 * fill the buffer with segements unless the sourceBuffers are
23754 * currently updating
23755 *
23756 * Note: this function should only ever be called by monitorBuffer_
23757 * and never directly
23758 *
23759 * @private
23760 */
23761
23762
23763 fillBuffer_() {
23764 // TODO since the source buffer maintains a queue, and we shouldn't call this function
23765 // except when we're ready for the next segment, this check can most likely be removed
23766 if (this.sourceUpdater_.updating()) {
23767 return;
23768 } // see if we need to begin loading immediately
23769
23770
23771 const segmentInfo = this.chooseNextRequest_();
23772
23773 if (!segmentInfo) {
23774 return;
23775 }
23776
23777 if (typeof segmentInfo.timestampOffset === 'number') {
23778 this.isPendingTimestampOffset_ = false;
23779 this.timelineChangeController_.pendingTimelineChange({
23780 type: this.loaderType_,
23781 from: this.currentTimeline_,
23782 to: segmentInfo.timeline
23783 });
23784 }
23785
23786 this.loadSegment_(segmentInfo);
23787 }
23788 /**
23789 * Determines if we should call endOfStream on the media source based
23790 * on the state of the buffer or if appened segment was the final
23791 * segment in the playlist.
23792 *
23793 * @param {number} [mediaIndex] the media index of segment we last appended
23794 * @param {Object} [playlist] a media playlist object
23795 * @return {boolean} do we need to call endOfStream on the MediaSource
23796 */
23797
23798
23799 isEndOfStream_(mediaIndex = this.mediaIndex, playlist = this.playlist_, partIndex = this.partIndex) {
23800 if (!playlist || !this.mediaSource_) {
23801 return false;
23802 }
23803
23804 const segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
23805
23806 const appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
23807
23808 const appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
23809 // so that MediaSources can trigger the `ended` event when it runs out of
23810 // buffered data instead of waiting for me
23811
23812 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
23813 }
23814 /**
23815 * Determines what request should be made given current segment loader state.
23816 *
23817 * @return {Object} a request object that describes the segment/part to load
23818 */
23819
23820
23821 chooseNextRequest_() {
23822 const buffered = this.buffered_();
23823 const bufferedEnd = lastBufferedEnd(buffered) || 0;
23824 const bufferedTime = timeAheadOf(buffered, this.currentTime_());
23825 const preloaded = !this.hasPlayed_() && bufferedTime >= 1;
23826 const haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
23827 const segments = this.playlist_.segments; // return no segment if:
23828 // 1. we don't have segments
23829 // 2. The video has not yet played and we already downloaded a segment
23830 // 3. we already have enough buffered time
23831
23832 if (!segments.length || preloaded || haveEnoughBuffer) {
23833 return null;
23834 }
23835
23836 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_(), this.loaderType_);
23837 const next = {
23838 partIndex: null,
23839 mediaIndex: null,
23840 startOfSegment: null,
23841 playlist: this.playlist_,
23842 isSyncRequest: Boolean(!this.syncPoint_)
23843 };
23844
23845 if (next.isSyncRequest) {
23846 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
23847 this.logger_(`choose next request. Can not find sync point. Fallback to media Index: ${next.mediaIndex}`);
23848 } else if (this.mediaIndex !== null) {
23849 const segment = segments[this.mediaIndex];
23850 const partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
23851 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
23852
23853 if (segment.parts && segment.parts[partIndex + 1]) {
23854 next.mediaIndex = this.mediaIndex;
23855 next.partIndex = partIndex + 1;
23856 } else {
23857 next.mediaIndex = this.mediaIndex + 1;
23858 }
23859 } else {
23860 // Find the segment containing the end of the buffer or current time.
23861 const {
23862 segmentIndex,
23863 startTime,
23864 partIndex
23865 } = Playlist.getMediaInfoForTime({
23866 exactManifestTimings: this.exactManifestTimings,
23867 playlist: this.playlist_,
23868 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
23869 startingPartIndex: this.syncPoint_.partIndex,
23870 startingSegmentIndex: this.syncPoint_.segmentIndex,
23871 startTime: this.syncPoint_.time
23872 });
23873 next.getMediaInfoForTime = this.fetchAtBuffer_ ? `bufferedEnd ${bufferedEnd}` : `currentTime ${this.currentTime_()}`;
23874 next.mediaIndex = segmentIndex;
23875 next.startOfSegment = startTime;
23876 next.partIndex = partIndex;
23877 this.logger_(`choose next request. Playlist switched and we have a sync point. Media Index: ${next.mediaIndex} `);
23878 }
23879
23880 const nextSegment = segments[next.mediaIndex];
23881 let nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
23882 // the next partIndex is invalid do not choose a next segment.
23883
23884 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
23885 return null;
23886 } // if the next segment has parts, and we don't have a partIndex.
23887 // Set partIndex to 0
23888
23889
23890 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
23891 next.partIndex = 0;
23892 nextPart = nextSegment.parts[0];
23893 } // independentSegments applies to every segment in a playlist. If independentSegments appears in a main playlist,
23894 // it applies to each segment in each media playlist.
23895 // https://datatracker.ietf.org/doc/html/draft-pantos-http-live-streaming-23#section-4.3.5.1
23896
23897
23898 const hasIndependentSegments = this.vhs_.playlists && this.vhs_.playlists.main && this.vhs_.playlists.main.independentSegments || this.playlist_.independentSegments; // if we have no buffered data then we need to make sure
23899 // that the next part we append is "independent" if possible.
23900 // So we check if the previous part is independent, and request
23901 // it if it is.
23902
23903 if (!bufferedTime && nextPart && !hasIndependentSegments && !nextPart.independent) {
23904 if (next.partIndex === 0) {
23905 const lastSegment = segments[next.mediaIndex - 1];
23906 const lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
23907
23908 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
23909 next.mediaIndex -= 1;
23910 next.partIndex = lastSegment.parts.length - 1;
23911 next.independent = 'previous segment';
23912 }
23913 } else if (nextSegment.parts[next.partIndex - 1].independent) {
23914 next.partIndex -= 1;
23915 next.independent = 'previous part';
23916 }
23917 }
23918
23919 const ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
23920 // 1. this is the last segment in the playlist
23921 // 2. end of stream has been called on the media source already
23922 // 3. the player is not seeking
23923
23924 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
23925 return null;
23926 }
23927
23928 if (this.shouldForceTimestampOffsetAfterResync_) {
23929 this.shouldForceTimestampOffsetAfterResync_ = false;
23930 next.forceTimestampOffset = true;
23931 this.logger_('choose next request. Force timestamp offset after loader resync');
23932 }
23933
23934 return this.generateSegmentInfo_(next);
23935 }
23936
23937 generateSegmentInfo_(options) {
23938 const {
23939 independent,
23940 playlist,
23941 mediaIndex,
23942 startOfSegment,
23943 isSyncRequest,
23944 partIndex,
23945 forceTimestampOffset,
23946 getMediaInfoForTime
23947 } = options;
23948 const segment = playlist.segments[mediaIndex];
23949 const part = typeof partIndex === 'number' && segment.parts[partIndex];
23950 const segmentInfo = {
23951 requestId: 'segment-loader-' + Math.random(),
23952 // resolve the segment URL relative to the playlist
23953 uri: part && part.resolvedUri || segment.resolvedUri,
23954 // the segment's mediaIndex at the time it was requested
23955 mediaIndex,
23956 partIndex: part ? partIndex : null,
23957 // whether or not to update the SegmentLoader's state with this
23958 // segment's mediaIndex
23959 isSyncRequest,
23960 startOfSegment,
23961 // the segment's playlist
23962 playlist,
23963 // unencrypted bytes of the segment
23964 bytes: null,
23965 // when a key is defined for this segment, the encrypted bytes
23966 encryptedBytes: null,
23967 // The target timestampOffset for this segment when we append it
23968 // to the source buffer
23969 timestampOffset: null,
23970 // The timeline that the segment is in
23971 timeline: segment.timeline,
23972 // The expected duration of the segment in seconds
23973 duration: part && part.duration || segment.duration,
23974 // retain the segment in case the playlist updates while doing an async process
23975 segment,
23976 part,
23977 byteLength: 0,
23978 transmuxer: this.transmuxer_,
23979 // type of getMediaInfoForTime that was used to get this segment
23980 getMediaInfoForTime,
23981 independent
23982 };
23983 const overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
23984 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
23985 segmentTimeline: segment.timeline,
23986 currentTimeline: this.currentTimeline_,
23987 startOfSegment,
23988 buffered: this.buffered_(),
23989 overrideCheck
23990 });
23991 const audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
23992
23993 if (typeof audioBufferedEnd === 'number') {
23994 // since the transmuxer is using the actual timing values, but the buffer is
23995 // adjusted by the timestamp offset, we must adjust the value here
23996 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
23997 }
23998
23999 if (this.sourceUpdater_.videoBuffered().length) {
24000 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
24001 // adjusted by the timestmap offset, we must adjust the value here
24002 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
24003 }
24004
24005 return segmentInfo;
24006 } // get the timestampoffset for a segment,
24007 // added so that vtt segment loader can override and prevent
24008 // adding timestamp offsets.
24009
24010
24011 timestampOffsetForSegment_(options) {
24012 return timestampOffsetForSegment(options);
24013 }
24014 /**
24015 * Determines if the network has enough bandwidth to complete the current segment
24016 * request in a timely manner. If not, the request will be aborted early and bandwidth
24017 * updated to trigger a playlist switch.
24018 *
24019 * @param {Object} stats
24020 * Object containing stats about the request timing and size
24021 * @private
24022 */
24023
24024
24025 earlyAbortWhenNeeded_(stats) {
24026 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
24027 // TODO: Replace using timeout with a boolean indicating whether this playlist is
24028 // the lowestEnabledRendition.
24029 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
24030 !this.playlist_.attributes.BANDWIDTH) {
24031 return;
24032 } // Wait at least 1 second since the first byte of data has been received before
24033 // using the calculated bandwidth from the progress event to allow the bitrate
24034 // to stabilize
24035
24036
24037 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
24038 return;
24039 }
24040
24041 const currentTime = this.currentTime_();
24042 const measuredBandwidth = stats.bandwidth;
24043 const segmentDuration = this.pendingSegment_.duration;
24044 const requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
24045 // if we are only left with less than 1 second when the request completes.
24046 // A negative timeUntilRebuffering indicates we are already rebuffering
24047
24048 const timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
24049 // is larger than the estimated time until the player runs out of forward buffer
24050
24051 if (requestTimeRemaining <= timeUntilRebuffer$1) {
24052 return;
24053 }
24054
24055 const switchCandidate = minRebufferMaxBandwidthSelector({
24056 main: this.vhs_.playlists.main,
24057 currentTime,
24058 bandwidth: measuredBandwidth,
24059 duration: this.duration_(),
24060 segmentDuration,
24061 timeUntilRebuffer: timeUntilRebuffer$1,
24062 currentTimeline: this.currentTimeline_,
24063 syncController: this.syncController_
24064 });
24065
24066 if (!switchCandidate) {
24067 return;
24068 }
24069
24070 const rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
24071 const timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
24072 let minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
24073 // potential round trip time of the new request so that we are not too aggressive
24074 // with switching to a playlist that might save us a fraction of a second.
24075
24076 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
24077 minimumTimeSaving = 1;
24078 }
24079
24080 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
24081 return;
24082 } // set the bandwidth to that of the desired playlist being sure to scale by
24083 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
24084 // don't trigger a bandwidthupdate as the bandwidth is artifial
24085
24086
24087 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
24088 this.trigger('earlyabort');
24089 }
24090
24091 handleAbort_(segmentInfo) {
24092 this.logger_(`Aborting ${segmentInfoString(segmentInfo)}`);
24093 this.mediaRequestsAborted += 1;
24094 }
24095 /**
24096 * XHR `progress` event handler
24097 *
24098 * @param {Event}
24099 * The XHR `progress` event
24100 * @param {Object} simpleSegment
24101 * A simplified segment object copy
24102 * @private
24103 */
24104
24105
24106 handleProgress_(event, simpleSegment) {
24107 this.earlyAbortWhenNeeded_(simpleSegment.stats);
24108
24109 if (this.checkForAbort_(simpleSegment.requestId)) {
24110 return;
24111 }
24112
24113 this.trigger('progress');
24114 }
24115
24116 handleTrackInfo_(simpleSegment, trackInfo) {
24117 this.earlyAbortWhenNeeded_(simpleSegment.stats);
24118
24119 if (this.checkForAbort_(simpleSegment.requestId)) {
24120 return;
24121 }
24122
24123 if (this.checkForIllegalMediaSwitch(trackInfo)) {
24124 return;
24125 }
24126
24127 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
24128 // Guard against cases where we're not getting track info at all until we are
24129 // certain that all streams will provide it.
24130
24131 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
24132 this.appendInitSegment_ = {
24133 audio: true,
24134 video: true
24135 };
24136 this.startingMediaInfo_ = trackInfo;
24137 this.currentMediaInfo_ = trackInfo;
24138 this.logger_('trackinfo update', trackInfo);
24139 this.trigger('trackinfo');
24140 } // trackinfo may cause an abort if the trackinfo
24141 // causes a codec change to an unsupported codec.
24142
24143
24144 if (this.checkForAbort_(simpleSegment.requestId)) {
24145 return;
24146 } // set trackinfo on the pending segment so that
24147 // it can append.
24148
24149
24150 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
24151
24152 if (this.hasEnoughInfoToAppend_()) {
24153 this.processCallQueue_();
24154 }
24155 }
24156
24157 handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
24158 this.earlyAbortWhenNeeded_(simpleSegment.stats);
24159
24160 if (this.checkForAbort_(simpleSegment.requestId)) {
24161 return;
24162 }
24163
24164 const segmentInfo = this.pendingSegment_;
24165 const timingInfoProperty = timingInfoPropertyForMedia(mediaType);
24166 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
24167 segmentInfo[timingInfoProperty][timeType] = time;
24168 this.logger_(`timinginfo: ${mediaType} - ${timeType} - ${time}`); // check if any calls were waiting on the timing info
24169
24170 if (this.hasEnoughInfoToAppend_()) {
24171 this.processCallQueue_();
24172 }
24173 }
24174
24175 handleCaptions_(simpleSegment, captionData) {
24176 this.earlyAbortWhenNeeded_(simpleSegment.stats);
24177
24178 if (this.checkForAbort_(simpleSegment.requestId)) {
24179 return;
24180 } // This could only happen with fmp4 segments, but
24181 // should still not happen in general
24182
24183
24184 if (captionData.length === 0) {
24185 this.logger_('SegmentLoader received no captions from a caption event');
24186 return;
24187 }
24188
24189 const segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
24190 // can be adjusted by the timestamp offset
24191
24192 if (!segmentInfo.hasAppendedData_) {
24193 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
24194 return;
24195 }
24196
24197 const timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
24198 const captionTracks = {}; // get total start/end and captions for each track/stream
24199
24200 captionData.forEach(caption => {
24201 // caption.stream is actually a track name...
24202 // set to the existing values in tracks or default values
24203 captionTracks[caption.stream] = captionTracks[caption.stream] || {
24204 // Infinity, as any other value will be less than this
24205 startTime: Infinity,
24206 captions: [],
24207 // 0 as an other value will be more than this
24208 endTime: 0
24209 };
24210 const captionTrack = captionTracks[caption.stream];
24211 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
24212 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
24213 captionTrack.captions.push(caption);
24214 });
24215 Object.keys(captionTracks).forEach(trackName => {
24216 const {
24217 startTime,
24218 endTime,
24219 captions
24220 } = captionTracks[trackName];
24221 const inbandTextTracks = this.inbandTextTracks_;
24222 this.logger_(`adding cues from ${startTime} -> ${endTime} for ${trackName}`);
24223 createCaptionsTrackIfNotExists(inbandTextTracks, this.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
24224 // We do this because a rendition change that also changes the timescale for captions
24225 // will result in captions being re-parsed for certain segments. If we add them again
24226 // without clearing we will have two of the same captions visible.
24227
24228 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
24229 addCaptionData({
24230 captionArray: captions,
24231 inbandTextTracks,
24232 timestampOffset
24233 });
24234 }); // Reset stored captions since we added parsed
24235 // captions to a text track at this point
24236
24237 if (this.transmuxer_) {
24238 this.transmuxer_.postMessage({
24239 action: 'clearParsedMp4Captions'
24240 });
24241 }
24242 }
24243
24244 handleId3_(simpleSegment, id3Frames, dispatchType) {
24245 this.earlyAbortWhenNeeded_(simpleSegment.stats);
24246
24247 if (this.checkForAbort_(simpleSegment.requestId)) {
24248 return;
24249 }
24250
24251 const segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
24252
24253 if (!segmentInfo.hasAppendedData_) {
24254 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
24255 return;
24256 }
24257
24258 this.addMetadataToTextTrack(dispatchType, id3Frames, this.duration_());
24259 }
24260
24261 processMetadataQueue_() {
24262 this.metadataQueue_.id3.forEach(fn => fn());
24263 this.metadataQueue_.caption.forEach(fn => fn());
24264 this.metadataQueue_.id3 = [];
24265 this.metadataQueue_.caption = [];
24266 }
24267
24268 processCallQueue_() {
24269 const callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
24270 // functions may check the length of the load queue and default to pushing themselves
24271 // back onto the queue.
24272
24273 this.callQueue_ = [];
24274 callQueue.forEach(fun => fun());
24275 }
24276
24277 processLoadQueue_() {
24278 const loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
24279 // functions may check the length of the load queue and default to pushing themselves
24280 // back onto the queue.
24281
24282 this.loadQueue_ = [];
24283 loadQueue.forEach(fun => fun());
24284 }
24285 /**
24286 * Determines whether the loader has enough info to load the next segment.
24287 *
24288 * @return {boolean}
24289 * Whether or not the loader has enough info to load the next segment
24290 */
24291
24292
24293 hasEnoughInfoToLoad_() {
24294 // Since primary timing goes by video, only the audio loader potentially needs to wait
24295 // to load.
24296 if (this.loaderType_ !== 'audio') {
24297 return true;
24298 }
24299
24300 const segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
24301 // enough info to load.
24302
24303 if (!segmentInfo) {
24304 return false;
24305 } // The first segment can and should be loaded immediately so that source buffers are
24306 // created together (before appending). Source buffer creation uses the presence of
24307 // audio and video data to determine whether to create audio/video source buffers, and
24308 // uses processed (transmuxed or parsed) media to determine the types required.
24309
24310
24311 if (!this.getCurrentMediaInfo_()) {
24312 return true;
24313 }
24314
24315 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
24316 // can be requested and downloaded and only wait before it is transmuxed or parsed.
24317 // But in practice, there are a few reasons why it is better to wait until a loader
24318 // is ready to append that segment before requesting and downloading:
24319 //
24320 // 1. Because audio and main loaders cross discontinuities together, if this loader
24321 // is waiting for the other to catch up, then instead of requesting another
24322 // segment and using up more bandwidth, by not yet loading, more bandwidth is
24323 // allotted to the loader currently behind.
24324 // 2. media-segment-request doesn't have to have logic to consider whether a segment
24325 // is ready to be processed or not, isolating the queueing behavior to the loader.
24326 // 3. The audio loader bases some of its segment properties on timing information
24327 // provided by the main loader, meaning that, if the logic for waiting on
24328 // processing was in media-segment-request, then it would also need to know how
24329 // to re-generate the segment information after the main loader caught up.
24330 shouldWaitForTimelineChange({
24331 timelineChangeController: this.timelineChangeController_,
24332 currentTimeline: this.currentTimeline_,
24333 segmentTimeline: segmentInfo.timeline,
24334 loaderType: this.loaderType_,
24335 audioDisabled: this.audioDisabled_
24336 })) {
24337 return false;
24338 }
24339
24340 return true;
24341 }
24342
24343 getCurrentMediaInfo_(segmentInfo = this.pendingSegment_) {
24344 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
24345 }
24346
24347 getMediaInfo_(segmentInfo = this.pendingSegment_) {
24348 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
24349 }
24350
24351 getPendingSegmentPlaylist() {
24352 return this.pendingSegment_ ? this.pendingSegment_.playlist : null;
24353 }
24354
24355 hasEnoughInfoToAppend_() {
24356 if (!this.sourceUpdater_.ready()) {
24357 return false;
24358 } // If content needs to be removed or the loader is waiting on an append reattempt,
24359 // then no additional content should be appended until the prior append is resolved.
24360
24361
24362 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
24363 return false;
24364 }
24365
24366 const segmentInfo = this.pendingSegment_;
24367 const trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
24368 // we do not have information on this specific
24369 // segment yet
24370
24371 if (!segmentInfo || !trackInfo) {
24372 return false;
24373 }
24374
24375 const {
24376 hasAudio,
24377 hasVideo,
24378 isMuxed
24379 } = trackInfo;
24380
24381 if (hasVideo && !segmentInfo.videoTimingInfo) {
24382 return false;
24383 } // muxed content only relies on video timing information for now.
24384
24385
24386 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
24387 return false;
24388 }
24389
24390 if (shouldWaitForTimelineChange({
24391 timelineChangeController: this.timelineChangeController_,
24392 currentTimeline: this.currentTimeline_,
24393 segmentTimeline: segmentInfo.timeline,
24394 loaderType: this.loaderType_,
24395 audioDisabled: this.audioDisabled_
24396 })) {
24397 return false;
24398 }
24399
24400 return true;
24401 }
24402
24403 handleData_(simpleSegment, result) {
24404 this.earlyAbortWhenNeeded_(simpleSegment.stats);
24405
24406 if (this.checkForAbort_(simpleSegment.requestId)) {
24407 return;
24408 } // If there's anything in the call queue, then this data came later and should be
24409 // executed after the calls currently queued.
24410
24411
24412 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
24413 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
24414 return;
24415 }
24416
24417 const segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
24418
24419 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
24420
24421 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
24422 // logic may change behavior depending on the state, and changing state too early may
24423 // inflate our estimates of bandwidth. In the future this should be re-examined to
24424 // note more granular states.
24425 // don't process and append data if the mediaSource is closed
24426
24427 if (this.mediaSource_.readyState === 'closed') {
24428 return;
24429 } // if this request included an initialization segment, save that data
24430 // to the initSegment cache
24431
24432
24433 if (simpleSegment.map) {
24434 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
24435
24436 segmentInfo.segment.map = simpleSegment.map;
24437 } // if this request included a segment key, save that data in the cache
24438
24439
24440 if (simpleSegment.key) {
24441 this.segmentKey(simpleSegment.key, true);
24442 }
24443
24444 segmentInfo.isFmp4 = simpleSegment.isFmp4;
24445 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
24446
24447 if (segmentInfo.isFmp4) {
24448 this.trigger('fmp4');
24449 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
24450 } else {
24451 const trackInfo = this.getCurrentMediaInfo_();
24452 const useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
24453 let firstVideoFrameTimeForData;
24454
24455 if (useVideoTimingInfo) {
24456 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
24457 } // Segment loader knows more about segment timing than the transmuxer (in certain
24458 // aspects), so make any changes required for a more accurate start time.
24459 // Don't set the end time yet, as the segment may not be finished processing.
24460
24461
24462 segmentInfo.timingInfo.start = this.trueSegmentStart_({
24463 currentStart: segmentInfo.timingInfo.start,
24464 playlist: segmentInfo.playlist,
24465 mediaIndex: segmentInfo.mediaIndex,
24466 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
24467 useVideoTimingInfo,
24468 firstVideoFrameTimeForData,
24469 videoTimingInfo: segmentInfo.videoTimingInfo,
24470 audioTimingInfo: segmentInfo.audioTimingInfo
24471 });
24472 } // Init segments for audio and video only need to be appended in certain cases. Now
24473 // that data is about to be appended, we can check the final cases to determine
24474 // whether we should append an init segment.
24475
24476
24477 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
24478 // as we use the start of the segment to offset the best guess (playlist provided)
24479 // timestamp offset.
24480
24481 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
24482 // be appended or not.
24483
24484 if (segmentInfo.isSyncRequest) {
24485 // first save/update our timing info for this segment.
24486 // this is what allows us to choose an accurate segment
24487 // and the main reason we make a sync request.
24488 this.updateTimingInfoEnd_(segmentInfo);
24489 this.syncController_.saveSegmentTimingInfo({
24490 segmentInfo,
24491 shouldSaveTimelineMapping: this.loaderType_ === 'main'
24492 });
24493 const next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
24494 // after taking into account its timing info, do not append it.
24495
24496 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
24497 this.logger_('sync segment was incorrect, not appending');
24498 return;
24499 } // otherwise append it like any other segment as our guess was correct.
24500
24501
24502 this.logger_('sync segment was correct, appending');
24503 } // Save some state so that in the future anything waiting on first append (and/or
24504 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
24505 // we need some notion of whether the timestamp offset or other relevant information
24506 // has had a chance to be set.
24507
24508
24509 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
24510
24511 this.processMetadataQueue_();
24512 this.appendData_(segmentInfo, result);
24513 }
24514
24515 updateAppendInitSegmentStatus(segmentInfo, type) {
24516 // alt audio doesn't manage timestamp offset
24517 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
24518 // segment for each chunk
24519 !segmentInfo.changedTimestampOffset) {
24520 // if the timestamp offset changed, the timeline may have changed, so we have to re-
24521 // append init segments
24522 this.appendInitSegment_ = {
24523 audio: true,
24524 video: true
24525 };
24526 }
24527
24528 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
24529 // make sure we append init segment on playlist changes, in case the media config
24530 // changed
24531 this.appendInitSegment_[type] = true;
24532 }
24533 }
24534
24535 getInitSegmentAndUpdateState_({
24536 type,
24537 initSegment,
24538 map,
24539 playlist
24540 }) {
24541 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
24542 // (Section 3) required to parse the applicable Media Segments. It applies to every
24543 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
24544 // or until the end of the playlist."
24545 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
24546 if (map) {
24547 const id = initSegmentId(map);
24548
24549 if (this.activeInitSegmentId_ === id) {
24550 // don't need to re-append the init segment if the ID matches
24551 return null;
24552 } // a map-specified init segment takes priority over any transmuxed (or otherwise
24553 // obtained) init segment
24554 //
24555 // this also caches the init segment for later use
24556
24557
24558 initSegment = this.initSegmentForMap(map, true).bytes;
24559 this.activeInitSegmentId_ = id;
24560 } // We used to always prepend init segments for video, however, that shouldn't be
24561 // necessary. Instead, we should only append on changes, similar to what we've always
24562 // done for audio. This is more important (though may not be that important) for
24563 // frame-by-frame appending for LHLS, simply because of the increased quantity of
24564 // appends.
24565
24566
24567 if (initSegment && this.appendInitSegment_[type]) {
24568 // Make sure we track the playlist that we last used for the init segment, so that
24569 // we can re-append the init segment in the event that we get data from a new
24570 // playlist. Discontinuities and track changes are handled in other sections.
24571 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
24572
24573 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
24574 // we are appending the muxer init segment
24575
24576 this.activeInitSegmentId_ = null;
24577 return initSegment;
24578 }
24579
24580 return null;
24581 }
24582
24583 handleQuotaExceededError_({
24584 segmentInfo,
24585 type,
24586 bytes
24587 }, error) {
24588 const audioBuffered = this.sourceUpdater_.audioBuffered();
24589 const videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
24590 // should be cleared out during the buffer removals. However, log in case it helps
24591 // debug.
24592
24593 if (audioBuffered.length > 1) {
24594 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
24595 }
24596
24597 if (videoBuffered.length > 1) {
24598 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
24599 }
24600
24601 const audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
24602 const audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
24603 const videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
24604 const videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
24605
24606 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
24607 // Can't remove enough buffer to make room for new segment (or the browser doesn't
24608 // allow for appends of segments this size). In the future, it may be possible to
24609 // split up the segment and append in pieces, but for now, error out this playlist
24610 // in an attempt to switch to a more manageable rendition.
24611 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + `Appended byte length: ${bytes.byteLength}, ` + `audio buffer: ${timeRangesToArray(audioBuffered).join(', ')}, ` + `video buffer: ${timeRangesToArray(videoBuffered).join(', ')}, `);
24612 this.error({
24613 message: 'Quota exceeded error with append of a single segment of content',
24614 excludeUntil: Infinity
24615 });
24616 this.trigger('error');
24617 return;
24618 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
24619 // that the segment-loader should block on future events until this one is handled, so
24620 // that it doesn't keep moving onto further segments. Adding the call to the call
24621 // queue will prevent further appends until waitingOnRemove_ and
24622 // quotaExceededErrorRetryTimeout_ are cleared.
24623 //
24624 // Note that this will only block the current loader. In the case of demuxed content,
24625 // the other load may keep filling as fast as possible. In practice, this should be
24626 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
24627 // source buffer, or video fills without enough room for audio to append (and without
24628 // the availability of clearing out seconds of back buffer to make room for audio).
24629 // But it might still be good to handle this case in the future as a TODO.
24630
24631
24632 this.waitingOnRemove_ = true;
24633 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
24634 segmentInfo,
24635 type,
24636 bytes
24637 }));
24638 const currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
24639 // before retrying.
24640
24641 const timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
24642 this.logger_(`On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to ${timeToRemoveUntil}`);
24643 this.remove(0, timeToRemoveUntil, () => {
24644 this.logger_(`On QUOTA_EXCEEDED_ERR, retrying append in ${MIN_BACK_BUFFER}s`);
24645 this.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
24646 // attempts (since we can't clear less than the minimum)
24647
24648 this.quotaExceededErrorRetryTimeout_ = window.setTimeout(() => {
24649 this.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
24650 this.quotaExceededErrorRetryTimeout_ = null;
24651 this.processCallQueue_();
24652 }, MIN_BACK_BUFFER * 1000);
24653 }, true);
24654 }
24655
24656 handleAppendError_({
24657 segmentInfo,
24658 type,
24659 bytes
24660 }, error) {
24661 // if there's no error, nothing to do
24662 if (!error) {
24663 return;
24664 }
24665
24666 if (error.code === QUOTA_EXCEEDED_ERR) {
24667 this.handleQuotaExceededError_({
24668 segmentInfo,
24669 type,
24670 bytes
24671 }); // A quota exceeded error should be recoverable with a future re-append, so no need
24672 // to trigger an append error.
24673
24674 return;
24675 }
24676
24677 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
24678 this.error(`${type} append of ${bytes.length}b failed for segment ` + `#${segmentInfo.mediaIndex} in playlist ${segmentInfo.playlist.id}`); // If an append errors, we often can't recover.
24679 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
24680 //
24681 // Trigger a special error so that it can be handled separately from normal,
24682 // recoverable errors.
24683
24684 this.trigger('appenderror');
24685 }
24686
24687 appendToSourceBuffer_({
24688 segmentInfo,
24689 type,
24690 initSegment,
24691 data,
24692 bytes
24693 }) {
24694 // If this is a re-append, bytes were already created and don't need to be recreated
24695 if (!bytes) {
24696 const segments = [data];
24697 let byteLength = data.byteLength;
24698
24699 if (initSegment) {
24700 // if the media initialization segment is changing, append it before the content
24701 // segment
24702 segments.unshift(initSegment);
24703 byteLength += initSegment.byteLength;
24704 } // Technically we should be OK appending the init segment separately, however, we
24705 // haven't yet tested that, and prepending is how we have always done things.
24706
24707
24708 bytes = concatSegments({
24709 bytes: byteLength,
24710 segments
24711 });
24712 }
24713
24714 this.sourceUpdater_.appendBuffer({
24715 segmentInfo,
24716 type,
24717 bytes
24718 }, this.handleAppendError_.bind(this, {
24719 segmentInfo,
24720 type,
24721 bytes
24722 }));
24723 }
24724
24725 handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
24726 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
24727 return;
24728 }
24729
24730 const segment = this.pendingSegment_.segment;
24731 const timingInfoProperty = `${type}TimingInfo`;
24732
24733 if (!segment[timingInfoProperty]) {
24734 segment[timingInfoProperty] = {};
24735 }
24736
24737 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
24738 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
24739 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
24740 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
24741 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
24742
24743 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
24744 }
24745
24746 appendData_(segmentInfo, result) {
24747 const {
24748 type,
24749 data
24750 } = result;
24751
24752 if (!data || !data.byteLength) {
24753 return;
24754 }
24755
24756 if (type === 'audio' && this.audioDisabled_) {
24757 return;
24758 }
24759
24760 const initSegment = this.getInitSegmentAndUpdateState_({
24761 type,
24762 initSegment: result.initSegment,
24763 playlist: segmentInfo.playlist,
24764 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
24765 });
24766 this.appendToSourceBuffer_({
24767 segmentInfo,
24768 type,
24769 initSegment,
24770 data
24771 });
24772 }
24773 /**
24774 * load a specific segment from a request into the buffer
24775 *
24776 * @private
24777 */
24778
24779
24780 loadSegment_(segmentInfo) {
24781 this.state = 'WAITING';
24782 this.pendingSegment_ = segmentInfo;
24783 this.trimBackBuffer_(segmentInfo);
24784
24785 if (typeof segmentInfo.timestampOffset === 'number') {
24786 if (this.transmuxer_) {
24787 this.transmuxer_.postMessage({
24788 action: 'clearAllMp4Captions'
24789 });
24790 }
24791 }
24792
24793 if (!this.hasEnoughInfoToLoad_()) {
24794 this.loadQueue_.push(() => {
24795 // regenerate the audioAppendStart, timestampOffset, etc as they
24796 // may have changed since this function was added to the queue.
24797 const options = _extends({}, segmentInfo, {
24798 forceTimestampOffset: true
24799 });
24800
24801 _extends(segmentInfo, this.generateSegmentInfo_(options));
24802
24803 this.isPendingTimestampOffset_ = false;
24804 this.updateTransmuxerAndRequestSegment_(segmentInfo);
24805 });
24806 return;
24807 }
24808
24809 this.updateTransmuxerAndRequestSegment_(segmentInfo);
24810 }
24811
24812 updateTransmuxerAndRequestSegment_(segmentInfo) {
24813 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
24814 // the transmuxer still needs to be updated before then.
24815 //
24816 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
24817 // offset must be passed to the transmuxer for stream correcting adjustments.
24818 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
24819 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
24820
24821 segmentInfo.gopsToAlignWith = [];
24822 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
24823
24824 this.transmuxer_.postMessage({
24825 action: 'reset'
24826 });
24827 this.transmuxer_.postMessage({
24828 action: 'setTimestampOffset',
24829 timestampOffset: segmentInfo.timestampOffset
24830 });
24831 }
24832
24833 const simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
24834 const isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
24835 const isWalkingForward = this.mediaIndex !== null;
24836 const isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
24837 // the first timeline
24838 segmentInfo.timeline > 0;
24839 const isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
24840 this.logger_(`Requesting ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
24841 // then this init segment has never been seen before and should be appended.
24842 //
24843 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
24844 // both to true and leave the decision of whether to append the init segment to append time.
24845
24846 if (simpleSegment.map && !simpleSegment.map.bytes) {
24847 this.logger_('going to request init segment.');
24848 this.appendInitSegment_ = {
24849 video: true,
24850 audio: true
24851 };
24852 }
24853
24854 segmentInfo.abortRequests = mediaSegmentRequest({
24855 xhr: this.vhs_.xhr,
24856 xhrOptions: this.xhrOptions_,
24857 decryptionWorker: this.decrypter_,
24858 segment: simpleSegment,
24859 abortFn: this.handleAbort_.bind(this, segmentInfo),
24860 progressFn: this.handleProgress_.bind(this),
24861 trackInfoFn: this.handleTrackInfo_.bind(this),
24862 timingInfoFn: this.handleTimingInfo_.bind(this),
24863 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
24864 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
24865 captionsFn: this.handleCaptions_.bind(this),
24866 isEndOfTimeline,
24867 endedTimelineFn: () => {
24868 this.logger_('received endedtimeline callback');
24869 },
24870 id3Fn: this.handleId3_.bind(this),
24871 dataFn: this.handleData_.bind(this),
24872 doneFn: this.segmentRequestFinished_.bind(this),
24873 onTransmuxerLog: ({
24874 message,
24875 level,
24876 stream
24877 }) => {
24878 this.logger_(`${segmentInfoString(segmentInfo)} logged from transmuxer stream ${stream} as a ${level}: ${message}`);
24879 }
24880 });
24881 }
24882 /**
24883 * trim the back buffer so that we don't have too much data
24884 * in the source buffer
24885 *
24886 * @private
24887 *
24888 * @param {Object} segmentInfo - the current segment
24889 */
24890
24891
24892 trimBackBuffer_(segmentInfo) {
24893 const removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
24894 // buffer and a very conservative "garbage collector"
24895 // We manually clear out the old buffer to ensure
24896 // we don't trigger the QuotaExceeded error
24897 // on the source buffer during subsequent appends
24898
24899 if (removeToTime > 0) {
24900 this.remove(0, removeToTime);
24901 }
24902 }
24903 /**
24904 * created a simplified copy of the segment object with just the
24905 * information necessary to perform the XHR and decryption
24906 *
24907 * @private
24908 *
24909 * @param {Object} segmentInfo - the current segment
24910 * @return {Object} a simplified segment object copy
24911 */
24912
24913
24914 createSimplifiedSegmentObj_(segmentInfo) {
24915 const segment = segmentInfo.segment;
24916 const part = segmentInfo.part;
24917 const simpleSegment = {
24918 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
24919 byterange: part ? part.byterange : segment.byterange,
24920 requestId: segmentInfo.requestId,
24921 transmuxer: segmentInfo.transmuxer,
24922 audioAppendStart: segmentInfo.audioAppendStart,
24923 gopsToAlignWith: segmentInfo.gopsToAlignWith,
24924 part: segmentInfo.part
24925 };
24926 const previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
24927
24928 if (previousSegment && previousSegment.timeline === segment.timeline) {
24929 // The baseStartTime of a segment is used to handle rollover when probing the TS
24930 // segment to retrieve timing information. Since the probe only looks at the media's
24931 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
24932 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
24933 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
24934 // seconds of media time, so should be used here. The previous segment is used since
24935 // the end of the previous segment should represent the beginning of the current
24936 // segment, so long as they are on the same timeline.
24937 if (previousSegment.videoTimingInfo) {
24938 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
24939 } else if (previousSegment.audioTimingInfo) {
24940 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
24941 }
24942 }
24943
24944 if (segment.key) {
24945 // if the media sequence is greater than 2^32, the IV will be incorrect
24946 // assuming 10s segments, that would be about 1300 years
24947 const iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
24948 simpleSegment.key = this.segmentKey(segment.key);
24949 simpleSegment.key.iv = iv;
24950 }
24951
24952 if (segment.map) {
24953 simpleSegment.map = this.initSegmentForMap(segment.map);
24954 }
24955
24956 return simpleSegment;
24957 }
24958
24959 saveTransferStats_(stats) {
24960 // every request counts as a media request even if it has been aborted
24961 // or canceled due to a timeout
24962 this.mediaRequests += 1;
24963
24964 if (stats) {
24965 this.mediaBytesTransferred += stats.bytesReceived;
24966 this.mediaTransferDuration += stats.roundTripTime;
24967 }
24968 }
24969
24970 saveBandwidthRelatedStats_(duration, stats) {
24971 // byteLength will be used for throughput, and should be based on bytes receieved,
24972 // which we only know at the end of the request and should reflect total bytes
24973 // downloaded rather than just bytes processed from components of the segment
24974 this.pendingSegment_.byteLength = stats.bytesReceived;
24975
24976 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
24977 this.logger_(`Ignoring segment's bandwidth because its duration of ${duration}` + ` is less than the min to record ${MIN_SEGMENT_DURATION_TO_SAVE_STATS}`);
24978 return;
24979 }
24980
24981 this.bandwidth = stats.bandwidth;
24982 this.roundTrip = stats.roundTripTime;
24983 }
24984
24985 handleTimeout_() {
24986 // although the VTT segment loader bandwidth isn't really used, it's good to
24987 // maintain functinality between segment loaders
24988 this.mediaRequestsTimedout += 1;
24989 this.bandwidth = 1;
24990 this.roundTrip = NaN;
24991 this.trigger('bandwidthupdate');
24992 this.trigger('timeout');
24993 }
24994 /**
24995 * Handle the callback from the segmentRequest function and set the
24996 * associated SegmentLoader state and errors if necessary
24997 *
24998 * @private
24999 */
25000
25001
25002 segmentRequestFinished_(error, simpleSegment, result) {
25003 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
25004 // check the call queue directly since this function doesn't need to deal with any
25005 // data, and can continue even if the source buffers are not set up and we didn't get
25006 // any data from the segment
25007 if (this.callQueue_.length) {
25008 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
25009 return;
25010 }
25011
25012 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
25013
25014 if (!this.pendingSegment_) {
25015 return;
25016 } // the request was aborted and the SegmentLoader has already started
25017 // another request. this can happen when the timeout for an aborted
25018 // request triggers due to a limitation in the XHR library
25019 // do not count this as any sort of request or we risk double-counting
25020
25021
25022 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
25023 return;
25024 } // an error occurred from the active pendingSegment_ so reset everything
25025
25026
25027 if (error) {
25028 this.pendingSegment_ = null;
25029 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
25030
25031 if (error.code === REQUEST_ERRORS.ABORTED) {
25032 return;
25033 }
25034
25035 this.pause(); // the error is really just that at least one of the requests timed-out
25036 // set the bandwidth to a very low value and trigger an ABR switch to
25037 // take emergency action
25038
25039 if (error.code === REQUEST_ERRORS.TIMEOUT) {
25040 this.handleTimeout_();
25041 return;
25042 } // if control-flow has arrived here, then the error is real
25043 // emit an error event to exclude the current playlist
25044
25045
25046 this.mediaRequestsErrored += 1;
25047 this.error(error);
25048 this.trigger('error');
25049 return;
25050 }
25051
25052 const segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
25053 // generated for ABR purposes
25054
25055 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
25056 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
25057
25058 if (result.gopInfo) {
25059 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
25060 } // Although we may have already started appending on progress, we shouldn't switch the
25061 // state away from loading until we are officially done loading the segment data.
25062
25063
25064 this.state = 'APPENDING'; // used for testing
25065
25066 this.trigger('appending');
25067 this.waitForAppendsToComplete_(segmentInfo);
25068 }
25069
25070 setTimeMapping_(timeline) {
25071 const timelineMapping = this.syncController_.mappingForTimeline(timeline);
25072
25073 if (timelineMapping !== null) {
25074 this.timeMapping_ = timelineMapping;
25075 }
25076 }
25077
25078 updateMediaSecondsLoaded_(segment) {
25079 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
25080 this.mediaSecondsLoaded += segment.end - segment.start;
25081 } else {
25082 this.mediaSecondsLoaded += segment.duration;
25083 }
25084 }
25085
25086 shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
25087 if (timestampOffset === null) {
25088 return false;
25089 } // note that we're potentially using the same timestamp offset for both video and
25090 // audio
25091
25092
25093 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
25094 return true;
25095 }
25096
25097 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
25098 return true;
25099 }
25100
25101 return false;
25102 }
25103
25104 trueSegmentStart_({
25105 currentStart,
25106 playlist,
25107 mediaIndex,
25108 firstVideoFrameTimeForData,
25109 currentVideoTimestampOffset,
25110 useVideoTimingInfo,
25111 videoTimingInfo,
25112 audioTimingInfo
25113 }) {
25114 if (typeof currentStart !== 'undefined') {
25115 // if start was set once, keep using it
25116 return currentStart;
25117 }
25118
25119 if (!useVideoTimingInfo) {
25120 return audioTimingInfo.start;
25121 }
25122
25123 const previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
25124 // within that segment. Since the transmuxer maintains a cache of incomplete data
25125 // from and/or the last frame seen, the start time may reflect a frame that starts
25126 // in the previous segment. Check for that case and ensure the start time is
25127 // accurate for the segment.
25128
25129 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
25130 return firstVideoFrameTimeForData;
25131 }
25132
25133 return videoTimingInfo.start;
25134 }
25135
25136 waitForAppendsToComplete_(segmentInfo) {
25137 const trackInfo = this.getCurrentMediaInfo_(segmentInfo);
25138
25139 if (!trackInfo) {
25140 this.error({
25141 message: 'No starting media returned, likely due to an unsupported media format.',
25142 playlistExclusionDuration: Infinity
25143 });
25144 this.trigger('error');
25145 return;
25146 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
25147 // on each queue this loader is responsible for to ensure that the appends are
25148 // complete.
25149
25150
25151 const {
25152 hasAudio,
25153 hasVideo,
25154 isMuxed
25155 } = trackInfo;
25156 const waitForVideo = this.loaderType_ === 'main' && hasVideo;
25157 const waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
25158 segmentInfo.waitingOnAppends = 0; // segments with no data
25159
25160 if (!segmentInfo.hasAppendedData_) {
25161 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
25162 // When there's no audio or video data in the segment, there's no audio or video
25163 // timing information.
25164 //
25165 // If there's no audio or video timing information, then the timestamp offset
25166 // can't be adjusted to the appropriate value for the transmuxer and source
25167 // buffers.
25168 //
25169 // Therefore, the next segment should be used to set the timestamp offset.
25170 this.isPendingTimestampOffset_ = true;
25171 } // override settings for metadata only segments
25172
25173
25174 segmentInfo.timingInfo = {
25175 start: 0
25176 };
25177 segmentInfo.waitingOnAppends++;
25178
25179 if (!this.isPendingTimestampOffset_) {
25180 // update the timestampoffset
25181 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
25182 // no video/audio data.
25183
25184 this.processMetadataQueue_();
25185 } // append is "done" instantly with no data.
25186
25187
25188 this.checkAppendsDone_(segmentInfo);
25189 return;
25190 } // Since source updater could call back synchronously, do the increments first.
25191
25192
25193 if (waitForVideo) {
25194 segmentInfo.waitingOnAppends++;
25195 }
25196
25197 if (waitForAudio) {
25198 segmentInfo.waitingOnAppends++;
25199 }
25200
25201 if (waitForVideo) {
25202 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
25203 }
25204
25205 if (waitForAudio) {
25206 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
25207 }
25208 }
25209
25210 checkAppendsDone_(segmentInfo) {
25211 if (this.checkForAbort_(segmentInfo.requestId)) {
25212 return;
25213 }
25214
25215 segmentInfo.waitingOnAppends--;
25216
25217 if (segmentInfo.waitingOnAppends === 0) {
25218 this.handleAppendsDone_();
25219 }
25220 }
25221
25222 checkForIllegalMediaSwitch(trackInfo) {
25223 const illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
25224
25225 if (illegalMediaSwitchError) {
25226 this.error({
25227 message: illegalMediaSwitchError,
25228 playlistExclusionDuration: Infinity
25229 });
25230 this.trigger('error');
25231 return true;
25232 }
25233
25234 return false;
25235 }
25236
25237 updateSourceBufferTimestampOffset_(segmentInfo) {
25238 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
25239 // priority, timing-wise, so we must wait
25240 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
25241 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
25242 this.loaderType_ !== 'main') {
25243 return;
25244 }
25245
25246 let didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
25247 // the timing info here comes from video. In the event that the audio is longer than
25248 // the video, this will trim the start of the audio.
25249 // This also trims any offset from 0 at the beginning of the media
25250
25251 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
25252 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
25253 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
25254 timingInfo: segmentInfo.timingInfo
25255 }); // In the event that there are part segment downloads, each will try to update the
25256 // timestamp offset. Retaining this bit of state prevents us from updating in the
25257 // future (within the same segment), however, there may be a better way to handle it.
25258
25259 segmentInfo.changedTimestampOffset = true;
25260
25261 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
25262 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
25263 didChange = true;
25264 }
25265
25266 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
25267 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
25268 didChange = true;
25269 }
25270
25271 if (didChange) {
25272 this.trigger('timestampoffset');
25273 }
25274 }
25275
25276 getSegmentStartTimeForTimestampOffsetCalculation_({
25277 videoTimingInfo,
25278 audioTimingInfo,
25279 timingInfo
25280 }) {
25281 if (!this.useDtsForTimestampOffset_) {
25282 return timingInfo.start;
25283 }
25284
25285 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
25286 return videoTimingInfo.transmuxedDecodeStart;
25287 } // handle audio only
25288
25289
25290 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
25291 return audioTimingInfo.transmuxedDecodeStart;
25292 } // handle content not transmuxed (e.g., MP4)
25293
25294
25295 return timingInfo.start;
25296 }
25297
25298 updateTimingInfoEnd_(segmentInfo) {
25299 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
25300 const trackInfo = this.getMediaInfo_();
25301 const useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
25302 const prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
25303
25304 if (!prioritizedTimingInfo) {
25305 return;
25306 }
25307
25308 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
25309 // current example is the case of fmp4), so use the rough duration to calculate an
25310 // end time.
25311 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
25312 }
25313 /**
25314 * callback to run when appendBuffer is finished. detects if we are
25315 * in a good state to do things with the data we got, or if we need
25316 * to wait for more
25317 *
25318 * @private
25319 */
25320
25321
25322 handleAppendsDone_() {
25323 // appendsdone can cause an abort
25324 if (this.pendingSegment_) {
25325 this.trigger('appendsdone');
25326 }
25327
25328 if (!this.pendingSegment_) {
25329 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
25330 // all appending cases?
25331
25332 if (!this.paused()) {
25333 this.monitorBuffer_();
25334 }
25335
25336 return;
25337 }
25338
25339 const segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
25340 // best to wait until all appends are done so we're sure that the primary media is
25341 // finished (and we have its end time).
25342
25343 this.updateTimingInfoEnd_(segmentInfo);
25344
25345 if (this.shouldSaveSegmentTimingInfo_) {
25346 // Timeline mappings should only be saved for the main loader. This is for multiple
25347 // reasons:
25348 //
25349 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
25350 // and the main loader try to save the timeline mapping, whichever comes later
25351 // will overwrite the first. In theory this is OK, as the mappings should be the
25352 // same, however, it breaks for (2)
25353 // 2) In the event of a live stream, the initial live point will make for a somewhat
25354 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
25355 // the mapping will be off for one of the streams, dependent on which one was
25356 // first saved (see (1)).
25357 // 3) Primary timing goes by video in VHS, so the mapping should be video.
25358 //
25359 // Since the audio loader will wait for the main loader to load the first segment,
25360 // the main loader will save the first timeline mapping, and ensure that there won't
25361 // be a case where audio loads two segments without saving a mapping (thus leading
25362 // to missing segment timing info).
25363 this.syncController_.saveSegmentTimingInfo({
25364 segmentInfo,
25365 shouldSaveTimelineMapping: this.loaderType_ === 'main'
25366 });
25367 }
25368
25369 const segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
25370
25371 if (segmentDurationMessage) {
25372 if (segmentDurationMessage.severity === 'warn') {
25373 videojs__default["default"].log.warn(segmentDurationMessage.message);
25374 } else {
25375 this.logger_(segmentDurationMessage.message);
25376 }
25377 }
25378
25379 this.recordThroughput_(segmentInfo);
25380 this.pendingSegment_ = null;
25381 this.state = 'READY';
25382
25383 if (segmentInfo.isSyncRequest) {
25384 this.trigger('syncinfoupdate'); // if the sync request was not appended
25385 // then it was not the correct segment.
25386 // throw it away and use the data it gave us
25387 // to get the correct one.
25388
25389 if (!segmentInfo.hasAppendedData_) {
25390 this.logger_(`Throwing away un-appended sync request ${segmentInfoString(segmentInfo)}`);
25391 return;
25392 }
25393 }
25394
25395 this.logger_(`Appended ${segmentInfoString(segmentInfo)}`);
25396 this.addSegmentMetadataCue_(segmentInfo);
25397 this.fetchAtBuffer_ = true;
25398
25399 if (this.currentTimeline_ !== segmentInfo.timeline) {
25400 this.timelineChangeController_.lastTimelineChange({
25401 type: this.loaderType_,
25402 from: this.currentTimeline_,
25403 to: segmentInfo.timeline
25404 }); // If audio is not disabled, the main segment loader is responsible for updating
25405 // the audio timeline as well. If the content is video only, this won't have any
25406 // impact.
25407
25408 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
25409 this.timelineChangeController_.lastTimelineChange({
25410 type: 'audio',
25411 from: this.currentTimeline_,
25412 to: segmentInfo.timeline
25413 });
25414 }
25415 }
25416
25417 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
25418 // the following conditional otherwise it may consider this a bad "guess"
25419 // and attempt to resync when the post-update seekable window and live
25420 // point would mean that this was the perfect segment to fetch
25421
25422 this.trigger('syncinfoupdate');
25423 const segment = segmentInfo.segment;
25424 const part = segmentInfo.part;
25425 const badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
25426 const badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
25427 // the currentTime_ that means that our conservative guess was too conservative.
25428 // In that case, reset the loader state so that we try to use any information gained
25429 // from the previous request to create a new, more accurate, sync-point.
25430
25431 if (badSegmentGuess || badPartGuess) {
25432 this.logger_(`bad ${badSegmentGuess ? 'segment' : 'part'} ${segmentInfoString(segmentInfo)}`);
25433 this.resetEverything();
25434 return;
25435 }
25436
25437 const isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
25438 // and conservatively guess
25439
25440 if (isWalkingForward) {
25441 this.trigger('bandwidthupdate');
25442 }
25443
25444 this.trigger('progress');
25445 this.mediaIndex = segmentInfo.mediaIndex;
25446 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
25447 // buffer, end the stream. this ensures the "ended" event will
25448 // fire if playback reaches that point.
25449
25450 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
25451 this.endOfStream();
25452 } // used for testing
25453
25454
25455 this.trigger('appended');
25456
25457 if (segmentInfo.hasAppendedData_) {
25458 this.mediaAppends++;
25459 }
25460
25461 if (!this.paused()) {
25462 this.monitorBuffer_();
25463 }
25464 }
25465 /**
25466 * Records the current throughput of the decrypt, transmux, and append
25467 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
25468 * moving average of the throughput. `throughput.count` is the number of
25469 * data points in the average.
25470 *
25471 * @private
25472 * @param {Object} segmentInfo the object returned by loadSegment
25473 */
25474
25475
25476 recordThroughput_(segmentInfo) {
25477 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
25478 this.logger_(`Ignoring segment's throughput because its duration of ${segmentInfo.duration}` + ` is less than the min to record ${MIN_SEGMENT_DURATION_TO_SAVE_STATS}`);
25479 return;
25480 }
25481
25482 const rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
25483 // by zero in the case where the throughput is ridiculously high
25484
25485 const segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
25486
25487 const segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
25488 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
25489
25490 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
25491 }
25492 /**
25493 * Adds a cue to the segment-metadata track with some metadata information about the
25494 * segment
25495 *
25496 * @private
25497 * @param {Object} segmentInfo
25498 * the object returned by loadSegment
25499 * @method addSegmentMetadataCue_
25500 */
25501
25502
25503 addSegmentMetadataCue_(segmentInfo) {
25504 if (!this.segmentMetadataTrack_) {
25505 return;
25506 }
25507
25508 const segment = segmentInfo.segment;
25509 const start = segment.start;
25510 const end = segment.end; // Do not try adding the cue if the start and end times are invalid.
25511
25512 if (!finite(start) || !finite(end)) {
25513 return;
25514 }
25515
25516 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
25517 const Cue = window.WebKitDataCue || window.VTTCue;
25518 const value = {
25519 custom: segment.custom,
25520 dateTimeObject: segment.dateTimeObject,
25521 dateTimeString: segment.dateTimeString,
25522 programDateTime: segment.programDateTime,
25523 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
25524 resolution: segmentInfo.playlist.attributes.RESOLUTION,
25525 codecs: segmentInfo.playlist.attributes.CODECS,
25526 byteLength: segmentInfo.byteLength,
25527 uri: segmentInfo.uri,
25528 timeline: segmentInfo.timeline,
25529 playlist: segmentInfo.playlist.id,
25530 start,
25531 end
25532 };
25533 const data = JSON.stringify(value);
25534 const cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
25535 // the differences of WebKitDataCue in safari and VTTCue in other browsers
25536
25537 cue.value = value;
25538 this.segmentMetadataTrack_.addCue(cue);
25539 }
25540
25541 }
25542
25543 function noop() {}
25544
25545 const toTitleCase = function (string) {
25546 if (typeof string !== 'string') {
25547 return string;
25548 }
25549
25550 return string.replace(/./, w => w.toUpperCase());
25551 };
25552
25553 /**
25554 * @file source-updater.js
25555 */
25556 const bufferTypes = ['video', 'audio'];
25557
25558 const updating = (type, sourceUpdater) => {
25559 const sourceBuffer = sourceUpdater[`${type}Buffer`];
25560 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
25561 };
25562
25563 const nextQueueIndexOfType = (type, queue) => {
25564 for (let i = 0; i < queue.length; i++) {
25565 const queueEntry = queue[i];
25566
25567 if (queueEntry.type === 'mediaSource') {
25568 // If the next entry is a media source entry (uses multiple source buffers), block
25569 // processing to allow it to go through first.
25570 return null;
25571 }
25572
25573 if (queueEntry.type === type) {
25574 return i;
25575 }
25576 }
25577
25578 return null;
25579 };
25580
25581 const shiftQueue = (type, sourceUpdater) => {
25582 if (sourceUpdater.queue.length === 0) {
25583 return;
25584 }
25585
25586 let queueIndex = 0;
25587 let queueEntry = sourceUpdater.queue[queueIndex];
25588
25589 if (queueEntry.type === 'mediaSource') {
25590 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
25591 sourceUpdater.queue.shift();
25592 queueEntry.action(sourceUpdater);
25593
25594 if (queueEntry.doneFn) {
25595 queueEntry.doneFn();
25596 } // Only specific source buffer actions must wait for async updateend events. Media
25597 // Source actions process synchronously. Therefore, both audio and video source
25598 // buffers are now clear to process the next queue entries.
25599
25600
25601 shiftQueue('audio', sourceUpdater);
25602 shiftQueue('video', sourceUpdater);
25603 } // Media Source actions require both source buffers, so if the media source action
25604 // couldn't process yet (because one or both source buffers are busy), block other
25605 // queue actions until both are available and the media source action can process.
25606
25607
25608 return;
25609 }
25610
25611 if (type === 'mediaSource') {
25612 // If the queue was shifted by a media source action (this happens when pushing a
25613 // media source action onto the queue), then it wasn't from an updateend event from an
25614 // audio or video source buffer, so there's no change from previous state, and no
25615 // processing should be done.
25616 return;
25617 } // Media source queue entries don't need to consider whether the source updater is
25618 // started (i.e., source buffers are created) as they don't need the source buffers, but
25619 // source buffer queue entries do.
25620
25621
25622 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || updating(type, sourceUpdater)) {
25623 return;
25624 }
25625
25626 if (queueEntry.type !== type) {
25627 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
25628
25629 if (queueIndex === null) {
25630 // Either there's no queue entry that uses this source buffer type in the queue, or
25631 // there's a media source queue entry before the next entry of this type, in which
25632 // case wait for that action to process first.
25633 return;
25634 }
25635
25636 queueEntry = sourceUpdater.queue[queueIndex];
25637 }
25638
25639 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
25640 //
25641 // The queue pending operation must be set before the action is performed in the event
25642 // that the action results in a synchronous event that is acted upon. For instance, if
25643 // an exception is thrown that can be handled, it's possible that new actions will be
25644 // appended to an empty queue and immediately executed, but would not have the correct
25645 // pending information if this property was set after the action was performed.
25646
25647 sourceUpdater.queuePending[type] = queueEntry;
25648 queueEntry.action(type, sourceUpdater);
25649
25650 if (!queueEntry.doneFn) {
25651 // synchronous operation, process next entry
25652 sourceUpdater.queuePending[type] = null;
25653 shiftQueue(type, sourceUpdater);
25654 return;
25655 }
25656 };
25657
25658 const cleanupBuffer = (type, sourceUpdater) => {
25659 const buffer = sourceUpdater[`${type}Buffer`];
25660 const titleType = toTitleCase(type);
25661
25662 if (!buffer) {
25663 return;
25664 }
25665
25666 buffer.removeEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
25667 buffer.removeEventListener('error', sourceUpdater[`on${titleType}Error_`]);
25668 sourceUpdater.codecs[type] = null;
25669 sourceUpdater[`${type}Buffer`] = null;
25670 };
25671
25672 const inSourceBuffers = (mediaSource, sourceBuffer) => mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
25673
25674 const actions = {
25675 appendBuffer: (bytes, segmentInfo, onError) => (type, sourceUpdater) => {
25676 const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
25677 // or the media source does not contain this source buffer.
25678
25679 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
25680 return;
25681 }
25682
25683 sourceUpdater.logger_(`Appending segment ${segmentInfo.mediaIndex}'s ${bytes.length} bytes to ${type}Buffer`);
25684
25685 try {
25686 sourceBuffer.appendBuffer(bytes);
25687 } catch (e) {
25688 sourceUpdater.logger_(`Error with code ${e.code} ` + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + `when appending segment ${segmentInfo.mediaIndex} to ${type}Buffer`);
25689 sourceUpdater.queuePending[type] = null;
25690 onError(e);
25691 }
25692 },
25693 remove: (start, end) => (type, sourceUpdater) => {
25694 const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
25695 // or the media source does not contain this source buffer.
25696
25697 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
25698 return;
25699 }
25700
25701 sourceUpdater.logger_(`Removing ${start} to ${end} from ${type}Buffer`);
25702
25703 try {
25704 sourceBuffer.remove(start, end);
25705 } catch (e) {
25706 sourceUpdater.logger_(`Remove ${start} to ${end} from ${type}Buffer failed`);
25707 }
25708 },
25709 timestampOffset: offset => (type, sourceUpdater) => {
25710 const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
25711 // or the media source does not contain this source buffer.
25712
25713 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
25714 return;
25715 }
25716
25717 sourceUpdater.logger_(`Setting ${type}timestampOffset to ${offset}`);
25718 sourceBuffer.timestampOffset = offset;
25719 },
25720 callback: callback => (type, sourceUpdater) => {
25721 callback();
25722 },
25723 endOfStream: error => sourceUpdater => {
25724 if (sourceUpdater.mediaSource.readyState !== 'open') {
25725 return;
25726 }
25727
25728 sourceUpdater.logger_(`Calling mediaSource endOfStream(${error || ''})`);
25729
25730 try {
25731 sourceUpdater.mediaSource.endOfStream(error);
25732 } catch (e) {
25733 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
25734 }
25735 },
25736 duration: duration => sourceUpdater => {
25737 sourceUpdater.logger_(`Setting mediaSource duration to ${duration}`);
25738
25739 try {
25740 sourceUpdater.mediaSource.duration = duration;
25741 } catch (e) {
25742 videojs__default["default"].log.warn('Failed to set media source duration', e);
25743 }
25744 },
25745 abort: () => (type, sourceUpdater) => {
25746 if (sourceUpdater.mediaSource.readyState !== 'open') {
25747 return;
25748 }
25749
25750 const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null
25751 // or the media source does not contain this source buffer.
25752
25753 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
25754 return;
25755 }
25756
25757 sourceUpdater.logger_(`calling abort on ${type}Buffer`);
25758
25759 try {
25760 sourceBuffer.abort();
25761 } catch (e) {
25762 videojs__default["default"].log.warn(`Failed to abort on ${type}Buffer`, e);
25763 }
25764 },
25765 addSourceBuffer: (type, codec) => sourceUpdater => {
25766 const titleType = toTitleCase(type);
25767 const mime = getMimeForCodec(codec);
25768 sourceUpdater.logger_(`Adding ${type}Buffer with codec ${codec} to mediaSource`);
25769 const sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
25770 sourceBuffer.addEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
25771 sourceBuffer.addEventListener('error', sourceUpdater[`on${titleType}Error_`]);
25772 sourceUpdater.codecs[type] = codec;
25773 sourceUpdater[`${type}Buffer`] = sourceBuffer;
25774 },
25775 removeSourceBuffer: type => sourceUpdater => {
25776 const sourceBuffer = sourceUpdater[`${type}Buffer`];
25777 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
25778 // or the media source does not contain this source buffer.
25779
25780 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
25781 return;
25782 }
25783
25784 sourceUpdater.logger_(`Removing ${type}Buffer with codec ${sourceUpdater.codecs[type]} from mediaSource`);
25785
25786 try {
25787 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
25788 } catch (e) {
25789 videojs__default["default"].log.warn(`Failed to removeSourceBuffer ${type}Buffer`, e);
25790 }
25791 },
25792 changeType: codec => (type, sourceUpdater) => {
25793 const sourceBuffer = sourceUpdater[`${type}Buffer`];
25794 const mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
25795 // or the media source does not contain this source buffer.
25796
25797 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
25798 return;
25799 } // do not update codec if we don't need to.
25800
25801
25802 if (sourceUpdater.codecs[type] === codec) {
25803 return;
25804 }
25805
25806 sourceUpdater.logger_(`changing ${type}Buffer codec from ${sourceUpdater.codecs[type]} to ${codec}`); // check if change to the provided type is supported
25807
25808 try {
25809 sourceBuffer.changeType(mime);
25810 sourceUpdater.codecs[type] = codec;
25811 } catch (e) {
25812 videojs__default["default"].log.warn(`Failed to changeType on ${type}Buffer`, e);
25813 }
25814 }
25815 };
25816
25817 const pushQueue = ({
25818 type,
25819 sourceUpdater,
25820 action,
25821 doneFn,
25822 name
25823 }) => {
25824 sourceUpdater.queue.push({
25825 type,
25826 action,
25827 doneFn,
25828 name
25829 });
25830 shiftQueue(type, sourceUpdater);
25831 };
25832
25833 const onUpdateend = (type, sourceUpdater) => e => {
25834 // Although there should, in theory, be a pending action for any updateend receieved,
25835 // there are some actions that may trigger updateend events without set definitions in
25836 // the w3c spec. For instance, setting the duration on the media source may trigger
25837 // updateend events on source buffers. This does not appear to be in the spec. As such,
25838 // if we encounter an updateend without a corresponding pending action from our queue
25839 // for that source buffer type, process the next action.
25840 if (sourceUpdater.queuePending[type]) {
25841 const doneFn = sourceUpdater.queuePending[type].doneFn;
25842 sourceUpdater.queuePending[type] = null;
25843
25844 if (doneFn) {
25845 // if there's an error, report it
25846 doneFn(sourceUpdater[`${type}Error_`]);
25847 }
25848 }
25849
25850 shiftQueue(type, sourceUpdater);
25851 };
25852 /**
25853 * A queue of callbacks to be serialized and applied when a
25854 * MediaSource and its associated SourceBuffers are not in the
25855 * updating state. It is used by the segment loader to update the
25856 * underlying SourceBuffers when new data is loaded, for instance.
25857 *
25858 * @class SourceUpdater
25859 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
25860 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
25861 */
25862
25863
25864 class SourceUpdater extends videojs__default["default"].EventTarget {
25865 constructor(mediaSource) {
25866 super();
25867 this.mediaSource = mediaSource;
25868
25869 this.sourceopenListener_ = () => shiftQueue('mediaSource', this);
25870
25871 this.mediaSource.addEventListener('sourceopen', this.sourceopenListener_);
25872 this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
25873
25874 this.audioTimestampOffset_ = 0;
25875 this.videoTimestampOffset_ = 0;
25876 this.queue = [];
25877 this.queuePending = {
25878 audio: null,
25879 video: null
25880 };
25881 this.delayedAudioAppendQueue_ = [];
25882 this.videoAppendQueued_ = false;
25883 this.codecs = {};
25884 this.onVideoUpdateEnd_ = onUpdateend('video', this);
25885 this.onAudioUpdateEnd_ = onUpdateend('audio', this);
25886
25887 this.onVideoError_ = e => {
25888 // used for debugging
25889 this.videoError_ = e;
25890 };
25891
25892 this.onAudioError_ = e => {
25893 // used for debugging
25894 this.audioError_ = e;
25895 };
25896
25897 this.createdSourceBuffers_ = false;
25898 this.initializedEme_ = false;
25899 this.triggeredReady_ = false;
25900 }
25901
25902 initializedEme() {
25903 this.initializedEme_ = true;
25904 this.triggerReady();
25905 }
25906
25907 hasCreatedSourceBuffers() {
25908 // if false, likely waiting on one of the segment loaders to get enough data to create
25909 // source buffers
25910 return this.createdSourceBuffers_;
25911 }
25912
25913 hasInitializedAnyEme() {
25914 return this.initializedEme_;
25915 }
25916
25917 ready() {
25918 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
25919 }
25920
25921 createSourceBuffers(codecs) {
25922 if (this.hasCreatedSourceBuffers()) {
25923 // already created them before
25924 return;
25925 } // the intial addOrChangeSourceBuffers will always be
25926 // two add buffers.
25927
25928
25929 this.addOrChangeSourceBuffers(codecs);
25930 this.createdSourceBuffers_ = true;
25931 this.trigger('createdsourcebuffers');
25932 this.triggerReady();
25933 }
25934
25935 triggerReady() {
25936 // only allow ready to be triggered once, this prevents the case
25937 // where:
25938 // 1. we trigger createdsourcebuffers
25939 // 2. ie 11 synchronously initializates eme
25940 // 3. the synchronous initialization causes us to trigger ready
25941 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
25942 if (this.ready() && !this.triggeredReady_) {
25943 this.triggeredReady_ = true;
25944 this.trigger('ready');
25945 }
25946 }
25947 /**
25948 * Add a type of source buffer to the media source.
25949 *
25950 * @param {string} type
25951 * The type of source buffer to add.
25952 *
25953 * @param {string} codec
25954 * The codec to add the source buffer with.
25955 */
25956
25957
25958 addSourceBuffer(type, codec) {
25959 pushQueue({
25960 type: 'mediaSource',
25961 sourceUpdater: this,
25962 action: actions.addSourceBuffer(type, codec),
25963 name: 'addSourceBuffer'
25964 });
25965 }
25966 /**
25967 * call abort on a source buffer.
25968 *
25969 * @param {string} type
25970 * The type of source buffer to call abort on.
25971 */
25972
25973
25974 abort(type) {
25975 pushQueue({
25976 type,
25977 sourceUpdater: this,
25978 action: actions.abort(type),
25979 name: 'abort'
25980 });
25981 }
25982 /**
25983 * Call removeSourceBuffer and remove a specific type
25984 * of source buffer on the mediaSource.
25985 *
25986 * @param {string} type
25987 * The type of source buffer to remove.
25988 */
25989
25990
25991 removeSourceBuffer(type) {
25992 if (!this.canRemoveSourceBuffer()) {
25993 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
25994 return;
25995 }
25996
25997 pushQueue({
25998 type: 'mediaSource',
25999 sourceUpdater: this,
26000 action: actions.removeSourceBuffer(type),
26001 name: 'removeSourceBuffer'
26002 });
26003 }
26004 /**
26005 * Whether or not the removeSourceBuffer function is supported
26006 * on the mediaSource.
26007 *
26008 * @return {boolean}
26009 * if removeSourceBuffer can be called.
26010 */
26011
26012
26013 canRemoveSourceBuffer() {
26014 // As of Firefox 83 removeSourceBuffer
26015 // throws errors, so we report that it does not support this.
26016 return !videojs__default["default"].browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
26017 }
26018 /**
26019 * Whether or not the changeType function is supported
26020 * on our SourceBuffers.
26021 *
26022 * @return {boolean}
26023 * if changeType can be called.
26024 */
26025
26026
26027 static canChangeType() {
26028 return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';
26029 }
26030 /**
26031 * Whether or not the changeType function is supported
26032 * on our SourceBuffers.
26033 *
26034 * @return {boolean}
26035 * if changeType can be called.
26036 */
26037
26038
26039 canChangeType() {
26040 return this.constructor.canChangeType();
26041 }
26042 /**
26043 * Call the changeType function on a source buffer, given the code and type.
26044 *
26045 * @param {string} type
26046 * The type of source buffer to call changeType on.
26047 *
26048 * @param {string} codec
26049 * The codec string to change type with on the source buffer.
26050 */
26051
26052
26053 changeType(type, codec) {
26054 if (!this.canChangeType()) {
26055 videojs__default["default"].log.error('changeType is not supported!');
26056 return;
26057 }
26058
26059 pushQueue({
26060 type,
26061 sourceUpdater: this,
26062 action: actions.changeType(codec),
26063 name: 'changeType'
26064 });
26065 }
26066 /**
26067 * Add source buffers with a codec or, if they are already created,
26068 * call changeType on source buffers using changeType.
26069 *
26070 * @param {Object} codecs
26071 * Codecs to switch to
26072 */
26073
26074
26075 addOrChangeSourceBuffers(codecs) {
26076 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
26077 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
26078 }
26079
26080 Object.keys(codecs).forEach(type => {
26081 const codec = codecs[type];
26082
26083 if (!this.hasCreatedSourceBuffers()) {
26084 return this.addSourceBuffer(type, codec);
26085 }
26086
26087 if (this.canChangeType()) {
26088 this.changeType(type, codec);
26089 }
26090 });
26091 }
26092 /**
26093 * Queue an update to append an ArrayBuffer.
26094 *
26095 * @param {MediaObject} object containing audioBytes and/or videoBytes
26096 * @param {Function} done the function to call when done
26097 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
26098 */
26099
26100
26101 appendBuffer(options, doneFn) {
26102 const {
26103 segmentInfo,
26104 type,
26105 bytes
26106 } = options;
26107 this.processedAppend_ = true;
26108
26109 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
26110 this.delayedAudioAppendQueue_.push([options, doneFn]);
26111 this.logger_(`delayed audio append of ${bytes.length} until video append`);
26112 return;
26113 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
26114 // not be fired. This means that the queue will be blocked until the next action
26115 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
26116 // these errors by calling the doneFn with the specific error.
26117
26118
26119 const onError = doneFn;
26120 pushQueue({
26121 type,
26122 sourceUpdater: this,
26123 action: actions.appendBuffer(bytes, segmentInfo || {
26124 mediaIndex: -1
26125 }, onError),
26126 doneFn,
26127 name: 'appendBuffer'
26128 });
26129
26130 if (type === 'video') {
26131 this.videoAppendQueued_ = true;
26132
26133 if (!this.delayedAudioAppendQueue_.length) {
26134 return;
26135 }
26136
26137 const queue = this.delayedAudioAppendQueue_.slice();
26138 this.logger_(`queuing delayed audio ${queue.length} appendBuffers`);
26139 this.delayedAudioAppendQueue_.length = 0;
26140 queue.forEach(que => {
26141 this.appendBuffer.apply(this, que);
26142 });
26143 }
26144 }
26145 /**
26146 * Get the audio buffer's buffered timerange.
26147 *
26148 * @return {TimeRange}
26149 * The audio buffer's buffered time range
26150 */
26151
26152
26153 audioBuffered() {
26154 // no media source/source buffer or it isn't in the media sources
26155 // source buffer list
26156 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
26157 return createTimeRanges();
26158 }
26159
26160 return this.audioBuffer.buffered ? this.audioBuffer.buffered : createTimeRanges();
26161 }
26162 /**
26163 * Get the video buffer's buffered timerange.
26164 *
26165 * @return {TimeRange}
26166 * The video buffer's buffered time range
26167 */
26168
26169
26170 videoBuffered() {
26171 // no media source/source buffer or it isn't in the media sources
26172 // source buffer list
26173 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
26174 return createTimeRanges();
26175 }
26176
26177 return this.videoBuffer.buffered ? this.videoBuffer.buffered : createTimeRanges();
26178 }
26179 /**
26180 * Get a combined video/audio buffer's buffered timerange.
26181 *
26182 * @return {TimeRange}
26183 * the combined time range
26184 */
26185
26186
26187 buffered() {
26188 const video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
26189 const audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
26190
26191 if (audio && !video) {
26192 return this.audioBuffered();
26193 }
26194
26195 if (video && !audio) {
26196 return this.videoBuffered();
26197 }
26198
26199 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
26200 }
26201 /**
26202 * Add a callback to the queue that will set duration on the mediaSource.
26203 *
26204 * @param {number} duration
26205 * The duration to set
26206 *
26207 * @param {Function} [doneFn]
26208 * function to run after duration has been set.
26209 */
26210
26211
26212 setDuration(duration, doneFn = noop) {
26213 // In order to set the duration on the media source, it's necessary to wait for all
26214 // source buffers to no longer be updating. "If the updating attribute equals true on
26215 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
26216 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
26217 pushQueue({
26218 type: 'mediaSource',
26219 sourceUpdater: this,
26220 action: actions.duration(duration),
26221 name: 'duration',
26222 doneFn
26223 });
26224 }
26225 /**
26226 * Add a mediaSource endOfStream call to the queue
26227 *
26228 * @param {Error} [error]
26229 * Call endOfStream with an error
26230 *
26231 * @param {Function} [doneFn]
26232 * A function that should be called when the
26233 * endOfStream call has finished.
26234 */
26235
26236
26237 endOfStream(error = null, doneFn = noop) {
26238 if (typeof error !== 'string') {
26239 error = undefined;
26240 } // In order to set the duration on the media source, it's necessary to wait for all
26241 // source buffers to no longer be updating. "If the updating attribute equals true on
26242 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
26243 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
26244
26245
26246 pushQueue({
26247 type: 'mediaSource',
26248 sourceUpdater: this,
26249 action: actions.endOfStream(error),
26250 name: 'endOfStream',
26251 doneFn
26252 });
26253 }
26254 /**
26255 * Queue an update to remove a time range from the buffer.
26256 *
26257 * @param {number} start where to start the removal
26258 * @param {number} end where to end the removal
26259 * @param {Function} [done=noop] optional callback to be executed when the remove
26260 * operation is complete
26261 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
26262 */
26263
26264
26265 removeAudio(start, end, done = noop) {
26266 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
26267 done();
26268 return;
26269 }
26270
26271 pushQueue({
26272 type: 'audio',
26273 sourceUpdater: this,
26274 action: actions.remove(start, end),
26275 doneFn: done,
26276 name: 'remove'
26277 });
26278 }
26279 /**
26280 * Queue an update to remove a time range from the buffer.
26281 *
26282 * @param {number} start where to start the removal
26283 * @param {number} end where to end the removal
26284 * @param {Function} [done=noop] optional callback to be executed when the remove
26285 * operation is complete
26286 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
26287 */
26288
26289
26290 removeVideo(start, end, done = noop) {
26291 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
26292 done();
26293 return;
26294 }
26295
26296 pushQueue({
26297 type: 'video',
26298 sourceUpdater: this,
26299 action: actions.remove(start, end),
26300 doneFn: done,
26301 name: 'remove'
26302 });
26303 }
26304 /**
26305 * Whether the underlying sourceBuffer is updating or not
26306 *
26307 * @return {boolean} the updating status of the SourceBuffer
26308 */
26309
26310
26311 updating() {
26312 // the audio/video source buffer is updating
26313 if (updating('audio', this) || updating('video', this)) {
26314 return true;
26315 }
26316
26317 return false;
26318 }
26319 /**
26320 * Set/get the timestampoffset on the audio SourceBuffer
26321 *
26322 * @return {number} the timestamp offset
26323 */
26324
26325
26326 audioTimestampOffset(offset) {
26327 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
26328 this.audioTimestampOffset_ !== offset) {
26329 pushQueue({
26330 type: 'audio',
26331 sourceUpdater: this,
26332 action: actions.timestampOffset(offset),
26333 name: 'timestampOffset'
26334 });
26335 this.audioTimestampOffset_ = offset;
26336 }
26337
26338 return this.audioTimestampOffset_;
26339 }
26340 /**
26341 * Set/get the timestampoffset on the video SourceBuffer
26342 *
26343 * @return {number} the timestamp offset
26344 */
26345
26346
26347 videoTimestampOffset(offset) {
26348 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
26349 this.videoTimestampOffset !== offset) {
26350 pushQueue({
26351 type: 'video',
26352 sourceUpdater: this,
26353 action: actions.timestampOffset(offset),
26354 name: 'timestampOffset'
26355 });
26356 this.videoTimestampOffset_ = offset;
26357 }
26358
26359 return this.videoTimestampOffset_;
26360 }
26361 /**
26362 * Add a function to the queue that will be called
26363 * when it is its turn to run in the audio queue.
26364 *
26365 * @param {Function} callback
26366 * The callback to queue.
26367 */
26368
26369
26370 audioQueueCallback(callback) {
26371 if (!this.audioBuffer) {
26372 return;
26373 }
26374
26375 pushQueue({
26376 type: 'audio',
26377 sourceUpdater: this,
26378 action: actions.callback(callback),
26379 name: 'callback'
26380 });
26381 }
26382 /**
26383 * Add a function to the queue that will be called
26384 * when it is its turn to run in the video queue.
26385 *
26386 * @param {Function} callback
26387 * The callback to queue.
26388 */
26389
26390
26391 videoQueueCallback(callback) {
26392 if (!this.videoBuffer) {
26393 return;
26394 }
26395
26396 pushQueue({
26397 type: 'video',
26398 sourceUpdater: this,
26399 action: actions.callback(callback),
26400 name: 'callback'
26401 });
26402 }
26403 /**
26404 * dispose of the source updater and the underlying sourceBuffer
26405 */
26406
26407
26408 dispose() {
26409 this.trigger('dispose');
26410 bufferTypes.forEach(type => {
26411 this.abort(type);
26412
26413 if (this.canRemoveSourceBuffer()) {
26414 this.removeSourceBuffer(type);
26415 } else {
26416 this[`${type}QueueCallback`](() => cleanupBuffer(type, this));
26417 }
26418 });
26419 this.videoAppendQueued_ = false;
26420 this.delayedAudioAppendQueue_.length = 0;
26421
26422 if (this.sourceopenListener_) {
26423 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
26424 }
26425
26426 this.off();
26427 }
26428
26429 }
26430
26431 const uint8ToUtf8 = uintArray => decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
26432 const bufferToHexString = buffer => {
26433 const uInt8Buffer = new Uint8Array(buffer);
26434 return Array.from(uInt8Buffer).map(byte => byte.toString(16).padStart(2, '0')).join('');
26435 };
26436
26437 /**
26438 * @file vtt-segment-loader.js
26439 */
26440 const VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(char => char.charCodeAt(0)));
26441
26442 class NoVttJsError extends Error {
26443 constructor() {
26444 super('Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.');
26445 }
26446
26447 }
26448 /**
26449 * An object that manages segment loading and appending.
26450 *
26451 * @class VTTSegmentLoader
26452 * @param {Object} options required and optional options
26453 * @extends videojs.EventTarget
26454 */
26455
26456
26457 class VTTSegmentLoader extends SegmentLoader {
26458 constructor(settings, options = {}) {
26459 super(settings, options); // SegmentLoader requires a MediaSource be specified or it will throw an error;
26460 // however, VTTSegmentLoader has no need of a media source, so delete the reference
26461
26462 this.mediaSource_ = null;
26463 this.subtitlesTrack_ = null;
26464 this.loaderType_ = 'subtitle';
26465 this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
26466 this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
26467 // the sync controller leads to improper behavior.
26468
26469 this.shouldSaveSegmentTimingInfo_ = false;
26470 }
26471
26472 createTransmuxer_() {
26473 // don't need to transmux any subtitles
26474 return null;
26475 }
26476 /**
26477 * Indicates which time ranges are buffered
26478 *
26479 * @return {TimeRange}
26480 * TimeRange object representing the current buffered ranges
26481 */
26482
26483
26484 buffered_() {
26485 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
26486 return createTimeRanges();
26487 }
26488
26489 const cues = this.subtitlesTrack_.cues;
26490 const start = cues[0].startTime;
26491 const end = cues[cues.length - 1].startTime;
26492 return createTimeRanges([[start, end]]);
26493 }
26494 /**
26495 * Gets and sets init segment for the provided map
26496 *
26497 * @param {Object} map
26498 * The map object representing the init segment to get or set
26499 * @param {boolean=} set
26500 * If true, the init segment for the provided map should be saved
26501 * @return {Object}
26502 * map object for desired init segment
26503 */
26504
26505
26506 initSegmentForMap(map, set = false) {
26507 if (!map) {
26508 return null;
26509 }
26510
26511 const id = initSegmentId(map);
26512 let storedMap = this.initSegments_[id];
26513
26514 if (set && !storedMap && map.bytes) {
26515 // append WebVTT line terminators to the media initialization segment if it exists
26516 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
26517 // requires two or more WebVTT line terminators between the WebVTT header and the
26518 // rest of the file
26519 const combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
26520 const combinedSegment = new Uint8Array(combinedByteLength);
26521 combinedSegment.set(map.bytes);
26522 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
26523 this.initSegments_[id] = storedMap = {
26524 resolvedUri: map.resolvedUri,
26525 byterange: map.byterange,
26526 bytes: combinedSegment
26527 };
26528 }
26529
26530 return storedMap || map;
26531 }
26532 /**
26533 * Returns true if all configuration required for loading is present, otherwise false.
26534 *
26535 * @return {boolean} True if the all configuration is ready for loading
26536 * @private
26537 */
26538
26539
26540 couldBeginLoading_() {
26541 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
26542 }
26543 /**
26544 * Once all the starting parameters have been specified, begin
26545 * operation. This method should only be invoked from the INIT
26546 * state.
26547 *
26548 * @private
26549 */
26550
26551
26552 init_() {
26553 this.state = 'READY';
26554 this.resetEverything();
26555 return this.monitorBuffer_();
26556 }
26557 /**
26558 * Set a subtitle track on the segment loader to add subtitles to
26559 *
26560 * @param {TextTrack=} track
26561 * The text track to add loaded subtitles to
26562 * @return {TextTrack}
26563 * Returns the subtitles track
26564 */
26565
26566
26567 track(track) {
26568 if (typeof track === 'undefined') {
26569 return this.subtitlesTrack_;
26570 }
26571
26572 this.subtitlesTrack_ = track; // if we were unpaused but waiting for a sourceUpdater, start
26573 // buffering now
26574
26575 if (this.state === 'INIT' && this.couldBeginLoading_()) {
26576 this.init_();
26577 }
26578
26579 return this.subtitlesTrack_;
26580 }
26581 /**
26582 * Remove any data in the source buffer between start and end times
26583 *
26584 * @param {number} start - the start time of the region to remove from the buffer
26585 * @param {number} end - the end time of the region to remove from the buffer
26586 */
26587
26588
26589 remove(start, end) {
26590 removeCuesFromTrack(start, end, this.subtitlesTrack_);
26591 }
26592 /**
26593 * fill the buffer with segements unless the sourceBuffers are
26594 * currently updating
26595 *
26596 * Note: this function should only ever be called by monitorBuffer_
26597 * and never directly
26598 *
26599 * @private
26600 */
26601
26602
26603 fillBuffer_() {
26604 // see if we need to begin loading immediately
26605 const segmentInfo = this.chooseNextRequest_();
26606
26607 if (!segmentInfo) {
26608 return;
26609 }
26610
26611 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
26612 // We don't have the timestamp offset that we need to sync subtitles.
26613 // Rerun on a timestamp offset or user interaction.
26614 const checkTimestampOffset = () => {
26615 this.state = 'READY';
26616
26617 if (!this.paused()) {
26618 // if not paused, queue a buffer check as soon as possible
26619 this.monitorBuffer_();
26620 }
26621 };
26622
26623 this.syncController_.one('timestampoffset', checkTimestampOffset);
26624 this.state = 'WAITING_ON_TIMELINE';
26625 return;
26626 }
26627
26628 this.loadSegment_(segmentInfo);
26629 } // never set a timestamp offset for vtt segments.
26630
26631
26632 timestampOffsetForSegment_() {
26633 return null;
26634 }
26635
26636 chooseNextRequest_() {
26637 return this.skipEmptySegments_(super.chooseNextRequest_());
26638 }
26639 /**
26640 * Prevents the segment loader from requesting segments we know contain no subtitles
26641 * by walking forward until we find the next segment that we don't know whether it is
26642 * empty or not.
26643 *
26644 * @param {Object} segmentInfo
26645 * a segment info object that describes the current segment
26646 * @return {Object}
26647 * a segment info object that describes the current segment
26648 */
26649
26650
26651 skipEmptySegments_(segmentInfo) {
26652 while (segmentInfo && segmentInfo.segment.empty) {
26653 // stop at the last possible segmentInfo
26654 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
26655 segmentInfo = null;
26656 break;
26657 }
26658
26659 segmentInfo = this.generateSegmentInfo_({
26660 playlist: segmentInfo.playlist,
26661 mediaIndex: segmentInfo.mediaIndex + 1,
26662 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
26663 isSyncRequest: segmentInfo.isSyncRequest
26664 });
26665 }
26666
26667 return segmentInfo;
26668 }
26669
26670 stopForError(error) {
26671 this.error(error);
26672 this.state = 'READY';
26673 this.pause();
26674 this.trigger('error');
26675 }
26676 /**
26677 * append a decrypted segement to the SourceBuffer through a SourceUpdater
26678 *
26679 * @private
26680 */
26681
26682
26683 segmentRequestFinished_(error, simpleSegment, result) {
26684 if (!this.subtitlesTrack_) {
26685 this.state = 'READY';
26686 return;
26687 }
26688
26689 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
26690
26691 if (!this.pendingSegment_) {
26692 this.state = 'READY';
26693 this.mediaRequestsAborted += 1;
26694 return;
26695 }
26696
26697 if (error) {
26698 if (error.code === REQUEST_ERRORS.TIMEOUT) {
26699 this.handleTimeout_();
26700 }
26701
26702 if (error.code === REQUEST_ERRORS.ABORTED) {
26703 this.mediaRequestsAborted += 1;
26704 } else {
26705 this.mediaRequestsErrored += 1;
26706 }
26707
26708 this.stopForError(error);
26709 return;
26710 }
26711
26712 const segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
26713 // maintain functionality between segment loaders
26714
26715 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cache
26716
26717 if (simpleSegment.key) {
26718 this.segmentKey(simpleSegment.key, true);
26719 }
26720
26721 this.state = 'APPENDING'; // used for tests
26722
26723 this.trigger('appending');
26724 const segment = segmentInfo.segment;
26725
26726 if (segment.map) {
26727 segment.map.bytes = simpleSegment.map.bytes;
26728 }
26729
26730 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
26731
26732 if (typeof window.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
26733 this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times
26734 // script will be loaded once but multiple listeners will be added to the queue, which is expected.
26735
26736 this.loadVttJs().then(() => this.segmentRequestFinished_(error, simpleSegment, result), () => this.stopForError({
26737 message: 'Error loading vtt.js'
26738 }));
26739 return;
26740 }
26741
26742 segment.requested = true;
26743
26744 try {
26745 this.parseVTTCues_(segmentInfo);
26746 } catch (e) {
26747 this.stopForError({
26748 message: e.message
26749 });
26750 return;
26751 }
26752
26753 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
26754
26755 if (segmentInfo.cues.length) {
26756 segmentInfo.timingInfo = {
26757 start: segmentInfo.cues[0].startTime,
26758 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
26759 };
26760 } else {
26761 segmentInfo.timingInfo = {
26762 start: segmentInfo.startOfSegment,
26763 end: segmentInfo.startOfSegment + segmentInfo.duration
26764 };
26765 }
26766
26767 if (segmentInfo.isSyncRequest) {
26768 this.trigger('syncinfoupdate');
26769 this.pendingSegment_ = null;
26770 this.state = 'READY';
26771 return;
26772 }
26773
26774 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
26775 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
26776 // the subtitle track
26777
26778 segmentInfo.cues.forEach(cue => {
26779 this.subtitlesTrack_.addCue(this.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
26780 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
26781 // cues to have identical time-intervals, but if the text is also identical
26782 // we can safely assume it is a duplicate that can be removed (ex. when a cue
26783 // "overlaps" VTT segments)
26784
26785 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
26786 this.handleAppendsDone_();
26787 }
26788
26789 handleData_() {// noop as we shouldn't be getting video/audio data captions
26790 // that we do not support here.
26791 }
26792
26793 updateTimingInfoEnd_() {// noop
26794 }
26795 /**
26796 * Uses the WebVTT parser to parse the segment response
26797 *
26798 * @throws NoVttJsError
26799 *
26800 * @param {Object} segmentInfo
26801 * a segment info object that describes the current segment
26802 * @private
26803 */
26804
26805
26806 parseVTTCues_(segmentInfo) {
26807 let decoder;
26808 let decodeBytesToString = false;
26809
26810 if (typeof window.WebVTT !== 'function') {
26811 // caller is responsible for exception handling.
26812 throw new NoVttJsError();
26813 }
26814
26815 if (typeof window.TextDecoder === 'function') {
26816 decoder = new window.TextDecoder('utf8');
26817 } else {
26818 decoder = window.WebVTT.StringDecoder();
26819 decodeBytesToString = true;
26820 }
26821
26822 const parser = new window.WebVTT.Parser(window, window.vttjs, decoder);
26823 segmentInfo.cues = [];
26824 segmentInfo.timestampmap = {
26825 MPEGTS: 0,
26826 LOCAL: 0
26827 };
26828 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
26829
26830 parser.ontimestampmap = map => {
26831 segmentInfo.timestampmap = map;
26832 };
26833
26834 parser.onparsingerror = error => {
26835 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
26836 };
26837
26838 if (segmentInfo.segment.map) {
26839 let mapData = segmentInfo.segment.map.bytes;
26840
26841 if (decodeBytesToString) {
26842 mapData = uint8ToUtf8(mapData);
26843 }
26844
26845 parser.parse(mapData);
26846 }
26847
26848 let segmentData = segmentInfo.bytes;
26849
26850 if (decodeBytesToString) {
26851 segmentData = uint8ToUtf8(segmentData);
26852 }
26853
26854 parser.parse(segmentData);
26855 parser.flush();
26856 }
26857 /**
26858 * Updates the start and end times of any cues parsed by the WebVTT parser using
26859 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
26860 * from the SyncController
26861 *
26862 * @param {Object} segmentInfo
26863 * a segment info object that describes the current segment
26864 * @param {Object} mappingObj
26865 * object containing a mapping from TS to media time
26866 * @param {Object} playlist
26867 * the playlist object containing the segment
26868 * @private
26869 */
26870
26871
26872 updateTimeMapping_(segmentInfo, mappingObj, playlist) {
26873 const segment = segmentInfo.segment;
26874
26875 if (!mappingObj) {
26876 // If the sync controller does not have a mapping of TS to Media Time for the
26877 // timeline, then we don't have enough information to update the cue
26878 // start/end times
26879 return;
26880 }
26881
26882 if (!segmentInfo.cues.length) {
26883 // If there are no cues, we also do not have enough information to figure out
26884 // segment timing. Mark that the segment contains no cues so we don't re-request
26885 // an empty segment.
26886 segment.empty = true;
26887 return;
26888 }
26889
26890 const {
26891 MPEGTS,
26892 LOCAL
26893 } = segmentInfo.timestampmap;
26894 /**
26895 * From the spec:
26896 * The MPEGTS media timestamp MUST use a 90KHz timescale,
26897 * even when non-WebVTT Media Segments use a different timescale.
26898 */
26899
26900 const mpegTsInSeconds = MPEGTS / clock.ONE_SECOND_IN_TS;
26901 const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;
26902 segmentInfo.cues.forEach(cue => {
26903 const duration = cue.endTime - cue.startTime;
26904 const startTime = MPEGTS === 0 ? cue.startTime + diff : this.handleRollover_(cue.startTime + diff, mappingObj.time);
26905 cue.startTime = Math.max(startTime, 0);
26906 cue.endTime = Math.max(startTime + duration, 0);
26907 });
26908
26909 if (!playlist.syncInfo) {
26910 const firstStart = segmentInfo.cues[0].startTime;
26911 const lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
26912 playlist.syncInfo = {
26913 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
26914 time: Math.min(firstStart, lastStart - segment.duration)
26915 };
26916 }
26917 }
26918 /**
26919 * MPEG-TS PES timestamps are limited to 2^33.
26920 * Once they reach 2^33, they roll over to 0.
26921 * mux.js handles PES timestamp rollover for the following scenarios:
26922 * [forward rollover(right)] ->
26923 * PES timestamps monotonically increase, and once they reach 2^33, they roll over to 0
26924 * [backward rollover(left)] -->
26925 * we seek back to position before rollover.
26926 *
26927 * According to the HLS SPEC:
26928 * When synchronizing WebVTT with PES timestamps, clients SHOULD account
26929 * for cases where the 33-bit PES timestamps have wrapped and the WebVTT
26930 * cue times have not. When the PES timestamp wraps, the WebVTT Segment
26931 * SHOULD have a X-TIMESTAMP-MAP header that maps the current WebVTT
26932 * time to the new (low valued) PES timestamp.
26933 *
26934 * So we want to handle rollover here and align VTT Cue start/end time to the player's time.
26935 */
26936
26937
26938 handleRollover_(value, reference) {
26939 if (reference === null) {
26940 return value;
26941 }
26942
26943 let valueIn90khz = value * clock.ONE_SECOND_IN_TS;
26944 const referenceIn90khz = reference * clock.ONE_SECOND_IN_TS;
26945 let offset;
26946
26947 if (referenceIn90khz < valueIn90khz) {
26948 // - 2^33
26949 offset = -8589934592;
26950 } else {
26951 // + 2^33
26952 offset = 8589934592;
26953 } // distance(value - reference) > 2^32
26954
26955
26956 while (Math.abs(valueIn90khz - referenceIn90khz) > 4294967296) {
26957 valueIn90khz += offset;
26958 }
26959
26960 return valueIn90khz / clock.ONE_SECOND_IN_TS;
26961 }
26962
26963 }
26964
26965 /**
26966 * @file ad-cue-tags.js
26967 */
26968
26969 /**
26970 * Searches for an ad cue that overlaps with the given mediaTime
26971 *
26972 * @param {Object} track
26973 * the track to find the cue for
26974 *
26975 * @param {number} mediaTime
26976 * the time to find the cue at
26977 *
26978 * @return {Object|null}
26979 * the found cue or null
26980 */
26981 const findAdCue = function (track, mediaTime) {
26982 const cues = track.cues;
26983
26984 for (let i = 0; i < cues.length; i++) {
26985 const cue = cues[i];
26986
26987 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
26988 return cue;
26989 }
26990 }
26991
26992 return null;
26993 };
26994 const updateAdCues = function (media, track, offset = 0) {
26995 if (!media.segments) {
26996 return;
26997 }
26998
26999 let mediaTime = offset;
27000 let cue;
27001
27002 for (let i = 0; i < media.segments.length; i++) {
27003 const segment = media.segments[i];
27004
27005 if (!cue) {
27006 // Since the cues will span for at least the segment duration, adding a fudge
27007 // factor of half segment duration will prevent duplicate cues from being
27008 // created when timing info is not exact (e.g. cue start time initialized
27009 // at 10.006677, but next call mediaTime is 10.003332 )
27010 cue = findAdCue(track, mediaTime + segment.duration / 2);
27011 }
27012
27013 if (cue) {
27014 if ('cueIn' in segment) {
27015 // Found a CUE-IN so end the cue
27016 cue.endTime = mediaTime;
27017 cue.adEndTime = mediaTime;
27018 mediaTime += segment.duration;
27019 cue = null;
27020 continue;
27021 }
27022
27023 if (mediaTime < cue.endTime) {
27024 // Already processed this mediaTime for this cue
27025 mediaTime += segment.duration;
27026 continue;
27027 } // otherwise extend cue until a CUE-IN is found
27028
27029
27030 cue.endTime += segment.duration;
27031 } else {
27032 if ('cueOut' in segment) {
27033 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
27034 cue.adStartTime = mediaTime; // Assumes tag format to be
27035 // #EXT-X-CUE-OUT:30
27036
27037 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
27038 track.addCue(cue);
27039 }
27040
27041 if ('cueOutCont' in segment) {
27042 // Entered into the middle of an ad cue
27043 // Assumes tag formate to be
27044 // #EXT-X-CUE-OUT-CONT:10/30
27045 const [adOffset, adTotal] = segment.cueOutCont.split('/').map(parseFloat);
27046 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');
27047 cue.adStartTime = mediaTime - adOffset;
27048 cue.adEndTime = cue.adStartTime + adTotal;
27049 track.addCue(cue);
27050 }
27051 }
27052
27053 mediaTime += segment.duration;
27054 }
27055 };
27056
27057 /**
27058 * @file sync-controller.js
27059 */
27060 // synchronize expired playlist segments.
27061 // the max media sequence diff is 48 hours of live stream
27062 // content with two second segments. Anything larger than that
27063 // will likely be invalid.
27064
27065 const MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
27066 const syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
27067 // the equivalence display-time 0 === segment-index 0
27068 {
27069 name: 'VOD',
27070 run: (syncController, playlist, duration, currentTimeline, currentTime) => {
27071 if (duration !== Infinity) {
27072 const syncPoint = {
27073 time: 0,
27074 segmentIndex: 0,
27075 partIndex: null
27076 };
27077 return syncPoint;
27078 }
27079
27080 return null;
27081 }
27082 }, {
27083 name: 'MediaSequence',
27084
27085 /**
27086 * run media sequence strategy
27087 *
27088 * @param {SyncController} syncController
27089 * @param {Object} playlist
27090 * @param {number} duration
27091 * @param {number} currentTimeline
27092 * @param {number} currentTime
27093 * @param {string} type
27094 */
27095 run: (syncController, playlist, duration, currentTimeline, currentTime, type) => {
27096 if (!type) {
27097 return null;
27098 }
27099
27100 const mediaSequenceMap = syncController.getMediaSequenceMap(type);
27101
27102 if (!mediaSequenceMap || mediaSequenceMap.size === 0) {
27103 return null;
27104 }
27105
27106 if (playlist.mediaSequence === undefined || !Array.isArray(playlist.segments) || !playlist.segments.length) {
27107 return null;
27108 }
27109
27110 let currentMediaSequence = playlist.mediaSequence;
27111 let segmentIndex = 0;
27112
27113 for (const segment of playlist.segments) {
27114 const range = mediaSequenceMap.get(currentMediaSequence);
27115
27116 if (!range) {
27117 // unexpected case
27118 // we expect this playlist to be the same playlist in the map
27119 // just break from the loop and move forward to the next strategy
27120 break;
27121 }
27122
27123 if (currentTime >= range.start && currentTime < range.end) {
27124 // we found segment
27125 if (Array.isArray(segment.parts) && segment.parts.length) {
27126 let currentPartStart = range.start;
27127 let partIndex = 0;
27128
27129 for (const part of segment.parts) {
27130 const start = currentPartStart;
27131 const end = start + part.duration;
27132
27133 if (currentTime >= start && currentTime < end) {
27134 return {
27135 time: range.start,
27136 segmentIndex,
27137 partIndex
27138 };
27139 }
27140
27141 partIndex++;
27142 currentPartStart = end;
27143 }
27144 } // no parts found, return sync point for segment
27145
27146
27147 return {
27148 time: range.start,
27149 segmentIndex,
27150 partIndex: null
27151 };
27152 }
27153
27154 segmentIndex++;
27155 currentMediaSequence++;
27156 } // we didn't find any segments for provided current time
27157
27158
27159 return null;
27160 }
27161 }, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
27162 {
27163 name: 'ProgramDateTime',
27164 run: (syncController, playlist, duration, currentTimeline, currentTime) => {
27165 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
27166 return null;
27167 }
27168
27169 let syncPoint = null;
27170 let lastDistance = null;
27171 const partsAndSegments = getPartsAndSegments(playlist);
27172 currentTime = currentTime || 0;
27173
27174 for (let i = 0; i < partsAndSegments.length; i++) {
27175 // start from the end and loop backwards for live
27176 // or start from the front and loop forwards for non-live
27177 const index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
27178 const partAndSegment = partsAndSegments[index];
27179 const segment = partAndSegment.segment;
27180 const datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
27181
27182 if (!datetimeMapping || !segment.dateTimeObject) {
27183 continue;
27184 }
27185
27186 const segmentTime = segment.dateTimeObject.getTime() / 1000;
27187 let start = segmentTime + datetimeMapping; // take part duration into account.
27188
27189 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
27190 for (let z = 0; z < partAndSegment.partIndex; z++) {
27191 start += segment.parts[z].duration;
27192 }
27193 }
27194
27195 const distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
27196 // currentTime and can stop looking for better candidates
27197
27198 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
27199 break;
27200 }
27201
27202 lastDistance = distance;
27203 syncPoint = {
27204 time: start,
27205 segmentIndex: partAndSegment.segmentIndex,
27206 partIndex: partAndSegment.partIndex
27207 };
27208 }
27209
27210 return syncPoint;
27211 }
27212 }, // Stategy "Segment": We have a known time mapping for a timeline and a
27213 // segment in the current timeline with timing data
27214 {
27215 name: 'Segment',
27216 run: (syncController, playlist, duration, currentTimeline, currentTime) => {
27217 let syncPoint = null;
27218 let lastDistance = null;
27219 currentTime = currentTime || 0;
27220 const partsAndSegments = getPartsAndSegments(playlist);
27221
27222 for (let i = 0; i < partsAndSegments.length; i++) {
27223 // start from the end and loop backwards for live
27224 // or start from the front and loop forwards for non-live
27225 const index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
27226 const partAndSegment = partsAndSegments[index];
27227 const segment = partAndSegment.segment;
27228 const start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
27229
27230 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
27231 const distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
27232 // currentTime and can stop looking for better candidates
27233
27234 if (lastDistance !== null && lastDistance < distance) {
27235 break;
27236 }
27237
27238 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
27239 lastDistance = distance;
27240 syncPoint = {
27241 time: start,
27242 segmentIndex: partAndSegment.segmentIndex,
27243 partIndex: partAndSegment.partIndex
27244 };
27245 }
27246 }
27247 }
27248
27249 return syncPoint;
27250 }
27251 }, // Stategy "Discontinuity": We have a discontinuity with a known
27252 // display-time
27253 {
27254 name: 'Discontinuity',
27255 run: (syncController, playlist, duration, currentTimeline, currentTime) => {
27256 let syncPoint = null;
27257 currentTime = currentTime || 0;
27258
27259 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
27260 let lastDistance = null;
27261
27262 for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
27263 const segmentIndex = playlist.discontinuityStarts[i];
27264 const discontinuity = playlist.discontinuitySequence + i + 1;
27265 const discontinuitySync = syncController.discontinuities[discontinuity];
27266
27267 if (discontinuitySync) {
27268 const distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
27269 // currentTime and can stop looking for better candidates
27270
27271 if (lastDistance !== null && lastDistance < distance) {
27272 break;
27273 }
27274
27275 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
27276 lastDistance = distance;
27277 syncPoint = {
27278 time: discontinuitySync.time,
27279 segmentIndex,
27280 partIndex: null
27281 };
27282 }
27283 }
27284 }
27285 }
27286
27287 return syncPoint;
27288 }
27289 }, // Stategy "Playlist": We have a playlist with a known mapping of
27290 // segment index to display time
27291 {
27292 name: 'Playlist',
27293 run: (syncController, playlist, duration, currentTimeline, currentTime) => {
27294 if (playlist.syncInfo) {
27295 const syncPoint = {
27296 time: playlist.syncInfo.time,
27297 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
27298 partIndex: null
27299 };
27300 return syncPoint;
27301 }
27302
27303 return null;
27304 }
27305 }];
27306 class SyncController extends videojs__default["default"].EventTarget {
27307 constructor(options = {}) {
27308 super(); // ...for synching across variants
27309
27310 this.timelines = [];
27311 this.discontinuities = [];
27312 this.timelineToDatetimeMappings = {};
27313 /**
27314 * @type {Map<string, Map<number, { start: number, end: number }>>}
27315 * @private
27316 */
27317
27318 this.mediaSequenceStorage_ = new Map();
27319 this.logger_ = logger('SyncController');
27320 }
27321 /**
27322 * Get media sequence map by type
27323 *
27324 * @param {string} type - segment loader type
27325 * @return {Map<number, { start: number, end: number }> | undefined}
27326 */
27327
27328
27329 getMediaSequenceMap(type) {
27330 return this.mediaSequenceStorage_.get(type);
27331 }
27332 /**
27333 * Update Media Sequence Map -> <MediaSequence, Range>
27334 *
27335 * @param {Object} playlist - parsed playlist
27336 * @param {number} currentTime - current player's time
27337 * @param {string} type - segment loader type
27338 * @return {void}
27339 */
27340
27341
27342 updateMediaSequenceMap(playlist, currentTime, type) {
27343 // we should not process this playlist if it does not have mediaSequence or segments
27344 if (playlist.mediaSequence === undefined || !Array.isArray(playlist.segments) || !playlist.segments.length) {
27345 return;
27346 }
27347
27348 const currentMap = this.getMediaSequenceMap(type);
27349 const result = new Map();
27350 let currentMediaSequence = playlist.mediaSequence;
27351 let currentBaseTime;
27352
27353 if (!currentMap) {
27354 // first playlist setup:
27355 currentBaseTime = 0;
27356 } else if (currentMap.has(playlist.mediaSequence)) {
27357 // further playlists setup:
27358 currentBaseTime = currentMap.get(playlist.mediaSequence).start;
27359 } else {
27360 // it seems like we have a gap between playlists, use current time as a fallback:
27361 this.logger_(`MediaSequence sync for ${type} segment loader - received a gap between playlists.
27362Fallback base time to: ${currentTime}.
27363Received media sequence: ${currentMediaSequence}.
27364Current map: `, currentMap);
27365 currentBaseTime = currentTime;
27366 }
27367
27368 this.logger_(`MediaSequence sync for ${type} segment loader.
27369Received media sequence: ${currentMediaSequence}.
27370base time is ${currentBaseTime}
27371Current map: `, currentMap);
27372 playlist.segments.forEach(segment => {
27373 const start = currentBaseTime;
27374 const end = start + segment.duration;
27375 const range = {
27376 start,
27377 end
27378 };
27379 result.set(currentMediaSequence, range);
27380 currentMediaSequence++;
27381 currentBaseTime = end;
27382 });
27383 this.mediaSequenceStorage_.set(type, result);
27384 }
27385 /**
27386 * Find a sync-point for the playlist specified
27387 *
27388 * A sync-point is defined as a known mapping from display-time to
27389 * a segment-index in the current playlist.
27390 *
27391 * @param {Playlist} playlist
27392 * The playlist that needs a sync-point
27393 * @param {number} duration
27394 * Duration of the MediaSource (Infinite if playing a live source)
27395 * @param {number} currentTimeline
27396 * The last timeline from which a segment was loaded
27397 * @param {number} currentTime
27398 * Current player's time
27399 * @param {string} type
27400 * Segment loader type
27401 * @return {Object}
27402 * A sync-point object
27403 */
27404
27405
27406 getSyncPoint(playlist, duration, currentTimeline, currentTime, type) {
27407 // Always use VOD sync point for VOD
27408 if (duration !== Infinity) {
27409 const vodSyncPointStrategy = syncPointStrategies.find(({
27410 name
27411 }) => name === 'VOD');
27412 return vodSyncPointStrategy.run(this, playlist, duration);
27413 }
27414
27415 const syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime, type);
27416
27417 if (!syncPoints.length) {
27418 // Signal that we need to attempt to get a sync-point manually
27419 // by fetching a segment in the playlist and constructing
27420 // a sync-point from that information
27421 return null;
27422 } // If we have exact match just return it instead of finding the nearest distance
27423
27424
27425 for (const syncPointInfo of syncPoints) {
27426 const {
27427 syncPoint,
27428 strategy
27429 } = syncPointInfo;
27430 const {
27431 segmentIndex,
27432 time
27433 } = syncPoint;
27434
27435 if (segmentIndex < 0) {
27436 continue;
27437 }
27438
27439 const selectedSegment = playlist.segments[segmentIndex];
27440 const start = time;
27441 const end = start + selectedSegment.duration;
27442 this.logger_(`Strategy: ${strategy}. Current time: ${currentTime}. selected segment: ${segmentIndex}. Time: [${start} -> ${end}]}`);
27443
27444 if (currentTime >= start && currentTime < end) {
27445 this.logger_('Found sync point with exact match: ', syncPoint);
27446 return syncPoint;
27447 }
27448 } // Now find the sync-point that is closest to the currentTime because
27449 // that should result in the most accurate guess about which segment
27450 // to fetch
27451
27452
27453 return this.selectSyncPoint_(syncPoints, {
27454 key: 'time',
27455 value: currentTime
27456 });
27457 }
27458 /**
27459 * Calculate the amount of time that has expired off the playlist during playback
27460 *
27461 * @param {Playlist} playlist
27462 * Playlist object to calculate expired from
27463 * @param {number} duration
27464 * Duration of the MediaSource (Infinity if playling a live source)
27465 * @return {number|null}
27466 * The amount of time that has expired off the playlist during playback. Null
27467 * if no sync-points for the playlist can be found.
27468 */
27469
27470
27471 getExpiredTime(playlist, duration) {
27472 if (!playlist || !playlist.segments) {
27473 return null;
27474 }
27475
27476 const syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0, 'main'); // Without sync-points, there is not enough information to determine the expired time
27477
27478 if (!syncPoints.length) {
27479 return null;
27480 }
27481
27482 const syncPoint = this.selectSyncPoint_(syncPoints, {
27483 key: 'segmentIndex',
27484 value: 0
27485 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
27486 // duration from index 0 to syncPoint.segmentIndex instead of adding.
27487
27488 if (syncPoint.segmentIndex > 0) {
27489 syncPoint.time *= -1;
27490 }
27491
27492 return Math.abs(syncPoint.time + sumDurations({
27493 defaultDuration: playlist.targetDuration,
27494 durationList: playlist.segments,
27495 startIndex: syncPoint.segmentIndex,
27496 endIndex: 0
27497 }));
27498 }
27499 /**
27500 * Runs each sync-point strategy and returns a list of sync-points returned by the
27501 * strategies
27502 *
27503 * @private
27504 * @param {Playlist} playlist
27505 * The playlist that needs a sync-point
27506 * @param {number} duration
27507 * Duration of the MediaSource (Infinity if playing a live source)
27508 * @param {number} currentTimeline
27509 * The last timeline from which a segment was loaded
27510 * @param {number} currentTime
27511 * Current player's time
27512 * @param {string} type
27513 * Segment loader type
27514 * @return {Array}
27515 * A list of sync-point objects
27516 */
27517
27518
27519 runStrategies_(playlist, duration, currentTimeline, currentTime, type) {
27520 const syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
27521
27522 for (let i = 0; i < syncPointStrategies.length; i++) {
27523 const strategy = syncPointStrategies[i];
27524 const syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime, type);
27525
27526 if (syncPoint) {
27527 syncPoint.strategy = strategy.name;
27528 syncPoints.push({
27529 strategy: strategy.name,
27530 syncPoint
27531 });
27532 }
27533 }
27534
27535 return syncPoints;
27536 }
27537 /**
27538 * Selects the sync-point nearest the specified target
27539 *
27540 * @private
27541 * @param {Array} syncPoints
27542 * List of sync-points to select from
27543 * @param {Object} target
27544 * Object specifying the property and value we are targeting
27545 * @param {string} target.key
27546 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
27547 * @param {number} target.value
27548 * The value to target for the specified key.
27549 * @return {Object}
27550 * The sync-point nearest the target
27551 */
27552
27553
27554 selectSyncPoint_(syncPoints, target) {
27555 let bestSyncPoint = syncPoints[0].syncPoint;
27556 let bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
27557 let bestStrategy = syncPoints[0].strategy;
27558
27559 for (let i = 1; i < syncPoints.length; i++) {
27560 const newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
27561
27562 if (newDistance < bestDistance) {
27563 bestDistance = newDistance;
27564 bestSyncPoint = syncPoints[i].syncPoint;
27565 bestStrategy = syncPoints[i].strategy;
27566 }
27567 }
27568
27569 this.logger_(`syncPoint for [${target.key}: ${target.value}] chosen with strategy` + ` [${bestStrategy}]: [time:${bestSyncPoint.time},` + ` segmentIndex:${bestSyncPoint.segmentIndex}` + (typeof bestSyncPoint.partIndex === 'number' ? `,partIndex:${bestSyncPoint.partIndex}` : '') + ']');
27570 return bestSyncPoint;
27571 }
27572 /**
27573 * Save any meta-data present on the segments when segments leave
27574 * the live window to the playlist to allow for synchronization at the
27575 * playlist level later.
27576 *
27577 * @param {Playlist} oldPlaylist - The previous active playlist
27578 * @param {Playlist} newPlaylist - The updated and most current playlist
27579 */
27580
27581
27582 saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
27583 const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
27584
27585 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
27586 videojs__default["default"].log.warn(`Not saving expired segment info. Media sequence gap ${mediaSequenceDiff} is too large.`);
27587 return;
27588 } // When a segment expires from the playlist and it has a start time
27589 // save that information as a possible sync-point reference in future
27590
27591
27592 for (let i = mediaSequenceDiff - 1; i >= 0; i--) {
27593 const lastRemovedSegment = oldPlaylist.segments[i];
27594
27595 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
27596 newPlaylist.syncInfo = {
27597 mediaSequence: oldPlaylist.mediaSequence + i,
27598 time: lastRemovedSegment.start
27599 };
27600 this.logger_(`playlist refresh sync: [time:${newPlaylist.syncInfo.time},` + ` mediaSequence: ${newPlaylist.syncInfo.mediaSequence}]`);
27601 this.trigger('syncinfoupdate');
27602 break;
27603 }
27604 }
27605 }
27606 /**
27607 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
27608 * before segments start to load.
27609 *
27610 * @param {Playlist} playlist - The currently active playlist
27611 */
27612
27613
27614 setDateTimeMappingForStart(playlist) {
27615 // It's possible for the playlist to be updated before playback starts, meaning time
27616 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
27617 // crossed, then the old time zero mapping (for the prior timeline) would be retained
27618 // unless the mappings are cleared.
27619 this.timelineToDatetimeMappings = {};
27620
27621 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
27622 const firstSegment = playlist.segments[0];
27623 const playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
27624 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
27625 }
27626 }
27627 /**
27628 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
27629 * based on the latest timing information.
27630 *
27631 * @param {Object} options
27632 * Options object
27633 * @param {SegmentInfo} options.segmentInfo
27634 * The current active request information
27635 * @param {boolean} options.shouldSaveTimelineMapping
27636 * If there's a timeline change, determines if the timeline mapping should be
27637 * saved for timeline mapping and program date time mappings.
27638 */
27639
27640
27641 saveSegmentTimingInfo({
27642 segmentInfo,
27643 shouldSaveTimelineMapping
27644 }) {
27645 const didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
27646 const segment = segmentInfo.segment;
27647
27648 if (didCalculateSegmentTimeMapping) {
27649 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
27650 // now with segment timing information
27651
27652 if (!segmentInfo.playlist.syncInfo) {
27653 segmentInfo.playlist.syncInfo = {
27654 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
27655 time: segment.start
27656 };
27657 }
27658 }
27659
27660 const dateTime = segment.dateTimeObject;
27661
27662 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
27663 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
27664 }
27665 }
27666
27667 timestampOffsetForTimeline(timeline) {
27668 if (typeof this.timelines[timeline] === 'undefined') {
27669 return null;
27670 }
27671
27672 return this.timelines[timeline].time;
27673 }
27674
27675 mappingForTimeline(timeline) {
27676 if (typeof this.timelines[timeline] === 'undefined') {
27677 return null;
27678 }
27679
27680 return this.timelines[timeline].mapping;
27681 }
27682 /**
27683 * Use the "media time" for a segment to generate a mapping to "display time" and
27684 * save that display time to the segment.
27685 *
27686 * @private
27687 * @param {SegmentInfo} segmentInfo
27688 * The current active request information
27689 * @param {Object} timingInfo
27690 * The start and end time of the current segment in "media time"
27691 * @param {boolean} shouldSaveTimelineMapping
27692 * If there's a timeline change, determines if the timeline mapping should be
27693 * saved in timelines.
27694 * @return {boolean}
27695 * Returns false if segment time mapping could not be calculated
27696 */
27697
27698
27699 calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
27700 // TODO: remove side effects
27701 const segment = segmentInfo.segment;
27702 const part = segmentInfo.part;
27703 let mappingObj = this.timelines[segmentInfo.timeline];
27704 let start;
27705 let end;
27706
27707 if (typeof segmentInfo.timestampOffset === 'number') {
27708 mappingObj = {
27709 time: segmentInfo.startOfSegment,
27710 mapping: segmentInfo.startOfSegment - timingInfo.start
27711 };
27712
27713 if (shouldSaveTimelineMapping) {
27714 this.timelines[segmentInfo.timeline] = mappingObj;
27715 this.trigger('timestampoffset');
27716 this.logger_(`time mapping for timeline ${segmentInfo.timeline}: ` + `[time: ${mappingObj.time}] [mapping: ${mappingObj.mapping}]`);
27717 }
27718
27719 start = segmentInfo.startOfSegment;
27720 end = timingInfo.end + mappingObj.mapping;
27721 } else if (mappingObj) {
27722 start = timingInfo.start + mappingObj.mapping;
27723 end = timingInfo.end + mappingObj.mapping;
27724 } else {
27725 return false;
27726 }
27727
27728 if (part) {
27729 part.start = start;
27730 part.end = end;
27731 } // If we don't have a segment start yet or the start value we got
27732 // is less than our current segment.start value, save a new start value.
27733 // We have to do this because parts will have segment timing info saved
27734 // multiple times and we want segment start to be the earliest part start
27735 // value for that segment.
27736
27737
27738 if (!segment.start || start < segment.start) {
27739 segment.start = start;
27740 }
27741
27742 segment.end = end;
27743 return true;
27744 }
27745 /**
27746 * Each time we have discontinuity in the playlist, attempt to calculate the location
27747 * in display of the start of the discontinuity and save that. We also save an accuracy
27748 * value so that we save values with the most accuracy (closest to 0.)
27749 *
27750 * @private
27751 * @param {SegmentInfo} segmentInfo - The current active request information
27752 */
27753
27754
27755 saveDiscontinuitySyncInfo_(segmentInfo) {
27756 const playlist = segmentInfo.playlist;
27757 const segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
27758 // the start of the range and it's accuracy is 0 (greater accuracy values
27759 // mean more approximation)
27760
27761 if (segment.discontinuity) {
27762 this.discontinuities[segment.timeline] = {
27763 time: segment.start,
27764 accuracy: 0
27765 };
27766 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
27767 // Search for future discontinuities that we can provide better timing
27768 // information for and save that information for sync purposes
27769 for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
27770 const segmentIndex = playlist.discontinuityStarts[i];
27771 const discontinuity = playlist.discontinuitySequence + i + 1;
27772 const mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
27773 const accuracy = Math.abs(mediaIndexDiff);
27774
27775 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
27776 let time;
27777
27778 if (mediaIndexDiff < 0) {
27779 time = segment.start - sumDurations({
27780 defaultDuration: playlist.targetDuration,
27781 durationList: playlist.segments,
27782 startIndex: segmentInfo.mediaIndex,
27783 endIndex: segmentIndex
27784 });
27785 } else {
27786 time = segment.end + sumDurations({
27787 defaultDuration: playlist.targetDuration,
27788 durationList: playlist.segments,
27789 startIndex: segmentInfo.mediaIndex + 1,
27790 endIndex: segmentIndex
27791 });
27792 }
27793
27794 this.discontinuities[discontinuity] = {
27795 time,
27796 accuracy
27797 };
27798 }
27799 }
27800 }
27801 }
27802
27803 dispose() {
27804 this.trigger('dispose');
27805 this.off();
27806 }
27807
27808 }
27809
27810 /**
27811 * The TimelineChangeController acts as a source for segment loaders to listen for and
27812 * keep track of latest and pending timeline changes. This is useful to ensure proper
27813 * sync, as each loader may need to make a consideration for what timeline the other
27814 * loader is on before making changes which could impact the other loader's media.
27815 *
27816 * @class TimelineChangeController
27817 * @extends videojs.EventTarget
27818 */
27819
27820 class TimelineChangeController extends videojs__default["default"].EventTarget {
27821 constructor() {
27822 super();
27823 this.pendingTimelineChanges_ = {};
27824 this.lastTimelineChanges_ = {};
27825 }
27826
27827 clearPendingTimelineChange(type) {
27828 this.pendingTimelineChanges_[type] = null;
27829 this.trigger('pendingtimelinechange');
27830 }
27831
27832 pendingTimelineChange({
27833 type,
27834 from,
27835 to
27836 }) {
27837 if (typeof from === 'number' && typeof to === 'number') {
27838 this.pendingTimelineChanges_[type] = {
27839 type,
27840 from,
27841 to
27842 };
27843 this.trigger('pendingtimelinechange');
27844 }
27845
27846 return this.pendingTimelineChanges_[type];
27847 }
27848
27849 lastTimelineChange({
27850 type,
27851 from,
27852 to
27853 }) {
27854 if (typeof from === 'number' && typeof to === 'number') {
27855 this.lastTimelineChanges_[type] = {
27856 type,
27857 from,
27858 to
27859 };
27860 delete this.pendingTimelineChanges_[type];
27861 this.trigger('timelinechange');
27862 }
27863
27864 return this.lastTimelineChanges_[type];
27865 }
27866
27867 dispose() {
27868 this.trigger('dispose');
27869 this.pendingTimelineChanges_ = {};
27870 this.lastTimelineChanges_ = {};
27871 this.off();
27872 }
27873
27874 }
27875
27876 /* rollup-plugin-worker-factory start for worker!/home/runner/work/http-streaming/http-streaming/src/decrypter-worker.js */
27877 const workerCode = transform(getWorkerString(function () {
27878 /**
27879 * @file stream.js
27880 */
27881
27882 /**
27883 * A lightweight readable stream implemention that handles event dispatching.
27884 *
27885 * @class Stream
27886 */
27887
27888 var Stream = /*#__PURE__*/function () {
27889 function Stream() {
27890 this.listeners = {};
27891 }
27892 /**
27893 * Add a listener for a specified event type.
27894 *
27895 * @param {string} type the event name
27896 * @param {Function} listener the callback to be invoked when an event of
27897 * the specified type occurs
27898 */
27899
27900
27901 var _proto = Stream.prototype;
27902
27903 _proto.on = function on(type, listener) {
27904 if (!this.listeners[type]) {
27905 this.listeners[type] = [];
27906 }
27907
27908 this.listeners[type].push(listener);
27909 }
27910 /**
27911 * Remove a listener for a specified event type.
27912 *
27913 * @param {string} type the event name
27914 * @param {Function} listener a function previously registered for this
27915 * type of event through `on`
27916 * @return {boolean} if we could turn it off or not
27917 */
27918 ;
27919
27920 _proto.off = function off(type, listener) {
27921 if (!this.listeners[type]) {
27922 return false;
27923 }
27924
27925 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
27926 // In Video.js we slice listener functions
27927 // on trigger so that it does not mess up the order
27928 // while we loop through.
27929 //
27930 // Here we slice on off so that the loop in trigger
27931 // can continue using it's old reference to loop without
27932 // messing up the order.
27933
27934 this.listeners[type] = this.listeners[type].slice(0);
27935 this.listeners[type].splice(index, 1);
27936 return index > -1;
27937 }
27938 /**
27939 * Trigger an event of the specified type on this stream. Any additional
27940 * arguments to this function are passed as parameters to event listeners.
27941 *
27942 * @param {string} type the event name
27943 */
27944 ;
27945
27946 _proto.trigger = function trigger(type) {
27947 var callbacks = this.listeners[type];
27948
27949 if (!callbacks) {
27950 return;
27951 } // Slicing the arguments on every invocation of this method
27952 // can add a significant amount of overhead. Avoid the
27953 // intermediate object creation for the common case of a
27954 // single callback argument
27955
27956
27957 if (arguments.length === 2) {
27958 var length = callbacks.length;
27959
27960 for (var i = 0; i < length; ++i) {
27961 callbacks[i].call(this, arguments[1]);
27962 }
27963 } else {
27964 var args = Array.prototype.slice.call(arguments, 1);
27965 var _length = callbacks.length;
27966
27967 for (var _i = 0; _i < _length; ++_i) {
27968 callbacks[_i].apply(this, args);
27969 }
27970 }
27971 }
27972 /**
27973 * Destroys the stream and cleans up.
27974 */
27975 ;
27976
27977 _proto.dispose = function dispose() {
27978 this.listeners = {};
27979 }
27980 /**
27981 * Forwards all `data` events on this stream to the destination stream. The
27982 * destination stream should provide a method `push` to receive the data
27983 * events as they arrive.
27984 *
27985 * @param {Stream} destination the stream that will receive all `data` events
27986 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
27987 */
27988 ;
27989
27990 _proto.pipe = function pipe(destination) {
27991 this.on('data', function (data) {
27992 destination.push(data);
27993 });
27994 };
27995
27996 return Stream;
27997 }();
27998 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
27999
28000 /**
28001 * Returns the subarray of a Uint8Array without PKCS#7 padding.
28002 *
28003 * @param padded {Uint8Array} unencrypted bytes that have been padded
28004 * @return {Uint8Array} the unpadded bytes
28005 * @see http://tools.ietf.org/html/rfc5652
28006 */
28007
28008
28009 function unpad(padded) {
28010 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
28011 }
28012 /*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
28013
28014 /**
28015 * @file aes.js
28016 *
28017 * This file contains an adaptation of the AES decryption algorithm
28018 * from the Standford Javascript Cryptography Library. That work is
28019 * covered by the following copyright and permissions notice:
28020 *
28021 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
28022 * All rights reserved.
28023 *
28024 * Redistribution and use in source and binary forms, with or without
28025 * modification, are permitted provided that the following conditions are
28026 * met:
28027 *
28028 * 1. Redistributions of source code must retain the above copyright
28029 * notice, this list of conditions and the following disclaimer.
28030 *
28031 * 2. Redistributions in binary form must reproduce the above
28032 * copyright notice, this list of conditions and the following
28033 * disclaimer in the documentation and/or other materials provided
28034 * with the distribution.
28035 *
28036 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
28037 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
28038 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
28039 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
28040 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28041 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28042 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
28043 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
28044 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
28045 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
28046 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28047 *
28048 * The views and conclusions contained in the software and documentation
28049 * are those of the authors and should not be interpreted as representing
28050 * official policies, either expressed or implied, of the authors.
28051 */
28052
28053 /**
28054 * Expand the S-box tables.
28055 *
28056 * @private
28057 */
28058
28059
28060 const precompute = function () {
28061 const tables = [[[], [], [], [], []], [[], [], [], [], []]];
28062 const encTable = tables[0];
28063 const decTable = tables[1];
28064 const sbox = encTable[4];
28065 const sboxInv = decTable[4];
28066 let i;
28067 let x;
28068 let xInv;
28069 const d = [];
28070 const th = [];
28071 let x2;
28072 let x4;
28073 let x8;
28074 let s;
28075 let tEnc;
28076 let tDec; // Compute double and third tables
28077
28078 for (i = 0; i < 256; i++) {
28079 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
28080 }
28081
28082 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
28083 // Compute sbox
28084 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
28085 s = s >> 8 ^ s & 255 ^ 99;
28086 sbox[x] = s;
28087 sboxInv[s] = x; // Compute MixColumns
28088
28089 x8 = d[x4 = d[x2 = d[x]]];
28090 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
28091 tEnc = d[s] * 0x101 ^ s * 0x1010100;
28092
28093 for (i = 0; i < 4; i++) {
28094 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
28095 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
28096 }
28097 } // Compactify. Considerable speedup on Firefox.
28098
28099
28100 for (i = 0; i < 5; i++) {
28101 encTable[i] = encTable[i].slice(0);
28102 decTable[i] = decTable[i].slice(0);
28103 }
28104
28105 return tables;
28106 };
28107
28108 let aesTables = null;
28109 /**
28110 * Schedule out an AES key for both encryption and decryption. This
28111 * is a low-level class. Use a cipher mode to do bulk encryption.
28112 *
28113 * @class AES
28114 * @param key {Array} The key as an array of 4, 6 or 8 words.
28115 */
28116
28117 class AES {
28118 constructor(key) {
28119 /**
28120 * The expanded S-box and inverse S-box tables. These will be computed
28121 * on the client so that we don't have to send them down the wire.
28122 *
28123 * There are two tables, _tables[0] is for encryption and
28124 * _tables[1] is for decryption.
28125 *
28126 * The first 4 sub-tables are the expanded S-box with MixColumns. The
28127 * last (_tables[01][4]) is the S-box itself.
28128 *
28129 * @private
28130 */
28131 // if we have yet to precompute the S-box tables
28132 // do so now
28133 if (!aesTables) {
28134 aesTables = precompute();
28135 } // then make a copy of that object for use
28136
28137
28138 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
28139 let i;
28140 let j;
28141 let tmp;
28142 const sbox = this._tables[0][4];
28143 const decTable = this._tables[1];
28144 const keyLen = key.length;
28145 let rcon = 1;
28146
28147 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
28148 throw new Error('Invalid aes key size');
28149 }
28150
28151 const encKey = key.slice(0);
28152 const decKey = [];
28153 this._key = [encKey, decKey]; // schedule encryption keys
28154
28155 for (i = keyLen; i < 4 * keyLen + 28; i++) {
28156 tmp = encKey[i - 1]; // apply sbox
28157
28158 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
28159 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
28160
28161 if (i % keyLen === 0) {
28162 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
28163 rcon = rcon << 1 ^ (rcon >> 7) * 283;
28164 }
28165 }
28166
28167 encKey[i] = encKey[i - keyLen] ^ tmp;
28168 } // schedule decryption keys
28169
28170
28171 for (j = 0; i; j++, i--) {
28172 tmp = encKey[j & 3 ? i : i - 4];
28173
28174 if (i <= 4 || j < 4) {
28175 decKey[j] = tmp;
28176 } else {
28177 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
28178 }
28179 }
28180 }
28181 /**
28182 * Decrypt 16 bytes, specified as four 32-bit words.
28183 *
28184 * @param {number} encrypted0 the first word to decrypt
28185 * @param {number} encrypted1 the second word to decrypt
28186 * @param {number} encrypted2 the third word to decrypt
28187 * @param {number} encrypted3 the fourth word to decrypt
28188 * @param {Int32Array} out the array to write the decrypted words
28189 * into
28190 * @param {number} offset the offset into the output array to start
28191 * writing results
28192 * @return {Array} The plaintext.
28193 */
28194
28195
28196 decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
28197 const key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
28198
28199 let a = encrypted0 ^ key[0];
28200 let b = encrypted3 ^ key[1];
28201 let c = encrypted2 ^ key[2];
28202 let d = encrypted1 ^ key[3];
28203 let a2;
28204 let b2;
28205 let c2; // key.length === 2 ?
28206
28207 const nInnerRounds = key.length / 4 - 2;
28208 let i;
28209 let kIndex = 4;
28210 const table = this._tables[1]; // load up the tables
28211
28212 const table0 = table[0];
28213 const table1 = table[1];
28214 const table2 = table[2];
28215 const table3 = table[3];
28216 const sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
28217
28218 for (i = 0; i < nInnerRounds; i++) {
28219 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
28220 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
28221 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
28222 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
28223 kIndex += 4;
28224 a = a2;
28225 b = b2;
28226 c = c2;
28227 } // Last round.
28228
28229
28230 for (i = 0; i < 4; i++) {
28231 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
28232 a2 = a;
28233 a = b;
28234 b = c;
28235 c = d;
28236 d = a2;
28237 }
28238 }
28239
28240 }
28241 /**
28242 * @file async-stream.js
28243 */
28244
28245 /**
28246 * A wrapper around the Stream class to use setTimeout
28247 * and run stream "jobs" Asynchronously
28248 *
28249 * @class AsyncStream
28250 * @extends Stream
28251 */
28252
28253
28254 class AsyncStream extends Stream {
28255 constructor() {
28256 super(Stream);
28257 this.jobs = [];
28258 this.delay = 1;
28259 this.timeout_ = null;
28260 }
28261 /**
28262 * process an async job
28263 *
28264 * @private
28265 */
28266
28267
28268 processJob_() {
28269 this.jobs.shift()();
28270
28271 if (this.jobs.length) {
28272 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
28273 } else {
28274 this.timeout_ = null;
28275 }
28276 }
28277 /**
28278 * push a job into the stream
28279 *
28280 * @param {Function} job the job to push into the stream
28281 */
28282
28283
28284 push(job) {
28285 this.jobs.push(job);
28286
28287 if (!this.timeout_) {
28288 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
28289 }
28290 }
28291
28292 }
28293 /**
28294 * @file decrypter.js
28295 *
28296 * An asynchronous implementation of AES-128 CBC decryption with
28297 * PKCS#7 padding.
28298 */
28299
28300 /**
28301 * Convert network-order (big-endian) bytes into their little-endian
28302 * representation.
28303 */
28304
28305
28306 const ntoh = function (word) {
28307 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
28308 };
28309 /**
28310 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
28311 *
28312 * @param {Uint8Array} encrypted the encrypted bytes
28313 * @param {Uint32Array} key the bytes of the decryption key
28314 * @param {Uint32Array} initVector the initialization vector (IV) to
28315 * use for the first round of CBC.
28316 * @return {Uint8Array} the decrypted bytes
28317 *
28318 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
28319 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
28320 * @see https://tools.ietf.org/html/rfc2315
28321 */
28322
28323
28324 const decrypt = function (encrypted, key, initVector) {
28325 // word-level access to the encrypted bytes
28326 const encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
28327 const decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
28328
28329 const decrypted = new Uint8Array(encrypted.byteLength);
28330 const decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
28331 // decrypted data
28332
28333 let init0;
28334 let init1;
28335 let init2;
28336 let init3;
28337 let encrypted0;
28338 let encrypted1;
28339 let encrypted2;
28340 let encrypted3; // iteration variable
28341
28342 let wordIx; // pull out the words of the IV to ensure we don't modify the
28343 // passed-in reference and easier access
28344
28345 init0 = initVector[0];
28346 init1 = initVector[1];
28347 init2 = initVector[2];
28348 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
28349 // to each decrypted block
28350
28351 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
28352 // convert big-endian (network order) words into little-endian
28353 // (javascript order)
28354 encrypted0 = ntoh(encrypted32[wordIx]);
28355 encrypted1 = ntoh(encrypted32[wordIx + 1]);
28356 encrypted2 = ntoh(encrypted32[wordIx + 2]);
28357 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
28358
28359 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
28360 // plaintext
28361
28362 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
28363 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
28364 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
28365 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
28366
28367 init0 = encrypted0;
28368 init1 = encrypted1;
28369 init2 = encrypted2;
28370 init3 = encrypted3;
28371 }
28372
28373 return decrypted;
28374 };
28375 /**
28376 * The `Decrypter` class that manages decryption of AES
28377 * data through `AsyncStream` objects and the `decrypt`
28378 * function
28379 *
28380 * @param {Uint8Array} encrypted the encrypted bytes
28381 * @param {Uint32Array} key the bytes of the decryption key
28382 * @param {Uint32Array} initVector the initialization vector (IV) to
28383 * @param {Function} done the function to run when done
28384 * @class Decrypter
28385 */
28386
28387
28388 class Decrypter {
28389 constructor(encrypted, key, initVector, done) {
28390 const step = Decrypter.STEP;
28391 const encrypted32 = new Int32Array(encrypted.buffer);
28392 const decrypted = new Uint8Array(encrypted.byteLength);
28393 let i = 0;
28394 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
28395
28396 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
28397
28398 for (i = step; i < encrypted32.length; i += step) {
28399 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
28400 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
28401 } // invoke the done() callback when everything is finished
28402
28403
28404 this.asyncStream_.push(function () {
28405 // remove pkcs#7 padding from the decrypted bytes
28406 done(null, unpad(decrypted));
28407 });
28408 }
28409 /**
28410 * a getter for step the maximum number of bytes to process at one time
28411 *
28412 * @return {number} the value of step 32000
28413 */
28414
28415
28416 static get STEP() {
28417 // 4 * 8000;
28418 return 32000;
28419 }
28420 /**
28421 * @private
28422 */
28423
28424
28425 decryptChunk_(encrypted, key, initVector, decrypted) {
28426 return function () {
28427 const bytes = decrypt(encrypted, key, initVector);
28428 decrypted.set(bytes, encrypted.byteOffset);
28429 };
28430 }
28431
28432 }
28433
28434 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
28435 var win;
28436
28437 if (typeof window !== "undefined") {
28438 win = window;
28439 } else if (typeof commonjsGlobal !== "undefined") {
28440 win = commonjsGlobal;
28441 } else if (typeof self !== "undefined") {
28442 win = self;
28443 } else {
28444 win = {};
28445 }
28446
28447 var window_1 = win;
28448
28449 var isArrayBufferView = function isArrayBufferView(obj) {
28450 if (ArrayBuffer.isView === 'function') {
28451 return ArrayBuffer.isView(obj);
28452 }
28453
28454 return obj && obj.buffer instanceof ArrayBuffer;
28455 };
28456
28457 var BigInt = window_1.BigInt || Number;
28458 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
28459
28460 (function () {
28461 var a = new Uint16Array([0xFFCC]);
28462 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
28463
28464 if (b[0] === 0xFF) {
28465 return 'big';
28466 }
28467
28468 if (b[0] === 0xCC) {
28469 return 'little';
28470 }
28471
28472 return 'unknown';
28473 })();
28474 /**
28475 * Creates an object for sending to a web worker modifying properties that are TypedArrays
28476 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
28477 *
28478 * @param {Object} message
28479 * Object of properties and values to send to the web worker
28480 * @return {Object}
28481 * Modified message with TypedArray values expanded
28482 * @function createTransferableMessage
28483 */
28484
28485
28486 const createTransferableMessage = function (message) {
28487 const transferable = {};
28488 Object.keys(message).forEach(key => {
28489 const value = message[key];
28490
28491 if (isArrayBufferView(value)) {
28492 transferable[key] = {
28493 bytes: value.buffer,
28494 byteOffset: value.byteOffset,
28495 byteLength: value.byteLength
28496 };
28497 } else {
28498 transferable[key] = value;
28499 }
28500 });
28501 return transferable;
28502 };
28503 /* global self */
28504
28505 /**
28506 * Our web worker interface so that things can talk to aes-decrypter
28507 * that will be running in a web worker. the scope is passed to this by
28508 * webworkify.
28509 */
28510
28511
28512 self.onmessage = function (event) {
28513 const data = event.data;
28514 const encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
28515 const key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
28516 const iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
28517 /* eslint-disable no-new, handle-callback-err */
28518
28519 new Decrypter(encrypted, key, iv, function (err, bytes) {
28520 self.postMessage(createTransferableMessage({
28521 source: data.source,
28522 decrypted: bytes
28523 }), [bytes.buffer]);
28524 });
28525 /* eslint-enable */
28526 };
28527 }));
28528 var Decrypter = factory(workerCode);
28529 /* rollup-plugin-worker-factory end for worker!/home/runner/work/http-streaming/http-streaming/src/decrypter-worker.js */
28530
28531 /**
28532 * Convert the properties of an HLS track into an audioTrackKind.
28533 *
28534 * @private
28535 */
28536
28537 const audioTrackKind_ = properties => {
28538 let kind = properties.default ? 'main' : 'alternative';
28539
28540 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
28541 kind = 'main-desc';
28542 }
28543
28544 return kind;
28545 };
28546 /**
28547 * Pause provided segment loader and playlist loader if active
28548 *
28549 * @param {SegmentLoader} segmentLoader
28550 * SegmentLoader to pause
28551 * @param {Object} mediaType
28552 * Active media type
28553 * @function stopLoaders
28554 */
28555
28556
28557 const stopLoaders = (segmentLoader, mediaType) => {
28558 segmentLoader.abort();
28559 segmentLoader.pause();
28560
28561 if (mediaType && mediaType.activePlaylistLoader) {
28562 mediaType.activePlaylistLoader.pause();
28563 mediaType.activePlaylistLoader = null;
28564 }
28565 };
28566 /**
28567 * Start loading provided segment loader and playlist loader
28568 *
28569 * @param {PlaylistLoader} playlistLoader
28570 * PlaylistLoader to start loading
28571 * @param {Object} mediaType
28572 * Active media type
28573 * @function startLoaders
28574 */
28575
28576 const startLoaders = (playlistLoader, mediaType) => {
28577 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
28578 // playlist loader
28579 mediaType.activePlaylistLoader = playlistLoader;
28580 playlistLoader.load();
28581 };
28582 /**
28583 * Returns a function to be called when the media group changes. It performs a
28584 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
28585 * change of group is merely a rendition switch of the same content at another encoding,
28586 * rather than a change of content, such as switching audio from English to Spanish.
28587 *
28588 * @param {string} type
28589 * MediaGroup type
28590 * @param {Object} settings
28591 * Object containing required information for media groups
28592 * @return {Function}
28593 * Handler for a non-destructive resync of SegmentLoader when the active media
28594 * group changes.
28595 * @function onGroupChanged
28596 */
28597
28598 const onGroupChanged = (type, settings) => () => {
28599 const {
28600 segmentLoaders: {
28601 [type]: segmentLoader,
28602 main: mainSegmentLoader
28603 },
28604 mediaTypes: {
28605 [type]: mediaType
28606 }
28607 } = settings;
28608 const activeTrack = mediaType.activeTrack();
28609 const activeGroup = mediaType.getActiveGroup();
28610 const previousActiveLoader = mediaType.activePlaylistLoader;
28611 const lastGroup = mediaType.lastGroup_; // the group did not change do nothing
28612
28613 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
28614 return;
28615 }
28616
28617 mediaType.lastGroup_ = activeGroup;
28618 mediaType.lastTrack_ = activeTrack;
28619 stopLoaders(segmentLoader, mediaType);
28620
28621 if (!activeGroup || activeGroup.isMainPlaylist) {
28622 // there is no group active or active group is a main playlist and won't change
28623 return;
28624 }
28625
28626 if (!activeGroup.playlistLoader) {
28627 if (previousActiveLoader) {
28628 // The previous group had a playlist loader but the new active group does not
28629 // this means we are switching from demuxed to muxed audio. In this case we want to
28630 // do a destructive reset of the main segment loader and not restart the audio
28631 // loaders.
28632 mainSegmentLoader.resetEverything();
28633 }
28634
28635 return;
28636 } // Non-destructive resync
28637
28638
28639 segmentLoader.resyncLoader();
28640 startLoaders(activeGroup.playlistLoader, mediaType);
28641 };
28642 const onGroupChanging = (type, settings) => () => {
28643 const {
28644 segmentLoaders: {
28645 [type]: segmentLoader
28646 },
28647 mediaTypes: {
28648 [type]: mediaType
28649 }
28650 } = settings;
28651 mediaType.lastGroup_ = null;
28652 segmentLoader.abort();
28653 segmentLoader.pause();
28654 };
28655 /**
28656 * Returns a function to be called when the media track changes. It performs a
28657 * destructive reset of the SegmentLoader to ensure we start loading as close to
28658 * currentTime as possible.
28659 *
28660 * @param {string} type
28661 * MediaGroup type
28662 * @param {Object} settings
28663 * Object containing required information for media groups
28664 * @return {Function}
28665 * Handler for a destructive reset of SegmentLoader when the active media
28666 * track changes.
28667 * @function onTrackChanged
28668 */
28669
28670 const onTrackChanged = (type, settings) => () => {
28671 const {
28672 mainPlaylistLoader,
28673 segmentLoaders: {
28674 [type]: segmentLoader,
28675 main: mainSegmentLoader
28676 },
28677 mediaTypes: {
28678 [type]: mediaType
28679 }
28680 } = settings;
28681 const activeTrack = mediaType.activeTrack();
28682 const activeGroup = mediaType.getActiveGroup();
28683 const previousActiveLoader = mediaType.activePlaylistLoader;
28684 const lastTrack = mediaType.lastTrack_; // track did not change, do nothing
28685
28686 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
28687 return;
28688 }
28689
28690 mediaType.lastGroup_ = activeGroup;
28691 mediaType.lastTrack_ = activeTrack;
28692 stopLoaders(segmentLoader, mediaType);
28693
28694 if (!activeGroup) {
28695 // there is no group active so we do not want to restart loaders
28696 return;
28697 }
28698
28699 if (activeGroup.isMainPlaylist) {
28700 // track did not change, do nothing
28701 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
28702 return;
28703 }
28704
28705 const pc = settings.vhs.playlistController_;
28706 const newPlaylist = pc.selectPlaylist(); // media will not change do nothing
28707
28708 if (pc.media() === newPlaylist) {
28709 return;
28710 }
28711
28712 mediaType.logger_(`track change. Switching main audio from ${lastTrack.id} to ${activeTrack.id}`);
28713 mainPlaylistLoader.pause();
28714 mainSegmentLoader.resetEverything();
28715 pc.fastQualityChange_(newPlaylist);
28716 return;
28717 }
28718
28719 if (type === 'AUDIO') {
28720 if (!activeGroup.playlistLoader) {
28721 // when switching from demuxed audio/video to muxed audio/video (noted by no
28722 // playlist loader for the audio group), we want to do a destructive reset of the
28723 // main segment loader and not restart the audio loaders
28724 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
28725 // it should be stopped
28726
28727 mainSegmentLoader.resetEverything();
28728 return;
28729 } // although the segment loader is an audio segment loader, call the setAudio
28730 // function to ensure it is prepared to re-append the init segment (or handle other
28731 // config changes)
28732
28733
28734 segmentLoader.setAudio(true);
28735 mainSegmentLoader.setAudio(false);
28736 }
28737
28738 if (previousActiveLoader === activeGroup.playlistLoader) {
28739 // Nothing has actually changed. This can happen because track change events can fire
28740 // multiple times for a "single" change. One for enabling the new active track, and
28741 // one for disabling the track that was active
28742 startLoaders(activeGroup.playlistLoader, mediaType);
28743 return;
28744 }
28745
28746 if (segmentLoader.track) {
28747 // For WebVTT, set the new text track in the segmentloader
28748 segmentLoader.track(activeTrack);
28749 } // destructive reset
28750
28751
28752 segmentLoader.resetEverything();
28753 startLoaders(activeGroup.playlistLoader, mediaType);
28754 };
28755 const onError = {
28756 /**
28757 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
28758 * an error.
28759 *
28760 * @param {string} type
28761 * MediaGroup type
28762 * @param {Object} settings
28763 * Object containing required information for media groups
28764 * @return {Function}
28765 * Error handler. Logs warning (or error if the playlist is excluded) to
28766 * console and switches back to default audio track.
28767 * @function onError.AUDIO
28768 */
28769 AUDIO: (type, settings) => () => {
28770 const {
28771 mediaTypes: {
28772 [type]: mediaType
28773 },
28774 excludePlaylist
28775 } = settings; // switch back to default audio track
28776
28777 const activeTrack = mediaType.activeTrack();
28778 const activeGroup = mediaType.activeGroup();
28779 const id = (activeGroup.filter(group => group.default)[0] || activeGroup[0]).id;
28780 const defaultTrack = mediaType.tracks[id];
28781
28782 if (activeTrack === defaultTrack) {
28783 // Default track encountered an error. All we can do now is exclude the current
28784 // rendition and hope another will switch audio groups
28785 excludePlaylist({
28786 error: {
28787 message: 'Problem encountered loading the default audio track.'
28788 }
28789 });
28790 return;
28791 }
28792
28793 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
28794
28795 for (const trackId in mediaType.tracks) {
28796 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
28797 }
28798
28799 mediaType.onTrackChanged();
28800 },
28801
28802 /**
28803 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
28804 * an error.
28805 *
28806 * @param {string} type
28807 * MediaGroup type
28808 * @param {Object} settings
28809 * Object containing required information for media groups
28810 * @return {Function}
28811 * Error handler. Logs warning to console and disables the active subtitle track
28812 * @function onError.SUBTITLES
28813 */
28814 SUBTITLES: (type, settings) => () => {
28815 const {
28816 mediaTypes: {
28817 [type]: mediaType
28818 }
28819 } = settings;
28820 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
28821 const track = mediaType.activeTrack();
28822
28823 if (track) {
28824 track.mode = 'disabled';
28825 }
28826
28827 mediaType.onTrackChanged();
28828 }
28829 };
28830 const setupListeners = {
28831 /**
28832 * Setup event listeners for audio playlist loader
28833 *
28834 * @param {string} type
28835 * MediaGroup type
28836 * @param {PlaylistLoader|null} playlistLoader
28837 * PlaylistLoader to register listeners on
28838 * @param {Object} settings
28839 * Object containing required information for media groups
28840 * @function setupListeners.AUDIO
28841 */
28842 AUDIO: (type, playlistLoader, settings) => {
28843 if (!playlistLoader) {
28844 // no playlist loader means audio will be muxed with the video
28845 return;
28846 }
28847
28848 const {
28849 tech,
28850 requestOptions,
28851 segmentLoaders: {
28852 [type]: segmentLoader
28853 }
28854 } = settings;
28855 playlistLoader.on('loadedmetadata', () => {
28856 const media = playlistLoader.media();
28857 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
28858 // permits, start downloading segments
28859
28860 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
28861 segmentLoader.load();
28862 }
28863 });
28864 playlistLoader.on('loadedplaylist', () => {
28865 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
28866
28867 if (!tech.paused()) {
28868 segmentLoader.load();
28869 }
28870 });
28871 playlistLoader.on('error', onError[type](type, settings));
28872 },
28873
28874 /**
28875 * Setup event listeners for subtitle playlist loader
28876 *
28877 * @param {string} type
28878 * MediaGroup type
28879 * @param {PlaylistLoader|null} playlistLoader
28880 * PlaylistLoader to register listeners on
28881 * @param {Object} settings
28882 * Object containing required information for media groups
28883 * @function setupListeners.SUBTITLES
28884 */
28885 SUBTITLES: (type, playlistLoader, settings) => {
28886 const {
28887 tech,
28888 requestOptions,
28889 segmentLoaders: {
28890 [type]: segmentLoader
28891 },
28892 mediaTypes: {
28893 [type]: mediaType
28894 }
28895 } = settings;
28896 playlistLoader.on('loadedmetadata', () => {
28897 const media = playlistLoader.media();
28898 segmentLoader.playlist(media, requestOptions);
28899 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
28900 // permits, start downloading segments
28901
28902 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
28903 segmentLoader.load();
28904 }
28905 });
28906 playlistLoader.on('loadedplaylist', () => {
28907 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
28908
28909 if (!tech.paused()) {
28910 segmentLoader.load();
28911 }
28912 });
28913 playlistLoader.on('error', onError[type](type, settings));
28914 }
28915 };
28916 const initialize = {
28917 /**
28918 * Setup PlaylistLoaders and AudioTracks for the audio groups
28919 *
28920 * @param {string} type
28921 * MediaGroup type
28922 * @param {Object} settings
28923 * Object containing required information for media groups
28924 * @function initialize.AUDIO
28925 */
28926 'AUDIO': (type, settings) => {
28927 const {
28928 vhs,
28929 sourceType,
28930 segmentLoaders: {
28931 [type]: segmentLoader
28932 },
28933 requestOptions,
28934 main: {
28935 mediaGroups
28936 },
28937 mediaTypes: {
28938 [type]: {
28939 groups,
28940 tracks,
28941 logger_
28942 }
28943 },
28944 mainPlaylistLoader
28945 } = settings;
28946 const audioOnlyMain = isAudioOnly(mainPlaylistLoader.main); // force a default if we have none
28947
28948 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
28949 mediaGroups[type] = {
28950 main: {
28951 default: {
28952 default: true
28953 }
28954 }
28955 };
28956
28957 if (audioOnlyMain) {
28958 mediaGroups[type].main.default.playlists = mainPlaylistLoader.main.playlists;
28959 }
28960 }
28961
28962 for (const groupId in mediaGroups[type]) {
28963 if (!groups[groupId]) {
28964 groups[groupId] = [];
28965 }
28966
28967 for (const variantLabel in mediaGroups[type][groupId]) {
28968 let properties = mediaGroups[type][groupId][variantLabel];
28969 let playlistLoader;
28970
28971 if (audioOnlyMain) {
28972 logger_(`AUDIO group '${groupId}' label '${variantLabel}' is a main playlist`);
28973 properties.isMainPlaylist = true;
28974 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
28975 // use the resolved media playlist object
28976 } else if (sourceType === 'vhs-json' && properties.playlists) {
28977 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
28978 } else if (properties.resolvedUri) {
28979 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
28980 // should we even have properties.playlists in this check.
28981 } else if (properties.playlists && sourceType === 'dash') {
28982 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, mainPlaylistLoader);
28983 } else {
28984 // no resolvedUri means the audio is muxed with the video when using this
28985 // audio track
28986 playlistLoader = null;
28987 }
28988
28989 properties = merge$1({
28990 id: variantLabel,
28991 playlistLoader
28992 }, properties);
28993 setupListeners[type](type, properties.playlistLoader, settings);
28994 groups[groupId].push(properties);
28995
28996 if (typeof tracks[variantLabel] === 'undefined') {
28997 const track = new videojs__default["default"].AudioTrack({
28998 id: variantLabel,
28999 kind: audioTrackKind_(properties),
29000 enabled: false,
29001 language: properties.language,
29002 default: properties.default,
29003 label: variantLabel
29004 });
29005 tracks[variantLabel] = track;
29006 }
29007 }
29008 } // setup single error event handler for the segment loader
29009
29010
29011 segmentLoader.on('error', onError[type](type, settings));
29012 },
29013
29014 /**
29015 * Setup PlaylistLoaders and TextTracks for the subtitle groups
29016 *
29017 * @param {string} type
29018 * MediaGroup type
29019 * @param {Object} settings
29020 * Object containing required information for media groups
29021 * @function initialize.SUBTITLES
29022 */
29023 'SUBTITLES': (type, settings) => {
29024 const {
29025 tech,
29026 vhs,
29027 sourceType,
29028 segmentLoaders: {
29029 [type]: segmentLoader
29030 },
29031 requestOptions,
29032 main: {
29033 mediaGroups
29034 },
29035 mediaTypes: {
29036 [type]: {
29037 groups,
29038 tracks
29039 }
29040 },
29041 mainPlaylistLoader
29042 } = settings;
29043
29044 for (const groupId in mediaGroups[type]) {
29045 if (!groups[groupId]) {
29046 groups[groupId] = [];
29047 }
29048
29049 for (const variantLabel in mediaGroups[type][groupId]) {
29050 if (!vhs.options_.useForcedSubtitles && mediaGroups[type][groupId][variantLabel].forced) {
29051 // Subtitle playlists with the forced attribute are not selectable in Safari.
29052 // According to Apple's HLS Authoring Specification:
29053 // If content has forced subtitles and regular subtitles in a given language,
29054 // the regular subtitles track in that language MUST contain both the forced
29055 // subtitles and the regular subtitles for that language.
29056 // Because of this requirement and that Safari does not add forced subtitles,
29057 // forced subtitles are skipped here to maintain consistent experience across
29058 // all platforms
29059 continue;
29060 }
29061
29062 let properties = mediaGroups[type][groupId][variantLabel];
29063 let playlistLoader;
29064
29065 if (sourceType === 'hls') {
29066 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
29067 } else if (sourceType === 'dash') {
29068 const playlists = properties.playlists.filter(p => p.excludeUntil !== Infinity);
29069
29070 if (!playlists.length) {
29071 return;
29072 }
29073
29074 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, mainPlaylistLoader);
29075 } else if (sourceType === 'vhs-json') {
29076 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
29077 // as provided, otherwise use the resolved URI to load the playlist
29078 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
29079 }
29080
29081 properties = merge$1({
29082 id: variantLabel,
29083 playlistLoader
29084 }, properties);
29085 setupListeners[type](type, properties.playlistLoader, settings);
29086 groups[groupId].push(properties);
29087
29088 if (typeof tracks[variantLabel] === 'undefined') {
29089 const track = tech.addRemoteTextTrack({
29090 id: variantLabel,
29091 kind: 'subtitles',
29092 default: properties.default && properties.autoselect,
29093 language: properties.language,
29094 label: variantLabel
29095 }, false).track;
29096 tracks[variantLabel] = track;
29097 }
29098 }
29099 } // setup single error event handler for the segment loader
29100
29101
29102 segmentLoader.on('error', onError[type](type, settings));
29103 },
29104
29105 /**
29106 * Setup TextTracks for the closed-caption groups
29107 *
29108 * @param {String} type
29109 * MediaGroup type
29110 * @param {Object} settings
29111 * Object containing required information for media groups
29112 * @function initialize['CLOSED-CAPTIONS']
29113 */
29114 'CLOSED-CAPTIONS': (type, settings) => {
29115 const {
29116 tech,
29117 main: {
29118 mediaGroups
29119 },
29120 mediaTypes: {
29121 [type]: {
29122 groups,
29123 tracks
29124 }
29125 }
29126 } = settings;
29127
29128 for (const groupId in mediaGroups[type]) {
29129 if (!groups[groupId]) {
29130 groups[groupId] = [];
29131 }
29132
29133 for (const variantLabel in mediaGroups[type][groupId]) {
29134 const properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
29135
29136 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
29137 continue;
29138 }
29139
29140 const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
29141 let newProps = {
29142 label: variantLabel,
29143 language: properties.language,
29144 instreamId: properties.instreamId,
29145 default: properties.default && properties.autoselect
29146 };
29147
29148 if (captionServices[newProps.instreamId]) {
29149 newProps = merge$1(newProps, captionServices[newProps.instreamId]);
29150 }
29151
29152 if (newProps.default === undefined) {
29153 delete newProps.default;
29154 } // No PlaylistLoader is required for Closed-Captions because the captions are
29155 // embedded within the video stream
29156
29157
29158 groups[groupId].push(merge$1({
29159 id: variantLabel
29160 }, properties));
29161
29162 if (typeof tracks[variantLabel] === 'undefined') {
29163 const track = tech.addRemoteTextTrack({
29164 id: newProps.instreamId,
29165 kind: 'captions',
29166 default: newProps.default,
29167 language: newProps.language,
29168 label: newProps.label
29169 }, false).track;
29170 tracks[variantLabel] = track;
29171 }
29172 }
29173 }
29174 }
29175 };
29176
29177 const groupMatch = (list, media) => {
29178 for (let i = 0; i < list.length; i++) {
29179 if (playlistMatch(media, list[i])) {
29180 return true;
29181 }
29182
29183 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
29184 return true;
29185 }
29186 }
29187
29188 return false;
29189 };
29190 /**
29191 * Returns a function used to get the active group of the provided type
29192 *
29193 * @param {string} type
29194 * MediaGroup type
29195 * @param {Object} settings
29196 * Object containing required information for media groups
29197 * @return {Function}
29198 * Function that returns the active media group for the provided type. Takes an
29199 * optional parameter {TextTrack} track. If no track is provided, a list of all
29200 * variants in the group, otherwise the variant corresponding to the provided
29201 * track is returned.
29202 * @function activeGroup
29203 */
29204
29205
29206 const activeGroup = (type, settings) => track => {
29207 const {
29208 mainPlaylistLoader,
29209 mediaTypes: {
29210 [type]: {
29211 groups
29212 }
29213 }
29214 } = settings;
29215 const media = mainPlaylistLoader.media();
29216
29217 if (!media) {
29218 return null;
29219 }
29220
29221 let variants = null; // set to variants to main media active group
29222
29223 if (media.attributes[type]) {
29224 variants = groups[media.attributes[type]];
29225 }
29226
29227 const groupKeys = Object.keys(groups);
29228
29229 if (!variants) {
29230 // find the mainPlaylistLoader media
29231 // that is in a media group if we are dealing
29232 // with audio only
29233 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.main)) {
29234 for (let i = 0; i < groupKeys.length; i++) {
29235 const groupPropertyList = groups[groupKeys[i]];
29236
29237 if (groupMatch(groupPropertyList, media)) {
29238 variants = groupPropertyList;
29239 break;
29240 }
29241 } // use the main group if it exists
29242
29243 } else if (groups.main) {
29244 variants = groups.main; // only one group, use that one
29245 } else if (groupKeys.length === 1) {
29246 variants = groups[groupKeys[0]];
29247 }
29248 }
29249
29250 if (typeof track === 'undefined') {
29251 return variants;
29252 }
29253
29254 if (track === null || !variants) {
29255 // An active track was specified so a corresponding group is expected. track === null
29256 // means no track is currently active so there is no corresponding group
29257 return null;
29258 }
29259
29260 return variants.filter(props => props.id === track.id)[0] || null;
29261 };
29262 const activeTrack = {
29263 /**
29264 * Returns a function used to get the active track of type provided
29265 *
29266 * @param {string} type
29267 * MediaGroup type
29268 * @param {Object} settings
29269 * Object containing required information for media groups
29270 * @return {Function}
29271 * Function that returns the active media track for the provided type. Returns
29272 * null if no track is active
29273 * @function activeTrack.AUDIO
29274 */
29275 AUDIO: (type, settings) => () => {
29276 const {
29277 mediaTypes: {
29278 [type]: {
29279 tracks
29280 }
29281 }
29282 } = settings;
29283
29284 for (const id in tracks) {
29285 if (tracks[id].enabled) {
29286 return tracks[id];
29287 }
29288 }
29289
29290 return null;
29291 },
29292
29293 /**
29294 * Returns a function used to get the active track of type provided
29295 *
29296 * @param {string} type
29297 * MediaGroup type
29298 * @param {Object} settings
29299 * Object containing required information for media groups
29300 * @return {Function}
29301 * Function that returns the active media track for the provided type. Returns
29302 * null if no track is active
29303 * @function activeTrack.SUBTITLES
29304 */
29305 SUBTITLES: (type, settings) => () => {
29306 const {
29307 mediaTypes: {
29308 [type]: {
29309 tracks
29310 }
29311 }
29312 } = settings;
29313
29314 for (const id in tracks) {
29315 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
29316 return tracks[id];
29317 }
29318 }
29319
29320 return null;
29321 }
29322 };
29323 const getActiveGroup = (type, {
29324 mediaTypes
29325 }) => () => {
29326 const activeTrack_ = mediaTypes[type].activeTrack();
29327
29328 if (!activeTrack_) {
29329 return null;
29330 }
29331
29332 return mediaTypes[type].activeGroup(activeTrack_);
29333 };
29334 /**
29335 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
29336 * Closed-Captions) specified in the main manifest.
29337 *
29338 * @param {Object} settings
29339 * Object containing required information for setting up the media groups
29340 * @param {Tech} settings.tech
29341 * The tech of the player
29342 * @param {Object} settings.requestOptions
29343 * XHR request options used by the segment loaders
29344 * @param {PlaylistLoader} settings.mainPlaylistLoader
29345 * PlaylistLoader for the main source
29346 * @param {VhsHandler} settings.vhs
29347 * VHS SourceHandler
29348 * @param {Object} settings.main
29349 * The parsed main manifest
29350 * @param {Object} settings.mediaTypes
29351 * Object to store the loaders, tracks, and utility methods for each media type
29352 * @param {Function} settings.excludePlaylist
29353 * Excludes the current rendition and forces a rendition switch.
29354 * @function setupMediaGroups
29355 */
29356
29357 const setupMediaGroups = settings => {
29358 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {
29359 initialize[type](type, settings);
29360 });
29361 const {
29362 mediaTypes,
29363 mainPlaylistLoader,
29364 tech,
29365 vhs,
29366 segmentLoaders: {
29367 ['AUDIO']: audioSegmentLoader,
29368 main: mainSegmentLoader
29369 }
29370 } = settings; // setup active group and track getters and change event handlers
29371
29372 ['AUDIO', 'SUBTITLES'].forEach(type => {
29373 mediaTypes[type].activeGroup = activeGroup(type, settings);
29374 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
29375 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
29376 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
29377 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
29378 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
29379 }); // DO NOT enable the default subtitle or caption track.
29380 // DO enable the default audio track
29381
29382 const audioGroup = mediaTypes.AUDIO.activeGroup();
29383
29384 if (audioGroup) {
29385 const groupId = (audioGroup.filter(group => group.default)[0] || audioGroup[0]).id;
29386 mediaTypes.AUDIO.tracks[groupId].enabled = true;
29387 mediaTypes.AUDIO.onGroupChanged();
29388 mediaTypes.AUDIO.onTrackChanged();
29389 const activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
29390 // track is changed, but needs to be handled here since the track may not be considered
29391 // changed on the first call to onTrackChanged
29392
29393 if (!activeAudioGroup.playlistLoader) {
29394 // either audio is muxed with video or the stream is audio only
29395 mainSegmentLoader.setAudio(true);
29396 } else {
29397 // audio is demuxed
29398 mainSegmentLoader.setAudio(false);
29399 audioSegmentLoader.setAudio(true);
29400 }
29401 }
29402
29403 mainPlaylistLoader.on('mediachange', () => {
29404 ['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanged());
29405 });
29406 mainPlaylistLoader.on('mediachanging', () => {
29407 ['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanging());
29408 }); // custom audio track change event handler for usage event
29409
29410 const onAudioTrackChanged = () => {
29411 mediaTypes.AUDIO.onTrackChanged();
29412 tech.trigger({
29413 type: 'usage',
29414 name: 'vhs-audio-change'
29415 });
29416 };
29417
29418 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
29419 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
29420 vhs.on('dispose', () => {
29421 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
29422 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
29423 }); // clear existing audio tracks and add the ones we just created
29424
29425 tech.clearTracks('audio');
29426
29427 for (const id in mediaTypes.AUDIO.tracks) {
29428 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
29429 }
29430 };
29431 /**
29432 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
29433 * media type
29434 *
29435 * @return {Object}
29436 * Object to store the loaders, tracks, and utility methods for each media type
29437 * @function createMediaTypes
29438 */
29439
29440 const createMediaTypes = () => {
29441 const mediaTypes = {};
29442 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {
29443 mediaTypes[type] = {
29444 groups: {},
29445 tracks: {},
29446 activePlaylistLoader: null,
29447 activeGroup: noop,
29448 activeTrack: noop,
29449 getActiveGroup: noop,
29450 onGroupChanged: noop,
29451 onTrackChanged: noop,
29452 lastTrack_: null,
29453 logger_: logger(`MediaGroups[${type}]`)
29454 };
29455 });
29456 return mediaTypes;
29457 };
29458
29459 /**
29460 * A utility class for setting properties and maintaining the state of the content steering manifest.
29461 *
29462 * Content Steering manifest format:
29463 * VERSION: number (required) currently only version 1 is supported.
29464 * TTL: number in seconds (optional) until the next content steering manifest reload.
29465 * RELOAD-URI: string (optional) uri to fetch the next content steering manifest.
29466 * SERVICE-LOCATION-PRIORITY or PATHWAY-PRIORITY a non empty array of unique string values.
29467 * PATHWAY-CLONES: array (optional) (HLS only) pathway clone objects to copy from other playlists.
29468 */
29469
29470 class SteeringManifest {
29471 constructor() {
29472 this.priority_ = [];
29473 this.pathwayClones_ = new Map();
29474 }
29475
29476 set version(number) {
29477 // Only version 1 is currently supported for both DASH and HLS.
29478 if (number === 1) {
29479 this.version_ = number;
29480 }
29481 }
29482
29483 set ttl(seconds) {
29484 // TTL = time-to-live, default = 300 seconds.
29485 this.ttl_ = seconds || 300;
29486 }
29487
29488 set reloadUri(uri) {
29489 if (uri) {
29490 // reload URI can be relative to the previous reloadUri.
29491 this.reloadUri_ = resolveUrl(this.reloadUri_, uri);
29492 }
29493 }
29494
29495 set priority(array) {
29496 // priority must be non-empty and unique values.
29497 if (array && array.length) {
29498 this.priority_ = array;
29499 }
29500 }
29501
29502 set pathwayClones(array) {
29503 // pathwayClones must be non-empty.
29504 if (array && array.length) {
29505 this.pathwayClones_ = new Map(array.map(clone => [clone.ID, clone]));
29506 }
29507 }
29508
29509 get version() {
29510 return this.version_;
29511 }
29512
29513 get ttl() {
29514 return this.ttl_;
29515 }
29516
29517 get reloadUri() {
29518 return this.reloadUri_;
29519 }
29520
29521 get priority() {
29522 return this.priority_;
29523 }
29524
29525 get pathwayClones() {
29526 return this.pathwayClones_;
29527 }
29528
29529 }
29530 /**
29531 * This class represents a content steering manifest and associated state. See both HLS and DASH specifications.
29532 * HLS: https://developer.apple.com/streaming/HLSContentSteeringSpecification.pdf and
29533 * https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/ section 4.4.6.6.
29534 * DASH: https://dashif.org/docs/DASH-IF-CTS-00XX-Content-Steering-Community-Review.pdf
29535 *
29536 * @param {function} xhr for making a network request from the browser.
29537 * @param {function} bandwidth for fetching the current bandwidth from the main segment loader.
29538 */
29539
29540
29541 class ContentSteeringController extends videojs__default["default"].EventTarget {
29542 constructor(xhr, bandwidth) {
29543 super();
29544 this.currentPathway = null;
29545 this.defaultPathway = null;
29546 this.queryBeforeStart = false;
29547 this.availablePathways_ = new Set();
29548 this.steeringManifest = new SteeringManifest();
29549 this.proxyServerUrl_ = null;
29550 this.manifestType_ = null;
29551 this.ttlTimeout_ = null;
29552 this.request_ = null;
29553 this.currentPathwayClones = new Map();
29554 this.nextPathwayClones = new Map();
29555 this.excludedSteeringManifestURLs = new Set();
29556 this.logger_ = logger('Content Steering');
29557 this.xhr_ = xhr;
29558 this.getBandwidth_ = bandwidth;
29559 }
29560 /**
29561 * Assigns the content steering tag properties to the steering controller
29562 *
29563 * @param {string} baseUrl the baseURL from the main manifest for resolving the steering manifest url
29564 * @param {Object} steeringTag the content steering tag from the main manifest
29565 */
29566
29567
29568 assignTagProperties(baseUrl, steeringTag) {
29569 this.manifestType_ = steeringTag.serverUri ? 'HLS' : 'DASH'; // serverUri is HLS serverURL is DASH
29570
29571 const steeringUri = steeringTag.serverUri || steeringTag.serverURL;
29572
29573 if (!steeringUri) {
29574 this.logger_(`steering manifest URL is ${steeringUri}, cannot request steering manifest.`);
29575 this.trigger('error');
29576 return;
29577 } // Content steering manifests can be encoded as a data URI. We can decode, parse and return early if that's the case.
29578
29579
29580 if (steeringUri.startsWith('data:')) {
29581 this.decodeDataUriManifest_(steeringUri.substring(steeringUri.indexOf(',') + 1));
29582 return;
29583 } // reloadUri is the resolution of the main manifest URL and steering URL.
29584
29585
29586 this.steeringManifest.reloadUri = resolveUrl(baseUrl, steeringUri); // pathwayId is HLS defaultServiceLocation is DASH
29587
29588 this.defaultPathway = steeringTag.pathwayId || steeringTag.defaultServiceLocation; // currently only DASH supports the following properties on <ContentSteering> tags.
29589
29590 this.queryBeforeStart = steeringTag.queryBeforeStart;
29591 this.proxyServerUrl_ = steeringTag.proxyServerURL; // trigger a steering event if we have a pathway from the content steering tag.
29592 // this tells VHS which segment pathway to start with.
29593 // If queryBeforeStart is true we need to wait for the steering manifest response.
29594
29595 if (this.defaultPathway && !this.queryBeforeStart) {
29596 this.trigger('content-steering');
29597 }
29598 }
29599 /**
29600 * Requests the content steering manifest and parse the response. This should only be called after
29601 * assignTagProperties was called with a content steering tag.
29602 *
29603 * @param {string} initialUri The optional uri to make the request with.
29604 * If set, the request should be made with exactly what is passed in this variable.
29605 * This scenario should only happen once on initalization.
29606 */
29607
29608
29609 requestSteeringManifest(initial) {
29610 const reloadUri = this.steeringManifest.reloadUri;
29611
29612 if (!reloadUri) {
29613 return;
29614 } // We currently don't support passing MPD query parameters directly to the content steering URL as this requires
29615 // ExtUrlQueryInfo tag support. See the DASH content steering spec section 8.1.
29616 // This request URI accounts for manifest URIs that have been excluded.
29617
29618
29619 const uri = initial ? reloadUri : this.getRequestURI(reloadUri); // If there are no valid manifest URIs, we should stop content steering.
29620
29621 if (!uri) {
29622 this.logger_('No valid content steering manifest URIs. Stopping content steering.');
29623 this.trigger('error');
29624 this.dispose();
29625 return;
29626 }
29627
29628 this.request_ = this.xhr_({
29629 uri
29630 }, (error, errorInfo) => {
29631 if (error) {
29632 // If the client receives HTTP 410 Gone in response to a manifest request,
29633 // it MUST NOT issue another request for that URI for the remainder of the
29634 // playback session. It MAY continue to use the most-recently obtained set
29635 // of Pathways.
29636 if (errorInfo.status === 410) {
29637 this.logger_(`manifest request 410 ${error}.`);
29638 this.logger_(`There will be no more content steering requests to ${uri} this session.`);
29639 this.excludedSteeringManifestURLs.add(uri);
29640 return;
29641 } // If the client receives HTTP 429 Too Many Requests with a Retry-After
29642 // header in response to a manifest request, it SHOULD wait until the time
29643 // specified by the Retry-After header to reissue the request.
29644
29645
29646 if (errorInfo.status === 429) {
29647 const retrySeconds = errorInfo.responseHeaders['retry-after'];
29648 this.logger_(`manifest request 429 ${error}.`);
29649 this.logger_(`content steering will retry in ${retrySeconds} seconds.`);
29650 this.startTTLTimeout_(parseInt(retrySeconds, 10));
29651 return;
29652 } // If the Steering Manifest cannot be loaded and parsed correctly, the
29653 // client SHOULD continue to use the previous values and attempt to reload
29654 // it after waiting for the previously-specified TTL (or 5 minutes if
29655 // none).
29656
29657
29658 this.logger_(`manifest failed to load ${error}.`);
29659 this.startTTLTimeout_();
29660 return;
29661 }
29662
29663 const steeringManifestJson = JSON.parse(this.request_.responseText);
29664 this.assignSteeringProperties_(steeringManifestJson);
29665 this.startTTLTimeout_();
29666 });
29667 }
29668 /**
29669 * Set the proxy server URL and add the steering manifest url as a URI encoded parameter.
29670 *
29671 * @param {string} steeringUrl the steering manifest url
29672 * @return the steering manifest url to a proxy server with all parameters set
29673 */
29674
29675
29676 setProxyServerUrl_(steeringUrl) {
29677 const steeringUrlObject = new window.URL(steeringUrl);
29678 const proxyServerUrlObject = new window.URL(this.proxyServerUrl_);
29679 proxyServerUrlObject.searchParams.set('url', encodeURI(steeringUrlObject.toString()));
29680 return this.setSteeringParams_(proxyServerUrlObject.toString());
29681 }
29682 /**
29683 * Decodes and parses the data uri encoded steering manifest
29684 *
29685 * @param {string} dataUri the data uri to be decoded and parsed.
29686 */
29687
29688
29689 decodeDataUriManifest_(dataUri) {
29690 const steeringManifestJson = JSON.parse(window.atob(dataUri));
29691 this.assignSteeringProperties_(steeringManifestJson);
29692 }
29693 /**
29694 * Set the HLS or DASH content steering manifest request query parameters. For example:
29695 * _HLS_pathway="<CURRENT-PATHWAY-ID>" and _HLS_throughput=<THROUGHPUT>
29696 * _DASH_pathway and _DASH_throughput
29697 *
29698 * @param {string} uri to add content steering server parameters to.
29699 * @return a new uri as a string with the added steering query parameters.
29700 */
29701
29702
29703 setSteeringParams_(url) {
29704 const urlObject = new window.URL(url);
29705 const path = this.getPathway();
29706 const networkThroughput = this.getBandwidth_();
29707
29708 if (path) {
29709 const pathwayKey = `_${this.manifestType_}_pathway`;
29710 urlObject.searchParams.set(pathwayKey, path);
29711 }
29712
29713 if (networkThroughput) {
29714 const throughputKey = `_${this.manifestType_}_throughput`;
29715 urlObject.searchParams.set(throughputKey, networkThroughput);
29716 }
29717
29718 return urlObject.toString();
29719 }
29720 /**
29721 * Assigns the current steering manifest properties and to the SteeringManifest object
29722 *
29723 * @param {Object} steeringJson the raw JSON steering manifest
29724 */
29725
29726
29727 assignSteeringProperties_(steeringJson) {
29728 this.steeringManifest.version = steeringJson.VERSION;
29729
29730 if (!this.steeringManifest.version) {
29731 this.logger_(`manifest version is ${steeringJson.VERSION}, which is not supported.`);
29732 this.trigger('error');
29733 return;
29734 }
29735
29736 this.steeringManifest.ttl = steeringJson.TTL;
29737 this.steeringManifest.reloadUri = steeringJson['RELOAD-URI']; // HLS = PATHWAY-PRIORITY required. DASH = SERVICE-LOCATION-PRIORITY optional
29738
29739 this.steeringManifest.priority = steeringJson['PATHWAY-PRIORITY'] || steeringJson['SERVICE-LOCATION-PRIORITY']; // Pathway clones to be created/updated in HLS.
29740 // See section 7.2 https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/
29741
29742 this.steeringManifest.pathwayClones = steeringJson['PATHWAY-CLONES'];
29743 this.nextPathwayClones = this.steeringManifest.pathwayClones; // 1. apply first pathway from the array.
29744 // 2. if first pathway doesn't exist in manifest, try next pathway.
29745 // a. if all pathways are exhausted, ignore the steering manifest priority.
29746 // 3. if segments fail from an established pathway, try all variants/renditions, then exclude the failed pathway.
29747 // a. exclude a pathway for a minimum of the last TTL duration. Meaning, from the next steering response,
29748 // the excluded pathway will be ignored.
29749 // See excludePathway usage in excludePlaylist().
29750 // If there are no available pathways, we need to stop content steering.
29751
29752 if (!this.availablePathways_.size) {
29753 this.logger_('There are no available pathways for content steering. Ending content steering.');
29754 this.trigger('error');
29755 this.dispose();
29756 }
29757
29758 const chooseNextPathway = pathwaysByPriority => {
29759 for (const path of pathwaysByPriority) {
29760 if (this.availablePathways_.has(path)) {
29761 return path;
29762 }
29763 } // If no pathway matches, ignore the manifest and choose the first available.
29764
29765
29766 return [...this.availablePathways_][0];
29767 };
29768
29769 const nextPathway = chooseNextPathway(this.steeringManifest.priority);
29770
29771 if (this.currentPathway !== nextPathway) {
29772 this.currentPathway = nextPathway;
29773 this.trigger('content-steering');
29774 }
29775 }
29776 /**
29777 * Returns the pathway to use for steering decisions
29778 *
29779 * @return {string} returns the current pathway or the default
29780 */
29781
29782
29783 getPathway() {
29784 return this.currentPathway || this.defaultPathway;
29785 }
29786 /**
29787 * Chooses the manifest request URI based on proxy URIs and server URLs.
29788 * Also accounts for exclusion on certain manifest URIs.
29789 *
29790 * @param {string} reloadUri the base uri before parameters
29791 *
29792 * @return {string} the final URI for the request to the manifest server.
29793 */
29794
29795
29796 getRequestURI(reloadUri) {
29797 if (!reloadUri) {
29798 return null;
29799 }
29800
29801 const isExcluded = uri => this.excludedSteeringManifestURLs.has(uri);
29802
29803 if (this.proxyServerUrl_) {
29804 const proxyURI = this.setProxyServerUrl_(reloadUri);
29805
29806 if (!isExcluded(proxyURI)) {
29807 return proxyURI;
29808 }
29809 }
29810
29811 const steeringURI = this.setSteeringParams_(reloadUri);
29812
29813 if (!isExcluded(steeringURI)) {
29814 return steeringURI;
29815 } // Return nothing if all valid manifest URIs are excluded.
29816
29817
29818 return null;
29819 }
29820 /**
29821 * Start the timeout for re-requesting the steering manifest at the TTL interval.
29822 *
29823 * @param {number} ttl time in seconds of the timeout. Defaults to the
29824 * ttl interval in the steering manifest
29825 */
29826
29827
29828 startTTLTimeout_(ttl = this.steeringManifest.ttl) {
29829 // 300 (5 minutes) is the default value.
29830 const ttlMS = ttl * 1000;
29831 this.ttlTimeout_ = window.setTimeout(() => {
29832 this.requestSteeringManifest();
29833 }, ttlMS);
29834 }
29835 /**
29836 * Clear the TTL timeout if necessary.
29837 */
29838
29839
29840 clearTTLTimeout_() {
29841 window.clearTimeout(this.ttlTimeout_);
29842 this.ttlTimeout_ = null;
29843 }
29844 /**
29845 * aborts any current steering xhr and sets the current request object to null
29846 */
29847
29848
29849 abort() {
29850 if (this.request_) {
29851 this.request_.abort();
29852 }
29853
29854 this.request_ = null;
29855 }
29856 /**
29857 * aborts steering requests clears the ttl timeout and resets all properties.
29858 */
29859
29860
29861 dispose() {
29862 this.off('content-steering');
29863 this.off('error');
29864 this.abort();
29865 this.clearTTLTimeout_();
29866 this.currentPathway = null;
29867 this.defaultPathway = null;
29868 this.queryBeforeStart = null;
29869 this.proxyServerUrl_ = null;
29870 this.manifestType_ = null;
29871 this.ttlTimeout_ = null;
29872 this.request_ = null;
29873 this.excludedSteeringManifestURLs = new Set();
29874 this.availablePathways_ = new Set();
29875 this.steeringManifest = new SteeringManifest();
29876 }
29877 /**
29878 * adds a pathway to the available pathways set
29879 *
29880 * @param {string} pathway the pathway string to add
29881 */
29882
29883
29884 addAvailablePathway(pathway) {
29885 if (pathway) {
29886 this.availablePathways_.add(pathway);
29887 }
29888 }
29889 /**
29890 * Clears all pathways from the available pathways set
29891 */
29892
29893
29894 clearAvailablePathways() {
29895 this.availablePathways_.clear();
29896 }
29897 /**
29898 * Removes a pathway from the available pathways set.
29899 */
29900
29901
29902 excludePathway(pathway) {
29903 return this.availablePathways_.delete(pathway);
29904 }
29905 /**
29906 * Checks the refreshed DASH manifest content steering tag for changes.
29907 *
29908 * @param {string} baseURL new steering tag on DASH manifest refresh
29909 * @param {Object} newTag the new tag to check for changes
29910 * @return a true or false whether the new tag has different values
29911 */
29912
29913
29914 didDASHTagChange(baseURL, newTag) {
29915 return !newTag && this.steeringManifest.reloadUri || newTag && (resolveUrl(baseURL, newTag.serverURL) !== this.steeringManifest.reloadUri || newTag.defaultServiceLocation !== this.defaultPathway || newTag.queryBeforeStart !== this.queryBeforeStart || newTag.proxyServerURL !== this.proxyServerUrl_);
29916 }
29917
29918 getAvailablePathways() {
29919 return this.availablePathways_;
29920 }
29921
29922 }
29923
29924 /**
29925 * @file playlist-controller.js
29926 */
29927 const ABORT_EARLY_EXCLUSION_SECONDS = 10;
29928 let Vhs$1; // SegmentLoader stats that need to have each loader's
29929 // values summed to calculate the final value
29930
29931 const loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
29932
29933 const sumLoaderStat = function (stat) {
29934 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
29935 };
29936
29937 const shouldSwitchToMedia = function ({
29938 currentPlaylist,
29939 buffered,
29940 currentTime,
29941 nextPlaylist,
29942 bufferLowWaterLine,
29943 bufferHighWaterLine,
29944 duration,
29945 bufferBasedABR,
29946 log
29947 }) {
29948 // we have no other playlist to switch to
29949 if (!nextPlaylist) {
29950 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
29951 return false;
29952 }
29953
29954 const sharedLogLine = `allowing switch ${currentPlaylist && currentPlaylist.id || 'null'} -> ${nextPlaylist.id}`;
29955
29956 if (!currentPlaylist) {
29957 log(`${sharedLogLine} as current playlist is not set`);
29958 return true;
29959 } // no need to switch if playlist is the same
29960
29961
29962 if (nextPlaylist.id === currentPlaylist.id) {
29963 return false;
29964 } // determine if current time is in a buffered range.
29965
29966
29967 const isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
29968 // This is because in LIVE, the player plays 3 segments from the end of the
29969 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
29970 // in those segments, a viewer will never experience a rendition upswitch.
29971
29972 if (!currentPlaylist.endList) {
29973 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
29974 // doubles the time to first playback.
29975 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
29976 log(`not ${sharedLogLine} as current playlist is live llhls, but currentTime isn't in buffered.`);
29977 return false;
29978 }
29979
29980 log(`${sharedLogLine} as current playlist is live`);
29981 return true;
29982 }
29983
29984 const forwardBuffer = timeAheadOf(buffered, currentTime);
29985 const maxBufferLowWaterLine = bufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
29986 // duration is below the max potential low water line
29987
29988 if (duration < maxBufferLowWaterLine) {
29989 log(`${sharedLogLine} as duration < max low water line (${duration} < ${maxBufferLowWaterLine})`);
29990 return true;
29991 }
29992
29993 const nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
29994 const currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
29995 // we can switch down
29996
29997 if (nextBandwidth < currBandwidth && (!bufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
29998 let logLine = `${sharedLogLine} as next bandwidth < current bandwidth (${nextBandwidth} < ${currBandwidth})`;
29999
30000 if (bufferBasedABR) {
30001 logLine += ` and forwardBuffer < bufferHighWaterLine (${forwardBuffer} < ${bufferHighWaterLine})`;
30002 }
30003
30004 log(logLine);
30005 return true;
30006 } // and if our buffer is higher than the low water line,
30007 // we can switch up
30008
30009
30010 if ((!bufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
30011 let logLine = `${sharedLogLine} as forwardBuffer >= bufferLowWaterLine (${forwardBuffer} >= ${bufferLowWaterLine})`;
30012
30013 if (bufferBasedABR) {
30014 logLine += ` and next bandwidth > current bandwidth (${nextBandwidth} > ${currBandwidth})`;
30015 }
30016
30017 log(logLine);
30018 return true;
30019 }
30020
30021 log(`not ${sharedLogLine} as no switching criteria met`);
30022 return false;
30023 };
30024 /**
30025 * the main playlist controller controller all interactons
30026 * between playlists and segmentloaders. At this time this mainly
30027 * involves a main playlist and a series of audio playlists
30028 * if they are available
30029 *
30030 * @class PlaylistController
30031 * @extends videojs.EventTarget
30032 */
30033
30034
30035 class PlaylistController extends videojs__default["default"].EventTarget {
30036 constructor(options) {
30037 super();
30038 const {
30039 src,
30040 withCredentials,
30041 tech,
30042 bandwidth,
30043 externVhs,
30044 useCueTags,
30045 playlistExclusionDuration,
30046 enableLowInitialPlaylist,
30047 sourceType,
30048 cacheEncryptionKeys,
30049 bufferBasedABR,
30050 leastPixelDiffSelector,
30051 captionServices
30052 } = options;
30053
30054 if (!src) {
30055 throw new Error('A non-empty playlist URL or JSON manifest string is required');
30056 }
30057
30058 let {
30059 maxPlaylistRetries
30060 } = options;
30061
30062 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
30063 maxPlaylistRetries = Infinity;
30064 }
30065
30066 Vhs$1 = externVhs;
30067 this.bufferBasedABR = Boolean(bufferBasedABR);
30068 this.leastPixelDiffSelector = Boolean(leastPixelDiffSelector);
30069 this.withCredentials = withCredentials;
30070 this.tech_ = tech;
30071 this.vhs_ = tech.vhs;
30072 this.sourceType_ = sourceType;
30073 this.useCueTags_ = useCueTags;
30074 this.playlistExclusionDuration = playlistExclusionDuration;
30075 this.maxPlaylistRetries = maxPlaylistRetries;
30076 this.enableLowInitialPlaylist = enableLowInitialPlaylist;
30077
30078 if (this.useCueTags_) {
30079 this.cueTagsTrack_ = this.tech_.addTextTrack('metadata', 'ad-cues');
30080 this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
30081 }
30082
30083 this.requestOptions_ = {
30084 withCredentials,
30085 maxPlaylistRetries,
30086 timeout: null
30087 };
30088 this.on('error', this.pauseLoading);
30089 this.mediaTypes_ = createMediaTypes();
30090 this.mediaSource = new window.MediaSource();
30091 this.handleDurationChange_ = this.handleDurationChange_.bind(this);
30092 this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);
30093 this.handleSourceEnded_ = this.handleSourceEnded_.bind(this);
30094 this.mediaSource.addEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
30095
30096 this.mediaSource.addEventListener('sourceopen', this.handleSourceOpen_);
30097 this.mediaSource.addEventListener('sourceended', this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
30098 // everything, and the MediaSource should not be detached without a proper disposal
30099
30100 this.seekable_ = createTimeRanges();
30101 this.hasPlayed_ = false;
30102 this.syncController_ = new SyncController(options);
30103 this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
30104 kind: 'metadata',
30105 label: 'segment-metadata'
30106 }, false).track;
30107 this.decrypter_ = new Decrypter();
30108 this.sourceUpdater_ = new SourceUpdater(this.mediaSource);
30109 this.inbandTextTracks_ = {};
30110 this.timelineChangeController_ = new TimelineChangeController();
30111 this.keyStatusMap_ = new Map();
30112 const segmentLoaderSettings = {
30113 vhs: this.vhs_,
30114 parse708captions: options.parse708captions,
30115 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
30116 captionServices,
30117 mediaSource: this.mediaSource,
30118 currentTime: this.tech_.currentTime.bind(this.tech_),
30119 seekable: () => this.seekable(),
30120 seeking: () => this.tech_.seeking(),
30121 duration: () => this.duration(),
30122 hasPlayed: () => this.hasPlayed_,
30123 goalBufferLength: () => this.goalBufferLength(),
30124 bandwidth,
30125 syncController: this.syncController_,
30126 decrypter: this.decrypter_,
30127 sourceType: this.sourceType_,
30128 inbandTextTracks: this.inbandTextTracks_,
30129 cacheEncryptionKeys,
30130 sourceUpdater: this.sourceUpdater_,
30131 timelineChangeController: this.timelineChangeController_,
30132 exactManifestTimings: options.exactManifestTimings,
30133 addMetadataToTextTrack: this.addMetadataToTextTrack.bind(this)
30134 }; // The source type check not only determines whether a special DASH playlist loader
30135 // should be used, but also covers the case where the provided src is a vhs-json
30136 // manifest object (instead of a URL). In the case of vhs-json, the default
30137 // PlaylistLoader should be used.
30138
30139 this.mainPlaylistLoader_ = this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, this.vhs_, merge$1(this.requestOptions_, {
30140 addMetadataToTextTrack: this.addMetadataToTextTrack.bind(this)
30141 })) : new PlaylistLoader(src, this.vhs_, merge$1(this.requestOptions_, {
30142 addDateRangesToTextTrack: this.addDateRangesToTextTrack_.bind(this)
30143 }));
30144 this.setupMainPlaylistLoaderListeners_(); // setup segment loaders
30145 // combined audio/video or just video when alternate audio track is selected
30146
30147 this.mainSegmentLoader_ = new SegmentLoader(merge$1(segmentLoaderSettings, {
30148 segmentMetadataTrack: this.segmentMetadataTrack_,
30149 loaderType: 'main'
30150 }), options); // alternate audio track
30151
30152 this.audioSegmentLoader_ = new SegmentLoader(merge$1(segmentLoaderSettings, {
30153 loaderType: 'audio'
30154 }), options);
30155 this.subtitleSegmentLoader_ = new VTTSegmentLoader(merge$1(segmentLoaderSettings, {
30156 loaderType: 'vtt',
30157 featuresNativeTextTracks: this.tech_.featuresNativeTextTracks,
30158 loadVttJs: () => new Promise((resolve, reject) => {
30159 function onLoad() {
30160 tech.off('vttjserror', onError);
30161 resolve();
30162 }
30163
30164 function onError() {
30165 tech.off('vttjsloaded', onLoad);
30166 reject();
30167 }
30168
30169 tech.one('vttjsloaded', onLoad);
30170 tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:
30171
30172 tech.addWebVttScript_();
30173 })
30174 }), options);
30175
30176 const getBandwidth = () => {
30177 return this.mainSegmentLoader_.bandwidth;
30178 };
30179
30180 this.contentSteeringController_ = new ContentSteeringController(this.vhs_.xhr, getBandwidth);
30181 this.setupSegmentLoaderListeners_();
30182
30183 if (this.bufferBasedABR) {
30184 this.mainPlaylistLoader_.one('loadedplaylist', () => this.startABRTimer_());
30185 this.tech_.on('pause', () => this.stopABRTimer_());
30186 this.tech_.on('play', () => this.startABRTimer_());
30187 } // Create SegmentLoader stat-getters
30188 // mediaRequests_
30189 // mediaRequestsAborted_
30190 // mediaRequestsTimedout_
30191 // mediaRequestsErrored_
30192 // mediaTransferDuration_
30193 // mediaBytesTransferred_
30194 // mediaAppends_
30195
30196
30197 loaderStats.forEach(stat => {
30198 this[stat + '_'] = sumLoaderStat.bind(this, stat);
30199 });
30200 this.logger_ = logger('pc');
30201 this.triggeredFmp4Usage = false;
30202
30203 if (this.tech_.preload() === 'none') {
30204 this.loadOnPlay_ = () => {
30205 this.loadOnPlay_ = null;
30206 this.mainPlaylistLoader_.load();
30207 };
30208
30209 this.tech_.one('play', this.loadOnPlay_);
30210 } else {
30211 this.mainPlaylistLoader_.load();
30212 }
30213
30214 this.timeToLoadedData__ = -1;
30215 this.mainAppendsToLoadedData__ = -1;
30216 this.audioAppendsToLoadedData__ = -1;
30217 const event = this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
30218
30219 this.tech_.one(event, () => {
30220 const timeToLoadedDataStart = Date.now();
30221 this.tech_.one('loadeddata', () => {
30222 this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
30223 this.mainAppendsToLoadedData__ = this.mainSegmentLoader_.mediaAppends;
30224 this.audioAppendsToLoadedData__ = this.audioSegmentLoader_.mediaAppends;
30225 });
30226 });
30227 }
30228
30229 mainAppendsToLoadedData_() {
30230 return this.mainAppendsToLoadedData__;
30231 }
30232
30233 audioAppendsToLoadedData_() {
30234 return this.audioAppendsToLoadedData__;
30235 }
30236
30237 appendsToLoadedData_() {
30238 const main = this.mainAppendsToLoadedData_();
30239 const audio = this.audioAppendsToLoadedData_();
30240
30241 if (main === -1 || audio === -1) {
30242 return -1;
30243 }
30244
30245 return main + audio;
30246 }
30247
30248 timeToLoadedData_() {
30249 return this.timeToLoadedData__;
30250 }
30251 /**
30252 * Run selectPlaylist and switch to the new playlist if we should
30253 *
30254 * @param {string} [reason=abr] a reason for why the ABR check is made
30255 * @private
30256 */
30257
30258
30259 checkABR_(reason = 'abr') {
30260 const nextPlaylist = this.selectPlaylist();
30261
30262 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
30263 this.switchMedia_(nextPlaylist, reason);
30264 }
30265 }
30266
30267 switchMedia_(playlist, cause, delay) {
30268 const oldMedia = this.media();
30269 const oldId = oldMedia && (oldMedia.id || oldMedia.uri);
30270 const newId = playlist && (playlist.id || playlist.uri);
30271
30272 if (oldId && oldId !== newId) {
30273 this.logger_(`switch media ${oldId} -> ${newId} from ${cause}`);
30274 this.tech_.trigger({
30275 type: 'usage',
30276 name: `vhs-rendition-change-${cause}`
30277 });
30278 }
30279
30280 this.mainPlaylistLoader_.media(playlist, delay);
30281 }
30282 /**
30283 * A function that ensures we switch our playlists inside of `mediaTypes`
30284 * to match the current `serviceLocation` provided by the contentSteering controller.
30285 * We want to check media types of `AUDIO`, `SUBTITLES`, and `CLOSED-CAPTIONS`.
30286 *
30287 * This should only be called on a DASH playback scenario while using content steering.
30288 * This is necessary due to differences in how media in HLS manifests are generally tied to
30289 * a video playlist, where in DASH that is not always the case.
30290 */
30291
30292
30293 switchMediaForDASHContentSteering_() {
30294 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {
30295 const mediaType = this.mediaTypes_[type];
30296 const activeGroup = mediaType ? mediaType.activeGroup() : null;
30297 const pathway = this.contentSteeringController_.getPathway();
30298
30299 if (activeGroup && pathway) {
30300 // activeGroup can be an array or a single group
30301 const mediaPlaylists = activeGroup.length ? activeGroup[0].playlists : activeGroup.playlists;
30302 const dashMediaPlaylists = mediaPlaylists.filter(p => p.attributes.serviceLocation === pathway); // Switch the current active playlist to the correct CDN
30303
30304 if (dashMediaPlaylists.length) {
30305 this.mediaTypes_[type].activePlaylistLoader.media(dashMediaPlaylists[0]);
30306 }
30307 }
30308 });
30309 }
30310 /**
30311 * Start a timer that periodically calls checkABR_
30312 *
30313 * @private
30314 */
30315
30316
30317 startABRTimer_() {
30318 this.stopABRTimer_();
30319 this.abrTimer_ = window.setInterval(() => this.checkABR_(), 250);
30320 }
30321 /**
30322 * Stop the timer that periodically calls checkABR_
30323 *
30324 * @private
30325 */
30326
30327
30328 stopABRTimer_() {
30329 // if we're scrubbing, we don't need to pause.
30330 // This getter will be added to Video.js in version 7.11.
30331 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
30332 return;
30333 }
30334
30335 window.clearInterval(this.abrTimer_);
30336 this.abrTimer_ = null;
30337 }
30338 /**
30339 * Get a list of playlists for the currently selected audio playlist
30340 *
30341 * @return {Array} the array of audio playlists
30342 */
30343
30344
30345 getAudioTrackPlaylists_() {
30346 const main = this.main();
30347 const defaultPlaylists = main && main.playlists || []; // if we don't have any audio groups then we can only
30348 // assume that the audio tracks are contained in main
30349 // playlist array, use that or an empty array.
30350
30351 if (!main || !main.mediaGroups || !main.mediaGroups.AUDIO) {
30352 return defaultPlaylists;
30353 }
30354
30355 const AUDIO = main.mediaGroups.AUDIO;
30356 const groupKeys = Object.keys(AUDIO);
30357 let track; // get the current active track
30358
30359 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
30360 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from main if mediaTypes_ isn't setup yet
30361 } else {
30362 // default group is `main` or just the first group.
30363 const defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
30364
30365 for (const label in defaultGroup) {
30366 if (defaultGroup[label].default) {
30367 track = {
30368 label
30369 };
30370 break;
30371 }
30372 }
30373 } // no active track no playlists.
30374
30375
30376 if (!track) {
30377 return defaultPlaylists;
30378 }
30379
30380 const playlists = []; // get all of the playlists that are possible for the
30381 // active track.
30382
30383 for (const group in AUDIO) {
30384 if (AUDIO[group][track.label]) {
30385 const properties = AUDIO[group][track.label];
30386
30387 if (properties.playlists && properties.playlists.length) {
30388 playlists.push.apply(playlists, properties.playlists);
30389 } else if (properties.uri) {
30390 playlists.push(properties);
30391 } else if (main.playlists.length) {
30392 // if an audio group does not have a uri
30393 // see if we have main playlists that use it as a group.
30394 // if we do then add those to the playlists list.
30395 for (let i = 0; i < main.playlists.length; i++) {
30396 const playlist = main.playlists[i];
30397
30398 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
30399 playlists.push(playlist);
30400 }
30401 }
30402 }
30403 }
30404 }
30405
30406 if (!playlists.length) {
30407 return defaultPlaylists;
30408 }
30409
30410 return playlists;
30411 }
30412 /**
30413 * Register event handlers on the main playlist loader. A helper
30414 * function for construction time.
30415 *
30416 * @private
30417 */
30418
30419
30420 setupMainPlaylistLoaderListeners_() {
30421 this.mainPlaylistLoader_.on('loadedmetadata', () => {
30422 const media = this.mainPlaylistLoader_.media();
30423 const requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
30424 // timeout the request.
30425
30426 if (isLowestEnabledRendition(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.media())) {
30427 this.requestOptions_.timeout = 0;
30428 } else {
30429 this.requestOptions_.timeout = requestTimeout;
30430 } // if this isn't a live video and preload permits, start
30431 // downloading segments
30432
30433
30434 if (media.endList && this.tech_.preload() !== 'none') {
30435 this.mainSegmentLoader_.playlist(media, this.requestOptions_);
30436 this.mainSegmentLoader_.load();
30437 }
30438
30439 setupMediaGroups({
30440 sourceType: this.sourceType_,
30441 segmentLoaders: {
30442 AUDIO: this.audioSegmentLoader_,
30443 SUBTITLES: this.subtitleSegmentLoader_,
30444 main: this.mainSegmentLoader_
30445 },
30446 tech: this.tech_,
30447 requestOptions: this.requestOptions_,
30448 mainPlaylistLoader: this.mainPlaylistLoader_,
30449 vhs: this.vhs_,
30450 main: this.main(),
30451 mediaTypes: this.mediaTypes_,
30452 excludePlaylist: this.excludePlaylist.bind(this)
30453 });
30454 this.triggerPresenceUsage_(this.main(), media);
30455 this.setupFirstPlay();
30456
30457 if (!this.mediaTypes_.AUDIO.activePlaylistLoader || this.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
30458 this.trigger('selectedinitialmedia');
30459 } else {
30460 // We must wait for the active audio playlist loader to
30461 // finish setting up before triggering this event so the
30462 // representations API and EME setup is correct
30463 this.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', () => {
30464 this.trigger('selectedinitialmedia');
30465 });
30466 }
30467 });
30468 this.mainPlaylistLoader_.on('loadedplaylist', () => {
30469 if (this.loadOnPlay_) {
30470 this.tech_.off('play', this.loadOnPlay_);
30471 }
30472
30473 let updatedPlaylist = this.mainPlaylistLoader_.media();
30474
30475 if (!updatedPlaylist) {
30476 // Add content steering listeners on first load and init.
30477 this.attachContentSteeringListeners_();
30478 this.initContentSteeringController_(); // exclude any variants that are not supported by the browser before selecting
30479 // an initial media as the playlist selectors do not consider browser support
30480
30481 this.excludeUnsupportedVariants_();
30482 let selectedMedia;
30483
30484 if (this.enableLowInitialPlaylist) {
30485 selectedMedia = this.selectInitialPlaylist();
30486 }
30487
30488 if (!selectedMedia) {
30489 selectedMedia = this.selectPlaylist();
30490 }
30491
30492 if (!selectedMedia || !this.shouldSwitchToMedia_(selectedMedia)) {
30493 return;
30494 }
30495
30496 this.initialMedia_ = selectedMedia;
30497 this.switchMedia_(this.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
30498 // fire again since the playlist will be requested. In the case of vhs-json
30499 // (where the manifest object is provided as the source), when the media
30500 // playlist's `segments` list is already available, a media playlist won't be
30501 // requested, and loadedplaylist won't fire again, so the playlist handler must be
30502 // called on its own here.
30503
30504 const haveJsonSource = this.sourceType_ === 'vhs-json' && this.initialMedia_.segments;
30505
30506 if (!haveJsonSource) {
30507 return;
30508 }
30509
30510 updatedPlaylist = this.initialMedia_;
30511 }
30512
30513 this.handleUpdatedMediaPlaylist(updatedPlaylist);
30514 });
30515 this.mainPlaylistLoader_.on('error', () => {
30516 const error = this.mainPlaylistLoader_.error;
30517 this.excludePlaylist({
30518 playlistToExclude: error.playlist,
30519 error
30520 });
30521 });
30522 this.mainPlaylistLoader_.on('mediachanging', () => {
30523 this.mainSegmentLoader_.abort();
30524 this.mainSegmentLoader_.pause();
30525 });
30526 this.mainPlaylistLoader_.on('mediachange', () => {
30527 const media = this.mainPlaylistLoader_.media();
30528 const requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
30529 // timeout the request.
30530
30531 if (isLowestEnabledRendition(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.media())) {
30532 this.requestOptions_.timeout = 0;
30533 } else {
30534 this.requestOptions_.timeout = requestTimeout;
30535 }
30536
30537 if (this.sourceType_ === 'dash') {
30538 // we don't want to re-request the same hls playlist right after it was changed
30539 this.mainPlaylistLoader_.load();
30540 } // TODO: Create a new event on the PlaylistLoader that signals
30541 // that the segments have changed in some way and use that to
30542 // update the SegmentLoader instead of doing it twice here and
30543 // on `loadedplaylist`
30544
30545
30546 this.mainSegmentLoader_.pause();
30547 this.mainSegmentLoader_.playlist(media, this.requestOptions_);
30548
30549 if (this.waitingForFastQualityPlaylistReceived_) {
30550 this.runFastQualitySwitch_();
30551 } else {
30552 this.mainSegmentLoader_.load();
30553 }
30554
30555 this.tech_.trigger({
30556 type: 'mediachange',
30557 bubbles: true
30558 });
30559 });
30560 this.mainPlaylistLoader_.on('playlistunchanged', () => {
30561 const updatedPlaylist = this.mainPlaylistLoader_.media(); // ignore unchanged playlists that have already been
30562 // excluded for not-changing. We likely just have a really slowly updating
30563 // playlist.
30564
30565 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
30566 return;
30567 }
30568
30569 const playlistOutdated = this.stuckAtPlaylistEnd_(updatedPlaylist);
30570
30571 if (playlistOutdated) {
30572 // Playlist has stopped updating and we're stuck at its end. Try to
30573 // exclude it and switch to another playlist in the hope that that
30574 // one is updating (and give the player a chance to re-adjust to the
30575 // safe live point).
30576 this.excludePlaylist({
30577 error: {
30578 message: 'Playlist no longer updating.',
30579 reason: 'playlist-unchanged'
30580 }
30581 }); // useful for monitoring QoS
30582
30583 this.tech_.trigger('playliststuck');
30584 }
30585 });
30586 this.mainPlaylistLoader_.on('renditiondisabled', () => {
30587 this.tech_.trigger({
30588 type: 'usage',
30589 name: 'vhs-rendition-disabled'
30590 });
30591 });
30592 this.mainPlaylistLoader_.on('renditionenabled', () => {
30593 this.tech_.trigger({
30594 type: 'usage',
30595 name: 'vhs-rendition-enabled'
30596 });
30597 });
30598 }
30599 /**
30600 * Given an updated media playlist (whether it was loaded for the first time, or
30601 * refreshed for live playlists), update any relevant properties and state to reflect
30602 * changes in the media that should be accounted for (e.g., cues and duration).
30603 *
30604 * @param {Object} updatedPlaylist the updated media playlist object
30605 *
30606 * @private
30607 */
30608
30609
30610 handleUpdatedMediaPlaylist(updatedPlaylist) {
30611 if (this.useCueTags_) {
30612 this.updateAdCues_(updatedPlaylist);
30613 } // TODO: Create a new event on the PlaylistLoader that signals
30614 // that the segments have changed in some way and use that to
30615 // update the SegmentLoader instead of doing it twice here and
30616 // on `mediachange`
30617
30618
30619 this.mainSegmentLoader_.pause();
30620 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
30621
30622 if (this.waitingForFastQualityPlaylistReceived_) {
30623 this.runFastQualitySwitch_();
30624 }
30625
30626 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
30627 // as it is possible that it was temporarily stopped while waiting for
30628 // a playlist (e.g., in case the playlist errored and we re-requested it).
30629
30630 if (!this.tech_.paused()) {
30631 this.mainSegmentLoader_.load();
30632
30633 if (this.audioSegmentLoader_) {
30634 this.audioSegmentLoader_.load();
30635 }
30636 }
30637 }
30638 /**
30639 * A helper function for triggerring presence usage events once per source
30640 *
30641 * @private
30642 */
30643
30644
30645 triggerPresenceUsage_(main, media) {
30646 const mediaGroups = main.mediaGroups || {};
30647 let defaultDemuxed = true;
30648 const audioGroupKeys = Object.keys(mediaGroups.AUDIO);
30649
30650 for (const mediaGroup in mediaGroups.AUDIO) {
30651 for (const label in mediaGroups.AUDIO[mediaGroup]) {
30652 const properties = mediaGroups.AUDIO[mediaGroup][label];
30653
30654 if (!properties.uri) {
30655 defaultDemuxed = false;
30656 }
30657 }
30658 }
30659
30660 if (defaultDemuxed) {
30661 this.tech_.trigger({
30662 type: 'usage',
30663 name: 'vhs-demuxed'
30664 });
30665 }
30666
30667 if (Object.keys(mediaGroups.SUBTITLES).length) {
30668 this.tech_.trigger({
30669 type: 'usage',
30670 name: 'vhs-webvtt'
30671 });
30672 }
30673
30674 if (Vhs$1.Playlist.isAes(media)) {
30675 this.tech_.trigger({
30676 type: 'usage',
30677 name: 'vhs-aes'
30678 });
30679 }
30680
30681 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
30682 this.tech_.trigger({
30683 type: 'usage',
30684 name: 'vhs-alternate-audio'
30685 });
30686 }
30687
30688 if (this.useCueTags_) {
30689 this.tech_.trigger({
30690 type: 'usage',
30691 name: 'vhs-playlist-cue-tags'
30692 });
30693 }
30694 }
30695
30696 shouldSwitchToMedia_(nextPlaylist) {
30697 const currentPlaylist = this.mainPlaylistLoader_.media() || this.mainPlaylistLoader_.pendingMedia_;
30698 const currentTime = this.tech_.currentTime();
30699 const bufferLowWaterLine = this.bufferLowWaterLine();
30700 const bufferHighWaterLine = this.bufferHighWaterLine();
30701 const buffered = this.tech_.buffered();
30702 return shouldSwitchToMedia({
30703 buffered,
30704 currentTime,
30705 currentPlaylist,
30706 nextPlaylist,
30707 bufferLowWaterLine,
30708 bufferHighWaterLine,
30709 duration: this.duration(),
30710 bufferBasedABR: this.bufferBasedABR,
30711 log: this.logger_
30712 });
30713 }
30714 /**
30715 * Register event handlers on the segment loaders. A helper function
30716 * for construction time.
30717 *
30718 * @private
30719 */
30720
30721
30722 setupSegmentLoaderListeners_() {
30723 this.mainSegmentLoader_.on('bandwidthupdate', () => {
30724 // Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's
30725 // useful to check to see if a rendition switch should be made.
30726 this.checkABR_('bandwidthupdate');
30727 this.tech_.trigger('bandwidthupdate');
30728 });
30729 this.mainSegmentLoader_.on('timeout', () => {
30730 if (this.bufferBasedABR) {
30731 // If a rendition change is needed, then it would've be done on `bandwidthupdate`.
30732 // Here the only consideration is that for buffer based ABR there's no guarantee
30733 // of an immediate switch (since the bandwidth is averaged with a timeout
30734 // bandwidth value of 1), so force a load on the segment loader to keep it going.
30735 this.mainSegmentLoader_.load();
30736 }
30737 }); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer
30738 // based ABR.
30739
30740 if (!this.bufferBasedABR) {
30741 this.mainSegmentLoader_.on('progress', () => {
30742 this.trigger('progress');
30743 });
30744 }
30745
30746 this.mainSegmentLoader_.on('error', () => {
30747 const error = this.mainSegmentLoader_.error();
30748 this.excludePlaylist({
30749 playlistToExclude: error.playlist,
30750 error
30751 });
30752 });
30753 this.mainSegmentLoader_.on('appenderror', () => {
30754 this.error = this.mainSegmentLoader_.error_;
30755 this.trigger('error');
30756 });
30757 this.mainSegmentLoader_.on('syncinfoupdate', () => {
30758 this.onSyncInfoUpdate_();
30759 });
30760 this.mainSegmentLoader_.on('timestampoffset', () => {
30761 this.tech_.trigger({
30762 type: 'usage',
30763 name: 'vhs-timestamp-offset'
30764 });
30765 });
30766 this.audioSegmentLoader_.on('syncinfoupdate', () => {
30767 this.onSyncInfoUpdate_();
30768 });
30769 this.audioSegmentLoader_.on('appenderror', () => {
30770 this.error = this.audioSegmentLoader_.error_;
30771 this.trigger('error');
30772 });
30773 this.mainSegmentLoader_.on('ended', () => {
30774 this.logger_('main segment loader ended');
30775 this.onEndOfStream();
30776 });
30777 this.mainSegmentLoader_.on('earlyabort', event => {
30778 // never try to early abort with the new ABR algorithm
30779 if (this.bufferBasedABR) {
30780 return;
30781 }
30782
30783 this.delegateLoaders_('all', ['abort']);
30784 this.excludePlaylist({
30785 error: {
30786 message: 'Aborted early because there isn\'t enough bandwidth to complete ' + 'the request without rebuffering.'
30787 },
30788 playlistExclusionDuration: ABORT_EARLY_EXCLUSION_SECONDS
30789 });
30790 });
30791
30792 const updateCodecs = () => {
30793 if (!this.sourceUpdater_.hasCreatedSourceBuffers()) {
30794 return this.tryToCreateSourceBuffers_();
30795 }
30796
30797 const codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
30798
30799 if (!codecs) {
30800 return;
30801 }
30802
30803 this.sourceUpdater_.addOrChangeSourceBuffers(codecs);
30804 };
30805
30806 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
30807 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
30808 this.mainSegmentLoader_.on('fmp4', () => {
30809 if (!this.triggeredFmp4Usage) {
30810 this.tech_.trigger({
30811 type: 'usage',
30812 name: 'vhs-fmp4'
30813 });
30814 this.triggeredFmp4Usage = true;
30815 }
30816 });
30817 this.audioSegmentLoader_.on('fmp4', () => {
30818 if (!this.triggeredFmp4Usage) {
30819 this.tech_.trigger({
30820 type: 'usage',
30821 name: 'vhs-fmp4'
30822 });
30823 this.triggeredFmp4Usage = true;
30824 }
30825 });
30826 this.audioSegmentLoader_.on('ended', () => {
30827 this.logger_('audioSegmentLoader ended');
30828 this.onEndOfStream();
30829 });
30830 }
30831
30832 mediaSecondsLoaded_() {
30833 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
30834 }
30835 /**
30836 * Call load on our SegmentLoaders
30837 */
30838
30839
30840 load() {
30841 this.mainSegmentLoader_.load();
30842
30843 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
30844 this.audioSegmentLoader_.load();
30845 }
30846
30847 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
30848 this.subtitleSegmentLoader_.load();
30849 }
30850 }
30851 /**
30852 * Re-tune playback quality level for the current player
30853 * conditions. This method will perform destructive actions like removing
30854 * already buffered content in order to readjust the currently active
30855 * playlist quickly. This is good for manual quality changes
30856 *
30857 * @private
30858 */
30859
30860
30861 fastQualityChange_(media = this.selectPlaylist()) {
30862 if (media && media === this.mainPlaylistLoader_.media()) {
30863 this.logger_('skipping fastQualityChange because new media is same as old');
30864 return;
30865 }
30866
30867 this.switchMedia_(media, 'fast-quality'); // we would like to avoid race condition when we call fastQuality,
30868 // reset everything and start loading segments from prev segments instead of new because new playlist is not received yet
30869
30870 this.waitingForFastQualityPlaylistReceived_ = true;
30871 }
30872
30873 runFastQualitySwitch_() {
30874 this.waitingForFastQualityPlaylistReceived_ = false; // Delete all buffered data to allow an immediate quality switch, then seek to give
30875 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
30876 // ahead was roughly the minimum that will accomplish this across a variety of content
30877 // in IE and Edge, but seeking in place is sufficient on all other browsers)
30878 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
30879 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
30880
30881 this.mainSegmentLoader_.pause();
30882 this.mainSegmentLoader_.resetEverything(() => {
30883 this.tech_.setCurrentTime(this.tech_.currentTime());
30884 }); // don't need to reset audio as it is reset when media changes
30885 }
30886 /**
30887 * Begin playback.
30888 */
30889
30890
30891 play() {
30892 if (this.setupFirstPlay()) {
30893 return;
30894 }
30895
30896 if (this.tech_.ended()) {
30897 this.tech_.setCurrentTime(0);
30898 }
30899
30900 if (this.hasPlayed_) {
30901 this.load();
30902 }
30903
30904 const seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
30905 // seek forward to the live point
30906
30907 if (this.tech_.duration() === Infinity) {
30908 if (this.tech_.currentTime() < seekable.start(0)) {
30909 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
30910 }
30911 }
30912 }
30913 /**
30914 * Seek to the latest media position if this is a live video and the
30915 * player and video are loaded and initialized.
30916 */
30917
30918
30919 setupFirstPlay() {
30920 const media = this.mainPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
30921 // If 1) there is no active media
30922 // 2) the player is paused
30923 // 3) the first play has already been setup
30924 // then exit early
30925
30926 if (!media || this.tech_.paused() || this.hasPlayed_) {
30927 return false;
30928 } // when the video is a live stream and/or has a start time
30929
30930
30931 if (!media.endList || media.start) {
30932 const seekable = this.seekable();
30933
30934 if (!seekable.length) {
30935 // without a seekable range, the player cannot seek to begin buffering at the
30936 // live or start point
30937 return false;
30938 }
30939
30940 const seekableEnd = seekable.end(0);
30941 let startPoint = seekableEnd;
30942
30943 if (media.start) {
30944 const offset = media.start.timeOffset;
30945
30946 if (offset < 0) {
30947 startPoint = Math.max(seekableEnd + offset, seekable.start(0));
30948 } else {
30949 startPoint = Math.min(seekableEnd, offset);
30950 }
30951 } // trigger firstplay to inform the source handler to ignore the next seek event
30952
30953
30954 this.trigger('firstplay'); // seek to the live point
30955
30956 this.tech_.setCurrentTime(startPoint);
30957 }
30958
30959 this.hasPlayed_ = true; // we can begin loading now that everything is ready
30960
30961 this.load();
30962 return true;
30963 }
30964 /**
30965 * handle the sourceopen event on the MediaSource
30966 *
30967 * @private
30968 */
30969
30970
30971 handleSourceOpen_() {
30972 // Only attempt to create the source buffer if none already exist.
30973 // handleSourceOpen is also called when we are "re-opening" a source buffer
30974 // after `endOfStream` has been called (in response to a seek for instance)
30975 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
30976 // code in video.js but is required because play() must be invoked
30977 // *after* the media source has opened.
30978
30979 if (this.tech_.autoplay()) {
30980 const playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
30981 // on browsers which return a promise
30982
30983 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
30984 playPromise.then(null, e => {});
30985 }
30986 }
30987
30988 this.trigger('sourceopen');
30989 }
30990 /**
30991 * handle the sourceended event on the MediaSource
30992 *
30993 * @private
30994 */
30995
30996
30997 handleSourceEnded_() {
30998 if (!this.inbandTextTracks_.metadataTrack_) {
30999 return;
31000 }
31001
31002 const cues = this.inbandTextTracks_.metadataTrack_.cues;
31003
31004 if (!cues || !cues.length) {
31005 return;
31006 }
31007
31008 const duration = this.duration();
31009 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
31010 }
31011 /**
31012 * handle the durationchange event on the MediaSource
31013 *
31014 * @private
31015 */
31016
31017
31018 handleDurationChange_() {
31019 this.tech_.trigger('durationchange');
31020 }
31021 /**
31022 * Calls endOfStream on the media source when all active stream types have called
31023 * endOfStream
31024 *
31025 * @param {string} streamType
31026 * Stream type of the segment loader that called endOfStream
31027 * @private
31028 */
31029
31030
31031 onEndOfStream() {
31032 let isEndOfStream = this.mainSegmentLoader_.ended_;
31033
31034 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
31035 const mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
31036
31037 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
31038 // if we do not know if the main segment loader contains video yet or if we
31039 // definitively know the main segment loader contains video, then we need to wait
31040 // for both main and audio segment loaders to call endOfStream
31041 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
31042 } else {
31043 // otherwise just rely on the audio loader
31044 isEndOfStream = this.audioSegmentLoader_.ended_;
31045 }
31046 }
31047
31048 if (!isEndOfStream) {
31049 return;
31050 }
31051
31052 this.stopABRTimer_();
31053 this.sourceUpdater_.endOfStream();
31054 }
31055 /**
31056 * Check if a playlist has stopped being updated
31057 *
31058 * @param {Object} playlist the media playlist object
31059 * @return {boolean} whether the playlist has stopped being updated or not
31060 */
31061
31062
31063 stuckAtPlaylistEnd_(playlist) {
31064 const seekable = this.seekable();
31065
31066 if (!seekable.length) {
31067 // playlist doesn't have enough information to determine whether we are stuck
31068 return false;
31069 }
31070
31071 const expired = this.syncController_.getExpiredTime(playlist, this.duration());
31072
31073 if (expired === null) {
31074 return false;
31075 } // does not use the safe live end to calculate playlist end, since we
31076 // don't want to say we are stuck while there is still content
31077
31078
31079 const absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
31080 const currentTime = this.tech_.currentTime();
31081 const buffered = this.tech_.buffered();
31082
31083 if (!buffered.length) {
31084 // return true if the playhead reached the absolute end of the playlist
31085 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
31086 }
31087
31088 const bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
31089 // end of playlist
31090
31091 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
31092 }
31093 /**
31094 * Exclude a playlist for a set amount of time, making it unavailable for selection by
31095 * the rendition selection algorithm, then force a new playlist (rendition) selection.
31096 *
31097 * @param {Object=} playlistToExclude
31098 * the playlist to exclude, defaults to the currently selected playlist
31099 * @param {Object=} error
31100 * an optional error
31101 * @param {number=} playlistExclusionDuration
31102 * an optional number of seconds to exclude the playlist
31103 */
31104
31105
31106 excludePlaylist({
31107 playlistToExclude = this.mainPlaylistLoader_.media(),
31108 error = {},
31109 playlistExclusionDuration
31110 }) {
31111 // If the `error` was generated by the playlist loader, it will contain
31112 // the playlist we were trying to load (but failed) and that should be
31113 // excluded instead of the currently selected playlist which is likely
31114 // out-of-date in this scenario
31115 playlistToExclude = playlistToExclude || this.mainPlaylistLoader_.media();
31116 playlistExclusionDuration = playlistExclusionDuration || error.playlistExclusionDuration || this.playlistExclusionDuration; // If there is no current playlist, then an error occurred while we were
31117 // trying to load the main OR while we were disposing of the tech
31118
31119 if (!playlistToExclude) {
31120 this.error = error;
31121
31122 if (this.mediaSource.readyState !== 'open') {
31123 this.trigger('error');
31124 } else {
31125 this.sourceUpdater_.endOfStream('network');
31126 }
31127
31128 return;
31129 }
31130
31131 playlistToExclude.playlistErrors_++;
31132 const playlists = this.mainPlaylistLoader_.main.playlists;
31133 const enabledPlaylists = playlists.filter(isEnabled);
31134 const isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === playlistToExclude; // Don't exclude the only playlist unless it was excluded
31135 // forever
31136
31137 if (playlists.length === 1 && playlistExclusionDuration !== Infinity) {
31138 videojs__default["default"].log.warn(`Problem encountered with playlist ${playlistToExclude.id}. ` + 'Trying again since it is the only playlist.');
31139 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
31140
31141 return this.mainPlaylistLoader_.load(isFinalRendition);
31142 }
31143
31144 if (isFinalRendition) {
31145 // If we're content steering, try other pathways.
31146 if (this.main().contentSteering) {
31147 const pathway = this.pathwayAttribute_(playlistToExclude); // Ignore at least 1 steering manifest refresh.
31148
31149 const reIncludeDelay = this.contentSteeringController_.steeringManifest.ttl * 1000;
31150 this.contentSteeringController_.excludePathway(pathway);
31151 this.excludeThenChangePathway_();
31152 setTimeout(() => {
31153 this.contentSteeringController_.addAvailablePathway(pathway);
31154 }, reIncludeDelay);
31155 return;
31156 } // Since we're on the final non-excluded playlist, and we're about to exclude
31157 // it, instead of erring the player or retrying this playlist, clear out the current
31158 // exclusion list. This allows other playlists to be attempted in case any have been
31159 // fixed.
31160
31161
31162 let reincluded = false;
31163 playlists.forEach(playlist => {
31164 // skip current playlist which is about to be excluded
31165 if (playlist === playlistToExclude) {
31166 return;
31167 }
31168
31169 const excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
31170
31171 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
31172 reincluded = true;
31173 delete playlist.excludeUntil;
31174 }
31175 });
31176
31177 if (reincluded) {
31178 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
31179 // playlist. This is needed for users relying on the retryplaylist event to catch a
31180 // case where the player might be stuck and looping through "dead" playlists.
31181
31182 this.tech_.trigger('retryplaylist');
31183 }
31184 } // Exclude this playlist
31185
31186
31187 let excludeUntil;
31188
31189 if (playlistToExclude.playlistErrors_ > this.maxPlaylistRetries) {
31190 excludeUntil = Infinity;
31191 } else {
31192 excludeUntil = Date.now() + playlistExclusionDuration * 1000;
31193 }
31194
31195 playlistToExclude.excludeUntil = excludeUntil;
31196
31197 if (error.reason) {
31198 playlistToExclude.lastExcludeReason_ = error.reason;
31199 }
31200
31201 this.tech_.trigger('excludeplaylist');
31202 this.tech_.trigger({
31203 type: 'usage',
31204 name: 'vhs-rendition-excluded'
31205 }); // TODO: only load a new playlist if we're excluding the current playlist
31206 // If this function was called with a playlist that's not the current active playlist
31207 // (e.g., media().id !== playlistToExclude.id),
31208 // then a new playlist should not be selected and loaded, as there's nothing wrong with the current playlist.
31209
31210 const nextPlaylist = this.selectPlaylist();
31211
31212 if (!nextPlaylist) {
31213 this.error = 'Playback cannot continue. No available working or supported playlists.';
31214 this.trigger('error');
31215 return;
31216 }
31217
31218 const logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
31219 const errorMessage = error.message ? ' ' + error.message : '';
31220 logFn(`${error.internal ? 'Internal problem' : 'Problem'} encountered with playlist ${playlistToExclude.id}.` + `${errorMessage} Switching to playlist ${nextPlaylist.id}.`); // if audio group changed reset audio loaders
31221
31222 if (nextPlaylist.attributes.AUDIO !== playlistToExclude.attributes.AUDIO) {
31223 this.delegateLoaders_('audio', ['abort', 'pause']);
31224 } // if subtitle group changed reset subtitle loaders
31225
31226
31227 if (nextPlaylist.attributes.SUBTITLES !== playlistToExclude.attributes.SUBTITLES) {
31228 this.delegateLoaders_('subtitle', ['abort', 'pause']);
31229 }
31230
31231 this.delegateLoaders_('main', ['abort', 'pause']);
31232 const delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
31233 const shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
31234
31235 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
31236 }
31237 /**
31238 * Pause all segment/playlist loaders
31239 */
31240
31241
31242 pauseLoading() {
31243 this.delegateLoaders_('all', ['abort', 'pause']);
31244 this.stopABRTimer_();
31245 }
31246 /**
31247 * Call a set of functions in order on playlist loaders, segment loaders,
31248 * or both types of loaders.
31249 *
31250 * @param {string} filter
31251 * Filter loaders that should call fnNames using a string. Can be:
31252 * * all - run on all loaders
31253 * * audio - run on all audio loaders
31254 * * subtitle - run on all subtitle loaders
31255 * * main - run on the main loaders
31256 *
31257 * @param {Array|string} fnNames
31258 * A string or array of function names to call.
31259 */
31260
31261
31262 delegateLoaders_(filter, fnNames) {
31263 const loaders = [];
31264 const dontFilterPlaylist = filter === 'all';
31265
31266 if (dontFilterPlaylist || filter === 'main') {
31267 loaders.push(this.mainPlaylistLoader_);
31268 }
31269
31270 const mediaTypes = [];
31271
31272 if (dontFilterPlaylist || filter === 'audio') {
31273 mediaTypes.push('AUDIO');
31274 }
31275
31276 if (dontFilterPlaylist || filter === 'subtitle') {
31277 mediaTypes.push('CLOSED-CAPTIONS');
31278 mediaTypes.push('SUBTITLES');
31279 }
31280
31281 mediaTypes.forEach(mediaType => {
31282 const loader = this.mediaTypes_[mediaType] && this.mediaTypes_[mediaType].activePlaylistLoader;
31283
31284 if (loader) {
31285 loaders.push(loader);
31286 }
31287 });
31288 ['main', 'audio', 'subtitle'].forEach(name => {
31289 const loader = this[`${name}SegmentLoader_`];
31290
31291 if (loader && (filter === name || filter === 'all')) {
31292 loaders.push(loader);
31293 }
31294 });
31295 loaders.forEach(loader => fnNames.forEach(fnName => {
31296 if (typeof loader[fnName] === 'function') {
31297 loader[fnName]();
31298 }
31299 }));
31300 }
31301 /**
31302 * set the current time on all segment loaders
31303 *
31304 * @param {TimeRange} currentTime the current time to set
31305 * @return {TimeRange} the current time
31306 */
31307
31308
31309 setCurrentTime(currentTime) {
31310 const buffered = findRange(this.tech_.buffered(), currentTime);
31311
31312 if (!(this.mainPlaylistLoader_ && this.mainPlaylistLoader_.media())) {
31313 // return immediately if the metadata is not ready yet
31314 return 0;
31315 } // it's clearly an edge-case but don't thrown an error if asked to
31316 // seek within an empty playlist
31317
31318
31319 if (!this.mainPlaylistLoader_.media().segments) {
31320 return 0;
31321 } // if the seek location is already buffered, continue buffering as usual
31322
31323
31324 if (buffered && buffered.length) {
31325 return currentTime;
31326 } // cancel outstanding requests so we begin buffering at the new
31327 // location
31328
31329
31330 this.mainSegmentLoader_.pause();
31331 this.mainSegmentLoader_.resetEverything();
31332
31333 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
31334 this.audioSegmentLoader_.pause();
31335 this.audioSegmentLoader_.resetEverything();
31336 }
31337
31338 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
31339 this.subtitleSegmentLoader_.pause();
31340 this.subtitleSegmentLoader_.resetEverything();
31341 } // start segment loader loading in case they are paused
31342
31343
31344 this.load();
31345 }
31346 /**
31347 * get the current duration
31348 *
31349 * @return {TimeRange} the duration
31350 */
31351
31352
31353 duration() {
31354 if (!this.mainPlaylistLoader_) {
31355 return 0;
31356 }
31357
31358 const media = this.mainPlaylistLoader_.media();
31359
31360 if (!media) {
31361 // no playlists loaded yet, so can't determine a duration
31362 return 0;
31363 } // Don't rely on the media source for duration in the case of a live playlist since
31364 // setting the native MediaSource's duration to infinity ends up with consequences to
31365 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
31366 //
31367 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
31368 // however, few browsers have support for setLiveSeekableRange()
31369 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
31370 //
31371 // Until a time when the duration of the media source can be set to infinity, and a
31372 // seekable range specified across browsers, just return Infinity.
31373
31374
31375 if (!media.endList) {
31376 return Infinity;
31377 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
31378 // available). If it's not available, fall back to a playlist-calculated estimate.
31379
31380
31381 if (this.mediaSource) {
31382 return this.mediaSource.duration;
31383 }
31384
31385 return Vhs$1.Playlist.duration(media);
31386 }
31387 /**
31388 * check the seekable range
31389 *
31390 * @return {TimeRange} the seekable range
31391 */
31392
31393
31394 seekable() {
31395 return this.seekable_;
31396 }
31397
31398 onSyncInfoUpdate_() {
31399 let audioSeekable; // TODO check for creation of both source buffers before updating seekable
31400 //
31401 // A fix was made to this function where a check for
31402 // this.sourceUpdater_.hasCreatedSourceBuffers
31403 // was added to ensure that both source buffers were created before seekable was
31404 // updated. However, it originally had a bug where it was checking for a true and
31405 // returning early instead of checking for false. Setting it to check for false to
31406 // return early though created other issues. A call to play() would check for seekable
31407 // end without verifying that a seekable range was present. In addition, even checking
31408 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
31409 // due to a media update calling load on the segment loaders, skipping a seek to live,
31410 // thereby starting live streams at the beginning of the stream rather than at the end.
31411 //
31412 // This conditional should be fixed to wait for the creation of two source buffers at
31413 // the same time as the other sections of code are fixed to properly seek to live and
31414 // not throw an error due to checking for a seekable end when no seekable range exists.
31415 //
31416 // For now, fall back to the older behavior, with the understanding that the seekable
31417 // range may not be completely correct, leading to a suboptimal initial live point.
31418
31419 if (!this.mainPlaylistLoader_) {
31420 return;
31421 }
31422
31423 let media = this.mainPlaylistLoader_.media();
31424
31425 if (!media) {
31426 return;
31427 }
31428
31429 let expired = this.syncController_.getExpiredTime(media, this.duration());
31430
31431 if (expired === null) {
31432 // not enough information to update seekable
31433 return;
31434 }
31435
31436 const main = this.mainPlaylistLoader_.main;
31437 const mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(main, media));
31438
31439 if (mainSeekable.length === 0) {
31440 return;
31441 }
31442
31443 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
31444 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
31445 expired = this.syncController_.getExpiredTime(media, this.duration());
31446
31447 if (expired === null) {
31448 return;
31449 }
31450
31451 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(main, media));
31452
31453 if (audioSeekable.length === 0) {
31454 return;
31455 }
31456 }
31457
31458 let oldEnd;
31459 let oldStart;
31460
31461 if (this.seekable_ && this.seekable_.length) {
31462 oldEnd = this.seekable_.end(0);
31463 oldStart = this.seekable_.start(0);
31464 }
31465
31466 if (!audioSeekable) {
31467 // seekable has been calculated based on buffering video data so it
31468 // can be returned directly
31469 this.seekable_ = mainSeekable;
31470 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
31471 // seekables are pretty far off, rely on main
31472 this.seekable_ = mainSeekable;
31473 } else {
31474 this.seekable_ = createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
31475 } // seekable is the same as last time
31476
31477
31478 if (this.seekable_ && this.seekable_.length) {
31479 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
31480 return;
31481 }
31482 }
31483
31484 this.logger_(`seekable updated [${printableRange(this.seekable_)}]`);
31485 this.tech_.trigger('seekablechanged');
31486 }
31487 /**
31488 * Update the player duration
31489 */
31490
31491
31492 updateDuration(isLive) {
31493 if (this.updateDuration_) {
31494 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
31495 this.updateDuration_ = null;
31496 }
31497
31498 if (this.mediaSource.readyState !== 'open') {
31499 this.updateDuration_ = this.updateDuration.bind(this, isLive);
31500 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
31501 return;
31502 }
31503
31504 if (isLive) {
31505 const seekable = this.seekable();
31506
31507 if (!seekable.length) {
31508 return;
31509 } // Even in the case of a live playlist, the native MediaSource's duration should not
31510 // be set to Infinity (even though this would be expected for a live playlist), since
31511 // setting the native MediaSource's duration to infinity ends up with consequences to
31512 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
31513 //
31514 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
31515 // however, few browsers have support for setLiveSeekableRange()
31516 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
31517 //
31518 // Until a time when the duration of the media source can be set to infinity, and a
31519 // seekable range specified across browsers, the duration should be greater than or
31520 // equal to the last possible seekable value.
31521 // MediaSource duration starts as NaN
31522 // It is possible (and probable) that this case will never be reached for many
31523 // sources, since the MediaSource reports duration as the highest value without
31524 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
31525 // we buffered times 0 to 100 with real times of 100 to 200, even though current
31526 // time will be between 0 and 100, the native media source may report the duration
31527 // as 200. However, since we report duration separate from the media source (as
31528 // Infinity), and as long as the native media source duration value is greater than
31529 // our reported seekable range, seeks will work as expected. The large number as
31530 // duration for live is actually a strategy used by some players to work around the
31531 // issue of live seekable ranges cited above.
31532
31533
31534 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
31535 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
31536 }
31537
31538 return;
31539 }
31540
31541 const buffered = this.tech_.buffered();
31542 let duration = Vhs$1.Playlist.duration(this.mainPlaylistLoader_.media());
31543
31544 if (buffered.length > 0) {
31545 duration = Math.max(duration, buffered.end(buffered.length - 1));
31546 }
31547
31548 if (this.mediaSource.duration !== duration) {
31549 this.sourceUpdater_.setDuration(duration);
31550 }
31551 }
31552 /**
31553 * dispose of the PlaylistController and everything
31554 * that it controls
31555 */
31556
31557
31558 dispose() {
31559 this.trigger('dispose');
31560 this.decrypter_.terminate();
31561 this.mainPlaylistLoader_.dispose();
31562 this.mainSegmentLoader_.dispose();
31563 this.contentSteeringController_.dispose();
31564 this.keyStatusMap_.clear();
31565
31566 if (this.loadOnPlay_) {
31567 this.tech_.off('play', this.loadOnPlay_);
31568 }
31569
31570 ['AUDIO', 'SUBTITLES'].forEach(type => {
31571 const groups = this.mediaTypes_[type].groups;
31572
31573 for (const id in groups) {
31574 groups[id].forEach(group => {
31575 if (group.playlistLoader) {
31576 group.playlistLoader.dispose();
31577 }
31578 });
31579 }
31580 });
31581 this.audioSegmentLoader_.dispose();
31582 this.subtitleSegmentLoader_.dispose();
31583 this.sourceUpdater_.dispose();
31584 this.timelineChangeController_.dispose();
31585 this.stopABRTimer_();
31586
31587 if (this.updateDuration_) {
31588 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
31589 }
31590
31591 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
31592
31593 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
31594 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
31595 this.off();
31596 }
31597 /**
31598 * return the main playlist object if we have one
31599 *
31600 * @return {Object} the main playlist object that we parsed
31601 */
31602
31603
31604 main() {
31605 return this.mainPlaylistLoader_.main;
31606 }
31607 /**
31608 * return the currently selected playlist
31609 *
31610 * @return {Object} the currently selected playlist object that we parsed
31611 */
31612
31613
31614 media() {
31615 // playlist loader will not return media if it has not been fully loaded
31616 return this.mainPlaylistLoader_.media() || this.initialMedia_;
31617 }
31618
31619 areMediaTypesKnown_() {
31620 const usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
31621 const hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
31622 // otherwise check on the segment loader.
31623
31624 const hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
31625
31626 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
31627 return false;
31628 }
31629
31630 return true;
31631 }
31632
31633 getCodecsOrExclude_() {
31634 const media = {
31635 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
31636 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
31637 };
31638 const playlist = this.mainSegmentLoader_.getPendingSegmentPlaylist() || this.media(); // set "main" media equal to video
31639
31640 media.video = media.main;
31641 const playlistCodecs = codecsForPlaylist(this.main(), playlist);
31642 const codecs = {};
31643 const usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
31644
31645 if (media.main.hasVideo) {
31646 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
31647 }
31648
31649 if (media.main.isMuxed) {
31650 codecs.video += `,${playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC}`;
31651 }
31652
31653 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
31654 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
31655
31656 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
31657 } // no codecs, no playback.
31658
31659
31660 if (!codecs.audio && !codecs.video) {
31661 this.excludePlaylist({
31662 playlistToExclude: playlist,
31663 error: {
31664 message: 'Could not determine codecs for playlist.'
31665 },
31666 playlistExclusionDuration: Infinity
31667 });
31668 return;
31669 } // fmp4 relies on browser support, while ts relies on muxer support
31670
31671
31672 const supportFunction = (isFmp4, codec) => isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
31673
31674 const unsupportedCodecs = {};
31675 let unsupportedAudio;
31676 ['video', 'audio'].forEach(function (type) {
31677 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
31678 const supporter = media[type].isFmp4 ? 'browser' : 'muxer';
31679 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
31680 unsupportedCodecs[supporter].push(codecs[type]);
31681
31682 if (type === 'audio') {
31683 unsupportedAudio = supporter;
31684 }
31685 }
31686 });
31687
31688 if (usingAudioLoader && unsupportedAudio && playlist.attributes.AUDIO) {
31689 const audioGroup = playlist.attributes.AUDIO;
31690 this.main().playlists.forEach(variant => {
31691 const variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
31692
31693 if (variantAudioGroup === audioGroup && variant !== playlist) {
31694 variant.excludeUntil = Infinity;
31695 }
31696 });
31697 this.logger_(`excluding audio group ${audioGroup} as ${unsupportedAudio} does not support codec(s): "${codecs.audio}"`);
31698 } // if we have any unsupported codecs exclude this playlist.
31699
31700
31701 if (Object.keys(unsupportedCodecs).length) {
31702 const message = Object.keys(unsupportedCodecs).reduce((acc, supporter) => {
31703 if (acc) {
31704 acc += ', ';
31705 }
31706
31707 acc += `${supporter} does not support codec(s): "${unsupportedCodecs[supporter].join(',')}"`;
31708 return acc;
31709 }, '') + '.';
31710 this.excludePlaylist({
31711 playlistToExclude: playlist,
31712 error: {
31713 internal: true,
31714 message
31715 },
31716 playlistExclusionDuration: Infinity
31717 });
31718 return;
31719 } // check if codec switching is happening
31720
31721
31722 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
31723 const switchMessages = [];
31724 ['video', 'audio'].forEach(type => {
31725 const newCodec = (parseCodecs(this.sourceUpdater_.codecs[type] || '')[0] || {}).type;
31726 const oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
31727
31728 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
31729 switchMessages.push(`"${this.sourceUpdater_.codecs[type]}" -> "${codecs[type]}"`);
31730 }
31731 });
31732
31733 if (switchMessages.length) {
31734 this.excludePlaylist({
31735 playlistToExclude: playlist,
31736 error: {
31737 message: `Codec switching not supported: ${switchMessages.join(', ')}.`,
31738 internal: true
31739 },
31740 playlistExclusionDuration: Infinity
31741 });
31742 return;
31743 }
31744 } // TODO: when using the muxer shouldn't we just return
31745 // the codecs that the muxer outputs?
31746
31747
31748 return codecs;
31749 }
31750 /**
31751 * Create source buffers and exlude any incompatible renditions.
31752 *
31753 * @private
31754 */
31755
31756
31757 tryToCreateSourceBuffers_() {
31758 // media source is not ready yet or sourceBuffers are already
31759 // created.
31760 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
31761 return;
31762 }
31763
31764 if (!this.areMediaTypesKnown_()) {
31765 return;
31766 }
31767
31768 const codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
31769
31770 if (!codecs) {
31771 return;
31772 }
31773
31774 this.sourceUpdater_.createSourceBuffers(codecs);
31775 const codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
31776 this.excludeIncompatibleVariants_(codecString);
31777 }
31778 /**
31779 * Excludes playlists with codecs that are unsupported by the muxer and browser.
31780 */
31781
31782
31783 excludeUnsupportedVariants_() {
31784 const playlists = this.main().playlists;
31785 const ids = []; // TODO: why don't we have a property to loop through all
31786 // playlist? Why did we ever mix indexes and keys?
31787
31788 Object.keys(playlists).forEach(key => {
31789 const variant = playlists[key]; // check if we already processed this playlist.
31790
31791 if (ids.indexOf(variant.id) !== -1) {
31792 return;
31793 }
31794
31795 ids.push(variant.id);
31796 const codecs = codecsForPlaylist(this.main, variant);
31797 const unsupported = [];
31798
31799 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
31800 unsupported.push(`audio codec ${codecs.audio}`);
31801 }
31802
31803 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
31804 unsupported.push(`video codec ${codecs.video}`);
31805 }
31806
31807 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
31808 unsupported.push(`text codec ${codecs.text}`);
31809 }
31810
31811 if (unsupported.length) {
31812 variant.excludeUntil = Infinity;
31813 this.logger_(`excluding ${variant.id} for unsupported: ${unsupported.join(', ')}`);
31814 }
31815 });
31816 }
31817 /**
31818 * Exclude playlists that are known to be codec or
31819 * stream-incompatible with the SourceBuffer configuration. For
31820 * instance, Media Source Extensions would cause the video element to
31821 * stall waiting for video data if you switched from a variant with
31822 * video and audio to an audio-only one.
31823 *
31824 * @param {Object} media a media playlist compatible with the current
31825 * set of SourceBuffers. Variants in the current main playlist that
31826 * do not appear to have compatible codec or stream configurations
31827 * will be excluded from the default playlist selection algorithm
31828 * indefinitely.
31829 * @private
31830 */
31831
31832
31833 excludeIncompatibleVariants_(codecString) {
31834 const ids = [];
31835 const playlists = this.main().playlists;
31836 const codecs = unwrapCodecList(parseCodecs(codecString));
31837 const codecCount_ = codecCount(codecs);
31838 const videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
31839 const audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
31840 Object.keys(playlists).forEach(key => {
31841 const variant = playlists[key]; // check if we already processed this playlist.
31842 // or it if it is already excluded forever.
31843
31844 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
31845 return;
31846 }
31847
31848 ids.push(variant.id);
31849 const exclusionReasons = []; // get codecs from the playlist for this variant
31850
31851 const variantCodecs = codecsForPlaylist(this.mainPlaylistLoader_.main, variant);
31852 const variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
31853 // variant is incompatible. Wait for mux.js to probe
31854
31855 if (!variantCodecs.audio && !variantCodecs.video) {
31856 return;
31857 } // TODO: we can support this by removing the
31858 // old media source and creating a new one, but it will take some work.
31859 // The number of streams cannot change
31860
31861
31862 if (variantCodecCount !== codecCount_) {
31863 exclusionReasons.push(`codec count "${variantCodecCount}" !== "${codecCount_}"`);
31864 } // only exclude playlists by codec change, if codecs cannot switch
31865 // during playback.
31866
31867
31868 if (!this.sourceUpdater_.canChangeType()) {
31869 const variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
31870 const variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
31871
31872 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
31873 exclusionReasons.push(`video codec "${variantVideoDetails.type}" !== "${videoDetails.type}"`);
31874 } // the audio codec cannot change
31875
31876
31877 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
31878 exclusionReasons.push(`audio codec "${variantAudioDetails.type}" !== "${audioDetails.type}"`);
31879 }
31880 }
31881
31882 if (exclusionReasons.length) {
31883 variant.excludeUntil = Infinity;
31884 this.logger_(`excluding ${variant.id}: ${exclusionReasons.join(' && ')}`);
31885 }
31886 });
31887 }
31888
31889 updateAdCues_(media) {
31890 let offset = 0;
31891 const seekable = this.seekable();
31892
31893 if (seekable.length) {
31894 offset = seekable.start(0);
31895 }
31896
31897 updateAdCues(media, this.cueTagsTrack_, offset);
31898 }
31899 /**
31900 * Calculates the desired forward buffer length based on current time
31901 *
31902 * @return {number} Desired forward buffer length in seconds
31903 */
31904
31905
31906 goalBufferLength() {
31907 const currentTime = this.tech_.currentTime();
31908 const initial = Config.GOAL_BUFFER_LENGTH;
31909 const rate = Config.GOAL_BUFFER_LENGTH_RATE;
31910 const max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
31911 return Math.min(initial + currentTime * rate, max);
31912 }
31913 /**
31914 * Calculates the desired buffer low water line based on current time
31915 *
31916 * @return {number} Desired buffer low water line in seconds
31917 */
31918
31919
31920 bufferLowWaterLine() {
31921 const currentTime = this.tech_.currentTime();
31922 const initial = Config.BUFFER_LOW_WATER_LINE;
31923 const rate = Config.BUFFER_LOW_WATER_LINE_RATE;
31924 const max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
31925 const newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
31926 return Math.min(initial + currentTime * rate, this.bufferBasedABR ? newMax : max);
31927 }
31928
31929 bufferHighWaterLine() {
31930 return Config.BUFFER_HIGH_WATER_LINE;
31931 }
31932
31933 addDateRangesToTextTrack_(dateRanges) {
31934 createMetadataTrackIfNotExists(this.inbandTextTracks_, 'com.apple.streaming', this.tech_);
31935 addDateRangeMetadata({
31936 inbandTextTracks: this.inbandTextTracks_,
31937 dateRanges
31938 });
31939 }
31940
31941 addMetadataToTextTrack(dispatchType, metadataArray, videoDuration) {
31942 const timestampOffset = this.sourceUpdater_.videoBuffer ? this.sourceUpdater_.videoTimestampOffset() : this.sourceUpdater_.audioTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
31943 // audio/video source with a metadata track, and an alt audio with a metadata track.
31944 // However, this probably won't happen, and if it does it can be handled then.
31945
31946 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.tech_);
31947 addMetadata({
31948 inbandTextTracks: this.inbandTextTracks_,
31949 metadataArray,
31950 timestampOffset,
31951 videoDuration
31952 });
31953 }
31954 /**
31955 * Utility for getting the pathway or service location from an HLS or DASH playlist.
31956 *
31957 * @param {Object} playlist for getting pathway from.
31958 * @return the pathway attribute of a playlist
31959 */
31960
31961
31962 pathwayAttribute_(playlist) {
31963 return playlist.attributes['PATHWAY-ID'] || playlist.attributes.serviceLocation;
31964 }
31965 /**
31966 * Initialize available pathways and apply the tag properties.
31967 */
31968
31969
31970 initContentSteeringController_() {
31971 const main = this.main();
31972
31973 if (!main.contentSteering) {
31974 return;
31975 }
31976
31977 for (const playlist of main.playlists) {
31978 this.contentSteeringController_.addAvailablePathway(this.pathwayAttribute_(playlist));
31979 }
31980
31981 this.contentSteeringController_.assignTagProperties(main.uri, main.contentSteering); // request the steering manifest immediately if queryBeforeStart is set.
31982
31983 if (this.contentSteeringController_.queryBeforeStart) {
31984 // When queryBeforeStart is true, initial request should omit steering parameters.
31985 this.contentSteeringController_.requestSteeringManifest(true);
31986 return;
31987 } // otherwise start content steering after playback starts
31988
31989
31990 this.tech_.one('canplay', () => {
31991 this.contentSteeringController_.requestSteeringManifest();
31992 });
31993 }
31994 /**
31995 * Reset the content steering controller and re-init.
31996 */
31997
31998
31999 resetContentSteeringController_() {
32000 this.contentSteeringController_.clearAvailablePathways();
32001 this.contentSteeringController_.dispose();
32002 this.initContentSteeringController_();
32003 }
32004 /**
32005 * Attaches the listeners for content steering.
32006 */
32007
32008
32009 attachContentSteeringListeners_() {
32010 this.contentSteeringController_.on('content-steering', this.excludeThenChangePathway_.bind(this));
32011
32012 if (this.sourceType_ === 'dash') {
32013 this.mainPlaylistLoader_.on('loadedplaylist', () => {
32014 const main = this.main(); // check if steering tag or pathways changed.
32015
32016 const didDashTagChange = this.contentSteeringController_.didDASHTagChange(main.uri, main.contentSteering);
32017
32018 const didPathwaysChange = () => {
32019 const availablePathways = this.contentSteeringController_.getAvailablePathways();
32020 const newPathways = [];
32021
32022 for (const playlist of main.playlists) {
32023 const serviceLocation = playlist.attributes.serviceLocation;
32024
32025 if (serviceLocation) {
32026 newPathways.push(serviceLocation);
32027
32028 if (!availablePathways.has(serviceLocation)) {
32029 return true;
32030 }
32031 }
32032 } // If we have no new serviceLocations and previously had availablePathways
32033
32034
32035 if (!newPathways.length && availablePathways.size) {
32036 return true;
32037 }
32038
32039 return false;
32040 };
32041
32042 if (didDashTagChange || didPathwaysChange()) {
32043 this.resetContentSteeringController_();
32044 }
32045 });
32046 }
32047 }
32048 /**
32049 * Simple exclude and change playlist logic for content steering.
32050 */
32051
32052
32053 excludeThenChangePathway_() {
32054 const currentPathway = this.contentSteeringController_.getPathway();
32055
32056 if (!currentPathway) {
32057 return;
32058 }
32059
32060 this.handlePathwayClones_();
32061 const main = this.main();
32062 const playlists = main.playlists;
32063 const ids = new Set();
32064 let didEnablePlaylists = false;
32065 Object.keys(playlists).forEach(key => {
32066 const variant = playlists[key];
32067 const pathwayId = this.pathwayAttribute_(variant);
32068 const differentPathwayId = pathwayId && currentPathway !== pathwayId;
32069 const steeringExclusion = variant.excludeUntil === Infinity && variant.lastExcludeReason_ === 'content-steering';
32070
32071 if (steeringExclusion && !differentPathwayId) {
32072 delete variant.excludeUntil;
32073 delete variant.lastExcludeReason_;
32074 didEnablePlaylists = true;
32075 }
32076
32077 const noExcludeUntil = !variant.excludeUntil && variant.excludeUntil !== Infinity;
32078 const shouldExclude = !ids.has(variant.id) && differentPathwayId && noExcludeUntil;
32079
32080 if (!shouldExclude) {
32081 return;
32082 }
32083
32084 ids.add(variant.id);
32085 variant.excludeUntil = Infinity;
32086 variant.lastExcludeReason_ = 'content-steering'; // TODO: kind of spammy, maybe move this.
32087
32088 this.logger_(`excluding ${variant.id} for ${variant.lastExcludeReason_}`);
32089 });
32090
32091 if (this.contentSteeringController_.manifestType_ === 'DASH') {
32092 Object.keys(this.mediaTypes_).forEach(key => {
32093 const type = this.mediaTypes_[key];
32094
32095 if (type.activePlaylistLoader) {
32096 const currentPlaylist = type.activePlaylistLoader.media_; // Check if the current media playlist matches the current CDN
32097
32098 if (currentPlaylist && currentPlaylist.attributes.serviceLocation !== currentPathway) {
32099 didEnablePlaylists = true;
32100 }
32101 }
32102 });
32103 }
32104
32105 if (didEnablePlaylists) {
32106 this.changeSegmentPathway_();
32107 }
32108 }
32109 /**
32110 * Add, update, or delete playlists and media groups for
32111 * the pathway clones for HLS Content Steering.
32112 *
32113 * See https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/
32114 *
32115 * NOTE: Pathway cloning does not currently support the `PER_VARIANT_URIS` and
32116 * `PER_RENDITION_URIS` as we do not handle `STABLE-VARIANT-ID` or
32117 * `STABLE-RENDITION-ID` values.
32118 */
32119
32120
32121 handlePathwayClones_() {
32122 const main = this.main();
32123 const playlists = main.playlists;
32124 const currentPathwayClones = this.contentSteeringController_.currentPathwayClones;
32125 const nextPathwayClones = this.contentSteeringController_.nextPathwayClones;
32126 const hasClones = currentPathwayClones && currentPathwayClones.size || nextPathwayClones && nextPathwayClones.size;
32127
32128 if (!hasClones) {
32129 return;
32130 }
32131
32132 for (const [id, clone] of currentPathwayClones.entries()) {
32133 const newClone = nextPathwayClones.get(id); // Delete the old pathway clone.
32134
32135 if (!newClone) {
32136 this.mainPlaylistLoader_.updateOrDeleteClone(clone);
32137 this.contentSteeringController_.excludePathway(id);
32138 }
32139 }
32140
32141 for (const [id, clone] of nextPathwayClones.entries()) {
32142 const oldClone = currentPathwayClones.get(id); // Create a new pathway if it is a new pathway clone object.
32143
32144 if (!oldClone) {
32145 const playlistsToClone = playlists.filter(p => {
32146 return p.attributes['PATHWAY-ID'] === clone['BASE-ID'];
32147 });
32148 playlistsToClone.forEach(p => {
32149 this.mainPlaylistLoader_.addClonePathway(clone, p);
32150 });
32151 this.contentSteeringController_.addAvailablePathway(id);
32152 continue;
32153 } // There have not been changes to the pathway clone object, so skip.
32154
32155
32156 if (this.equalPathwayClones_(oldClone, clone)) {
32157 continue;
32158 } // Update a preexisting cloned pathway.
32159 // True is set for the update flag.
32160
32161
32162 this.mainPlaylistLoader_.updateOrDeleteClone(clone, true);
32163 this.contentSteeringController_.addAvailablePathway(id);
32164 } // Deep copy contents of next to current pathways.
32165
32166
32167 this.contentSteeringController_.currentPathwayClones = new Map(JSON.parse(JSON.stringify([...nextPathwayClones])));
32168 }
32169 /**
32170 * Determines whether two pathway clone objects are equivalent.
32171 *
32172 * @param {Object} a The first pathway clone object.
32173 * @param {Object} b The second pathway clone object.
32174 * @return {boolean} True if the pathway clone objects are equal, false otherwise.
32175 */
32176
32177
32178 equalPathwayClones_(a, b) {
32179 if (a['BASE-ID'] !== b['BASE-ID'] || a.ID !== b.ID || a['URI-REPLACEMENT'].HOST !== b['URI-REPLACEMENT'].HOST) {
32180 return false;
32181 }
32182
32183 const aParams = a['URI-REPLACEMENT'].PARAMS;
32184 const bParams = b['URI-REPLACEMENT'].PARAMS; // We need to iterate through both lists of params because one could be
32185 // missing a parameter that the other has.
32186
32187 for (const p in aParams) {
32188 if (aParams[p] !== bParams[p]) {
32189 return false;
32190 }
32191 }
32192
32193 for (const p in bParams) {
32194 if (aParams[p] !== bParams[p]) {
32195 return false;
32196 }
32197 }
32198
32199 return true;
32200 }
32201 /**
32202 * Changes the current playlists for audio, video and subtitles after a new pathway
32203 * is chosen from content steering.
32204 */
32205
32206
32207 changeSegmentPathway_() {
32208 const nextPlaylist = this.selectPlaylist();
32209 this.pauseLoading(); // Switch audio and text track playlists if necessary in DASH
32210
32211 if (this.contentSteeringController_.manifestType_ === 'DASH') {
32212 this.switchMediaForDASHContentSteering_();
32213 }
32214
32215 this.switchMedia_(nextPlaylist, 'content-steering');
32216 }
32217 /**
32218 * Iterates through playlists and check their keyId set and compare with the
32219 * keyStatusMap, only enable playlists that have a usable key. If the playlist
32220 * has no keyId leave it enabled by default.
32221 */
32222
32223
32224 excludeNonUsablePlaylistsByKeyId_() {
32225 if (!this.mainPlaylistLoader_ || !this.mainPlaylistLoader_.main) {
32226 return;
32227 }
32228
32229 let nonUsableKeyStatusCount = 0;
32230 const NON_USABLE = 'non-usable';
32231 this.mainPlaylistLoader_.main.playlists.forEach(playlist => {
32232 const keyIdSet = this.mainPlaylistLoader_.getKeyIdSet(playlist); // If the playlist doesn't have keyIDs lets not exclude it.
32233
32234 if (!keyIdSet || !keyIdSet.size) {
32235 return;
32236 }
32237
32238 keyIdSet.forEach(key => {
32239 const USABLE = 'usable';
32240 const hasUsableKeyStatus = this.keyStatusMap_.has(key) && this.keyStatusMap_.get(key) === USABLE;
32241 const nonUsableExclusion = playlist.lastExcludeReason_ === NON_USABLE && playlist.excludeUntil === Infinity;
32242
32243 if (!hasUsableKeyStatus) {
32244 // Only exclude playlists that haven't already been excluded as non-usable.
32245 if (playlist.excludeUntil !== Infinity && playlist.lastExcludeReason_ !== NON_USABLE) {
32246 playlist.excludeUntil = Infinity;
32247 playlist.lastExcludeReason_ = NON_USABLE;
32248 this.logger_(`excluding playlist ${playlist.id} because the key ID ${key} doesn't exist in the keyStatusMap or is not ${USABLE}`);
32249 } // count all nonUsableKeyStatus
32250
32251
32252 nonUsableKeyStatusCount++;
32253 } else if (hasUsableKeyStatus && nonUsableExclusion) {
32254 delete playlist.excludeUntil;
32255 delete playlist.lastExcludeReason_;
32256 this.logger_(`enabling playlist ${playlist.id} because key ID ${key} is ${USABLE}`);
32257 }
32258 });
32259 }); // If for whatever reason every playlist has a non usable key status. Lets try re-including the SD renditions as a failsafe.
32260
32261 if (nonUsableKeyStatusCount >= this.mainPlaylistLoader_.main.playlists.length) {
32262 this.mainPlaylistLoader_.main.playlists.forEach(playlist => {
32263 const isNonHD = playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height < 720;
32264 const excludedForNonUsableKey = playlist.excludeUntil === Infinity && playlist.lastExcludeReason_ === NON_USABLE;
32265
32266 if (isNonHD && excludedForNonUsableKey) {
32267 // Only delete the excludeUntil so we don't try and re-exclude these playlists.
32268 delete playlist.excludeUntil;
32269 videojs__default["default"].log.warn(`enabling non-HD playlist ${playlist.id} because all playlists were excluded due to ${NON_USABLE} key IDs`);
32270 }
32271 });
32272 }
32273 }
32274 /**
32275 * Adds a keystatus to the keystatus map, tries to convert to string if necessary.
32276 *
32277 * @param {any} keyId the keyId to add a status for
32278 * @param {string} status the status of the keyId
32279 */
32280
32281
32282 addKeyStatus_(keyId, status) {
32283 const isString = typeof keyId === 'string';
32284 const keyIdHexString = isString ? keyId : bufferToHexString(keyId);
32285 const formattedKeyIdString = keyIdHexString.slice(0, 32).toLowerCase();
32286 this.logger_(`KeyStatus '${status}' with key ID ${formattedKeyIdString} added to the keyStatusMap`);
32287 this.keyStatusMap_.set(formattedKeyIdString, status);
32288 }
32289 /**
32290 * Utility function for adding key status to the keyStatusMap and filtering usable encrypted playlists.
32291 *
32292 * @param {any} keyId the keyId from the keystatuschange event
32293 * @param {string} status the key status string
32294 */
32295
32296
32297 updatePlaylistByKeyStatus(keyId, status) {
32298 this.addKeyStatus_(keyId, status);
32299
32300 if (!this.waitingForFastQualityPlaylistReceived_) {
32301 this.excludeNonUsableThenChangePlaylist_();
32302 } // Listen to loadedplaylist with a single listener and check for new contentProtection elements when a playlist is updated.
32303
32304
32305 this.mainPlaylistLoader_.off('loadedplaylist', this.excludeNonUsableThenChangePlaylist_.bind(this));
32306 this.mainPlaylistLoader_.on('loadedplaylist', this.excludeNonUsableThenChangePlaylist_.bind(this));
32307 }
32308
32309 excludeNonUsableThenChangePlaylist_() {
32310 this.excludeNonUsablePlaylistsByKeyId_();
32311 this.fastQualityChange_();
32312 }
32313
32314 }
32315
32316 /**
32317 * Returns a function that acts as the Enable/disable playlist function.
32318 *
32319 * @param {PlaylistLoader} loader - The main playlist loader
32320 * @param {string} playlistID - id of the playlist
32321 * @param {Function} changePlaylistFn - A function to be called after a
32322 * playlist's enabled-state has been changed. Will NOT be called if a
32323 * playlist's enabled-state is unchanged
32324 * @param {boolean=} enable - Value to set the playlist enabled-state to
32325 * or if undefined returns the current enabled-state for the playlist
32326 * @return {Function} Function for setting/getting enabled
32327 */
32328
32329 const enableFunction = (loader, playlistID, changePlaylistFn) => enable => {
32330 const playlist = loader.main.playlists[playlistID];
32331 const incompatible = isIncompatible(playlist);
32332 const currentlyEnabled = isEnabled(playlist);
32333
32334 if (typeof enable === 'undefined') {
32335 return currentlyEnabled;
32336 }
32337
32338 if (enable) {
32339 delete playlist.disabled;
32340 } else {
32341 playlist.disabled = true;
32342 }
32343
32344 if (enable !== currentlyEnabled && !incompatible) {
32345 // Ensure the outside world knows about our changes
32346 changePlaylistFn();
32347
32348 if (enable) {
32349 loader.trigger('renditionenabled');
32350 } else {
32351 loader.trigger('renditiondisabled');
32352 }
32353 }
32354
32355 return enable;
32356 };
32357 /**
32358 * The representation object encapsulates the publicly visible information
32359 * in a media playlist along with a setter/getter-type function (enabled)
32360 * for changing the enabled-state of a particular playlist entry
32361 *
32362 * @class Representation
32363 */
32364
32365
32366 class Representation {
32367 constructor(vhsHandler, playlist, id) {
32368 const {
32369 playlistController_: pc
32370 } = vhsHandler;
32371 const qualityChangeFunction = pc.fastQualityChange_.bind(pc); // some playlist attributes are optional
32372
32373 if (playlist.attributes) {
32374 const resolution = playlist.attributes.RESOLUTION;
32375 this.width = resolution && resolution.width;
32376 this.height = resolution && resolution.height;
32377 this.bandwidth = playlist.attributes.BANDWIDTH;
32378 this.frameRate = playlist.attributes['FRAME-RATE'];
32379 }
32380
32381 this.codecs = codecsForPlaylist(pc.main(), playlist);
32382 this.playlist = playlist; // The id is simply the ordinality of the media playlist
32383 // within the main playlist
32384
32385 this.id = id; // Partially-apply the enableFunction to create a playlist-
32386 // specific variant
32387
32388 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
32389 }
32390
32391 }
32392 /**
32393 * A mixin function that adds the `representations` api to an instance
32394 * of the VhsHandler class
32395 *
32396 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
32397 * representation API into
32398 */
32399
32400
32401 const renditionSelectionMixin = function (vhsHandler) {
32402 // Add a single API-specific function to the VhsHandler instance
32403 vhsHandler.representations = () => {
32404 const main = vhsHandler.playlistController_.main();
32405 const playlists = isAudioOnly(main) ? vhsHandler.playlistController_.getAudioTrackPlaylists_() : main.playlists;
32406
32407 if (!playlists) {
32408 return [];
32409 }
32410
32411 return playlists.filter(media => !isIncompatible(media)).map((e, i) => new Representation(vhsHandler, e, e.id));
32412 };
32413 };
32414
32415 /**
32416 * @file playback-watcher.js
32417 *
32418 * Playback starts, and now my watch begins. It shall not end until my death. I shall
32419 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
32420 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
32421 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
32422 * my life and honor to the Playback Watch, for this Player and all the Players to come.
32423 */
32424
32425 const timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
32426 /**
32427 * @class PlaybackWatcher
32428 */
32429
32430 class PlaybackWatcher {
32431 /**
32432 * Represents an PlaybackWatcher object.
32433 *
32434 * @class
32435 * @param {Object} options an object that includes the tech and settings
32436 */
32437 constructor(options) {
32438 this.playlistController_ = options.playlistController;
32439 this.tech_ = options.tech;
32440 this.seekable = options.seekable;
32441 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
32442 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
32443 this.media = options.media;
32444 this.consecutiveUpdates = 0;
32445 this.lastRecordedTime = null;
32446 this.checkCurrentTimeTimeout_ = null;
32447 this.logger_ = logger('PlaybackWatcher');
32448 this.logger_('initialize');
32449
32450 const playHandler = () => this.monitorCurrentTime_();
32451
32452 const canPlayHandler = () => this.monitorCurrentTime_();
32453
32454 const waitingHandler = () => this.techWaiting_();
32455
32456 const cancelTimerHandler = () => this.resetTimeUpdate_();
32457
32458 const pc = this.playlistController_;
32459 const loaderTypes = ['main', 'subtitle', 'audio'];
32460 const loaderChecks = {};
32461 loaderTypes.forEach(type => {
32462 loaderChecks[type] = {
32463 reset: () => this.resetSegmentDownloads_(type),
32464 updateend: () => this.checkSegmentDownloads_(type)
32465 };
32466 pc[`${type}SegmentLoader_`].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
32467 // isn't changing we want to reset. We cannot assume that the new rendition
32468 // will also be stalled, until after new appends.
32469
32470 pc[`${type}SegmentLoader_`].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
32471 // This prevents one segment playlists (single vtt or single segment content)
32472 // from being detected as stalling. As the buffer will not change in those cases, since
32473 // the buffer is the entire video duration.
32474
32475 this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
32476 });
32477 /**
32478 * We check if a seek was into a gap through the following steps:
32479 * 1. We get a seeking event and we do not get a seeked event. This means that
32480 * a seek was attempted but not completed.
32481 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
32482 * removed everything from our buffer and appended a segment, and should be ready
32483 * to check for gaps.
32484 */
32485
32486 const setSeekingHandlers = fn => {
32487 ['main', 'audio'].forEach(type => {
32488 pc[`${type}SegmentLoader_`][fn]('appended', this.seekingAppendCheck_);
32489 });
32490 };
32491
32492 this.seekingAppendCheck_ = () => {
32493 if (this.fixesBadSeeks_()) {
32494 this.consecutiveUpdates = 0;
32495 this.lastRecordedTime = this.tech_.currentTime();
32496 setSeekingHandlers('off');
32497 }
32498 };
32499
32500 this.clearSeekingAppendCheck_ = () => setSeekingHandlers('off');
32501
32502 this.watchForBadSeeking_ = () => {
32503 this.clearSeekingAppendCheck_();
32504 setSeekingHandlers('on');
32505 };
32506
32507 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
32508 this.tech_.on('seeking', this.watchForBadSeeking_);
32509 this.tech_.on('waiting', waitingHandler);
32510 this.tech_.on(timerCancelEvents, cancelTimerHandler);
32511 this.tech_.on('canplay', canPlayHandler);
32512 /*
32513 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
32514 is surfaced in one of two ways:
32515 1) The `waiting` event is fired before the player has buffered content, making it impossible
32516 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
32517 we can check if playback is stalled due to a gap, and skip the gap if necessary.
32518 2) A source with a gap at the beginning of the stream is loaded programatically while the player
32519 is in a playing state. To catch this case, it's important that our one-time play listener is setup
32520 even if the player is in a playing state
32521 */
32522
32523 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
32524
32525 this.dispose = () => {
32526 this.clearSeekingAppendCheck_();
32527 this.logger_('dispose');
32528 this.tech_.off('waiting', waitingHandler);
32529 this.tech_.off(timerCancelEvents, cancelTimerHandler);
32530 this.tech_.off('canplay', canPlayHandler);
32531 this.tech_.off('play', playHandler);
32532 this.tech_.off('seeking', this.watchForBadSeeking_);
32533 this.tech_.off('seeked', this.clearSeekingAppendCheck_);
32534 loaderTypes.forEach(type => {
32535 pc[`${type}SegmentLoader_`].off('appendsdone', loaderChecks[type].updateend);
32536 pc[`${type}SegmentLoader_`].off('playlistupdate', loaderChecks[type].reset);
32537 this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
32538 });
32539
32540 if (this.checkCurrentTimeTimeout_) {
32541 window.clearTimeout(this.checkCurrentTimeTimeout_);
32542 }
32543
32544 this.resetTimeUpdate_();
32545 };
32546 }
32547 /**
32548 * Periodically check current time to see if playback stopped
32549 *
32550 * @private
32551 */
32552
32553
32554 monitorCurrentTime_() {
32555 this.checkCurrentTime_();
32556
32557 if (this.checkCurrentTimeTimeout_) {
32558 window.clearTimeout(this.checkCurrentTimeTimeout_);
32559 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
32560
32561
32562 this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
32563 }
32564 /**
32565 * Reset stalled download stats for a specific type of loader
32566 *
32567 * @param {string} type
32568 * The segment loader type to check.
32569 *
32570 * @listens SegmentLoader#playlistupdate
32571 * @listens Tech#seeking
32572 * @listens Tech#seeked
32573 */
32574
32575
32576 resetSegmentDownloads_(type) {
32577 const loader = this.playlistController_[`${type}SegmentLoader_`];
32578
32579 if (this[`${type}StalledDownloads_`] > 0) {
32580 this.logger_(`resetting possible stalled download count for ${type} loader`);
32581 }
32582
32583 this[`${type}StalledDownloads_`] = 0;
32584 this[`${type}Buffered_`] = loader.buffered_();
32585 }
32586 /**
32587 * Checks on every segment `appendsdone` to see
32588 * if segment appends are making progress. If they are not
32589 * and we are still downloading bytes. We exclude the playlist.
32590 *
32591 * @param {string} type
32592 * The segment loader type to check.
32593 *
32594 * @listens SegmentLoader#appendsdone
32595 */
32596
32597
32598 checkSegmentDownloads_(type) {
32599 const pc = this.playlistController_;
32600 const loader = pc[`${type}SegmentLoader_`];
32601 const buffered = loader.buffered_();
32602 const isBufferedDifferent = isRangeDifferent(this[`${type}Buffered_`], buffered);
32603 this[`${type}Buffered_`] = buffered; // if another watcher is going to fix the issue or
32604 // the buffered value for this loader changed
32605 // appends are working
32606
32607 if (isBufferedDifferent) {
32608 this.resetSegmentDownloads_(type);
32609 return;
32610 }
32611
32612 this[`${type}StalledDownloads_`]++;
32613 this.logger_(`found #${this[`${type}StalledDownloads_`]} ${type} appends that did not increase buffer (possible stalled download)`, {
32614 playlistId: loader.playlist_ && loader.playlist_.id,
32615 buffered: timeRangesToArray(buffered)
32616 }); // after 10 possibly stalled appends with no reset, exclude
32617
32618 if (this[`${type}StalledDownloads_`] < 10) {
32619 return;
32620 }
32621
32622 this.logger_(`${type} loader stalled download exclusion`);
32623 this.resetSegmentDownloads_(type);
32624 this.tech_.trigger({
32625 type: 'usage',
32626 name: `vhs-${type}-download-exclusion`
32627 });
32628
32629 if (type === 'subtitle') {
32630 return;
32631 } // TODO: should we exclude audio tracks rather than main tracks
32632 // when type is audio?
32633
32634
32635 pc.excludePlaylist({
32636 error: {
32637 message: `Excessive ${type} segment downloading detected.`
32638 },
32639 playlistExclusionDuration: Infinity
32640 });
32641 }
32642 /**
32643 * The purpose of this function is to emulate the "waiting" event on
32644 * browsers that do not emit it when they are waiting for more
32645 * data to continue playback
32646 *
32647 * @private
32648 */
32649
32650
32651 checkCurrentTime_() {
32652 if (this.tech_.paused() || this.tech_.seeking()) {
32653 return;
32654 }
32655
32656 const currentTime = this.tech_.currentTime();
32657 const buffered = this.tech_.buffered();
32658
32659 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
32660 // If current time is at the end of the final buffered region, then any playback
32661 // stall is most likely caused by buffering in a low bandwidth environment. The tech
32662 // should fire a `waiting` event in this scenario, but due to browser and tech
32663 // inconsistencies. Calling `techWaiting_` here allows us to simulate
32664 // responding to a native `waiting` event when the tech fails to emit one.
32665 return this.techWaiting_();
32666 }
32667
32668 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
32669 this.consecutiveUpdates++;
32670 this.waiting_();
32671 } else if (currentTime === this.lastRecordedTime) {
32672 this.consecutiveUpdates++;
32673 } else {
32674 this.consecutiveUpdates = 0;
32675 this.lastRecordedTime = currentTime;
32676 }
32677 }
32678 /**
32679 * Resets the 'timeupdate' mechanism designed to detect that we are stalled
32680 *
32681 * @private
32682 */
32683
32684
32685 resetTimeUpdate_() {
32686 this.consecutiveUpdates = 0;
32687 }
32688 /**
32689 * Fixes situations where there's a bad seek
32690 *
32691 * @return {boolean} whether an action was taken to fix the seek
32692 * @private
32693 */
32694
32695
32696 fixesBadSeeks_() {
32697 const seeking = this.tech_.seeking();
32698
32699 if (!seeking) {
32700 return false;
32701 } // TODO: It's possible that these seekable checks should be moved out of this function
32702 // and into a function that runs on seekablechange. It's also possible that we only need
32703 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
32704 // seekable range.
32705
32706
32707 const seekable = this.seekable();
32708 const currentTime = this.tech_.currentTime();
32709 const isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
32710 let seekTo;
32711
32712 if (isAfterSeekableRange) {
32713 const seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
32714
32715 seekTo = seekableEnd;
32716 }
32717
32718 if (this.beforeSeekableWindow_(seekable, currentTime)) {
32719 const seekableStart = seekable.start(0); // sync to the beginning of the live window
32720 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
32721
32722 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
32723 // happen in live with a 3 segment playlist), then don't use a time delta
32724 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
32725 }
32726
32727 if (typeof seekTo !== 'undefined') {
32728 this.logger_(`Trying to seek outside of seekable at time ${currentTime} with ` + `seekable range ${printableRange(seekable)}. Seeking to ` + `${seekTo}.`);
32729 this.tech_.setCurrentTime(seekTo);
32730 return true;
32731 }
32732
32733 const sourceUpdater = this.playlistController_.sourceUpdater_;
32734 const buffered = this.tech_.buffered();
32735 const audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
32736 const videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
32737 const media = this.media(); // verify that at least two segment durations or one part duration have been
32738 // appended before checking for a gap.
32739
32740 const minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
32741 // appended before checking for a gap.
32742
32743 const bufferedToCheck = [audioBuffered, videoBuffered];
32744
32745 for (let i = 0; i < bufferedToCheck.length; i++) {
32746 // skip null buffered
32747 if (!bufferedToCheck[i]) {
32748 continue;
32749 }
32750
32751 const timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
32752 // duration behind we haven't appended enough to call this a bad seek.
32753
32754 if (timeAhead < minAppendedDuration) {
32755 return false;
32756 }
32757 }
32758
32759 const nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
32760 // to seek over the gap
32761
32762 if (nextRange.length === 0) {
32763 return false;
32764 }
32765
32766 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
32767 this.logger_(`Buffered region starts (${nextRange.start(0)}) ` + ` just beyond seek point (${currentTime}). Seeking to ${seekTo}.`);
32768 this.tech_.setCurrentTime(seekTo);
32769 return true;
32770 }
32771 /**
32772 * Handler for situations when we determine the player is waiting.
32773 *
32774 * @private
32775 */
32776
32777
32778 waiting_() {
32779 if (this.techWaiting_()) {
32780 return;
32781 } // All tech waiting checks failed. Use last resort correction
32782
32783
32784 const currentTime = this.tech_.currentTime();
32785 const buffered = this.tech_.buffered();
32786 const currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
32787 // region with no indication that anything is amiss (seen in Firefox). Seeking to
32788 // currentTime is usually enough to kickstart the player. This checks that the player
32789 // is currently within a buffered region before attempting a corrective seek.
32790 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
32791 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
32792 // make sure there is ~3 seconds of forward buffer before taking any corrective action
32793 // to avoid triggering an `unknownwaiting` event when the network is slow.
32794
32795 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
32796 this.resetTimeUpdate_();
32797 this.tech_.setCurrentTime(currentTime);
32798 this.logger_(`Stopped at ${currentTime} while inside a buffered region ` + `[${currentRange.start(0)} -> ${currentRange.end(0)}]. Attempting to resume ` + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
32799
32800 this.tech_.trigger({
32801 type: 'usage',
32802 name: 'vhs-unknown-waiting'
32803 });
32804 return;
32805 }
32806 }
32807 /**
32808 * Handler for situations when the tech fires a `waiting` event
32809 *
32810 * @return {boolean}
32811 * True if an action (or none) was needed to correct the waiting. False if no
32812 * checks passed
32813 * @private
32814 */
32815
32816
32817 techWaiting_() {
32818 const seekable = this.seekable();
32819 const currentTime = this.tech_.currentTime();
32820
32821 if (this.tech_.seeking()) {
32822 // Tech is seeking or already waiting on another action, no action needed
32823 return true;
32824 }
32825
32826 if (this.beforeSeekableWindow_(seekable, currentTime)) {
32827 const livePoint = seekable.end(seekable.length - 1);
32828 this.logger_(`Fell out of live window at time ${currentTime}. Seeking to ` + `live point (seekable end) ${livePoint}`);
32829 this.resetTimeUpdate_();
32830 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
32831
32832 this.tech_.trigger({
32833 type: 'usage',
32834 name: 'vhs-live-resync'
32835 });
32836 return true;
32837 }
32838
32839 const sourceUpdater = this.tech_.vhs.playlistController_.sourceUpdater_;
32840 const buffered = this.tech_.buffered();
32841 const videoUnderflow = this.videoUnderflow_({
32842 audioBuffered: sourceUpdater.audioBuffered(),
32843 videoBuffered: sourceUpdater.videoBuffered(),
32844 currentTime
32845 });
32846
32847 if (videoUnderflow) {
32848 // Even though the video underflowed and was stuck in a gap, the audio overplayed
32849 // the gap, leading currentTime into a buffered range. Seeking to currentTime
32850 // allows the video to catch up to the audio position without losing any audio
32851 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
32852 this.resetTimeUpdate_();
32853 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
32854
32855 this.tech_.trigger({
32856 type: 'usage',
32857 name: 'vhs-video-underflow'
32858 });
32859 return true;
32860 }
32861
32862 const nextRange = findNextRange(buffered, currentTime); // check for gap
32863
32864 if (nextRange.length > 0) {
32865 this.logger_(`Stopped at ${currentTime} and seeking to ${nextRange.start(0)}`);
32866 this.resetTimeUpdate_();
32867 this.skipTheGap_(currentTime);
32868 return true;
32869 } // All checks failed. Returning false to indicate failure to correct waiting
32870
32871
32872 return false;
32873 }
32874
32875 afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow = false) {
32876 if (!seekable.length) {
32877 // we can't make a solid case if there's no seekable, default to false
32878 return false;
32879 }
32880
32881 let allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
32882 const isLive = !playlist.endList;
32883 const isLLHLS = typeof playlist.partTargetDuration === 'number';
32884
32885 if (isLive && (isLLHLS || allowSeeksWithinUnsafeLiveWindow)) {
32886 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
32887 }
32888
32889 if (currentTime > allowedEnd) {
32890 return true;
32891 }
32892
32893 return false;
32894 }
32895
32896 beforeSeekableWindow_(seekable, currentTime) {
32897 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
32898 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
32899 return true;
32900 }
32901
32902 return false;
32903 }
32904
32905 videoUnderflow_({
32906 videoBuffered,
32907 audioBuffered,
32908 currentTime
32909 }) {
32910 // audio only content will not have video underflow :)
32911 if (!videoBuffered) {
32912 return;
32913 }
32914
32915 let gap; // find a gap in demuxed content.
32916
32917 if (videoBuffered.length && audioBuffered.length) {
32918 // in Chrome audio will continue to play for ~3s when we run out of video
32919 // so we have to check that the video buffer did have some buffer in the
32920 // past.
32921 const lastVideoRange = findRange(videoBuffered, currentTime - 3);
32922 const videoRange = findRange(videoBuffered, currentTime);
32923 const audioRange = findRange(audioBuffered, currentTime);
32924
32925 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
32926 gap = {
32927 start: lastVideoRange.end(0),
32928 end: audioRange.end(0)
32929 };
32930 } // find a gap in muxed content.
32931
32932 } else {
32933 const nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
32934 // stuck in a gap due to video underflow.
32935
32936 if (!nextRange.length) {
32937 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
32938 }
32939 }
32940
32941 if (gap) {
32942 this.logger_(`Encountered a gap in video from ${gap.start} to ${gap.end}. ` + `Seeking to current time ${currentTime}`);
32943 return true;
32944 }
32945
32946 return false;
32947 }
32948 /**
32949 * Timer callback. If playback still has not proceeded, then we seek
32950 * to the start of the next buffered region.
32951 *
32952 * @private
32953 */
32954
32955
32956 skipTheGap_(scheduledCurrentTime) {
32957 const buffered = this.tech_.buffered();
32958 const currentTime = this.tech_.currentTime();
32959 const nextRange = findNextRange(buffered, currentTime);
32960 this.resetTimeUpdate_();
32961
32962 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
32963 return;
32964 }
32965
32966 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
32967
32968 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
32969 this.tech_.trigger({
32970 type: 'usage',
32971 name: 'vhs-gap-skip'
32972 });
32973 }
32974
32975 gapFromVideoUnderflow_(buffered, currentTime) {
32976 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
32977 // playing for ~3 seconds after the video gap starts. This is done to account for
32978 // video buffer underflow/underrun (note that this is not done when there is audio
32979 // buffer underflow/underrun -- in that case the video will stop as soon as it
32980 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
32981 // video stalls). The player's time will reflect the playthrough of audio, so the
32982 // time will appear as if we are in a buffered region, even if we are stuck in a
32983 // "gap."
32984 //
32985 // Example:
32986 // video buffer: 0 => 10.1, 10.2 => 20
32987 // audio buffer: 0 => 20
32988 // overall buffer: 0 => 10.1, 10.2 => 20
32989 // current time: 13
32990 //
32991 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
32992 // however, the audio continued playing until it reached ~3 seconds past the gap
32993 // (13 seconds), at which point it stops as well. Since current time is past the
32994 // gap, findNextRange will return no ranges.
32995 //
32996 // To check for this issue, we see if there is a gap that starts somewhere within
32997 // a 3 second range (3 seconds +/- 1 second) back from our current time.
32998 const gaps = findGaps(buffered);
32999
33000 for (let i = 0; i < gaps.length; i++) {
33001 const start = gaps.start(i);
33002 const end = gaps.end(i); // gap is starts no more than 4 seconds back
33003
33004 if (currentTime - start < 4 && currentTime - start > 2) {
33005 return {
33006 start,
33007 end
33008 };
33009 }
33010 }
33011
33012 return null;
33013 }
33014
33015 }
33016
33017 const defaultOptions = {
33018 errorInterval: 30,
33019
33020 getSource(next) {
33021 const tech = this.tech({
33022 IWillNotUseThisInPlugins: true
33023 });
33024 const sourceObj = tech.currentSource_ || this.currentSource();
33025 return next(sourceObj);
33026 }
33027
33028 };
33029 /**
33030 * Main entry point for the plugin
33031 *
33032 * @param {Player} player a reference to a videojs Player instance
33033 * @param {Object} [options] an object with plugin options
33034 * @private
33035 */
33036
33037 const initPlugin = function (player, options) {
33038 let lastCalled = 0;
33039 let seekTo = 0;
33040 const localOptions = merge$1(defaultOptions, options);
33041 player.ready(() => {
33042 player.trigger({
33043 type: 'usage',
33044 name: 'vhs-error-reload-initialized'
33045 });
33046 });
33047 /**
33048 * Player modifications to perform that must wait until `loadedmetadata`
33049 * has been triggered
33050 *
33051 * @private
33052 */
33053
33054 const loadedMetadataHandler = function () {
33055 if (seekTo) {
33056 player.currentTime(seekTo);
33057 }
33058 };
33059 /**
33060 * Set the source on the player element, play, and seek if necessary
33061 *
33062 * @param {Object} sourceObj An object specifying the source url and mime-type to play
33063 * @private
33064 */
33065
33066
33067 const setSource = function (sourceObj) {
33068 if (sourceObj === null || sourceObj === undefined) {
33069 return;
33070 }
33071
33072 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
33073 player.one('loadedmetadata', loadedMetadataHandler);
33074 player.src(sourceObj);
33075 player.trigger({
33076 type: 'usage',
33077 name: 'vhs-error-reload'
33078 });
33079 player.play();
33080 };
33081 /**
33082 * Attempt to get a source from either the built-in getSource function
33083 * or a custom function provided via the options
33084 *
33085 * @private
33086 */
33087
33088
33089 const errorHandler = function () {
33090 // Do not attempt to reload the source if a source-reload occurred before
33091 // 'errorInterval' time has elapsed since the last source-reload
33092 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
33093 player.trigger({
33094 type: 'usage',
33095 name: 'vhs-error-reload-canceled'
33096 });
33097 return;
33098 }
33099
33100 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
33101 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
33102 return;
33103 }
33104
33105 lastCalled = Date.now();
33106 return localOptions.getSource.call(player, setSource);
33107 };
33108 /**
33109 * Unbind any event handlers that were bound by the plugin
33110 *
33111 * @private
33112 */
33113
33114
33115 const cleanupEvents = function () {
33116 player.off('loadedmetadata', loadedMetadataHandler);
33117 player.off('error', errorHandler);
33118 player.off('dispose', cleanupEvents);
33119 };
33120 /**
33121 * Cleanup before re-initializing the plugin
33122 *
33123 * @param {Object} [newOptions] an object with plugin options
33124 * @private
33125 */
33126
33127
33128 const reinitPlugin = function (newOptions) {
33129 cleanupEvents();
33130 initPlugin(player, newOptions);
33131 };
33132
33133 player.on('error', errorHandler);
33134 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
33135 // initializing the plugin
33136
33137 player.reloadSourceOnError = reinitPlugin;
33138 };
33139 /**
33140 * Reload the source when an error is detected as long as there
33141 * wasn't an error previously within the last 30 seconds
33142 *
33143 * @param {Object} [options] an object with plugin options
33144 */
33145
33146
33147 const reloadSourceOnError = function (options) {
33148 initPlugin(this, options);
33149 };
33150
33151 var version$4 = "3.10.0";
33152
33153 var version$3 = "7.0.2";
33154
33155 var version$2 = "1.3.0";
33156
33157 var version$1 = "7.1.0";
33158
33159 var version = "4.0.1";
33160
33161 /**
33162 * @file videojs-http-streaming.js
33163 *
33164 * The main file for the VHS project.
33165 * License: https://github.com/videojs/videojs-http-streaming/blob/main/LICENSE
33166 */
33167 const Vhs = {
33168 PlaylistLoader,
33169 Playlist,
33170 utils,
33171 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
33172 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
33173 lastBandwidthSelector,
33174 movingAverageBandwidthSelector,
33175 comparePlaylistBandwidth,
33176 comparePlaylistResolution,
33177 xhr: xhrFactory()
33178 }; // Define getter/setters for config properties
33179
33180 Object.keys(Config).forEach(prop => {
33181 Object.defineProperty(Vhs, prop, {
33182 get() {
33183 videojs__default["default"].log.warn(`using Vhs.${prop} is UNSAFE be sure you know what you are doing`);
33184 return Config[prop];
33185 },
33186
33187 set(value) {
33188 videojs__default["default"].log.warn(`using Vhs.${prop} is UNSAFE be sure you know what you are doing`);
33189
33190 if (typeof value !== 'number' || value < 0) {
33191 videojs__default["default"].log.warn(`value of Vhs.${prop} must be greater than or equal to 0`);
33192 return;
33193 }
33194
33195 Config[prop] = value;
33196 }
33197
33198 });
33199 });
33200 const LOCAL_STORAGE_KEY = 'videojs-vhs';
33201 /**
33202 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
33203 *
33204 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
33205 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
33206 * @function handleVhsMediaChange
33207 */
33208
33209 const handleVhsMediaChange = function (qualityLevels, playlistLoader) {
33210 const newPlaylist = playlistLoader.media();
33211 let selectedIndex = -1;
33212
33213 for (let i = 0; i < qualityLevels.length; i++) {
33214 if (qualityLevels[i].id === newPlaylist.id) {
33215 selectedIndex = i;
33216 break;
33217 }
33218 }
33219
33220 qualityLevels.selectedIndex_ = selectedIndex;
33221 qualityLevels.trigger({
33222 selectedIndex,
33223 type: 'change'
33224 });
33225 };
33226 /**
33227 * Adds quality levels to list once playlist metadata is available
33228 *
33229 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
33230 * @param {Object} vhs Vhs object to listen to for media events.
33231 * @function handleVhsLoadedMetadata
33232 */
33233
33234
33235 const handleVhsLoadedMetadata = function (qualityLevels, vhs) {
33236 vhs.representations().forEach(rep => {
33237 qualityLevels.addQualityLevel(rep);
33238 });
33239 handleVhsMediaChange(qualityLevels, vhs.playlists);
33240 }; // VHS is a source handler, not a tech. Make sure attempts to use it
33241 // as one do not cause exceptions.
33242
33243
33244 Vhs.canPlaySource = function () {
33245 return videojs__default["default"].log.warn('VHS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
33246 };
33247
33248 const emeKeySystems = (keySystemOptions, mainPlaylist, audioPlaylist) => {
33249 if (!keySystemOptions) {
33250 return keySystemOptions;
33251 }
33252
33253 let codecs = {};
33254
33255 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
33256 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
33257 }
33258
33259 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
33260 codecs.audio = audioPlaylist.attributes.CODECS;
33261 }
33262
33263 const videoContentType = getMimeForCodec(codecs.video);
33264 const audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
33265
33266 const keySystemContentTypes = {};
33267
33268 for (const keySystem in keySystemOptions) {
33269 keySystemContentTypes[keySystem] = {};
33270
33271 if (audioContentType) {
33272 keySystemContentTypes[keySystem].audioContentType = audioContentType;
33273 }
33274
33275 if (videoContentType) {
33276 keySystemContentTypes[keySystem].videoContentType = videoContentType;
33277 } // Default to using the video playlist's PSSH even though they may be different, as
33278 // videojs-contrib-eme will only accept one in the options.
33279 //
33280 // This shouldn't be an issue for most cases as early intialization will handle all
33281 // unique PSSH values, and if they aren't, then encrypted events should have the
33282 // specific information needed for the unique license.
33283
33284
33285 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
33286 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
33287 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
33288 // so we need to prevent overwriting the URL entirely
33289
33290
33291 if (typeof keySystemOptions[keySystem] === 'string') {
33292 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
33293 }
33294 }
33295
33296 return merge$1(keySystemOptions, keySystemContentTypes);
33297 };
33298 /**
33299 * @typedef {Object} KeySystems
33300 *
33301 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
33302 * Note: not all options are listed here.
33303 *
33304 * @property {Uint8Array} [pssh]
33305 * Protection System Specific Header
33306 */
33307
33308 /**
33309 * Goes through all the playlists and collects an array of KeySystems options objects
33310 * containing each playlist's keySystems and their pssh values, if available.
33311 *
33312 * @param {Object[]} playlists
33313 * The playlists to look through
33314 * @param {string[]} keySystems
33315 * The keySystems to collect pssh values for
33316 *
33317 * @return {KeySystems[]}
33318 * An array of KeySystems objects containing available key systems and their
33319 * pssh values
33320 */
33321
33322
33323 const getAllPsshKeySystemsOptions = (playlists, keySystems) => {
33324 return playlists.reduce((keySystemsArr, playlist) => {
33325 if (!playlist.contentProtection) {
33326 return keySystemsArr;
33327 }
33328
33329 const keySystemsOptions = keySystems.reduce((keySystemsObj, keySystem) => {
33330 const keySystemOptions = playlist.contentProtection[keySystem];
33331
33332 if (keySystemOptions && keySystemOptions.pssh) {
33333 keySystemsObj[keySystem] = {
33334 pssh: keySystemOptions.pssh
33335 };
33336 }
33337
33338 return keySystemsObj;
33339 }, {});
33340
33341 if (Object.keys(keySystemsOptions).length) {
33342 keySystemsArr.push(keySystemsOptions);
33343 }
33344
33345 return keySystemsArr;
33346 }, []);
33347 };
33348 /**
33349 * Returns a promise that waits for the
33350 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
33351 *
33352 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
33353 * browsers.
33354 *
33355 * As per the above ticket, this is particularly important for Chrome, where, if
33356 * unencrypted content is appended before encrypted content and the key session has not
33357 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
33358 * during playback.
33359 *
33360 * @param {Object} player
33361 * The player instance
33362 * @param {Object[]} sourceKeySystems
33363 * The key systems options from the player source
33364 * @param {Object} [audioMedia]
33365 * The active audio media playlist (optional)
33366 * @param {Object[]} mainPlaylists
33367 * The playlists found on the main playlist object
33368 *
33369 * @return {Object}
33370 * Promise that resolves when the key session has been created
33371 */
33372
33373
33374 const waitForKeySessionCreation = ({
33375 player,
33376 sourceKeySystems,
33377 audioMedia,
33378 mainPlaylists
33379 }) => {
33380 if (!player.eme.initializeMediaKeys) {
33381 return Promise.resolve();
33382 } // TODO should all audio PSSH values be initialized for DRM?
33383 //
33384 // All unique video rendition pssh values are initialized for DRM, but here only
33385 // the initial audio playlist license is initialized. In theory, an encrypted
33386 // event should be fired if the user switches to an alternative audio playlist
33387 // where a license is required, but this case hasn't yet been tested. In addition, there
33388 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
33389 // languages).
33390
33391
33392 const playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
33393 const keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
33394 const initializationFinishedPromises = [];
33395 const keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
33396 // only place where it should not be deduped is for ms-prefixed APIs, but
33397 // the existence of modern EME APIs in addition to
33398 // ms-prefixed APIs on Edge should prevent this from being a concern.
33399 // initializeMediaKeys also won't use the webkit-prefixed APIs.
33400
33401 keySystemsOptionsArr.forEach(keySystemsOptions => {
33402 keySessionCreatedPromises.push(new Promise((resolve, reject) => {
33403 player.tech_.one('keysessioncreated', resolve);
33404 }));
33405 initializationFinishedPromises.push(new Promise((resolve, reject) => {
33406 player.eme.initializeMediaKeys({
33407 keySystems: keySystemsOptions
33408 }, err => {
33409 if (err) {
33410 reject(err);
33411 return;
33412 }
33413
33414 resolve();
33415 });
33416 }));
33417 }); // The reasons Promise.race is chosen over Promise.any:
33418 //
33419 // * Promise.any is only available in Safari 14+.
33420 // * None of these promises are expected to reject. If they do reject, it might be
33421 // better here for the race to surface the rejection, rather than mask it by using
33422 // Promise.any.
33423
33424 return Promise.race([// If a session was previously created, these will all finish resolving without
33425 // creating a new session, otherwise it will take until the end of all license
33426 // requests, which is why the key session check is used (to make setup much faster).
33427 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
33428 Promise.race(keySessionCreatedPromises)]);
33429 };
33430 /**
33431 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
33432 * there are keySystems on the source, sets up source options to prepare the source for
33433 * eme.
33434 *
33435 * @param {Object} player
33436 * The player instance
33437 * @param {Object[]} sourceKeySystems
33438 * The key systems options from the player source
33439 * @param {Object} media
33440 * The active media playlist
33441 * @param {Object} [audioMedia]
33442 * The active audio media playlist (optional)
33443 *
33444 * @return {boolean}
33445 * Whether or not options were configured and EME is available
33446 */
33447
33448 const setupEmeOptions = ({
33449 player,
33450 sourceKeySystems,
33451 media,
33452 audioMedia
33453 }) => {
33454 const sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
33455
33456 if (!sourceOptions) {
33457 return false;
33458 }
33459
33460 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
33461 // do nothing.
33462
33463 if (sourceOptions && !player.eme) {
33464 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
33465 return false;
33466 }
33467
33468 return true;
33469 };
33470
33471 const getVhsLocalStorage = () => {
33472 if (!window.localStorage) {
33473 return null;
33474 }
33475
33476 const storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
33477
33478 if (!storedObject) {
33479 return null;
33480 }
33481
33482 try {
33483 return JSON.parse(storedObject);
33484 } catch (e) {
33485 // someone may have tampered with the value
33486 return null;
33487 }
33488 };
33489
33490 const updateVhsLocalStorage = options => {
33491 if (!window.localStorage) {
33492 return false;
33493 }
33494
33495 let objectToStore = getVhsLocalStorage();
33496 objectToStore = objectToStore ? merge$1(objectToStore, options) : options;
33497
33498 try {
33499 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
33500 } catch (e) {
33501 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
33502 // storage is set to 0).
33503 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
33504 // No need to perform any operation.
33505 return false;
33506 }
33507
33508 return objectToStore;
33509 };
33510 /**
33511 * Parses VHS-supported media types from data URIs. See
33512 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
33513 * for information on data URIs.
33514 *
33515 * @param {string} dataUri
33516 * The data URI
33517 *
33518 * @return {string|Object}
33519 * The parsed object/string, or the original string if no supported media type
33520 * was found
33521 */
33522
33523
33524 const expandDataUri = dataUri => {
33525 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
33526 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
33527 } // no known case for this data URI, return the string as-is
33528
33529
33530 return dataUri;
33531 };
33532 /**
33533 * Adds a request hook to an xhr object
33534 *
33535 * @param {Object} xhr object to add the onRequest hook to
33536 * @param {function} callback hook function for an xhr request
33537 */
33538
33539
33540 const addOnRequestHook = (xhr, callback) => {
33541 if (!xhr._requestCallbackSet) {
33542 xhr._requestCallbackSet = new Set();
33543 }
33544
33545 xhr._requestCallbackSet.add(callback);
33546 };
33547 /**
33548 * Adds a response hook to an xhr object
33549 *
33550 * @param {Object} xhr object to add the onResponse hook to
33551 * @param {function} callback hook function for an xhr response
33552 */
33553
33554
33555 const addOnResponseHook = (xhr, callback) => {
33556 if (!xhr._responseCallbackSet) {
33557 xhr._responseCallbackSet = new Set();
33558 }
33559
33560 xhr._responseCallbackSet.add(callback);
33561 };
33562 /**
33563 * Removes a request hook on an xhr object, deletes the onRequest set if empty.
33564 *
33565 * @param {Object} xhr object to remove the onRequest hook from
33566 * @param {function} callback hook function to remove
33567 */
33568
33569
33570 const removeOnRequestHook = (xhr, callback) => {
33571 if (!xhr._requestCallbackSet) {
33572 return;
33573 }
33574
33575 xhr._requestCallbackSet.delete(callback);
33576
33577 if (!xhr._requestCallbackSet.size) {
33578 delete xhr._requestCallbackSet;
33579 }
33580 };
33581 /**
33582 * Removes a response hook on an xhr object, deletes the onResponse set if empty.
33583 *
33584 * @param {Object} xhr object to remove the onResponse hook from
33585 * @param {function} callback hook function to remove
33586 */
33587
33588
33589 const removeOnResponseHook = (xhr, callback) => {
33590 if (!xhr._responseCallbackSet) {
33591 return;
33592 }
33593
33594 xhr._responseCallbackSet.delete(callback);
33595
33596 if (!xhr._responseCallbackSet.size) {
33597 delete xhr._responseCallbackSet;
33598 }
33599 };
33600 /**
33601 * Whether the browser has built-in HLS support.
33602 */
33603
33604
33605 Vhs.supportsNativeHls = function () {
33606 if (!document || !document.createElement) {
33607 return false;
33608 }
33609
33610 const video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
33611
33612 if (!videojs__default["default"].getTech('Html5').isSupported()) {
33613 return false;
33614 } // HLS manifests can go by many mime-types
33615
33616
33617 const canPlay = [// Apple santioned
33618 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
33619 'audio/mpegurl', // Very common
33620 'audio/x-mpegurl', // Very common
33621 'application/x-mpegurl', // Included for completeness
33622 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
33623 return canPlay.some(function (canItPlay) {
33624 return /maybe|probably/i.test(video.canPlayType(canItPlay));
33625 });
33626 }();
33627
33628 Vhs.supportsNativeDash = function () {
33629 if (!document || !document.createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
33630 return false;
33631 }
33632
33633 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
33634 }();
33635
33636 Vhs.supportsTypeNatively = type => {
33637 if (type === 'hls') {
33638 return Vhs.supportsNativeHls;
33639 }
33640
33641 if (type === 'dash') {
33642 return Vhs.supportsNativeDash;
33643 }
33644
33645 return false;
33646 };
33647 /**
33648 * VHS is a source handler, not a tech. Make sure attempts to use it
33649 * as one do not cause exceptions.
33650 */
33651
33652
33653 Vhs.isSupported = function () {
33654 return videojs__default["default"].log.warn('VHS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
33655 };
33656 /**
33657 * A global function for setting an onRequest hook
33658 *
33659 * @param {function} callback for request modifiction
33660 */
33661
33662
33663 Vhs.xhr.onRequest = function (callback) {
33664 addOnRequestHook(Vhs.xhr, callback);
33665 };
33666 /**
33667 * A global function for setting an onResponse hook
33668 *
33669 * @param {callback} callback for response data retrieval
33670 */
33671
33672
33673 Vhs.xhr.onResponse = function (callback) {
33674 addOnResponseHook(Vhs.xhr, callback);
33675 };
33676 /**
33677 * Deletes a global onRequest callback if it exists
33678 *
33679 * @param {function} callback to delete from the global set
33680 */
33681
33682
33683 Vhs.xhr.offRequest = function (callback) {
33684 removeOnRequestHook(Vhs.xhr, callback);
33685 };
33686 /**
33687 * Deletes a global onResponse callback if it exists
33688 *
33689 * @param {function} callback to delete from the global set
33690 */
33691
33692
33693 Vhs.xhr.offResponse = function (callback) {
33694 removeOnResponseHook(Vhs.xhr, callback);
33695 };
33696
33697 const Component = videojs__default["default"].getComponent('Component');
33698 /**
33699 * The Vhs Handler object, where we orchestrate all of the parts
33700 * of VHS to interact with video.js
33701 *
33702 * @class VhsHandler
33703 * @extends videojs.Component
33704 * @param {Object} source the soruce object
33705 * @param {Tech} tech the parent tech object
33706 * @param {Object} options optional and required options
33707 */
33708
33709 class VhsHandler extends Component {
33710 constructor(source, tech, options) {
33711 super(tech, options.vhs); // if a tech level `initialBandwidth` option was passed
33712 // use that over the VHS level `bandwidth` option
33713
33714 if (typeof options.initialBandwidth === 'number') {
33715 this.options_.bandwidth = options.initialBandwidth;
33716 }
33717
33718 this.logger_ = logger('VhsHandler'); // we need access to the player in some cases,
33719 // so, get it from Video.js via the `playerId`
33720
33721 if (tech.options_ && tech.options_.playerId) {
33722 const _player = videojs__default["default"].getPlayer(tech.options_.playerId);
33723
33724 this.player_ = _player;
33725 }
33726
33727 this.tech_ = tech;
33728 this.source_ = source;
33729 this.stats = {};
33730 this.ignoreNextSeekingEvent_ = false;
33731 this.setOptions_();
33732
33733 if (this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
33734 tech.overrideNativeAudioTracks(true);
33735 tech.overrideNativeVideoTracks(true);
33736 } else if (this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
33737 // overriding native VHS only works if audio tracks have been emulated
33738 // error early if we're misconfigured
33739 throw new Error('Overriding native VHS requires emulated tracks. ' + 'See https://git.io/vMpjB');
33740 } // listen for fullscreenchange events for this player so that we
33741 // can adjust our quality selection quickly
33742
33743
33744 this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], event => {
33745 const fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
33746
33747 if (fullscreenElement && fullscreenElement.contains(this.tech_.el())) {
33748 this.playlistController_.fastQualityChange_();
33749 } else {
33750 // When leaving fullscreen, since the in page pixel dimensions should be smaller
33751 // than full screen, see if there should be a rendition switch down to preserve
33752 // bandwidth.
33753 this.playlistController_.checkABR_();
33754 }
33755 });
33756 this.on(this.tech_, 'seeking', function () {
33757 if (this.ignoreNextSeekingEvent_) {
33758 this.ignoreNextSeekingEvent_ = false;
33759 return;
33760 }
33761
33762 this.setCurrentTime(this.tech_.currentTime());
33763 });
33764 this.on(this.tech_, 'error', function () {
33765 // verify that the error was real and we are loaded
33766 // enough to have pc loaded.
33767 if (this.tech_.error() && this.playlistController_) {
33768 this.playlistController_.pauseLoading();
33769 }
33770 });
33771 this.on(this.tech_, 'play', this.play);
33772 }
33773 /**
33774 * Set VHS options based on options from configuration, as well as partial
33775 * options to be passed at a later time.
33776 *
33777 * @param {Object} options A partial chunk of config options
33778 */
33779
33780
33781 setOptions_(options = {}) {
33782 this.options_ = merge$1(this.options_, options); // defaults
33783
33784 this.options_.withCredentials = this.options_.withCredentials || false;
33785 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
33786 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
33787 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
33788 this.options_.useForcedSubtitles = this.options_.useForcedSubtitles || false;
33789 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
33790 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
33791 this.options_.customTagParsers = this.options_.customTagParsers || [];
33792 this.options_.customTagMappers = this.options_.customTagMappers || [];
33793 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
33794 this.options_.llhls = this.options_.llhls === false ? false : true;
33795 this.options_.bufferBasedABR = this.options_.bufferBasedABR || false;
33796
33797 if (typeof this.options_.playlistExclusionDuration !== 'number') {
33798 this.options_.playlistExclusionDuration = 60;
33799 }
33800
33801 if (typeof this.options_.bandwidth !== 'number') {
33802 if (this.options_.useBandwidthFromLocalStorage) {
33803 const storedObject = getVhsLocalStorage();
33804
33805 if (storedObject && storedObject.bandwidth) {
33806 this.options_.bandwidth = storedObject.bandwidth;
33807 this.tech_.trigger({
33808 type: 'usage',
33809 name: 'vhs-bandwidth-from-local-storage'
33810 });
33811 }
33812
33813 if (storedObject && storedObject.throughput) {
33814 this.options_.throughput = storedObject.throughput;
33815 this.tech_.trigger({
33816 type: 'usage',
33817 name: 'vhs-throughput-from-local-storage'
33818 });
33819 }
33820 }
33821 } // if bandwidth was not set by options or pulled from local storage, start playlist
33822 // selection at a reasonable bandwidth
33823
33824
33825 if (typeof this.options_.bandwidth !== 'number') {
33826 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
33827 } // If the bandwidth number is unchanged from the initial setting
33828 // then this takes precedence over the enableLowInitialPlaylist option
33829
33830
33831 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
33832
33833 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'customTagParsers', 'customTagMappers', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'bufferBasedABR', 'liveRangeSafeTimeDelta', 'llhls', 'useForcedSubtitles', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'exactManifestTimings', 'leastPixelDiffSelector'].forEach(option => {
33834 if (typeof this.source_[option] !== 'undefined') {
33835 this.options_[option] = this.source_[option];
33836 }
33837 });
33838 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
33839 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
33840 } // alias for public method to set options
33841
33842
33843 setOptions(options = {}) {
33844 this.setOptions_(options);
33845 }
33846 /**
33847 * called when player.src gets called, handle a new source
33848 *
33849 * @param {Object} src the source object to handle
33850 */
33851
33852
33853 src(src, type) {
33854 // do nothing if the src is falsey
33855 if (!src) {
33856 return;
33857 }
33858
33859 this.setOptions_(); // add main playlist controller options
33860
33861 this.options_.src = expandDataUri(this.source_.src);
33862 this.options_.tech = this.tech_;
33863 this.options_.externVhs = Vhs;
33864 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
33865
33866 this.options_.seekTo = time => {
33867 this.tech_.setCurrentTime(time);
33868 };
33869
33870 this.playlistController_ = new PlaylistController(this.options_);
33871 const playbackWatcherOptions = merge$1({
33872 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
33873 }, this.options_, {
33874 seekable: () => this.seekable(),
33875 media: () => this.playlistController_.media(),
33876 playlistController: this.playlistController_
33877 });
33878 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
33879 this.playlistController_.on('error', () => {
33880 const player = videojs__default["default"].players[this.tech_.options_.playerId];
33881 let error = this.playlistController_.error;
33882
33883 if (typeof error === 'object' && !error.code) {
33884 error.code = 3;
33885 } else if (typeof error === 'string') {
33886 error = {
33887 message: error,
33888 code: 3
33889 };
33890 }
33891
33892 player.error(error);
33893 });
33894 const defaultSelector = this.options_.bufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
33895 // compatibility with < v2
33896
33897 this.playlistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
33898 this.playlistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
33899
33900 this.playlists = this.playlistController_.mainPlaylistLoader_;
33901 this.mediaSource = this.playlistController_.mediaSource; // Proxy assignment of some properties to the main playlist
33902 // controller. Using a custom property for backwards compatibility
33903 // with < v2
33904
33905 Object.defineProperties(this, {
33906 selectPlaylist: {
33907 get() {
33908 return this.playlistController_.selectPlaylist;
33909 },
33910
33911 set(selectPlaylist) {
33912 this.playlistController_.selectPlaylist = selectPlaylist.bind(this);
33913 }
33914
33915 },
33916 throughput: {
33917 get() {
33918 return this.playlistController_.mainSegmentLoader_.throughput.rate;
33919 },
33920
33921 set(throughput) {
33922 this.playlistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
33923 // for the cumulative average
33924
33925 this.playlistController_.mainSegmentLoader_.throughput.count = 1;
33926 }
33927
33928 },
33929 bandwidth: {
33930 get() {
33931 let playerBandwidthEst = this.playlistController_.mainSegmentLoader_.bandwidth;
33932 const networkInformation = window.navigator.connection || window.navigator.mozConnection || window.navigator.webkitConnection;
33933 const tenMbpsAsBitsPerSecond = 10e6;
33934
33935 if (this.options_.useNetworkInformationApi && networkInformation) {
33936 // downlink returns Mbps
33937 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
33938 const networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
33939 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
33940 // high quality streams are not filtered out.
33941
33942 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
33943 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
33944 } else {
33945 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
33946 }
33947 }
33948
33949 return playerBandwidthEst;
33950 },
33951
33952 set(bandwidth) {
33953 this.playlistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
33954 // `count` is set to zero that current value of `rate` isn't included
33955 // in the cumulative average
33956
33957 this.playlistController_.mainSegmentLoader_.throughput = {
33958 rate: 0,
33959 count: 0
33960 };
33961 }
33962
33963 },
33964
33965 /**
33966 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
33967 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
33968 * the entire process after that - decryption, transmuxing, and appending - provided
33969 * by `throughput`.
33970 *
33971 * Since the two process are serial, the overall system bandwidth is given by:
33972 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
33973 */
33974 systemBandwidth: {
33975 get() {
33976 const invBandwidth = 1 / (this.bandwidth || 1);
33977 let invThroughput;
33978
33979 if (this.throughput > 0) {
33980 invThroughput = 1 / this.throughput;
33981 } else {
33982 invThroughput = 0;
33983 }
33984
33985 const systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
33986 return systemBitrate;
33987 },
33988
33989 set() {
33990 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
33991 }
33992
33993 }
33994 });
33995
33996 if (this.options_.bandwidth) {
33997 this.bandwidth = this.options_.bandwidth;
33998 }
33999
34000 if (this.options_.throughput) {
34001 this.throughput = this.options_.throughput;
34002 }
34003
34004 Object.defineProperties(this.stats, {
34005 bandwidth: {
34006 get: () => this.bandwidth || 0,
34007 enumerable: true
34008 },
34009 mediaRequests: {
34010 get: () => this.playlistController_.mediaRequests_() || 0,
34011 enumerable: true
34012 },
34013 mediaRequestsAborted: {
34014 get: () => this.playlistController_.mediaRequestsAborted_() || 0,
34015 enumerable: true
34016 },
34017 mediaRequestsTimedout: {
34018 get: () => this.playlistController_.mediaRequestsTimedout_() || 0,
34019 enumerable: true
34020 },
34021 mediaRequestsErrored: {
34022 get: () => this.playlistController_.mediaRequestsErrored_() || 0,
34023 enumerable: true
34024 },
34025 mediaTransferDuration: {
34026 get: () => this.playlistController_.mediaTransferDuration_() || 0,
34027 enumerable: true
34028 },
34029 mediaBytesTransferred: {
34030 get: () => this.playlistController_.mediaBytesTransferred_() || 0,
34031 enumerable: true
34032 },
34033 mediaSecondsLoaded: {
34034 get: () => this.playlistController_.mediaSecondsLoaded_() || 0,
34035 enumerable: true
34036 },
34037 mediaAppends: {
34038 get: () => this.playlistController_.mediaAppends_() || 0,
34039 enumerable: true
34040 },
34041 mainAppendsToLoadedData: {
34042 get: () => this.playlistController_.mainAppendsToLoadedData_() || 0,
34043 enumerable: true
34044 },
34045 audioAppendsToLoadedData: {
34046 get: () => this.playlistController_.audioAppendsToLoadedData_() || 0,
34047 enumerable: true
34048 },
34049 appendsToLoadedData: {
34050 get: () => this.playlistController_.appendsToLoadedData_() || 0,
34051 enumerable: true
34052 },
34053 timeToLoadedData: {
34054 get: () => this.playlistController_.timeToLoadedData_() || 0,
34055 enumerable: true
34056 },
34057 buffered: {
34058 get: () => timeRangesToArray(this.tech_.buffered()),
34059 enumerable: true
34060 },
34061 currentTime: {
34062 get: () => this.tech_.currentTime(),
34063 enumerable: true
34064 },
34065 currentSource: {
34066 get: () => this.tech_.currentSource_,
34067 enumerable: true
34068 },
34069 currentTech: {
34070 get: () => this.tech_.name_,
34071 enumerable: true
34072 },
34073 duration: {
34074 get: () => this.tech_.duration(),
34075 enumerable: true
34076 },
34077 main: {
34078 get: () => this.playlists.main,
34079 enumerable: true
34080 },
34081 playerDimensions: {
34082 get: () => this.tech_.currentDimensions(),
34083 enumerable: true
34084 },
34085 seekable: {
34086 get: () => timeRangesToArray(this.tech_.seekable()),
34087 enumerable: true
34088 },
34089 timestamp: {
34090 get: () => Date.now(),
34091 enumerable: true
34092 },
34093 videoPlaybackQuality: {
34094 get: () => this.tech_.getVideoPlaybackQuality(),
34095 enumerable: true
34096 }
34097 });
34098 this.tech_.one('canplay', this.playlistController_.setupFirstPlay.bind(this.playlistController_));
34099 this.tech_.on('bandwidthupdate', () => {
34100 if (this.options_.useBandwidthFromLocalStorage) {
34101 updateVhsLocalStorage({
34102 bandwidth: this.bandwidth,
34103 throughput: Math.round(this.throughput)
34104 });
34105 }
34106 });
34107 this.playlistController_.on('selectedinitialmedia', () => {
34108 // Add the manual rendition mix-in to VhsHandler
34109 renditionSelectionMixin(this);
34110 });
34111 this.playlistController_.sourceUpdater_.on('createdsourcebuffers', () => {
34112 this.setupEme_();
34113 }); // the bandwidth of the primary segment loader is our best
34114 // estimate of overall bandwidth
34115
34116 this.on(this.playlistController_, 'progress', function () {
34117 this.tech_.trigger('progress');
34118 }); // In the live case, we need to ignore the very first `seeking` event since
34119 // that will be the result of the seek-to-live behavior
34120
34121 this.on(this.playlistController_, 'firstplay', function () {
34122 this.ignoreNextSeekingEvent_ = true;
34123 });
34124 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
34125 // this can occur if someone sets the src in player.ready(), for instance
34126
34127 if (!this.tech_.el()) {
34128 return;
34129 }
34130
34131 this.mediaSourceUrl_ = window.URL.createObjectURL(this.playlistController_.mediaSource);
34132 this.tech_.src(this.mediaSourceUrl_);
34133 }
34134
34135 createKeySessions_() {
34136 const audioPlaylistLoader = this.playlistController_.mediaTypes_.AUDIO.activePlaylistLoader;
34137 this.logger_('waiting for EME key session creation');
34138 waitForKeySessionCreation({
34139 player: this.player_,
34140 sourceKeySystems: this.source_.keySystems,
34141 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
34142 mainPlaylists: this.playlists.main.playlists
34143 }).then(() => {
34144 this.logger_('created EME key session');
34145 this.playlistController_.sourceUpdater_.initializedEme();
34146 }).catch(err => {
34147 this.logger_('error while creating EME key session', err);
34148 this.player_.error({
34149 message: 'Failed to initialize media keys for EME',
34150 code: 3
34151 });
34152 });
34153 }
34154
34155 handleWaitingForKey_() {
34156 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
34157 // the key is in the manifest. While this should've happened on initial source load, it
34158 // may happen again in live streams where the keys change, and the manifest info
34159 // reflects the update.
34160 //
34161 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
34162 // already requested keys for, we don't have to worry about this generating extraneous
34163 // requests.
34164 this.logger_('waitingforkey fired, attempting to create any new key sessions');
34165 this.createKeySessions_();
34166 }
34167 /**
34168 * If necessary and EME is available, sets up EME options and waits for key session
34169 * creation.
34170 *
34171 * This function also updates the source updater so taht it can be used, as for some
34172 * browsers, EME must be configured before content is appended (if appending unencrypted
34173 * content before encrypted content).
34174 */
34175
34176
34177 setupEme_() {
34178 const audioPlaylistLoader = this.playlistController_.mediaTypes_.AUDIO.activePlaylistLoader;
34179 const didSetupEmeOptions = setupEmeOptions({
34180 player: this.player_,
34181 sourceKeySystems: this.source_.keySystems,
34182 media: this.playlists.media(),
34183 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
34184 });
34185 this.player_.tech_.on('keystatuschange', e => {
34186 this.playlistController_.updatePlaylistByKeyStatus(e.keyId, e.status);
34187 });
34188 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
34189 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_);
34190
34191 if (!didSetupEmeOptions) {
34192 // If EME options were not set up, we've done all we could to initialize EME.
34193 this.playlistController_.sourceUpdater_.initializedEme();
34194 return;
34195 }
34196
34197 this.createKeySessions_();
34198 }
34199 /**
34200 * Initializes the quality levels and sets listeners to update them.
34201 *
34202 * @method setupQualityLevels_
34203 * @private
34204 */
34205
34206
34207 setupQualityLevels_() {
34208 const player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
34209 // or qualityLevels_ listeners have already been setup, do nothing.
34210
34211 if (!player || !player.qualityLevels || this.qualityLevels_) {
34212 return;
34213 }
34214
34215 this.qualityLevels_ = player.qualityLevels();
34216 this.playlistController_.on('selectedinitialmedia', () => {
34217 handleVhsLoadedMetadata(this.qualityLevels_, this);
34218 });
34219 this.playlists.on('mediachange', () => {
34220 handleVhsMediaChange(this.qualityLevels_, this.playlists);
34221 });
34222 }
34223 /**
34224 * return the version
34225 */
34226
34227
34228 static version() {
34229 return {
34230 '@videojs/http-streaming': version$4,
34231 'mux.js': version$3,
34232 'mpd-parser': version$2,
34233 'm3u8-parser': version$1,
34234 'aes-decrypter': version
34235 };
34236 }
34237 /**
34238 * return the version
34239 */
34240
34241
34242 version() {
34243 return this.constructor.version();
34244 }
34245
34246 canChangeType() {
34247 return SourceUpdater.canChangeType();
34248 }
34249 /**
34250 * Begin playing the video.
34251 */
34252
34253
34254 play() {
34255 this.playlistController_.play();
34256 }
34257 /**
34258 * a wrapper around the function in PlaylistController
34259 */
34260
34261
34262 setCurrentTime(currentTime) {
34263 this.playlistController_.setCurrentTime(currentTime);
34264 }
34265 /**
34266 * a wrapper around the function in PlaylistController
34267 */
34268
34269
34270 duration() {
34271 return this.playlistController_.duration();
34272 }
34273 /**
34274 * a wrapper around the function in PlaylistController
34275 */
34276
34277
34278 seekable() {
34279 return this.playlistController_.seekable();
34280 }
34281 /**
34282 * Abort all outstanding work and cleanup.
34283 */
34284
34285
34286 dispose() {
34287 if (this.playbackWatcher_) {
34288 this.playbackWatcher_.dispose();
34289 }
34290
34291 if (this.playlistController_) {
34292 this.playlistController_.dispose();
34293 }
34294
34295 if (this.qualityLevels_) {
34296 this.qualityLevels_.dispose();
34297 }
34298
34299 if (this.tech_ && this.tech_.vhs) {
34300 delete this.tech_.vhs;
34301 }
34302
34303 if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
34304 window.URL.revokeObjectURL(this.mediaSourceUrl_);
34305 this.mediaSourceUrl_ = null;
34306 }
34307
34308 if (this.tech_) {
34309 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
34310 }
34311
34312 super.dispose();
34313 }
34314
34315 convertToProgramTime(time, callback) {
34316 return getProgramTime({
34317 playlist: this.playlistController_.media(),
34318 time,
34319 callback
34320 });
34321 } // the player must be playing before calling this
34322
34323
34324 seekToProgramTime(programTime, callback, pauseAfterSeek = true, retryCount = 2) {
34325 return seekToProgramTime({
34326 programTime,
34327 playlist: this.playlistController_.media(),
34328 retryCount,
34329 pauseAfterSeek,
34330 seekTo: this.options_.seekTo,
34331 tech: this.options_.tech,
34332 callback
34333 });
34334 }
34335 /**
34336 * Adds the onRequest, onResponse, offRequest and offResponse functions
34337 * to the VhsHandler xhr Object.
34338 */
34339
34340
34341 setupXhrHooks_() {
34342 /**
34343 * A player function for setting an onRequest hook
34344 *
34345 * @param {function} callback for request modifiction
34346 */
34347 this.xhr.onRequest = callback => {
34348 addOnRequestHook(this.xhr, callback);
34349 };
34350 /**
34351 * A player function for setting an onResponse hook
34352 *
34353 * @param {callback} callback for response data retrieval
34354 */
34355
34356
34357 this.xhr.onResponse = callback => {
34358 addOnResponseHook(this.xhr, callback);
34359 };
34360 /**
34361 * Deletes a player onRequest callback if it exists
34362 *
34363 * @param {function} callback to delete from the player set
34364 */
34365
34366
34367 this.xhr.offRequest = callback => {
34368 removeOnRequestHook(this.xhr, callback);
34369 };
34370 /**
34371 * Deletes a player onResponse callback if it exists
34372 *
34373 * @param {function} callback to delete from the player set
34374 */
34375
34376
34377 this.xhr.offResponse = callback => {
34378 removeOnResponseHook(this.xhr, callback);
34379 }; // Trigger an event on the player to notify the user that vhs is ready to set xhr hooks.
34380 // This allows hooks to be set before the source is set to vhs when handleSource is called.
34381
34382
34383 this.player_.trigger('xhr-hooks-ready');
34384 }
34385
34386 }
34387 /**
34388 * The Source Handler object, which informs video.js what additional
34389 * MIME types are supported and sets up playback. It is registered
34390 * automatically to the appropriate tech based on the capabilities of
34391 * the browser it is running in. It is not necessary to use or modify
34392 * this object in normal usage.
34393 */
34394
34395
34396 const VhsSourceHandler = {
34397 name: 'videojs-http-streaming',
34398 VERSION: version$4,
34399
34400 canHandleSource(srcObj, options = {}) {
34401 const localOptions = merge$1(videojs__default["default"].options, options);
34402 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
34403 },
34404
34405 handleSource(source, tech, options = {}) {
34406 const localOptions = merge$1(videojs__default["default"].options, options);
34407 tech.vhs = new VhsHandler(source, tech, localOptions);
34408 tech.vhs.xhr = xhrFactory();
34409 tech.vhs.setupXhrHooks_();
34410 tech.vhs.src(source.src, source.type);
34411 return tech.vhs;
34412 },
34413
34414 canPlayType(type, options) {
34415 const simpleType = simpleTypeFromSourceType(type);
34416
34417 if (!simpleType) {
34418 return '';
34419 }
34420
34421 const overrideNative = VhsSourceHandler.getOverrideNative(options);
34422 const supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);
34423 const canUseMsePlayback = !supportsTypeNatively || overrideNative;
34424 return canUseMsePlayback ? 'maybe' : '';
34425 },
34426
34427 getOverrideNative(options = {}) {
34428 const {
34429 vhs = {}
34430 } = options;
34431 const defaultOverrideNative = !(videojs__default["default"].browser.IS_ANY_SAFARI || videojs__default["default"].browser.IS_IOS);
34432 const {
34433 overrideNative = defaultOverrideNative
34434 } = vhs;
34435 return overrideNative;
34436 }
34437
34438 };
34439 /**
34440 * Check to see if the native MediaSource object exists and supports
34441 * an MP4 container with both H.264 video and AAC-LC audio.
34442 *
34443 * @return {boolean} if native media sources are supported
34444 */
34445
34446 const supportsNativeMediaSources = () => {
34447 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
34448 }; // register source handlers with the appropriate techs
34449
34450
34451 if (supportsNativeMediaSources()) {
34452 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
34453 }
34454
34455 videojs__default["default"].VhsHandler = VhsHandler;
34456 videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
34457 videojs__default["default"].Vhs = Vhs;
34458
34459 if (!videojs__default["default"].use) {
34460 videojs__default["default"].registerComponent('Vhs', Vhs);
34461 }
34462
34463 videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
34464
34465 if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
34466 videojs__default["default"].registerPlugin('reloadSourceOnError', reloadSourceOnError);
34467 }
34468
34469 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
34470 exports.Vhs = Vhs;
34471 exports.VhsHandler = VhsHandler;
34472 exports.VhsSourceHandler = VhsSourceHandler;
34473 exports.emeKeySystems = emeKeySystems;
34474 exports.expandDataUri = expandDataUri;
34475 exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
34476 exports.setupEmeOptions = setupEmeOptions;
34477 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
34478 exports.waitForKeySessionCreation = waitForKeySessionCreation;
34479
34480 Object.defineProperty(exports, '__esModule', { value: true });
34481
34482}));