UNPKG

961 kBJavaScriptView Raw
1/**
2 * @videojs/http-streaming
3 * @version 1.13.2
4 * @copyright 2020 Brightcove, Inc
5 * @license Apache-2.0
6 */
7(function (global, factory) {
8 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js')) :
9 typeof define === 'function' && define.amd ? define(['exports', 'video.js'], factory) :
10 (factory((global.videojsHttpStreaming = {}),global.videojs));
11}(this, (function (exports,videojs) { 'use strict';
12
13 videojs = videojs && videojs.hasOwnProperty('default') ? videojs['default'] : videojs;
14
15 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
16
17 function createCommonjsModule(fn, module) {
18 return module = { exports: {} }, fn(module, module.exports), module.exports;
19 }
20
21 var minDoc = {};
22
23 var topLevel = typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : typeof window !== 'undefined' ? window : {};
24
25 var doccy;
26
27 if (typeof document !== 'undefined') {
28 doccy = document;
29 } else {
30 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
31
32 if (!doccy) {
33 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
34 }
35 }
36
37 var document_1 = doccy;
38
39 var urlToolkit = createCommonjsModule(function (module, exports) {
40 // see https://tools.ietf.org/html/rfc1808
41
42 /* jshint ignore:start */
43 (function (root) {
44 /* jshint ignore:end */
45
46 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/\?#]*\/)*.*?)??(;.*?)?(\?.*?)?(#.*?)?$/;
47 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)(.*)$/;
48 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
49 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/).*?(?=\/)/g;
50
51 var URLToolkit = { // jshint ignore:line
52 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
53 // E.g
54 // With opts.alwaysNormalize = false (default, spec compliant)
55 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
56 // With opts.alwaysNormalize = true (not spec compliant)
57 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
58 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
59 opts = opts || {};
60 // remove any remaining space and CRLF
61 baseURL = baseURL.trim();
62 relativeURL = relativeURL.trim();
63 if (!relativeURL) {
64 // 2a) If the embedded URL is entirely empty, it inherits the
65 // entire base URL (i.e., is set equal to the base URL)
66 // and we are done.
67 if (!opts.alwaysNormalize) {
68 return baseURL;
69 }
70 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
71 if (!basePartsForNormalise) {
72 throw new Error('Error trying to parse base URL.');
73 }
74 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
75 return URLToolkit.buildURLFromParts(basePartsForNormalise);
76 }
77 var relativeParts = URLToolkit.parseURL(relativeURL);
78 if (!relativeParts) {
79 throw new Error('Error trying to parse relative URL.');
80 }
81 if (relativeParts.scheme) {
82 // 2b) If the embedded URL starts with a scheme name, it is
83 // interpreted as an absolute URL and we are done.
84 if (!opts.alwaysNormalize) {
85 return relativeURL;
86 }
87 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
88 return URLToolkit.buildURLFromParts(relativeParts);
89 }
90 var baseParts = URLToolkit.parseURL(baseURL);
91 if (!baseParts) {
92 throw new Error('Error trying to parse base URL.');
93 }
94 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
95 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
96 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
97 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
98 baseParts.netLoc = pathParts[1];
99 baseParts.path = pathParts[2];
100 }
101 if (baseParts.netLoc && !baseParts.path) {
102 baseParts.path = '/';
103 }
104 var builtParts = {
105 // 2c) Otherwise, the embedded URL inherits the scheme of
106 // the base URL.
107 scheme: baseParts.scheme,
108 netLoc: relativeParts.netLoc,
109 path: null,
110 params: relativeParts.params,
111 query: relativeParts.query,
112 fragment: relativeParts.fragment
113 };
114 if (!relativeParts.netLoc) {
115 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
116 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
117 // (if any) of the base URL.
118 builtParts.netLoc = baseParts.netLoc;
119 // 4) If the embedded URL path is preceded by a slash "/", the
120 // path is not relative and we skip to Step 7.
121 if (relativeParts.path[0] !== '/') {
122 if (!relativeParts.path) {
123 // 5) If the embedded URL path is empty (and not preceded by a
124 // slash), then the embedded URL inherits the base URL path
125 builtParts.path = baseParts.path;
126 // 5a) if the embedded URL's <params> is non-empty, we skip to
127 // step 7; otherwise, it inherits the <params> of the base
128 // URL (if any) and
129 if (!relativeParts.params) {
130 builtParts.params = baseParts.params;
131 // 5b) if the embedded URL's <query> is non-empty, we skip to
132 // step 7; otherwise, it inherits the <query> of the base
133 // URL (if any) and we skip to step 7.
134 if (!relativeParts.query) {
135 builtParts.query = baseParts.query;
136 }
137 }
138 } else {
139 // 6) The last segment of the base URL's path (anything
140 // following the rightmost slash "/", or the entire path if no
141 // slash is present) is removed and the embedded URL's path is
142 // appended in its place.
143 var baseURLPath = baseParts.path;
144 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
145 builtParts.path = URLToolkit.normalizePath(newPath);
146 }
147 }
148 }
149 if (builtParts.path === null) {
150 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
151 }
152 return URLToolkit.buildURLFromParts(builtParts);
153 },
154 parseURL: function parseURL(url) {
155 var parts = URL_REGEX.exec(url);
156 if (!parts) {
157 return null;
158 }
159 return {
160 scheme: parts[1] || '',
161 netLoc: parts[2] || '',
162 path: parts[3] || '',
163 params: parts[4] || '',
164 query: parts[5] || '',
165 fragment: parts[6] || ''
166 };
167 },
168 normalizePath: function normalizePath(path) {
169 // The following operations are
170 // then applied, in order, to the new path:
171 // 6a) All occurrences of "./", where "." is a complete path
172 // segment, are removed.
173 // 6b) If the path ends with "." as a complete path segment,
174 // that "." is removed.
175 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, '');
176 // 6c) All occurrences of "<segment>/../", where <segment> is a
177 // complete path segment not equal to "..", are removed.
178 // Removal of these path segments is performed iteratively,
179 // removing the leftmost matching pattern on each iteration,
180 // until no matching pattern remains.
181 // 6d) If the path ends with "<segment>/..", where <segment> is a
182 // complete path segment not equal to "..", that
183 // "<segment>/.." is removed.
184 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {} // jshint ignore:line
185 return path.split('').reverse().join('');
186 },
187 buildURLFromParts: function buildURLFromParts(parts) {
188 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
189 }
190 };
191
192 /* jshint ignore:start */
193 module.exports = URLToolkit;
194 })(commonjsGlobal);
195 /* jshint ignore:end */
196 });
197
198 var win;
199
200 if (typeof window !== "undefined") {
201 win = window;
202 } else if (typeof commonjsGlobal !== "undefined") {
203 win = commonjsGlobal;
204 } else if (typeof self !== "undefined") {
205 win = self;
206 } else {
207 win = {};
208 }
209
210 var window_1 = win;
211
212 /**
213 * @file resolve-url.js - Handling how URLs are resolved and manipulated
214 */
215
216 var resolveUrl = function resolveUrl(baseURL, relativeURL) {
217 // return early if we don't need to resolve
218 if (/^[a-z]+:/i.test(relativeURL)) {
219 return relativeURL;
220 }
221
222 // if the base URL is relative then combine with the current location
223 if (!/\/\//i.test(baseURL)) {
224 baseURL = urlToolkit.buildAbsoluteURL(window_1.location.href, baseURL);
225 }
226
227 return urlToolkit.buildAbsoluteURL(baseURL, relativeURL);
228 };
229
230 /**
231 * Checks whether xhr request was redirected and returns correct url depending
232 * on `handleManifestRedirects` option
233 *
234 * @api private
235 *
236 * @param {String} url - an url being requested
237 * @param {XMLHttpRequest} req - xhr request result
238 *
239 * @return {String}
240 */
241 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
242 // To understand how the responseURL below is set and generated:
243 // - https://fetch.spec.whatwg.org/#concept-response-url
244 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
245 if (handleManifestRedirect && req.responseURL && url !== req.responseURL) {
246 return req.responseURL;
247 }
248
249 return url;
250 };
251
252 /*! @name m3u8-parser @version 4.4.0 @license Apache-2.0 */
253
254 function _extends() {
255 _extends = Object.assign || function (target) {
256 for (var i = 1; i < arguments.length; i++) {
257 var source = arguments[i];
258
259 for (var key in source) {
260 if (Object.prototype.hasOwnProperty.call(source, key)) {
261 target[key] = source[key];
262 }
263 }
264 }
265
266 return target;
267 };
268
269 return _extends.apply(this, arguments);
270 }
271
272 function _inheritsLoose(subClass, superClass) {
273 subClass.prototype = Object.create(superClass.prototype);
274 subClass.prototype.constructor = subClass;
275 subClass.__proto__ = superClass;
276 }
277
278 function _assertThisInitialized(self) {
279 if (self === void 0) {
280 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
281 }
282
283 return self;
284 }
285
286 /**
287 * @file stream.js
288 */
289
290 /**
291 * A lightweight readable stream implementation that handles event dispatching.
292 *
293 * @class Stream
294 */
295 var Stream =
296 /*#__PURE__*/
297 function () {
298 function Stream() {
299 this.listeners = {};
300 }
301 /**
302 * Add a listener for a specified event type.
303 *
304 * @param {string} type the event name
305 * @param {Function} listener the callback to be invoked when an event of
306 * the specified type occurs
307 */
308
309 var _proto = Stream.prototype;
310
311 _proto.on = function on(type, listener) {
312 if (!this.listeners[type]) {
313 this.listeners[type] = [];
314 }
315
316 this.listeners[type].push(listener);
317 }
318 /**
319 * Remove a listener for a specified event type.
320 *
321 * @param {string} type the event name
322 * @param {Function} listener a function previously registered for this
323 * type of event through `on`
324 * @return {boolean} if we could turn it off or not
325 */
326 ;
327
328 _proto.off = function off(type, listener) {
329 if (!this.listeners[type]) {
330 return false;
331 }
332
333 var index = this.listeners[type].indexOf(listener);
334 this.listeners[type].splice(index, 1);
335 return index > -1;
336 }
337 /**
338 * Trigger an event of the specified type on this stream. Any additional
339 * arguments to this function are passed as parameters to event listeners.
340 *
341 * @param {string} type the event name
342 */
343 ;
344
345 _proto.trigger = function trigger(type) {
346 var callbacks = this.listeners[type];
347 var i;
348 var length;
349 var args;
350
351 if (!callbacks) {
352 return;
353 } // Slicing the arguments on every invocation of this method
354 // can add a significant amount of overhead. Avoid the
355 // intermediate object creation for the common case of a
356 // single callback argument
357
358
359 if (arguments.length === 2) {
360 length = callbacks.length;
361
362 for (i = 0; i < length; ++i) {
363 callbacks[i].call(this, arguments[1]);
364 }
365 } else {
366 args = Array.prototype.slice.call(arguments, 1);
367 length = callbacks.length;
368
369 for (i = 0; i < length; ++i) {
370 callbacks[i].apply(this, args);
371 }
372 }
373 }
374 /**
375 * Destroys the stream and cleans up.
376 */
377 ;
378
379 _proto.dispose = function dispose() {
380 this.listeners = {};
381 }
382 /**
383 * Forwards all `data` events on this stream to the destination stream. The
384 * destination stream should provide a method `push` to receive the data
385 * events as they arrive.
386 *
387 * @param {Stream} destination the stream that will receive all `data` events
388 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
389 */
390 ;
391
392 _proto.pipe = function pipe(destination) {
393 this.on('data', function (data) {
394 destination.push(data);
395 });
396 };
397
398 return Stream;
399 }();
400
401 /**
402 * A stream that buffers string input and generates a `data` event for each
403 * line.
404 *
405 * @class LineStream
406 * @extends Stream
407 */
408
409 var LineStream =
410 /*#__PURE__*/
411 function (_Stream) {
412 _inheritsLoose(LineStream, _Stream);
413
414 function LineStream() {
415 var _this;
416
417 _this = _Stream.call(this) || this;
418 _this.buffer = '';
419 return _this;
420 }
421 /**
422 * Add new data to be parsed.
423 *
424 * @param {string} data the text to process
425 */
426
427 var _proto = LineStream.prototype;
428
429 _proto.push = function push(data) {
430 var nextNewline;
431 this.buffer += data;
432 nextNewline = this.buffer.indexOf('\n');
433
434 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
435 this.trigger('data', this.buffer.substring(0, nextNewline));
436 this.buffer = this.buffer.substring(nextNewline + 1);
437 }
438 };
439
440 return LineStream;
441 }(Stream);
442
443 /**
444 * "forgiving" attribute list psuedo-grammar:
445 * attributes -> keyvalue (',' keyvalue)*
446 * keyvalue -> key '=' value
447 * key -> [^=]*
448 * value -> '"' [^"]* '"' | [^,]*
449 */
450
451 var attributeSeparator = function attributeSeparator() {
452 var key = '[^=]*';
453 var value = '"[^"]*"|[^,]*';
454 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
455 return new RegExp('(?:^|,)(' + keyvalue + ')');
456 };
457 /**
458 * Parse attributes from a line given the separator
459 *
460 * @param {string} attributes the attribute line to parse
461 */
462
463 var parseAttributes = function parseAttributes(attributes) {
464 // split the string using attributes as the separator
465 var attrs = attributes.split(attributeSeparator());
466 var result = {};
467 var i = attrs.length;
468 var attr;
469
470 while (i--) {
471 // filter out unmatched portions of the string
472 if (attrs[i] === '') {
473 continue;
474 } // split the key and value
475
476
477 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
478
479 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
480 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
481 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
482 result[attr[0]] = attr[1];
483 }
484
485 return result;
486 };
487 /**
488 * A line-level M3U8 parser event stream. It expects to receive input one
489 * line at a time and performs a context-free parse of its contents. A stream
490 * interpretation of a manifest can be useful if the manifest is expected to
491 * be too large to fit comfortably into memory or the entirety of the input
492 * is not immediately available. Otherwise, it's probably much easier to work
493 * with a regular `Parser` object.
494 *
495 * Produces `data` events with an object that captures the parser's
496 * interpretation of the input. That object has a property `tag` that is one
497 * of `uri`, `comment`, or `tag`. URIs only have a single additional
498 * property, `line`, which captures the entirety of the input without
499 * interpretation. Comments similarly have a single additional property
500 * `text` which is the input without the leading `#`.
501 *
502 * Tags always have a property `tagType` which is the lower-cased version of
503 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
504 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
505 * tags are given the tag type `unknown` and a single additional property
506 * `data` with the remainder of the input.
507 *
508 * @class ParseStream
509 * @extends Stream
510 */
511
512 var ParseStream =
513 /*#__PURE__*/
514 function (_Stream) {
515 _inheritsLoose(ParseStream, _Stream);
516
517 function ParseStream() {
518 var _this;
519
520 _this = _Stream.call(this) || this;
521 _this.customParsers = [];
522 _this.tagMappers = [];
523 return _this;
524 }
525 /**
526 * Parses an additional line of input.
527 *
528 * @param {string} line a single line of an M3U8 file to parse
529 */
530
531 var _proto = ParseStream.prototype;
532
533 _proto.push = function push(line) {
534 var _this2 = this;
535
536 var match;
537 var event; // strip whitespace
538
539 line = line.trim();
540
541 if (line.length === 0) {
542 // ignore empty lines
543 return;
544 } // URIs
545
546
547 if (line[0] !== '#') {
548 this.trigger('data', {
549 type: 'uri',
550 uri: line
551 });
552 return;
553 } // map tags
554
555
556 var newLines = this.tagMappers.reduce(function (acc, mapper) {
557 var mappedLine = mapper(line); // skip if unchanged
558
559 if (mappedLine === line) {
560 return acc;
561 }
562
563 return acc.concat([mappedLine]);
564 }, [line]);
565 newLines.forEach(function (newLine) {
566 for (var i = 0; i < _this2.customParsers.length; i++) {
567 if (_this2.customParsers[i].call(_this2, newLine)) {
568 return;
569 }
570 } // Comments
571
572
573 if (newLine.indexOf('#EXT') !== 0) {
574 _this2.trigger('data', {
575 type: 'comment',
576 text: newLine.slice(1)
577 });
578
579 return;
580 } // strip off any carriage returns here so the regex matching
581 // doesn't have to account for them.
582
583
584 newLine = newLine.replace('\r', ''); // Tags
585
586 match = /^#EXTM3U/.exec(newLine);
587
588 if (match) {
589 _this2.trigger('data', {
590 type: 'tag',
591 tagType: 'm3u'
592 });
593
594 return;
595 }
596
597 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
598
599 if (match) {
600 event = {
601 type: 'tag',
602 tagType: 'inf'
603 };
604
605 if (match[1]) {
606 event.duration = parseFloat(match[1]);
607 }
608
609 if (match[2]) {
610 event.title = match[2];
611 }
612
613 _this2.trigger('data', event);
614
615 return;
616 }
617
618 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
619
620 if (match) {
621 event = {
622 type: 'tag',
623 tagType: 'targetduration'
624 };
625
626 if (match[1]) {
627 event.duration = parseInt(match[1], 10);
628 }
629
630 _this2.trigger('data', event);
631
632 return;
633 }
634
635 match = /^#ZEN-TOTAL-DURATION:?([0-9.]*)?/.exec(newLine);
636
637 if (match) {
638 event = {
639 type: 'tag',
640 tagType: 'totalduration'
641 };
642
643 if (match[1]) {
644 event.duration = parseInt(match[1], 10);
645 }
646
647 _this2.trigger('data', event);
648
649 return;
650 }
651
652 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
653
654 if (match) {
655 event = {
656 type: 'tag',
657 tagType: 'version'
658 };
659
660 if (match[1]) {
661 event.version = parseInt(match[1], 10);
662 }
663
664 _this2.trigger('data', event);
665
666 return;
667 }
668
669 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
670
671 if (match) {
672 event = {
673 type: 'tag',
674 tagType: 'media-sequence'
675 };
676
677 if (match[1]) {
678 event.number = parseInt(match[1], 10);
679 }
680
681 _this2.trigger('data', event);
682
683 return;
684 }
685
686 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
687
688 if (match) {
689 event = {
690 type: 'tag',
691 tagType: 'discontinuity-sequence'
692 };
693
694 if (match[1]) {
695 event.number = parseInt(match[1], 10);
696 }
697
698 _this2.trigger('data', event);
699
700 return;
701 }
702
703 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
704
705 if (match) {
706 event = {
707 type: 'tag',
708 tagType: 'playlist-type'
709 };
710
711 if (match[1]) {
712 event.playlistType = match[1];
713 }
714
715 _this2.trigger('data', event);
716
717 return;
718 }
719
720 match = /^#EXT-X-BYTERANGE:?([0-9.]*)?@?([0-9.]*)?/.exec(newLine);
721
722 if (match) {
723 event = {
724 type: 'tag',
725 tagType: 'byterange'
726 };
727
728 if (match[1]) {
729 event.length = parseInt(match[1], 10);
730 }
731
732 if (match[2]) {
733 event.offset = parseInt(match[2], 10);
734 }
735
736 _this2.trigger('data', event);
737
738 return;
739 }
740
741 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
742
743 if (match) {
744 event = {
745 type: 'tag',
746 tagType: 'allow-cache'
747 };
748
749 if (match[1]) {
750 event.allowed = !/NO/.test(match[1]);
751 }
752
753 _this2.trigger('data', event);
754
755 return;
756 }
757
758 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
759
760 if (match) {
761 event = {
762 type: 'tag',
763 tagType: 'map'
764 };
765
766 if (match[1]) {
767 var attributes = parseAttributes(match[1]);
768
769 if (attributes.URI) {
770 event.uri = attributes.URI;
771 }
772
773 if (attributes.BYTERANGE) {
774 var _attributes$BYTERANGE = attributes.BYTERANGE.split('@'),
775 length = _attributes$BYTERANGE[0],
776 offset = _attributes$BYTERANGE[1];
777
778 event.byterange = {};
779
780 if (length) {
781 event.byterange.length = parseInt(length, 10);
782 }
783
784 if (offset) {
785 event.byterange.offset = parseInt(offset, 10);
786 }
787 }
788 }
789
790 _this2.trigger('data', event);
791
792 return;
793 }
794
795 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
796
797 if (match) {
798 event = {
799 type: 'tag',
800 tagType: 'stream-inf'
801 };
802
803 if (match[1]) {
804 event.attributes = parseAttributes(match[1]);
805
806 if (event.attributes.RESOLUTION) {
807 var split = event.attributes.RESOLUTION.split('x');
808 var resolution = {};
809
810 if (split[0]) {
811 resolution.width = parseInt(split[0], 10);
812 }
813
814 if (split[1]) {
815 resolution.height = parseInt(split[1], 10);
816 }
817
818 event.attributes.RESOLUTION = resolution;
819 }
820
821 if (event.attributes.BANDWIDTH) {
822 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
823 }
824
825 if (event.attributes['PROGRAM-ID']) {
826 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
827 }
828 }
829
830 _this2.trigger('data', event);
831
832 return;
833 }
834
835 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
836
837 if (match) {
838 event = {
839 type: 'tag',
840 tagType: 'media'
841 };
842
843 if (match[1]) {
844 event.attributes = parseAttributes(match[1]);
845 }
846
847 _this2.trigger('data', event);
848
849 return;
850 }
851
852 match = /^#EXT-X-ENDLIST/.exec(newLine);
853
854 if (match) {
855 _this2.trigger('data', {
856 type: 'tag',
857 tagType: 'endlist'
858 });
859
860 return;
861 }
862
863 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
864
865 if (match) {
866 _this2.trigger('data', {
867 type: 'tag',
868 tagType: 'discontinuity'
869 });
870
871 return;
872 }
873
874 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
875
876 if (match) {
877 event = {
878 type: 'tag',
879 tagType: 'program-date-time'
880 };
881
882 if (match[1]) {
883 event.dateTimeString = match[1];
884 event.dateTimeObject = new Date(match[1]);
885 }
886
887 _this2.trigger('data', event);
888
889 return;
890 }
891
892 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
893
894 if (match) {
895 event = {
896 type: 'tag',
897 tagType: 'key'
898 };
899
900 if (match[1]) {
901 event.attributes = parseAttributes(match[1]); // parse the IV string into a Uint32Array
902
903 if (event.attributes.IV) {
904 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
905 event.attributes.IV = event.attributes.IV.substring(2);
906 }
907
908 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
909 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
910 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
911 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
912 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
913 event.attributes.IV = new Uint32Array(event.attributes.IV);
914 }
915 }
916
917 _this2.trigger('data', event);
918
919 return;
920 }
921
922 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
923
924 if (match) {
925 event = {
926 type: 'tag',
927 tagType: 'start'
928 };
929
930 if (match[1]) {
931 event.attributes = parseAttributes(match[1]);
932 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
933 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
934 }
935
936 _this2.trigger('data', event);
937
938 return;
939 }
940
941 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
942
943 if (match) {
944 event = {
945 type: 'tag',
946 tagType: 'cue-out-cont'
947 };
948
949 if (match[1]) {
950 event.data = match[1];
951 } else {
952 event.data = '';
953 }
954
955 _this2.trigger('data', event);
956
957 return;
958 }
959
960 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
961
962 if (match) {
963 event = {
964 type: 'tag',
965 tagType: 'cue-out'
966 };
967
968 if (match[1]) {
969 event.data = match[1];
970 } else {
971 event.data = '';
972 }
973
974 _this2.trigger('data', event);
975
976 return;
977 }
978
979 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
980
981 if (match) {
982 event = {
983 type: 'tag',
984 tagType: 'cue-in'
985 };
986
987 if (match[1]) {
988 event.data = match[1];
989 } else {
990 event.data = '';
991 }
992
993 _this2.trigger('data', event);
994
995 return;
996 } // unknown tag type
997
998
999 _this2.trigger('data', {
1000 type: 'tag',
1001 data: newLine.slice(4)
1002 });
1003 });
1004 }
1005 /**
1006 * Add a parser for custom headers
1007 *
1008 * @param {Object} options a map of options for the added parser
1009 * @param {RegExp} options.expression a regular expression to match the custom header
1010 * @param {string} options.customType the custom type to register to the output
1011 * @param {Function} [options.dataParser] function to parse the line into an object
1012 * @param {boolean} [options.segment] should tag data be attached to the segment object
1013 */
1014 ;
1015
1016 _proto.addParser = function addParser(_ref) {
1017 var _this3 = this;
1018
1019 var expression = _ref.expression,
1020 customType = _ref.customType,
1021 dataParser = _ref.dataParser,
1022 segment = _ref.segment;
1023
1024 if (typeof dataParser !== 'function') {
1025 dataParser = function dataParser(line) {
1026 return line;
1027 };
1028 }
1029
1030 this.customParsers.push(function (line) {
1031 var match = expression.exec(line);
1032
1033 if (match) {
1034 _this3.trigger('data', {
1035 type: 'custom',
1036 data: dataParser(line),
1037 customType: customType,
1038 segment: segment
1039 });
1040
1041 return true;
1042 }
1043 });
1044 }
1045 /**
1046 * Add a custom header mapper
1047 *
1048 * @param {Object} options
1049 * @param {RegExp} options.expression a regular expression to match the custom header
1050 * @param {Function} options.map function to translate tag into a different tag
1051 */
1052 ;
1053
1054 _proto.addTagMapper = function addTagMapper(_ref2) {
1055 var expression = _ref2.expression,
1056 map = _ref2.map;
1057
1058 var mapFn = function mapFn(line) {
1059 if (expression.test(line)) {
1060 return map(line);
1061 }
1062
1063 return line;
1064 };
1065
1066 this.tagMappers.push(mapFn);
1067 };
1068
1069 return ParseStream;
1070 }(Stream);
1071
1072 function decodeB64ToUint8Array(b64Text) {
1073 var decodedString = window_1.atob(b64Text || '');
1074 var array = new Uint8Array(decodedString.length);
1075
1076 for (var i = 0; i < decodedString.length; i++) {
1077 array[i] = decodedString.charCodeAt(i);
1078 }
1079
1080 return array;
1081 }
1082
1083 /**
1084 * A parser for M3U8 files. The current interpretation of the input is
1085 * exposed as a property `manifest` on parser objects. It's just two lines to
1086 * create and parse a manifest once you have the contents available as a string:
1087 *
1088 * ```js
1089 * var parser = new m3u8.Parser();
1090 * parser.push(xhr.responseText);
1091 * ```
1092 *
1093 * New input can later be applied to update the manifest object by calling
1094 * `push` again.
1095 *
1096 * The parser attempts to create a usable manifest object even if the
1097 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1098 * events during the parse if it encounters input that seems invalid or
1099 * requires some property of the manifest object to be defaulted.
1100 *
1101 * @class Parser
1102 * @extends Stream
1103 */
1104
1105 var Parser =
1106 /*#__PURE__*/
1107 function (_Stream) {
1108 _inheritsLoose(Parser, _Stream);
1109
1110 function Parser() {
1111 var _this;
1112
1113 _this = _Stream.call(this) || this;
1114 _this.lineStream = new LineStream();
1115 _this.parseStream = new ParseStream();
1116
1117 _this.lineStream.pipe(_this.parseStream);
1118 /* eslint-disable consistent-this */
1119
1120 var self = _assertThisInitialized(_this);
1121 /* eslint-enable consistent-this */
1122
1123 var uris = [];
1124 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1125
1126 var currentMap; // if specified, the active decryption key
1127
1128 var _key;
1129
1130 var noop = function noop() {};
1131
1132 var defaultMediaGroups = {
1133 'AUDIO': {},
1134 'VIDEO': {},
1135 'CLOSED-CAPTIONS': {},
1136 'SUBTITLES': {}
1137 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1138 // used in MPDs with Widevine encrypted streams.
1139
1140 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1141
1142 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1143
1144 _this.manifest = {
1145 allowCache: true,
1146 discontinuityStarts: [],
1147 segments: []
1148 }; // update the manifest with the m3u8 entry from the parse stream
1149
1150 _this.parseStream.on('data', function (entry) {
1151 var mediaGroup;
1152 var rendition;
1153 ({
1154 tag: function tag() {
1155 // switch based on the tag type
1156 (({
1157 'allow-cache': function allowCache() {
1158 this.manifest.allowCache = entry.allowed;
1159
1160 if (!('allowed' in entry)) {
1161 this.trigger('info', {
1162 message: 'defaulting allowCache to YES'
1163 });
1164 this.manifest.allowCache = true;
1165 }
1166 },
1167 byterange: function byterange() {
1168 var byterange = {};
1169
1170 if ('length' in entry) {
1171 currentUri.byterange = byterange;
1172 byterange.length = entry.length;
1173
1174 if (!('offset' in entry)) {
1175 this.trigger('info', {
1176 message: 'defaulting offset to zero'
1177 });
1178 entry.offset = 0;
1179 }
1180 }
1181
1182 if ('offset' in entry) {
1183 currentUri.byterange = byterange;
1184 byterange.offset = entry.offset;
1185 }
1186 },
1187 endlist: function endlist() {
1188 this.manifest.endList = true;
1189 },
1190 inf: function inf() {
1191 if (!('mediaSequence' in this.manifest)) {
1192 this.manifest.mediaSequence = 0;
1193 this.trigger('info', {
1194 message: 'defaulting media sequence to zero'
1195 });
1196 }
1197
1198 if (!('discontinuitySequence' in this.manifest)) {
1199 this.manifest.discontinuitySequence = 0;
1200 this.trigger('info', {
1201 message: 'defaulting discontinuity sequence to zero'
1202 });
1203 }
1204
1205 if (entry.duration > 0) {
1206 currentUri.duration = entry.duration;
1207 }
1208
1209 if (entry.duration === 0) {
1210 currentUri.duration = 0.01;
1211 this.trigger('info', {
1212 message: 'updating zero segment duration to a small value'
1213 });
1214 }
1215
1216 this.manifest.segments = uris;
1217 },
1218 key: function key() {
1219 if (!entry.attributes) {
1220 this.trigger('warn', {
1221 message: 'ignoring key declaration without attribute list'
1222 });
1223 return;
1224 } // clear the active encryption key
1225
1226
1227 if (entry.attributes.METHOD === 'NONE') {
1228 _key = null;
1229 return;
1230 }
1231
1232 if (!entry.attributes.URI) {
1233 this.trigger('warn', {
1234 message: 'ignoring key declaration without URI'
1235 });
1236 return;
1237 } // check if the content is encrypted for Widevine
1238 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1239
1240
1241 if (entry.attributes.KEYFORMAT === widevineUuid) {
1242 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1243
1244 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1245 this.trigger('warn', {
1246 message: 'invalid key method provided for Widevine'
1247 });
1248 return;
1249 }
1250
1251 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1252 this.trigger('warn', {
1253 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1254 });
1255 }
1256
1257 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1258 this.trigger('warn', {
1259 message: 'invalid key URI provided for Widevine'
1260 });
1261 return;
1262 }
1263
1264 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1265 this.trigger('warn', {
1266 message: 'invalid key ID provided for Widevine'
1267 });
1268 return;
1269 } // if Widevine key attributes are valid, store them as `contentProtection`
1270 // on the manifest to emulate Widevine tag structure in a DASH mpd
1271
1272
1273 this.manifest.contentProtection = {
1274 'com.widevine.alpha': {
1275 attributes: {
1276 schemeIdUri: entry.attributes.KEYFORMAT,
1277 // remove '0x' from the key id string
1278 keyId: entry.attributes.KEYID.substring(2)
1279 },
1280 // decode the base64-encoded PSSH box
1281 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1282 }
1283 };
1284 return;
1285 }
1286
1287 if (!entry.attributes.METHOD) {
1288 this.trigger('warn', {
1289 message: 'defaulting key method to AES-128'
1290 });
1291 } // setup an encryption key for upcoming segments
1292
1293
1294 _key = {
1295 method: entry.attributes.METHOD || 'AES-128',
1296 uri: entry.attributes.URI
1297 };
1298
1299 if (typeof entry.attributes.IV !== 'undefined') {
1300 _key.iv = entry.attributes.IV;
1301 }
1302 },
1303 'media-sequence': function mediaSequence() {
1304 if (!isFinite(entry.number)) {
1305 this.trigger('warn', {
1306 message: 'ignoring invalid media sequence: ' + entry.number
1307 });
1308 return;
1309 }
1310
1311 this.manifest.mediaSequence = entry.number;
1312 },
1313 'discontinuity-sequence': function discontinuitySequence() {
1314 if (!isFinite(entry.number)) {
1315 this.trigger('warn', {
1316 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1317 });
1318 return;
1319 }
1320
1321 this.manifest.discontinuitySequence = entry.number;
1322 currentTimeline = entry.number;
1323 },
1324 'playlist-type': function playlistType() {
1325 if (!/VOD|EVENT/.test(entry.playlistType)) {
1326 this.trigger('warn', {
1327 message: 'ignoring unknown playlist type: ' + entry.playlist
1328 });
1329 return;
1330 }
1331
1332 this.manifest.playlistType = entry.playlistType;
1333 },
1334 map: function map() {
1335 currentMap = {};
1336
1337 if (entry.uri) {
1338 currentMap.uri = entry.uri;
1339 }
1340
1341 if (entry.byterange) {
1342 currentMap.byterange = entry.byterange;
1343 }
1344 },
1345 'stream-inf': function streamInf() {
1346 this.manifest.playlists = uris;
1347 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1348
1349 if (!entry.attributes) {
1350 this.trigger('warn', {
1351 message: 'ignoring empty stream-inf attributes'
1352 });
1353 return;
1354 }
1355
1356 if (!currentUri.attributes) {
1357 currentUri.attributes = {};
1358 }
1359
1360 _extends(currentUri.attributes, entry.attributes);
1361 },
1362 media: function media() {
1363 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1364
1365 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1366 this.trigger('warn', {
1367 message: 'ignoring incomplete or missing media group'
1368 });
1369 return;
1370 } // find the media group, creating defaults as necessary
1371
1372
1373 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1374 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1375 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1376
1377 rendition = {
1378 default: /yes/i.test(entry.attributes.DEFAULT)
1379 };
1380
1381 if (rendition.default) {
1382 rendition.autoselect = true;
1383 } else {
1384 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1385 }
1386
1387 if (entry.attributes.LANGUAGE) {
1388 rendition.language = entry.attributes.LANGUAGE;
1389 }
1390
1391 if (entry.attributes.URI) {
1392 rendition.uri = entry.attributes.URI;
1393 }
1394
1395 if (entry.attributes['INSTREAM-ID']) {
1396 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1397 }
1398
1399 if (entry.attributes.CHARACTERISTICS) {
1400 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1401 }
1402
1403 if (entry.attributes.FORCED) {
1404 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1405 } // insert the new rendition
1406
1407
1408 mediaGroup[entry.attributes.NAME] = rendition;
1409 },
1410 discontinuity: function discontinuity() {
1411 currentTimeline += 1;
1412 currentUri.discontinuity = true;
1413 this.manifest.discontinuityStarts.push(uris.length);
1414 },
1415 'program-date-time': function programDateTime() {
1416 if (typeof this.manifest.dateTimeString === 'undefined') {
1417 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1418 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1419 // to the manifest object
1420 // TODO: Consider removing this in future major version
1421 this.manifest.dateTimeString = entry.dateTimeString;
1422 this.manifest.dateTimeObject = entry.dateTimeObject;
1423 }
1424
1425 currentUri.dateTimeString = entry.dateTimeString;
1426 currentUri.dateTimeObject = entry.dateTimeObject;
1427 },
1428 targetduration: function targetduration() {
1429 if (!isFinite(entry.duration) || entry.duration < 0) {
1430 this.trigger('warn', {
1431 message: 'ignoring invalid target duration: ' + entry.duration
1432 });
1433 return;
1434 }
1435
1436 this.manifest.targetDuration = entry.duration;
1437 },
1438 totalduration: function totalduration() {
1439 if (!isFinite(entry.duration) || entry.duration < 0) {
1440 this.trigger('warn', {
1441 message: 'ignoring invalid total duration: ' + entry.duration
1442 });
1443 return;
1444 }
1445
1446 this.manifest.totalDuration = entry.duration;
1447 },
1448 start: function start() {
1449 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1450 this.trigger('warn', {
1451 message: 'ignoring start declaration without appropriate attribute list'
1452 });
1453 return;
1454 }
1455
1456 this.manifest.start = {
1457 timeOffset: entry.attributes['TIME-OFFSET'],
1458 precise: entry.attributes.PRECISE
1459 };
1460 },
1461 'cue-out': function cueOut() {
1462 currentUri.cueOut = entry.data;
1463 },
1464 'cue-out-cont': function cueOutCont() {
1465 currentUri.cueOutCont = entry.data;
1466 },
1467 'cue-in': function cueIn() {
1468 currentUri.cueIn = entry.data;
1469 }
1470 })[entry.tagType] || noop).call(self);
1471 },
1472 uri: function uri() {
1473 currentUri.uri = entry.uri;
1474 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1475
1476 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1477 this.trigger('warn', {
1478 message: 'defaulting segment duration to the target duration'
1479 });
1480 currentUri.duration = this.manifest.targetDuration;
1481 } // annotate with encryption information, if necessary
1482
1483
1484 if (_key) {
1485 currentUri.key = _key;
1486 }
1487
1488 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1489
1490 if (currentMap) {
1491 currentUri.map = currentMap;
1492 } // prepare for the next URI
1493
1494
1495 currentUri = {};
1496 },
1497 comment: function comment() {// comments are not important for playback
1498 },
1499 custom: function custom() {
1500 // if this is segment-level data attach the output to the segment
1501 if (entry.segment) {
1502 currentUri.custom = currentUri.custom || {};
1503 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1504 } else {
1505 this.manifest.custom = this.manifest.custom || {};
1506 this.manifest.custom[entry.customType] = entry.data;
1507 }
1508 }
1509 })[entry.type].call(self);
1510 });
1511
1512 return _this;
1513 }
1514 /**
1515 * Parse the input string and update the manifest object.
1516 *
1517 * @param {string} chunk a potentially incomplete portion of the manifest
1518 */
1519
1520 var _proto = Parser.prototype;
1521
1522 _proto.push = function push(chunk) {
1523 this.lineStream.push(chunk);
1524 }
1525 /**
1526 * Flush any remaining input. This can be handy if the last line of an M3U8
1527 * manifest did not contain a trailing newline but the file has been
1528 * completely received.
1529 */
1530 ;
1531
1532 _proto.end = function end() {
1533 // flush any buffered input
1534 this.lineStream.push('\n');
1535 }
1536 /**
1537 * Add an additional parser for non-standard tags
1538 *
1539 * @param {Object} options a map of options for the added parser
1540 * @param {RegExp} options.expression a regular expression to match the custom header
1541 * @param {string} options.type the type to register to the output
1542 * @param {Function} [options.dataParser] function to parse the line into an object
1543 * @param {boolean} [options.segment] should tag data be attached to the segment object
1544 */
1545 ;
1546
1547 _proto.addParser = function addParser(options) {
1548 this.parseStream.addParser(options);
1549 }
1550 /**
1551 * Add a custom header mapper
1552 *
1553 * @param {Object} options
1554 * @param {RegExp} options.expression a regular expression to match the custom header
1555 * @param {Function} options.map function to translate tag into a different tag
1556 */
1557 ;
1558
1559 _proto.addTagMapper = function addTagMapper(options) {
1560 this.parseStream.addTagMapper(options);
1561 };
1562
1563 return Parser;
1564 }(Stream);
1565
1566 var classCallCheck = function (instance, Constructor) {
1567 if (!(instance instanceof Constructor)) {
1568 throw new TypeError("Cannot call a class as a function");
1569 }
1570 };
1571
1572 var createClass = function () {
1573 function defineProperties(target, props) {
1574 for (var i = 0; i < props.length; i++) {
1575 var descriptor = props[i];
1576 descriptor.enumerable = descriptor.enumerable || false;
1577 descriptor.configurable = true;
1578 if ("value" in descriptor) descriptor.writable = true;
1579 Object.defineProperty(target, descriptor.key, descriptor);
1580 }
1581 }
1582
1583 return function (Constructor, protoProps, staticProps) {
1584 if (protoProps) defineProperties(Constructor.prototype, protoProps);
1585 if (staticProps) defineProperties(Constructor, staticProps);
1586 return Constructor;
1587 };
1588 }();
1589
1590 var get = function get(object, property, receiver) {
1591 if (object === null) object = Function.prototype;
1592 var desc = Object.getOwnPropertyDescriptor(object, property);
1593
1594 if (desc === undefined) {
1595 var parent = Object.getPrototypeOf(object);
1596
1597 if (parent === null) {
1598 return undefined;
1599 } else {
1600 return get(parent, property, receiver);
1601 }
1602 } else if ("value" in desc) {
1603 return desc.value;
1604 } else {
1605 var getter = desc.get;
1606
1607 if (getter === undefined) {
1608 return undefined;
1609 }
1610
1611 return getter.call(receiver);
1612 }
1613 };
1614
1615 var inherits = function (subClass, superClass) {
1616 if (typeof superClass !== "function" && superClass !== null) {
1617 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
1618 }
1619
1620 subClass.prototype = Object.create(superClass && superClass.prototype, {
1621 constructor: {
1622 value: subClass,
1623 enumerable: false,
1624 writable: true,
1625 configurable: true
1626 }
1627 });
1628 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
1629 };
1630
1631 var possibleConstructorReturn = function (self, call) {
1632 if (!self) {
1633 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
1634 }
1635
1636 return call && (typeof call === "object" || typeof call === "function") ? call : self;
1637 };
1638
1639 var slicedToArray = function () {
1640 function sliceIterator(arr, i) {
1641 var _arr = [];
1642 var _n = true;
1643 var _d = false;
1644 var _e = undefined;
1645
1646 try {
1647 for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
1648 _arr.push(_s.value);
1649
1650 if (i && _arr.length === i) break;
1651 }
1652 } catch (err) {
1653 _d = true;
1654 _e = err;
1655 } finally {
1656 try {
1657 if (!_n && _i["return"]) _i["return"]();
1658 } finally {
1659 if (_d) throw _e;
1660 }
1661 }
1662
1663 return _arr;
1664 }
1665
1666 return function (arr, i) {
1667 if (Array.isArray(arr)) {
1668 return arr;
1669 } else if (Symbol.iterator in Object(arr)) {
1670 return sliceIterator(arr, i);
1671 } else {
1672 throw new TypeError("Invalid attempt to destructure non-iterable instance");
1673 }
1674 };
1675 }();
1676
1677 /**
1678 * @file playlist-loader.js
1679 *
1680 * A state machine that manages the loading, caching, and updating of
1681 * M3U8 playlists.
1682 *
1683 */
1684
1685 var mergeOptions = videojs.mergeOptions,
1686 EventTarget = videojs.EventTarget,
1687 log = videojs.log;
1688
1689 /**
1690 * Loops through all supported media groups in master and calls the provided
1691 * callback for each group
1692 *
1693 * @param {Object} master
1694 * The parsed master manifest object
1695 * @param {Function} callback
1696 * Callback to call for each media group
1697 */
1698
1699 var forEachMediaGroup = function forEachMediaGroup(master, callback) {
1700 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
1701 for (var groupKey in master.mediaGroups[mediaType]) {
1702 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
1703 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
1704
1705 callback(mediaProperties, mediaType, groupKey, labelKey);
1706 }
1707 }
1708 });
1709 };
1710
1711 /**
1712 * Returns a new array of segments that is the result of merging
1713 * properties from an older list of segments onto an updated
1714 * list. No properties on the updated playlist will be overridden.
1715 *
1716 * @param {Array} original the outdated list of segments
1717 * @param {Array} update the updated list of segments
1718 * @param {Number=} offset the index of the first update
1719 * segment in the original segment list. For non-live playlists,
1720 * this should always be zero and does not need to be
1721 * specified. For live playlists, it should be the difference
1722 * between the media sequence numbers in the original and updated
1723 * playlists.
1724 * @return a list of merged segment objects
1725 */
1726 var updateSegments = function updateSegments(original, update, offset) {
1727 var result = update.slice();
1728
1729 offset = offset || 0;
1730 var length = Math.min(original.length, update.length + offset);
1731
1732 for (var i = offset; i < length; i++) {
1733 result[i - offset] = mergeOptions(original[i], result[i - offset]);
1734 }
1735 return result;
1736 };
1737
1738 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
1739 if (!segment.resolvedUri) {
1740 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
1741 }
1742 if (segment.key && !segment.key.resolvedUri) {
1743 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
1744 }
1745 if (segment.map && !segment.map.resolvedUri) {
1746 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
1747 }
1748 };
1749
1750 /**
1751 * Returns a new master playlist that is the result of merging an
1752 * updated media playlist into the original version. If the
1753 * updated media playlist does not match any of the playlist
1754 * entries in the original master playlist, null is returned.
1755 *
1756 * @param {Object} master a parsed master M3U8 object
1757 * @param {Object} media a parsed media M3U8 object
1758 * @return {Object} a new object that represents the original
1759 * master playlist with the updated media playlist merged in, or
1760 * null if the merge produced no change.
1761 */
1762 var updateMaster = function updateMaster(master, media) {
1763 var result = mergeOptions(master, {});
1764 var playlist = result.playlists[media.id];
1765
1766 if (!playlist) {
1767 return null;
1768 }
1769
1770 // consider the playlist unchanged if the number of segments is equal, the media
1771 // sequence number is unchanged, and this playlist hasn't become the end of the playlist
1772 if (playlist.segments && media.segments && playlist.segments.length === media.segments.length && playlist.endList === media.endList && playlist.mediaSequence === media.mediaSequence) {
1773 return null;
1774 }
1775
1776 var mergedPlaylist = mergeOptions(playlist, media);
1777
1778 // if the update could overlap existing segment information, merge the two segment lists
1779 if (playlist.segments) {
1780 mergedPlaylist.segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
1781 }
1782
1783 // resolve any segment URIs to prevent us from having to do it later
1784 mergedPlaylist.segments.forEach(function (segment) {
1785 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
1786 });
1787
1788 // TODO Right now in the playlists array there are two references to each playlist, one
1789 // that is referenced by index, and one by URI. The index reference may no longer be
1790 // necessary.
1791 for (var i = 0; i < result.playlists.length; i++) {
1792 if (result.playlists[i].id === media.id) {
1793 result.playlists[i] = mergedPlaylist;
1794 }
1795 }
1796 result.playlists[media.id] = mergedPlaylist;
1797 // URI reference added for backwards compatibility
1798 result.playlists[media.uri] = mergedPlaylist;
1799
1800 return result;
1801 };
1802
1803 var createPlaylistID = function createPlaylistID(index, uri) {
1804 return index + '-' + uri;
1805 };
1806
1807 var setupMediaPlaylists = function setupMediaPlaylists(master) {
1808 // setup by-URI lookups and resolve media playlist URIs
1809 var i = master.playlists.length;
1810
1811 while (i--) {
1812 var playlist = master.playlists[i];
1813
1814 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
1815 playlist.id = createPlaylistID(i, playlist.uri);
1816
1817 master.playlists[playlist.id] = playlist;
1818 // URI reference added for backwards compatibility
1819 master.playlists[playlist.uri] = playlist;
1820
1821 if (!playlist.attributes) {
1822 // Although the spec states an #EXT-X-STREAM-INF tag MUST have a
1823 // BANDWIDTH attribute, we can play the stream without it. This means a poorly
1824 // formatted master playlist may not have an attribute list. An attributes
1825 // property is added here to prevent undefined references when we encounter
1826 // this scenario.
1827 playlist.attributes = {};
1828
1829 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
1830 }
1831 }
1832 };
1833
1834 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
1835 forEachMediaGroup(master, function (properties) {
1836 if (properties.uri) {
1837 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
1838 }
1839 });
1840 };
1841
1842 /**
1843 * Calculates the time to wait before refreshing a live playlist
1844 *
1845 * @param {Object} media
1846 * The current media
1847 * @param {Boolean} update
1848 * True if there were any updates from the last refresh, false otherwise
1849 * @return {Number}
1850 * The time in ms to wait before refreshing the live playlist
1851 */
1852 var refreshDelay = function refreshDelay(media, update) {
1853 var lastSegment = media.segments[media.segments.length - 1];
1854 var delay = void 0;
1855
1856 if (update && lastSegment && lastSegment.duration) {
1857 delay = lastSegment.duration * 1000;
1858 } else {
1859 // if the playlist is unchanged since the last reload or last segment duration
1860 // cannot be determined, try again after half the target duration
1861 delay = (media.targetDuration || 10) * 500;
1862 }
1863 return delay;
1864 };
1865
1866 /**
1867 * Load a playlist from a remote location
1868 *
1869 * @class PlaylistLoader
1870 * @extends Stream
1871 * @param {String} srcUrl the url to start with
1872 * @param {Boolean} withCredentials the withCredentials xhr option
1873 * @constructor
1874 */
1875
1876 var PlaylistLoader = function (_EventTarget) {
1877 inherits(PlaylistLoader, _EventTarget);
1878
1879 function PlaylistLoader(srcUrl, hls) {
1880 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
1881 classCallCheck(this, PlaylistLoader);
1882
1883 var _this = possibleConstructorReturn(this, (PlaylistLoader.__proto__ || Object.getPrototypeOf(PlaylistLoader)).call(this));
1884
1885 var _options$withCredenti = options.withCredentials,
1886 withCredentials = _options$withCredenti === undefined ? false : _options$withCredenti,
1887 _options$handleManife = options.handleManifestRedirects,
1888 handleManifestRedirects = _options$handleManife === undefined ? false : _options$handleManife;
1889
1890
1891 _this.srcUrl = srcUrl;
1892 _this.hls_ = hls;
1893 _this.withCredentials = withCredentials;
1894 _this.handleManifestRedirects = handleManifestRedirects;
1895
1896 var hlsOptions = hls.options_;
1897
1898 _this.customTagParsers = hlsOptions && hlsOptions.customTagParsers || [];
1899 _this.customTagMappers = hlsOptions && hlsOptions.customTagMappers || [];
1900
1901 if (!_this.srcUrl) {
1902 throw new Error('A non-empty playlist URL is required');
1903 }
1904
1905 // initialize the loader state
1906 _this.state = 'HAVE_NOTHING';
1907
1908 // live playlist staleness timeout
1909 _this.on('mediaupdatetimeout', function () {
1910 if (_this.state !== 'HAVE_METADATA') {
1911 // only refresh the media playlist if no other activity is going on
1912 return;
1913 }
1914
1915 _this.state = 'HAVE_CURRENT_METADATA';
1916
1917 _this.request = _this.hls_.xhr({
1918 uri: resolveUrl(_this.master.uri, _this.media().uri),
1919 withCredentials: _this.withCredentials
1920 }, function (error, req) {
1921 // disposed
1922 if (!_this.request) {
1923 return;
1924 }
1925
1926 if (error) {
1927 return _this.playlistRequestError(_this.request, _this.media(), 'HAVE_METADATA');
1928 }
1929
1930 _this.haveMetadata(_this.request, _this.media().uri, _this.media().id);
1931 });
1932 });
1933 return _this;
1934 }
1935
1936 createClass(PlaylistLoader, [{
1937 key: 'playlistRequestError',
1938 value: function playlistRequestError(xhr, playlist, startingState) {
1939 var uri = playlist.uri,
1940 id = playlist.id;
1941
1942 // any in-flight request is now finished
1943
1944 this.request = null;
1945
1946 if (startingState) {
1947 this.state = startingState;
1948 }
1949
1950 this.error = {
1951 playlist: this.master.playlists[id],
1952 status: xhr.status,
1953 message: 'HLS playlist request error at URL: ' + uri + '.',
1954 responseText: xhr.responseText,
1955 code: xhr.status >= 500 ? 4 : 2
1956 };
1957
1958 this.trigger('error');
1959 }
1960
1961 // update the playlist loader's state in response to a new or
1962 // updated playlist.
1963
1964 }, {
1965 key: 'haveMetadata',
1966 value: function haveMetadata(xhr, url, id) {
1967 var _this2 = this;
1968
1969 // any in-flight request is now finished
1970 this.request = null;
1971 this.state = 'HAVE_METADATA';
1972
1973 var parser = new Parser();
1974
1975 // adding custom tag parsers
1976 this.customTagParsers.forEach(function (customParser) {
1977 return parser.addParser(customParser);
1978 });
1979
1980 // adding custom tag mappers
1981 this.customTagMappers.forEach(function (mapper) {
1982 return parser.addTagMapper(mapper);
1983 });
1984
1985 parser.push(xhr.responseText);
1986 parser.end();
1987 parser.manifest.uri = url;
1988 parser.manifest.id = id;
1989 // m3u8-parser does not attach an attributes property to media playlists so make
1990 // sure that the property is attached to avoid undefined reference errors
1991 parser.manifest.attributes = parser.manifest.attributes || {};
1992
1993 // merge this playlist into the master
1994 var update = updateMaster(this.master, parser.manifest);
1995
1996 this.targetDuration = parser.manifest.targetDuration;
1997
1998 if (update) {
1999 this.master = update;
2000 this.media_ = this.master.playlists[id];
2001 } else {
2002 this.trigger('playlistunchanged');
2003 }
2004
2005 // refresh live playlists after a target duration passes
2006 if (!this.media().endList) {
2007 window_1.clearTimeout(this.mediaUpdateTimeout);
2008 this.mediaUpdateTimeout = window_1.setTimeout(function () {
2009 _this2.trigger('mediaupdatetimeout');
2010 }, refreshDelay(this.media(), !!update));
2011 }
2012
2013 this.trigger('loadedplaylist');
2014 }
2015
2016 /**
2017 * Abort any outstanding work and clean up.
2018 */
2019
2020 }, {
2021 key: 'dispose',
2022 value: function dispose() {
2023 this.trigger('dispose');
2024 this.stopRequest();
2025 window_1.clearTimeout(this.mediaUpdateTimeout);
2026 window_1.clearTimeout(this.finalRenditionTimeout);
2027 this.off();
2028 }
2029 }, {
2030 key: 'stopRequest',
2031 value: function stopRequest() {
2032 if (this.request) {
2033 var oldRequest = this.request;
2034
2035 this.request = null;
2036 oldRequest.onreadystatechange = null;
2037 oldRequest.abort();
2038 }
2039 }
2040
2041 /**
2042 * When called without any arguments, returns the currently
2043 * active media playlist. When called with a single argument,
2044 * triggers the playlist loader to asynchronously switch to the
2045 * specified media playlist. Calling this method while the
2046 * loader is in the HAVE_NOTHING causes an error to be emitted
2047 * but otherwise has no effect.
2048 *
2049 * @param {Object=} playlist the parsed media playlist
2050 * object to switch to
2051 * @param {Boolean=} is this the last available playlist
2052 *
2053 * @return {Playlist} the current loaded media
2054 */
2055
2056 }, {
2057 key: 'media',
2058 value: function media(playlist, isFinalRendition) {
2059 var _this3 = this;
2060
2061 // getter
2062 if (!playlist) {
2063 return this.media_;
2064 }
2065
2066 // setter
2067 if (this.state === 'HAVE_NOTHING') {
2068 throw new Error('Cannot switch media playlist from ' + this.state);
2069 }
2070
2071 // find the playlist object if the target playlist has been
2072 // specified by URI
2073 if (typeof playlist === 'string') {
2074 if (!this.master.playlists[playlist]) {
2075 throw new Error('Unknown playlist URI: ' + playlist);
2076 }
2077 playlist = this.master.playlists[playlist];
2078 }
2079
2080 window_1.clearTimeout(this.finalRenditionTimeout);
2081
2082 if (isFinalRendition) {
2083 var delay = playlist.targetDuration / 2 * 1000 || 5 * 1000;
2084
2085 this.finalRenditionTimeout = window_1.setTimeout(this.media.bind(this, playlist, false), delay);
2086 return;
2087 }
2088
2089 var startingState = this.state;
2090 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
2091
2092 // switch to fully loaded playlists immediately
2093 if (this.master.playlists[playlist.id].endList) {
2094 // abort outstanding playlist requests
2095 if (this.request) {
2096 this.request.onreadystatechange = null;
2097 this.request.abort();
2098 this.request = null;
2099 }
2100 this.state = 'HAVE_METADATA';
2101 this.media_ = playlist;
2102
2103 // trigger media change if the active media has been updated
2104 if (mediaChange) {
2105 this.trigger('mediachanging');
2106 this.trigger('mediachange');
2107 }
2108 return;
2109 }
2110
2111 // switching to the active playlist is a no-op
2112 if (!mediaChange) {
2113 return;
2114 }
2115
2116 this.state = 'SWITCHING_MEDIA';
2117
2118 // there is already an outstanding playlist request
2119 if (this.request) {
2120 if (playlist.resolvedUri === this.request.url) {
2121 // requesting to switch to the same playlist multiple times
2122 // has no effect after the first
2123 return;
2124 }
2125 this.request.onreadystatechange = null;
2126 this.request.abort();
2127 this.request = null;
2128 }
2129
2130 // request the new playlist
2131 if (this.media_) {
2132 this.trigger('mediachanging');
2133 }
2134
2135 this.request = this.hls_.xhr({
2136 uri: playlist.resolvedUri,
2137 withCredentials: this.withCredentials
2138 }, function (error, req) {
2139 // disposed
2140 if (!_this3.request) {
2141 return;
2142 }
2143
2144 playlist.resolvedUri = resolveManifestRedirect(_this3.handleManifestRedirects, playlist.resolvedUri, req);
2145
2146 if (error) {
2147 return _this3.playlistRequestError(_this3.request, playlist, startingState);
2148 }
2149
2150 _this3.haveMetadata(req, playlist.uri, playlist.id);
2151
2152 // fire loadedmetadata the first time a media playlist is loaded
2153 if (startingState === 'HAVE_MASTER') {
2154 _this3.trigger('loadedmetadata');
2155 } else {
2156 _this3.trigger('mediachange');
2157 }
2158 });
2159 }
2160
2161 /**
2162 * pause loading of the playlist
2163 */
2164
2165 }, {
2166 key: 'pause',
2167 value: function pause() {
2168 this.stopRequest();
2169 window_1.clearTimeout(this.mediaUpdateTimeout);
2170 if (this.state === 'HAVE_NOTHING') {
2171 // If we pause the loader before any data has been retrieved, its as if we never
2172 // started, so reset to an unstarted state.
2173 this.started = false;
2174 }
2175 // Need to restore state now that no activity is happening
2176 if (this.state === 'SWITCHING_MEDIA') {
2177 // if the loader was in the process of switching media, it should either return to
2178 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2179 // playlist yet. This is determined by the existence of loader.media_
2180 if (this.media_) {
2181 this.state = 'HAVE_METADATA';
2182 } else {
2183 this.state = 'HAVE_MASTER';
2184 }
2185 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2186 this.state = 'HAVE_METADATA';
2187 }
2188 }
2189
2190 /**
2191 * start loading of the playlist
2192 */
2193
2194 }, {
2195 key: 'load',
2196 value: function load(isFinalRendition) {
2197 var _this4 = this;
2198
2199 window_1.clearTimeout(this.mediaUpdateTimeout);
2200
2201 var media = this.media();
2202
2203 if (isFinalRendition) {
2204 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
2205
2206 this.mediaUpdateTimeout = window_1.setTimeout(function () {
2207 return _this4.load();
2208 }, delay);
2209 return;
2210 }
2211
2212 if (!this.started) {
2213 this.start();
2214 return;
2215 }
2216
2217 if (media && !media.endList) {
2218 this.trigger('mediaupdatetimeout');
2219 } else {
2220 this.trigger('loadedplaylist');
2221 }
2222 }
2223
2224 /**
2225 * start loading of the playlist
2226 */
2227
2228 }, {
2229 key: 'start',
2230 value: function start() {
2231 var _this5 = this;
2232
2233 this.started = true;
2234
2235 // request the specified URL
2236 this.request = this.hls_.xhr({
2237 uri: this.srcUrl,
2238 withCredentials: this.withCredentials
2239 }, function (error, req) {
2240 // disposed
2241 if (!_this5.request) {
2242 return;
2243 }
2244
2245 // clear the loader's request reference
2246 _this5.request = null;
2247
2248 if (error) {
2249 _this5.error = {
2250 status: req.status,
2251 message: 'HLS playlist request error at URL: ' + _this5.srcUrl + '.',
2252 responseText: req.responseText,
2253 // MEDIA_ERR_NETWORK
2254 code: 2
2255 };
2256 if (_this5.state === 'HAVE_NOTHING') {
2257 _this5.started = false;
2258 }
2259 return _this5.trigger('error');
2260 }
2261
2262 var parser = new Parser();
2263
2264 // adding custom tag parsers
2265 _this5.customTagParsers.forEach(function (customParser) {
2266 return parser.addParser(customParser);
2267 });
2268
2269 // adding custom tag mappers
2270 _this5.customTagMappers.forEach(function (mapper) {
2271 return parser.addTagMapper(mapper);
2272 });
2273
2274 parser.push(req.responseText);
2275 parser.end();
2276
2277 _this5.state = 'HAVE_MASTER';
2278
2279 _this5.srcUrl = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.srcUrl, req);
2280
2281 parser.manifest.uri = _this5.srcUrl;
2282
2283 // loaded a master playlist
2284 if (parser.manifest.playlists) {
2285 _this5.master = parser.manifest;
2286
2287 setupMediaPlaylists(_this5.master);
2288 resolveMediaGroupUris(_this5.master);
2289
2290 _this5.trigger('loadedplaylist');
2291 if (!_this5.request) {
2292 // no media playlist was specifically selected so start
2293 // from the first listed one
2294 _this5.media(parser.manifest.playlists[0]);
2295 }
2296 return;
2297 }
2298
2299 var id = createPlaylistID(0, _this5.srcUrl);
2300
2301 // loaded a media playlist
2302 // infer a master playlist if none was previously requested
2303 _this5.master = {
2304 mediaGroups: {
2305 'AUDIO': {},
2306 'VIDEO': {},
2307 'CLOSED-CAPTIONS': {},
2308 'SUBTITLES': {}
2309 },
2310 uri: window_1.location.href,
2311 playlists: [{
2312 uri: _this5.srcUrl,
2313 id: id,
2314 resolvedUri: _this5.srcUrl,
2315 // m3u8-parser does not attach an attributes property to media playlists so make
2316 // sure that the property is attached to avoid undefined reference errors
2317 attributes: {}
2318 }]
2319 };
2320 _this5.master.playlists[id] = _this5.master.playlists[0];
2321 // URI reference added for backwards compatibility
2322 _this5.master.playlists[_this5.srcUrl] = _this5.master.playlists[0];
2323
2324 _this5.haveMetadata(req, _this5.srcUrl, id);
2325 return _this5.trigger('loadedmetadata');
2326 });
2327 }
2328 }]);
2329 return PlaylistLoader;
2330 }(EventTarget);
2331
2332 /**
2333 * @file playlist.js
2334 *
2335 * Playlist related utilities.
2336 */
2337
2338 var createTimeRange = videojs.createTimeRange;
2339
2340 /**
2341 * walk backward until we find a duration we can use
2342 * or return a failure
2343 *
2344 * @param {Playlist} playlist the playlist to walk through
2345 * @param {Number} endSequence the mediaSequence to stop walking on
2346 */
2347
2348 var backwardDuration = function backwardDuration(playlist, endSequence) {
2349 var result = 0;
2350 var i = endSequence - playlist.mediaSequence;
2351 // if a start time is available for segment immediately following
2352 // the interval, use it
2353 var segment = playlist.segments[i];
2354
2355 // Walk backward until we find the latest segment with timeline
2356 // information that is earlier than endSequence
2357 if (segment) {
2358 if (typeof segment.start !== 'undefined') {
2359 return { result: segment.start, precise: true };
2360 }
2361 if (typeof segment.end !== 'undefined') {
2362 return {
2363 result: segment.end - segment.duration,
2364 precise: true
2365 };
2366 }
2367 }
2368 while (i--) {
2369 segment = playlist.segments[i];
2370 if (typeof segment.end !== 'undefined') {
2371 return { result: result + segment.end, precise: true };
2372 }
2373
2374 result += segment.duration;
2375
2376 if (typeof segment.start !== 'undefined') {
2377 return { result: result + segment.start, precise: true };
2378 }
2379 }
2380 return { result: result, precise: false };
2381 };
2382
2383 /**
2384 * walk forward until we find a duration we can use
2385 * or return a failure
2386 *
2387 * @param {Playlist} playlist the playlist to walk through
2388 * @param {Number} endSequence the mediaSequence to stop walking on
2389 */
2390 var forwardDuration = function forwardDuration(playlist, endSequence) {
2391 var result = 0;
2392 var segment = void 0;
2393 var i = endSequence - playlist.mediaSequence;
2394 // Walk forward until we find the earliest segment with timeline
2395 // information
2396
2397 for (; i < playlist.segments.length; i++) {
2398 segment = playlist.segments[i];
2399 if (typeof segment.start !== 'undefined') {
2400 return {
2401 result: segment.start - result,
2402 precise: true
2403 };
2404 }
2405
2406 result += segment.duration;
2407
2408 if (typeof segment.end !== 'undefined') {
2409 return {
2410 result: segment.end - result,
2411 precise: true
2412 };
2413 }
2414 }
2415 // indicate we didn't find a useful duration estimate
2416 return { result: -1, precise: false };
2417 };
2418
2419 /**
2420 * Calculate the media duration from the segments associated with a
2421 * playlist. The duration of a subinterval of the available segments
2422 * may be calculated by specifying an end index.
2423 *
2424 * @param {Object} playlist a media playlist object
2425 * @param {Number=} endSequence an exclusive upper boundary
2426 * for the playlist. Defaults to playlist length.
2427 * @param {Number} expired the amount of time that has dropped
2428 * off the front of the playlist in a live scenario
2429 * @return {Number} the duration between the first available segment
2430 * and end index.
2431 */
2432 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
2433 var backward = void 0;
2434 var forward = void 0;
2435
2436 if (typeof endSequence === 'undefined') {
2437 endSequence = playlist.mediaSequence + playlist.segments.length;
2438 }
2439
2440 if (endSequence < playlist.mediaSequence) {
2441 return 0;
2442 }
2443
2444 // do a backward walk to estimate the duration
2445 backward = backwardDuration(playlist, endSequence);
2446 if (backward.precise) {
2447 // if we were able to base our duration estimate on timing
2448 // information provided directly from the Media Source, return
2449 // it
2450 return backward.result;
2451 }
2452
2453 // walk forward to see if a precise duration estimate can be made
2454 // that way
2455 forward = forwardDuration(playlist, endSequence);
2456 if (forward.precise) {
2457 // we found a segment that has been buffered and so it's
2458 // position is known precisely
2459 return forward.result;
2460 }
2461
2462 // return the less-precise, playlist-based duration estimate
2463 return backward.result + expired;
2464 };
2465
2466 /**
2467 * Calculates the duration of a playlist. If a start and end index
2468 * are specified, the duration will be for the subset of the media
2469 * timeline between those two indices. The total duration for live
2470 * playlists is always Infinity.
2471 *
2472 * @param {Object} playlist a media playlist object
2473 * @param {Number=} endSequence an exclusive upper
2474 * boundary for the playlist. Defaults to the playlist media
2475 * sequence number plus its length.
2476 * @param {Number=} expired the amount of time that has
2477 * dropped off the front of the playlist in a live scenario
2478 * @return {Number} the duration between the start index and end
2479 * index.
2480 */
2481 var duration = function duration(playlist, endSequence, expired) {
2482 if (!playlist) {
2483 return 0;
2484 }
2485
2486 if (typeof expired !== 'number') {
2487 expired = 0;
2488 }
2489
2490 // if a slice of the total duration is not requested, use
2491 // playlist-level duration indicators when they're present
2492 if (typeof endSequence === 'undefined') {
2493 // if present, use the duration specified in the playlist
2494 if (playlist.totalDuration) {
2495 return playlist.totalDuration;
2496 }
2497
2498 // duration should be Infinity for live playlists
2499 if (!playlist.endList) {
2500 return window_1.Infinity;
2501 }
2502 }
2503
2504 // calculate the total duration based on the segment durations
2505 return intervalDuration(playlist, endSequence, expired);
2506 };
2507
2508 /**
2509 * Calculate the time between two indexes in the current playlist
2510 * neight the start- nor the end-index need to be within the current
2511 * playlist in which case, the targetDuration of the playlist is used
2512 * to approximate the durations of the segments
2513 *
2514 * @param {Object} playlist a media playlist object
2515 * @param {Number} startIndex
2516 * @param {Number} endIndex
2517 * @return {Number} the number of seconds between startIndex and endIndex
2518 */
2519 var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
2520 var durations = 0;
2521
2522 if (startIndex > endIndex) {
2523 var _ref = [endIndex, startIndex];
2524 startIndex = _ref[0];
2525 endIndex = _ref[1];
2526 }
2527
2528 if (startIndex < 0) {
2529 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
2530 durations += playlist.targetDuration;
2531 }
2532 startIndex = 0;
2533 }
2534
2535 for (var _i = startIndex; _i < endIndex; _i++) {
2536 durations += playlist.segments[_i].duration;
2537 }
2538
2539 return durations;
2540 };
2541
2542 /**
2543 * Determines the media index of the segment corresponding to the safe edge of the live
2544 * window which is the duration of the last segment plus 2 target durations from the end
2545 * of the playlist.
2546 *
2547 * A liveEdgePadding can be provided which will be used instead of calculating the safe live edge.
2548 * This corresponds to suggestedPresentationDelay in DASH manifests.
2549 *
2550 * @param {Object} playlist
2551 * a media playlist object
2552 * @param {Number} [liveEdgePadding]
2553 * A number in seconds indicating how far from the end we want to be.
2554 * If provided, this value is used instead of calculating the safe live index from the target durations.
2555 * Corresponds to suggestedPresentationDelay in DASH manifests.
2556 * @return {Number}
2557 * The media index of the segment at the safe live point. 0 if there is no "safe"
2558 * point.
2559 * @function safeLiveIndex
2560 */
2561 var safeLiveIndex = function safeLiveIndex(playlist, liveEdgePadding) {
2562 if (!playlist.segments.length) {
2563 return 0;
2564 }
2565
2566 var i = playlist.segments.length;
2567 var lastSegmentDuration = playlist.segments[i - 1].duration || playlist.targetDuration;
2568 var safeDistance = typeof liveEdgePadding === 'number' ? liveEdgePadding : lastSegmentDuration + playlist.targetDuration * 2;
2569
2570 if (safeDistance === 0) {
2571 return i;
2572 }
2573
2574 var distanceFromEnd = 0;
2575
2576 while (i--) {
2577 distanceFromEnd += playlist.segments[i].duration;
2578
2579 if (distanceFromEnd >= safeDistance) {
2580 break;
2581 }
2582 }
2583
2584 return Math.max(0, i);
2585 };
2586
2587 /**
2588 * Calculates the playlist end time
2589 *
2590 * @param {Object} playlist a media playlist object
2591 * @param {Number=} expired the amount of time that has
2592 * dropped off the front of the playlist in a live scenario
2593 * @param {Boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
2594 * playlist end calculation should consider the safe live end
2595 * (truncate the playlist end by three segments). This is normally
2596 * used for calculating the end of the playlist's seekable range.
2597 * This takes into account the value of liveEdgePadding.
2598 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
2599 * @param {Number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
2600 * If this is provided, it is used in the safe live end calculation.
2601 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
2602 * Corresponds to suggestedPresentationDelay in DASH manifests.
2603 * @returns {Number} the end time of playlist
2604 * @function playlistEnd
2605 */
2606 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
2607 if (!playlist || !playlist.segments) {
2608 return null;
2609 }
2610 if (playlist.endList) {
2611 return duration(playlist);
2612 }
2613
2614 if (expired === null) {
2615 return null;
2616 }
2617
2618 expired = expired || 0;
2619
2620 var endSequence = useSafeLiveEnd ? safeLiveIndex(playlist, liveEdgePadding) : playlist.segments.length;
2621
2622 return intervalDuration(playlist, playlist.mediaSequence + endSequence, expired);
2623 };
2624
2625 /**
2626 * Calculates the interval of time that is currently seekable in a
2627 * playlist. The returned time ranges are relative to the earliest
2628 * moment in the specified playlist that is still available. A full
2629 * seekable implementation for live streams would need to offset
2630 * these values by the duration of content that has expired from the
2631 * stream.
2632 *
2633 * @param {Object} playlist a media playlist object
2634 * dropped off the front of the playlist in a live scenario
2635 * @param {Number=} expired the amount of time that has
2636 * dropped off the front of the playlist in a live scenario
2637 * @param {Number} liveEdgePadding how far from the end of the playlist we should be in seconds.
2638 * Corresponds to suggestedPresentationDelay in DASH manifests.
2639 * @return {TimeRanges} the periods of time that are valid targets
2640 * for seeking
2641 */
2642 var seekable = function seekable(playlist, expired, liveEdgePadding) {
2643 var useSafeLiveEnd = true;
2644 var seekableStart = expired || 0;
2645 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
2646
2647 if (seekableEnd === null) {
2648 return createTimeRange();
2649 }
2650 return createTimeRange(seekableStart, seekableEnd);
2651 };
2652
2653 var isWholeNumber = function isWholeNumber(num) {
2654 return num - Math.floor(num) === 0;
2655 };
2656
2657 var roundSignificantDigit = function roundSignificantDigit(increment, num) {
2658 // If we have a whole number, just add 1 to it
2659 if (isWholeNumber(num)) {
2660 return num + increment * 0.1;
2661 }
2662
2663 var numDecimalDigits = num.toString().split('.')[1].length;
2664
2665 for (var i = 1; i <= numDecimalDigits; i++) {
2666 var scale = Math.pow(10, i);
2667 var temp = num * scale;
2668
2669 if (isWholeNumber(temp) || i === numDecimalDigits) {
2670 return (temp + increment) / scale;
2671 }
2672 }
2673 };
2674
2675 var ceilLeastSignificantDigit = roundSignificantDigit.bind(null, 1);
2676 var floorLeastSignificantDigit = roundSignificantDigit.bind(null, -1);
2677
2678 /**
2679 * Determine the index and estimated starting time of the segment that
2680 * contains a specified playback position in a media playlist.
2681 *
2682 * @param {Object} playlist the media playlist to query
2683 * @param {Number} currentTime The number of seconds since the earliest
2684 * possible position to determine the containing segment for
2685 * @param {Number} startIndex
2686 * @param {Number} startTime
2687 * @return {Object}
2688 */
2689 var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
2690 var i = void 0;
2691 var segment = void 0;
2692 var numSegments = playlist.segments.length;
2693
2694 var time = currentTime - startTime;
2695
2696 if (time < 0) {
2697 // Walk backward from startIndex in the playlist, adding durations
2698 // until we find a segment that contains `time` and return it
2699 if (startIndex > 0) {
2700 for (i = startIndex - 1; i >= 0; i--) {
2701 segment = playlist.segments[i];
2702 time += floorLeastSignificantDigit(segment.duration);
2703 if (time > 0) {
2704 return {
2705 mediaIndex: i,
2706 startTime: startTime - sumDurations(playlist, startIndex, i)
2707 };
2708 }
2709 }
2710 }
2711 // We were unable to find a good segment within the playlist
2712 // so select the first segment
2713 return {
2714 mediaIndex: 0,
2715 startTime: currentTime
2716 };
2717 }
2718
2719 // When startIndex is negative, we first walk forward to first segment
2720 // adding target durations. If we "run out of time" before getting to
2721 // the first segment, return the first segment
2722 if (startIndex < 0) {
2723 for (i = startIndex; i < 0; i++) {
2724 time -= playlist.targetDuration;
2725 if (time < 0) {
2726 return {
2727 mediaIndex: 0,
2728 startTime: currentTime
2729 };
2730 }
2731 }
2732 startIndex = 0;
2733 }
2734
2735 // Walk forward from startIndex in the playlist, subtracting durations
2736 // until we find a segment that contains `time` and return it
2737 for (i = startIndex; i < numSegments; i++) {
2738 segment = playlist.segments[i];
2739 time -= ceilLeastSignificantDigit(segment.duration);
2740 if (time < 0) {
2741 return {
2742 mediaIndex: i,
2743 startTime: startTime + sumDurations(playlist, startIndex, i)
2744 };
2745 }
2746 }
2747
2748 // We are out of possible candidates so load the last one...
2749 return {
2750 mediaIndex: numSegments - 1,
2751 startTime: currentTime
2752 };
2753 };
2754
2755 /**
2756 * Check whether the playlist is blacklisted or not.
2757 *
2758 * @param {Object} playlist the media playlist object
2759 * @return {boolean} whether the playlist is blacklisted or not
2760 * @function isBlacklisted
2761 */
2762 var isBlacklisted = function isBlacklisted(playlist) {
2763 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
2764 };
2765
2766 /**
2767 * Check whether the playlist is compatible with current playback configuration or has
2768 * been blacklisted permanently for being incompatible.
2769 *
2770 * @param {Object} playlist the media playlist object
2771 * @return {boolean} whether the playlist is incompatible or not
2772 * @function isIncompatible
2773 */
2774 var isIncompatible = function isIncompatible(playlist) {
2775 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
2776 };
2777
2778 /**
2779 * Check whether the playlist is enabled or not.
2780 *
2781 * @param {Object} playlist the media playlist object
2782 * @return {boolean} whether the playlist is enabled or not
2783 * @function isEnabled
2784 */
2785 var isEnabled = function isEnabled(playlist) {
2786 var blacklisted = isBlacklisted(playlist);
2787
2788 return !playlist.disabled && !blacklisted;
2789 };
2790
2791 /**
2792 * Check whether the playlist has been manually disabled through the representations api.
2793 *
2794 * @param {Object} playlist the media playlist object
2795 * @return {boolean} whether the playlist is disabled manually or not
2796 * @function isDisabled
2797 */
2798 var isDisabled = function isDisabled(playlist) {
2799 return playlist.disabled;
2800 };
2801
2802 /**
2803 * Returns whether the current playlist is an AES encrypted HLS stream
2804 *
2805 * @return {Boolean} true if it's an AES encrypted HLS stream
2806 */
2807 var isAes = function isAes(media) {
2808 for (var i = 0; i < media.segments.length; i++) {
2809 if (media.segments[i].key) {
2810 return true;
2811 }
2812 }
2813 return false;
2814 };
2815
2816 /**
2817 * Returns whether the current playlist contains fMP4
2818 *
2819 * @return {Boolean} true if the playlist contains fMP4
2820 */
2821 var isFmp4 = function isFmp4(media) {
2822 for (var i = 0; i < media.segments.length; i++) {
2823 if (media.segments[i].map) {
2824 return true;
2825 }
2826 }
2827 return false;
2828 };
2829
2830 /**
2831 * Checks if the playlist has a value for the specified attribute
2832 *
2833 * @param {String} attr
2834 * Attribute to check for
2835 * @param {Object} playlist
2836 * The media playlist object
2837 * @return {Boolean}
2838 * Whether the playlist contains a value for the attribute or not
2839 * @function hasAttribute
2840 */
2841 var hasAttribute = function hasAttribute(attr, playlist) {
2842 return playlist.attributes && playlist.attributes[attr];
2843 };
2844
2845 /**
2846 * Estimates the time required to complete a segment download from the specified playlist
2847 *
2848 * @param {Number} segmentDuration
2849 * Duration of requested segment
2850 * @param {Number} bandwidth
2851 * Current measured bandwidth of the player
2852 * @param {Object} playlist
2853 * The media playlist object
2854 * @param {Number=} bytesReceived
2855 * Number of bytes already received for the request. Defaults to 0
2856 * @return {Number|NaN}
2857 * The estimated time to request the segment. NaN if bandwidth information for
2858 * the given playlist is unavailable
2859 * @function estimateSegmentRequestTime
2860 */
2861 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist) {
2862 var bytesReceived = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0;
2863
2864 if (!hasAttribute('BANDWIDTH', playlist)) {
2865 return NaN;
2866 }
2867
2868 var size = segmentDuration * playlist.attributes.BANDWIDTH;
2869
2870 return (size - bytesReceived * 8) / bandwidth;
2871 };
2872
2873 /*
2874 * Returns whether the current playlist is the lowest rendition
2875 *
2876 * @return {Boolean} true if on lowest rendition
2877 */
2878 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
2879 if (master.playlists.length === 1) {
2880 return true;
2881 }
2882
2883 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
2884
2885 return master.playlists.filter(function (playlist) {
2886 if (!isEnabled(playlist)) {
2887 return false;
2888 }
2889
2890 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
2891 }).length === 0;
2892 };
2893
2894 // exports
2895 var Playlist = {
2896 duration: duration,
2897 seekable: seekable,
2898 safeLiveIndex: safeLiveIndex,
2899 getMediaInfoForTime: getMediaInfoForTime,
2900 isEnabled: isEnabled,
2901 isDisabled: isDisabled,
2902 isBlacklisted: isBlacklisted,
2903 isIncompatible: isIncompatible,
2904 playlistEnd: playlistEnd,
2905 isAes: isAes,
2906 isFmp4: isFmp4,
2907 hasAttribute: hasAttribute,
2908 estimateSegmentRequestTime: estimateSegmentRequestTime,
2909 isLowestEnabledRendition: isLowestEnabledRendition
2910 };
2911
2912 /**
2913 * @file xhr.js
2914 */
2915
2916 var videojsXHR = videojs.xhr,
2917 mergeOptions$1 = videojs.mergeOptions;
2918
2919
2920 var xhrFactory = function xhrFactory() {
2921 var xhr = function XhrFunction(options, callback) {
2922 // Add a default timeout for all hls requests
2923 options = mergeOptions$1({
2924 timeout: 45e3
2925 }, options);
2926
2927 // Allow an optional user-specified function to modify the option
2928 // object before we construct the xhr request
2929 var beforeRequest = XhrFunction.beforeRequest || videojs.Hls.xhr.beforeRequest;
2930
2931 if (beforeRequest && typeof beforeRequest === 'function') {
2932 var newOptions = beforeRequest(options);
2933
2934 if (newOptions) {
2935 options = newOptions;
2936 }
2937 }
2938
2939 var request = videojsXHR(options, function (error, response) {
2940 var reqResponse = request.response;
2941
2942 if (!error && reqResponse) {
2943 request.responseTime = Date.now();
2944 request.roundTripTime = request.responseTime - request.requestTime;
2945 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2946 if (!request.bandwidth) {
2947 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2948 }
2949 }
2950
2951 if (response.headers) {
2952 request.responseHeaders = response.headers;
2953 }
2954
2955 // videojs.xhr now uses a specific code on the error
2956 // object to signal that a request has timed out instead
2957 // of setting a boolean on the request object
2958 if (error && error.code === 'ETIMEDOUT') {
2959 request.timedout = true;
2960 }
2961
2962 // videojs.xhr no longer considers status codes outside of 200 and 0
2963 // (for file uris) to be errors, but the old XHR did, so emulate that
2964 // behavior. Status 206 may be used in response to byterange requests.
2965 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2966 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2967 }
2968
2969 callback(error, request);
2970 });
2971 var originalAbort = request.abort;
2972
2973 request.abort = function () {
2974 request.aborted = true;
2975 return originalAbort.apply(request, arguments);
2976 };
2977 request.uri = options.uri;
2978 request.requestTime = Date.now();
2979 return request;
2980 };
2981
2982 return xhr;
2983 };
2984
2985 /**
2986 * Turns segment byterange into a string suitable for use in
2987 * HTTP Range requests
2988 *
2989 * @param {Object} byterange - an object with two values defining the start and end
2990 * of a byte-range
2991 */
2992 var byterangeStr = function byterangeStr(byterange) {
2993 var byterangeStart = void 0;
2994 var byterangeEnd = void 0;
2995
2996 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2997 // header uses inclusive ranges
2998 byterangeEnd = byterange.offset + byterange.length - 1;
2999 byterangeStart = byterange.offset;
3000 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
3001 };
3002
3003 /**
3004 * Defines headers for use in the xhr request for a particular segment.
3005 *
3006 * @param {Object} segment - a simplified copy of the segmentInfo object
3007 * from SegmentLoader
3008 */
3009 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
3010 var headers = {};
3011
3012 if (segment.byterange) {
3013 headers.Range = byterangeStr(segment.byterange);
3014 }
3015 return headers;
3016 };
3017
3018 /*
3019 * pkcs7.pad
3020 * https://github.com/brightcove/pkcs7
3021 *
3022 * Copyright (c) 2014 Brightcove
3023 * Licensed under the apache2 license.
3024 */
3025
3026 /**
3027 * Returns the subarray of a Uint8Array without PKCS#7 padding.
3028 * @param padded {Uint8Array} unencrypted bytes that have been padded
3029 * @return {Uint8Array} the unpadded bytes
3030 * @see http://tools.ietf.org/html/rfc5652
3031 */
3032 function unpad(padded) {
3033 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
3034 }
3035
3036 var classCallCheck$1 = function classCallCheck(instance, Constructor) {
3037 if (!(instance instanceof Constructor)) {
3038 throw new TypeError("Cannot call a class as a function");
3039 }
3040 };
3041
3042 var createClass$1 = function () {
3043 function defineProperties(target, props) {
3044 for (var i = 0; i < props.length; i++) {
3045 var descriptor = props[i];
3046 descriptor.enumerable = descriptor.enumerable || false;
3047 descriptor.configurable = true;
3048 if ("value" in descriptor) descriptor.writable = true;
3049 Object.defineProperty(target, descriptor.key, descriptor);
3050 }
3051 }
3052
3053 return function (Constructor, protoProps, staticProps) {
3054 if (protoProps) defineProperties(Constructor.prototype, protoProps);
3055 if (staticProps) defineProperties(Constructor, staticProps);
3056 return Constructor;
3057 };
3058 }();
3059
3060 var inherits$1 = function inherits(subClass, superClass) {
3061 if (typeof superClass !== "function" && superClass !== null) {
3062 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
3063 }
3064
3065 subClass.prototype = Object.create(superClass && superClass.prototype, {
3066 constructor: {
3067 value: subClass,
3068 enumerable: false,
3069 writable: true,
3070 configurable: true
3071 }
3072 });
3073 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
3074 };
3075
3076 var possibleConstructorReturn$1 = function possibleConstructorReturn(self, call) {
3077 if (!self) {
3078 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
3079 }
3080
3081 return call && (typeof call === "object" || typeof call === "function") ? call : self;
3082 };
3083
3084 /**
3085 * @file aes.js
3086 *
3087 * This file contains an adaptation of the AES decryption algorithm
3088 * from the Standford Javascript Cryptography Library. That work is
3089 * covered by the following copyright and permissions notice:
3090 *
3091 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
3092 * All rights reserved.
3093 *
3094 * Redistribution and use in source and binary forms, with or without
3095 * modification, are permitted provided that the following conditions are
3096 * met:
3097 *
3098 * 1. Redistributions of source code must retain the above copyright
3099 * notice, this list of conditions and the following disclaimer.
3100 *
3101 * 2. Redistributions in binary form must reproduce the above
3102 * copyright notice, this list of conditions and the following
3103 * disclaimer in the documentation and/or other materials provided
3104 * with the distribution.
3105 *
3106 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
3107 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
3108 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
3109 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
3110 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
3111 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
3112 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
3113 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
3114 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
3115 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
3116 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
3117 *
3118 * The views and conclusions contained in the software and documentation
3119 * are those of the authors and should not be interpreted as representing
3120 * official policies, either expressed or implied, of the authors.
3121 */
3122
3123 /**
3124 * Expand the S-box tables.
3125 *
3126 * @private
3127 */
3128 var precompute = function precompute() {
3129 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
3130 var encTable = tables[0];
3131 var decTable = tables[1];
3132 var sbox = encTable[4];
3133 var sboxInv = decTable[4];
3134 var i = void 0;
3135 var x = void 0;
3136 var xInv = void 0;
3137 var d = [];
3138 var th = [];
3139 var x2 = void 0;
3140 var x4 = void 0;
3141 var x8 = void 0;
3142 var s = void 0;
3143 var tEnc = void 0;
3144 var tDec = void 0;
3145
3146 // Compute double and third tables
3147 for (i = 0; i < 256; i++) {
3148 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
3149 }
3150
3151 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
3152 // Compute sbox
3153 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
3154 s = s >> 8 ^ s & 255 ^ 99;
3155 sbox[x] = s;
3156 sboxInv[s] = x;
3157
3158 // Compute MixColumns
3159 x8 = d[x4 = d[x2 = d[x]]];
3160 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
3161 tEnc = d[s] * 0x101 ^ s * 0x1010100;
3162
3163 for (i = 0; i < 4; i++) {
3164 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
3165 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
3166 }
3167 }
3168
3169 // Compactify. Considerable speedup on Firefox.
3170 for (i = 0; i < 5; i++) {
3171 encTable[i] = encTable[i].slice(0);
3172 decTable[i] = decTable[i].slice(0);
3173 }
3174 return tables;
3175 };
3176 var aesTables = null;
3177
3178 /**
3179 * Schedule out an AES key for both encryption and decryption. This
3180 * is a low-level class. Use a cipher mode to do bulk encryption.
3181 *
3182 * @class AES
3183 * @param key {Array} The key as an array of 4, 6 or 8 words.
3184 */
3185
3186 var AES = function () {
3187 function AES(key) {
3188 classCallCheck$1(this, AES);
3189
3190 /**
3191 * The expanded S-box and inverse S-box tables. These will be computed
3192 * on the client so that we don't have to send them down the wire.
3193 *
3194 * There are two tables, _tables[0] is for encryption and
3195 * _tables[1] is for decryption.
3196 *
3197 * The first 4 sub-tables are the expanded S-box with MixColumns. The
3198 * last (_tables[01][4]) is the S-box itself.
3199 *
3200 * @private
3201 */
3202 // if we have yet to precompute the S-box tables
3203 // do so now
3204 if (!aesTables) {
3205 aesTables = precompute();
3206 }
3207 // then make a copy of that object for use
3208 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
3209 var i = void 0;
3210 var j = void 0;
3211 var tmp = void 0;
3212 var encKey = void 0;
3213 var decKey = void 0;
3214 var sbox = this._tables[0][4];
3215 var decTable = this._tables[1];
3216 var keyLen = key.length;
3217 var rcon = 1;
3218
3219 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
3220 throw new Error('Invalid aes key size');
3221 }
3222
3223 encKey = key.slice(0);
3224 decKey = [];
3225 this._key = [encKey, decKey];
3226
3227 // schedule encryption keys
3228 for (i = keyLen; i < 4 * keyLen + 28; i++) {
3229 tmp = encKey[i - 1];
3230
3231 // apply sbox
3232 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
3233 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
3234
3235 // shift rows and add rcon
3236 if (i % keyLen === 0) {
3237 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
3238 rcon = rcon << 1 ^ (rcon >> 7) * 283;
3239 }
3240 }
3241
3242 encKey[i] = encKey[i - keyLen] ^ tmp;
3243 }
3244
3245 // schedule decryption keys
3246 for (j = 0; i; j++, i--) {
3247 tmp = encKey[j & 3 ? i : i - 4];
3248 if (i <= 4 || j < 4) {
3249 decKey[j] = tmp;
3250 } else {
3251 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
3252 }
3253 }
3254 }
3255
3256 /**
3257 * Decrypt 16 bytes, specified as four 32-bit words.
3258 *
3259 * @param {Number} encrypted0 the first word to decrypt
3260 * @param {Number} encrypted1 the second word to decrypt
3261 * @param {Number} encrypted2 the third word to decrypt
3262 * @param {Number} encrypted3 the fourth word to decrypt
3263 * @param {Int32Array} out the array to write the decrypted words
3264 * into
3265 * @param {Number} offset the offset into the output array to start
3266 * writing results
3267 * @return {Array} The plaintext.
3268 */
3269
3270 AES.prototype.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
3271 var key = this._key[1];
3272 // state variables a,b,c,d are loaded with pre-whitened data
3273 var a = encrypted0 ^ key[0];
3274 var b = encrypted3 ^ key[1];
3275 var c = encrypted2 ^ key[2];
3276 var d = encrypted1 ^ key[3];
3277 var a2 = void 0;
3278 var b2 = void 0;
3279 var c2 = void 0;
3280
3281 // key.length === 2 ?
3282 var nInnerRounds = key.length / 4 - 2;
3283 var i = void 0;
3284 var kIndex = 4;
3285 var table = this._tables[1];
3286
3287 // load up the tables
3288 var table0 = table[0];
3289 var table1 = table[1];
3290 var table2 = table[2];
3291 var table3 = table[3];
3292 var sbox = table[4];
3293
3294 // Inner rounds. Cribbed from OpenSSL.
3295 for (i = 0; i < nInnerRounds; i++) {
3296 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
3297 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
3298 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
3299 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
3300 kIndex += 4;
3301 a = a2;b = b2;c = c2;
3302 }
3303
3304 // Last round.
3305 for (i = 0; i < 4; i++) {
3306 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
3307 a2 = a;a = b;b = c;c = d;d = a2;
3308 }
3309 };
3310
3311 return AES;
3312 }();
3313
3314 /**
3315 * @file stream.js
3316 */
3317 /**
3318 * A lightweight readable stream implemention that handles event dispatching.
3319 *
3320 * @class Stream
3321 */
3322 var Stream$1 = function () {
3323 function Stream() {
3324 classCallCheck$1(this, Stream);
3325
3326 this.listeners = {};
3327 }
3328
3329 /**
3330 * Add a listener for a specified event type.
3331 *
3332 * @param {String} type the event name
3333 * @param {Function} listener the callback to be invoked when an event of
3334 * the specified type occurs
3335 */
3336
3337 Stream.prototype.on = function on(type, listener) {
3338 if (!this.listeners[type]) {
3339 this.listeners[type] = [];
3340 }
3341 this.listeners[type].push(listener);
3342 };
3343
3344 /**
3345 * Remove a listener for a specified event type.
3346 *
3347 * @param {String} type the event name
3348 * @param {Function} listener a function previously registered for this
3349 * type of event through `on`
3350 * @return {Boolean} if we could turn it off or not
3351 */
3352
3353 Stream.prototype.off = function off(type, listener) {
3354 if (!this.listeners[type]) {
3355 return false;
3356 }
3357
3358 var index = this.listeners[type].indexOf(listener);
3359
3360 this.listeners[type].splice(index, 1);
3361 return index > -1;
3362 };
3363
3364 /**
3365 * Trigger an event of the specified type on this stream. Any additional
3366 * arguments to this function are passed as parameters to event listeners.
3367 *
3368 * @param {String} type the event name
3369 */
3370
3371 Stream.prototype.trigger = function trigger(type) {
3372 var callbacks = this.listeners[type];
3373
3374 if (!callbacks) {
3375 return;
3376 }
3377
3378 // Slicing the arguments on every invocation of this method
3379 // can add a significant amount of overhead. Avoid the
3380 // intermediate object creation for the common case of a
3381 // single callback argument
3382 if (arguments.length === 2) {
3383 var length = callbacks.length;
3384
3385 for (var i = 0; i < length; ++i) {
3386 callbacks[i].call(this, arguments[1]);
3387 }
3388 } else {
3389 var args = Array.prototype.slice.call(arguments, 1);
3390 var _length = callbacks.length;
3391
3392 for (var _i = 0; _i < _length; ++_i) {
3393 callbacks[_i].apply(this, args);
3394 }
3395 }
3396 };
3397
3398 /**
3399 * Destroys the stream and cleans up.
3400 */
3401
3402 Stream.prototype.dispose = function dispose() {
3403 this.listeners = {};
3404 };
3405 /**
3406 * Forwards all `data` events on this stream to the destination stream. The
3407 * destination stream should provide a method `push` to receive the data
3408 * events as they arrive.
3409 *
3410 * @param {Stream} destination the stream that will receive all `data` events
3411 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
3412 */
3413
3414 Stream.prototype.pipe = function pipe(destination) {
3415 this.on('data', function (data) {
3416 destination.push(data);
3417 });
3418 };
3419
3420 return Stream;
3421 }();
3422
3423 /**
3424 * @file async-stream.js
3425 */
3426 /**
3427 * A wrapper around the Stream class to use setTiemout
3428 * and run stream "jobs" Asynchronously
3429 *
3430 * @class AsyncStream
3431 * @extends Stream
3432 */
3433
3434 var AsyncStream = function (_Stream) {
3435 inherits$1(AsyncStream, _Stream);
3436
3437 function AsyncStream() {
3438 classCallCheck$1(this, AsyncStream);
3439
3440 var _this = possibleConstructorReturn$1(this, _Stream.call(this, Stream$1));
3441
3442 _this.jobs = [];
3443 _this.delay = 1;
3444 _this.timeout_ = null;
3445 return _this;
3446 }
3447
3448 /**
3449 * process an async job
3450 *
3451 * @private
3452 */
3453
3454 AsyncStream.prototype.processJob_ = function processJob_() {
3455 this.jobs.shift()();
3456 if (this.jobs.length) {
3457 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
3458 } else {
3459 this.timeout_ = null;
3460 }
3461 };
3462
3463 /**
3464 * push a job into the stream
3465 *
3466 * @param {Function} job the job to push into the stream
3467 */
3468
3469 AsyncStream.prototype.push = function push(job) {
3470 this.jobs.push(job);
3471 if (!this.timeout_) {
3472 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
3473 }
3474 };
3475
3476 return AsyncStream;
3477 }(Stream$1);
3478
3479 /**
3480 * @file decrypter.js
3481 *
3482 * An asynchronous implementation of AES-128 CBC decryption with
3483 * PKCS#7 padding.
3484 */
3485
3486 /**
3487 * Convert network-order (big-endian) bytes into their little-endian
3488 * representation.
3489 */
3490 var ntoh = function ntoh(word) {
3491 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
3492 };
3493
3494 /**
3495 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
3496 *
3497 * @param {Uint8Array} encrypted the encrypted bytes
3498 * @param {Uint32Array} key the bytes of the decryption key
3499 * @param {Uint32Array} initVector the initialization vector (IV) to
3500 * use for the first round of CBC.
3501 * @return {Uint8Array} the decrypted bytes
3502 *
3503 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
3504 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
3505 * @see https://tools.ietf.org/html/rfc2315
3506 */
3507 var decrypt = function decrypt(encrypted, key, initVector) {
3508 // word-level access to the encrypted bytes
3509 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
3510
3511 var decipher = new AES(Array.prototype.slice.call(key));
3512
3513 // byte and word-level access for the decrypted output
3514 var decrypted = new Uint8Array(encrypted.byteLength);
3515 var decrypted32 = new Int32Array(decrypted.buffer);
3516
3517 // temporary variables for working with the IV, encrypted, and
3518 // decrypted data
3519 var init0 = void 0;
3520 var init1 = void 0;
3521 var init2 = void 0;
3522 var init3 = void 0;
3523 var encrypted0 = void 0;
3524 var encrypted1 = void 0;
3525 var encrypted2 = void 0;
3526 var encrypted3 = void 0;
3527
3528 // iteration variable
3529 var wordIx = void 0;
3530
3531 // pull out the words of the IV to ensure we don't modify the
3532 // passed-in reference and easier access
3533 init0 = initVector[0];
3534 init1 = initVector[1];
3535 init2 = initVector[2];
3536 init3 = initVector[3];
3537
3538 // decrypt four word sequences, applying cipher-block chaining (CBC)
3539 // to each decrypted block
3540 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
3541 // convert big-endian (network order) words into little-endian
3542 // (javascript order)
3543 encrypted0 = ntoh(encrypted32[wordIx]);
3544 encrypted1 = ntoh(encrypted32[wordIx + 1]);
3545 encrypted2 = ntoh(encrypted32[wordIx + 2]);
3546 encrypted3 = ntoh(encrypted32[wordIx + 3]);
3547
3548 // decrypt the block
3549 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
3550
3551 // XOR with the IV, and restore network byte-order to obtain the
3552 // plaintext
3553 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
3554 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
3555 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
3556 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
3557
3558 // setup the IV for the next round
3559 init0 = encrypted0;
3560 init1 = encrypted1;
3561 init2 = encrypted2;
3562 init3 = encrypted3;
3563 }
3564
3565 return decrypted;
3566 };
3567
3568 /**
3569 * The `Decrypter` class that manages decryption of AES
3570 * data through `AsyncStream` objects and the `decrypt`
3571 * function
3572 *
3573 * @param {Uint8Array} encrypted the encrypted bytes
3574 * @param {Uint32Array} key the bytes of the decryption key
3575 * @param {Uint32Array} initVector the initialization vector (IV) to
3576 * @param {Function} done the function to run when done
3577 * @class Decrypter
3578 */
3579
3580 var Decrypter = function () {
3581 function Decrypter(encrypted, key, initVector, done) {
3582 classCallCheck$1(this, Decrypter);
3583
3584 var step = Decrypter.STEP;
3585 var encrypted32 = new Int32Array(encrypted.buffer);
3586 var decrypted = new Uint8Array(encrypted.byteLength);
3587 var i = 0;
3588
3589 this.asyncStream_ = new AsyncStream();
3590
3591 // split up the encryption job and do the individual chunks asynchronously
3592 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
3593 for (i = step; i < encrypted32.length; i += step) {
3594 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
3595 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
3596 }
3597 // invoke the done() callback when everything is finished
3598 this.asyncStream_.push(function () {
3599 // remove pkcs#7 padding from the decrypted bytes
3600 done(null, unpad(decrypted));
3601 });
3602 }
3603
3604 /**
3605 * a getter for step the maximum number of bytes to process at one time
3606 *
3607 * @return {Number} the value of step 32000
3608 */
3609
3610 /**
3611 * @private
3612 */
3613 Decrypter.prototype.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
3614 return function () {
3615 var bytes = decrypt(encrypted, key, initVector);
3616
3617 decrypted.set(bytes, encrypted.byteOffset);
3618 };
3619 };
3620
3621 createClass$1(Decrypter, null, [{
3622 key: 'STEP',
3623 get: function get$$1() {
3624 // 4 * 8000;
3625 return 32000;
3626 }
3627 }]);
3628 return Decrypter;
3629 }();
3630
3631 /**
3632 * @file bin-utils.js
3633 */
3634
3635 /**
3636 * convert a TimeRange to text
3637 *
3638 * @param {TimeRange} range the timerange to use for conversion
3639 * @param {Number} i the iterator on the range to convert
3640 */
3641 var textRange = function textRange(range, i) {
3642 return range.start(i) + '-' + range.end(i);
3643 };
3644
3645 /**
3646 * format a number as hex string
3647 *
3648 * @param {Number} e The number
3649 * @param {Number} i the iterator
3650 */
3651 var formatHexString = function formatHexString(e, i) {
3652 var value = e.toString(16);
3653
3654 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
3655 };
3656 var formatAsciiString = function formatAsciiString(e) {
3657 if (e >= 0x20 && e < 0x7e) {
3658 return String.fromCharCode(e);
3659 }
3660 return '.';
3661 };
3662
3663 /**
3664 * Creates an object for sending to a web worker modifying properties that are TypedArrays
3665 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
3666 *
3667 * @param {Object} message
3668 * Object of properties and values to send to the web worker
3669 * @return {Object}
3670 * Modified message with TypedArray values expanded
3671 * @function createTransferableMessage
3672 */
3673 var createTransferableMessage = function createTransferableMessage(message) {
3674 var transferable = {};
3675
3676 Object.keys(message).forEach(function (key) {
3677 var value = message[key];
3678
3679 if (ArrayBuffer.isView(value)) {
3680 transferable[key] = {
3681 bytes: value.buffer,
3682 byteOffset: value.byteOffset,
3683 byteLength: value.byteLength
3684 };
3685 } else {
3686 transferable[key] = value;
3687 }
3688 });
3689
3690 return transferable;
3691 };
3692
3693 /**
3694 * Returns a unique string identifier for a media initialization
3695 * segment.
3696 */
3697 var initSegmentId = function initSegmentId(initSegment) {
3698 var byterange = initSegment.byterange || {
3699 length: Infinity,
3700 offset: 0
3701 };
3702
3703 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
3704 };
3705
3706 /**
3707 * Returns a unique string identifier for a media segment key.
3708 */
3709 var segmentKeyId = function segmentKeyId(key) {
3710 return key.resolvedUri;
3711 };
3712
3713 /**
3714 * utils to help dump binary data to the console
3715 */
3716 var hexDump = function hexDump(data) {
3717 var bytes = Array.prototype.slice.call(data);
3718 var step = 16;
3719 var result = '';
3720 var hex = void 0;
3721 var ascii = void 0;
3722
3723 for (var j = 0; j < bytes.length / step; j++) {
3724 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
3725 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
3726 result += hex + ' ' + ascii + '\n';
3727 }
3728
3729 return result;
3730 };
3731
3732 var tagDump = function tagDump(_ref) {
3733 var bytes = _ref.bytes;
3734 return hexDump(bytes);
3735 };
3736
3737 var textRanges = function textRanges(ranges) {
3738 var result = '';
3739 var i = void 0;
3740
3741 for (i = 0; i < ranges.length; i++) {
3742 result += textRange(ranges, i) + ' ';
3743 }
3744 return result;
3745 };
3746
3747 var utils = /*#__PURE__*/Object.freeze({
3748 createTransferableMessage: createTransferableMessage,
3749 initSegmentId: initSegmentId,
3750 segmentKeyId: segmentKeyId,
3751 hexDump: hexDump,
3752 tagDump: tagDump,
3753 textRanges: textRanges
3754 });
3755
3756 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
3757
3758 // Add 25% to the segment duration to account for small discrepencies in segment timing.
3759 // 25% was arbitrarily chosen, and may need to be refined over time.
3760 var SEGMENT_END_FUDGE_PERCENT = 0.25;
3761
3762 /**
3763 * Converts a player time (any time that can be gotten/set from player.currentTime(),
3764 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
3765 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
3766 *
3767 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
3768 * point" (a point where we have a mapping from program time to player time, with player
3769 * time being the post transmux start of the segment).
3770 *
3771 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
3772 *
3773 * @param {Number} playerTime the player time
3774 * @param {Object} segment the segment which contains the player time
3775 * @return {Date} program time
3776 */
3777 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
3778 if (!segment.dateTimeObject) {
3779 // Can't convert without an "anchor point" for the program time (i.e., a time that can
3780 // be used to map the start of a segment with a real world time).
3781 return null;
3782 }
3783
3784 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
3785 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart;
3786
3787 // get the start of the content from before old content is prepended
3788 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
3789 var offsetFromSegmentStart = playerTime - startOfSegment;
3790
3791 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
3792 };
3793
3794 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
3795 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
3796 };
3797
3798 /**
3799 * Finds a segment that contains the time requested given as an ISO-8601 string. The
3800 * returned segment might be an estimate or an accurate match.
3801 *
3802 * @param {String} programTime The ISO-8601 programTime to find a match for
3803 * @param {Object} playlist A playlist object to search within
3804 */
3805 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
3806 // Assumptions:
3807 // - verifyProgramDateTimeTags has already been run
3808 // - live streams have been started
3809
3810 var dateTimeObject = void 0;
3811
3812 try {
3813 dateTimeObject = new Date(programTime);
3814 } catch (e) {
3815 return null;
3816 }
3817
3818 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
3819 return null;
3820 }
3821
3822 var segment = playlist.segments[0];
3823
3824 if (dateTimeObject < segment.dateTimeObject) {
3825 // Requested time is before stream start.
3826 return null;
3827 }
3828
3829 for (var i = 0; i < playlist.segments.length - 1; i++) {
3830 segment = playlist.segments[i];
3831
3832 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
3833
3834 if (dateTimeObject < nextSegmentStart) {
3835 break;
3836 }
3837 }
3838
3839 var lastSegment = playlist.segments[playlist.segments.length - 1];
3840 var lastSegmentStart = lastSegment.dateTimeObject;
3841 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
3842 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
3843
3844 if (dateTimeObject > lastSegmentEnd) {
3845 // Beyond the end of the stream, or our best guess of the end of the stream.
3846 return null;
3847 }
3848
3849 if (dateTimeObject > lastSegmentStart) {
3850 segment = lastSegment;
3851 }
3852
3853 return {
3854 segment: segment,
3855 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
3856 // Although, given that all segments have accurate date time objects, the segment
3857 // selected should be accurate, unless the video has been transmuxed at some point
3858 // (determined by the presence of the videoTimingInfo object), the segment's "player
3859 // time" (the start time in the player) can't be considered accurate.
3860 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
3861 };
3862 };
3863
3864 /**
3865 * Finds a segment that contains the given player time(in seconds).
3866 *
3867 * @param {Number} time The player time to find a match for
3868 * @param {Object} playlist A playlist object to search within
3869 */
3870 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
3871 // Assumptions:
3872 // - there will always be a segment.duration
3873 // - we can start from zero
3874 // - segments are in time order
3875
3876 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
3877 return null;
3878 }
3879
3880 var segmentEnd = 0;
3881 var segment = void 0;
3882
3883 for (var i = 0; i < playlist.segments.length; i++) {
3884 segment = playlist.segments[i];
3885
3886 // videoTimingInfo is set after the segment is downloaded and transmuxed, and
3887 // should contain the most accurate values we have for the segment's player times.
3888 //
3889 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
3890 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
3891 // calculate an end value.
3892 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
3893
3894 if (time <= segmentEnd) {
3895 break;
3896 }
3897 }
3898
3899 var lastSegment = playlist.segments[playlist.segments.length - 1];
3900
3901 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
3902 // The time requested is beyond the stream end.
3903 return null;
3904 }
3905
3906 if (time > segmentEnd) {
3907 // The time is within or beyond the last segment.
3908 //
3909 // Check to see if the time is beyond a reasonable guess of the end of the stream.
3910 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
3911 // Technically, because the duration value is only an estimate, the time may still
3912 // exist in the last segment, however, there isn't enough information to make even
3913 // a reasonable estimate.
3914 return null;
3915 }
3916
3917 segment = lastSegment;
3918 }
3919
3920 return {
3921 segment: segment,
3922 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
3923 // Because videoTimingInfo is only set after transmux, it is the only way to get
3924 // accurate timing values.
3925 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
3926 };
3927 };
3928
3929 /**
3930 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
3931 * If the offset returned is positive, the programTime occurs after the
3932 * comparisonTimestamp.
3933 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
3934 *
3935 * @param {String} comparisonTimeStamp An ISO-8601 timestamp to compare against
3936 * @param {String} programTime The programTime as an ISO-8601 string
3937 * @return {Number} offset
3938 */
3939 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
3940 var segmentDateTime = void 0;
3941 var programDateTime = void 0;
3942
3943 try {
3944 segmentDateTime = new Date(comparisonTimeStamp);
3945 programDateTime = new Date(programTime);
3946 } catch (e) {
3947 // TODO handle error
3948 }
3949
3950 var segmentTimeEpoch = segmentDateTime.getTime();
3951 var programTimeEpoch = programDateTime.getTime();
3952
3953 return (programTimeEpoch - segmentTimeEpoch) / 1000;
3954 };
3955
3956 /**
3957 * Checks that all segments in this playlist have programDateTime tags.
3958 *
3959 * @param {Object} playlist A playlist object
3960 */
3961 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
3962 if (!playlist.segments || playlist.segments.length === 0) {
3963 return false;
3964 }
3965
3966 for (var i = 0; i < playlist.segments.length; i++) {
3967 var segment = playlist.segments[i];
3968
3969 if (!segment.dateTimeObject) {
3970 return false;
3971 }
3972 }
3973
3974 return true;
3975 };
3976
3977 /**
3978 * Returns the programTime of the media given a playlist and a playerTime.
3979 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
3980 * If the segments containing the time requested have not been buffered yet, an estimate
3981 * may be returned to the callback.
3982 *
3983 * @param {Object} args
3984 * @param {Object} args.playlist A playlist object to search within
3985 * @param {Number} time A playerTime in seconds
3986 * @param {Function} callback(err, programTime)
3987 * @returns {String} err.message A detailed error message
3988 * @returns {Object} programTime
3989 * @returns {Number} programTime.mediaSeconds The streamTime in seconds
3990 * @returns {String} programTime.programDateTime The programTime as an ISO-8601 String
3991 */
3992 var getProgramTime = function getProgramTime(_ref) {
3993 var playlist = _ref.playlist,
3994 _ref$time = _ref.time,
3995 time = _ref$time === undefined ? undefined : _ref$time,
3996 callback = _ref.callback;
3997
3998
3999 if (!callback) {
4000 throw new Error('getProgramTime: callback must be provided');
4001 }
4002
4003 if (!playlist || time === undefined) {
4004 return callback({
4005 message: 'getProgramTime: playlist and time must be provided'
4006 });
4007 }
4008
4009 var matchedSegment = findSegmentForPlayerTime(time, playlist);
4010
4011 if (!matchedSegment) {
4012 return callback({
4013 message: 'valid programTime was not found'
4014 });
4015 }
4016
4017 if (matchedSegment.type === 'estimate') {
4018 return callback({
4019 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
4020 seekTime: matchedSegment.estimatedStart
4021 });
4022 }
4023
4024 var programTimeObject = {
4025 mediaSeconds: time
4026 };
4027 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
4028
4029 if (programTime) {
4030 programTimeObject.programDateTime = programTime.toISOString();
4031 }
4032
4033 return callback(null, programTimeObject);
4034 };
4035
4036 /**
4037 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
4038 *
4039 * @param {Object} args
4040 * @param {String} args.programTime A programTime to seek to as an ISO-8601 String
4041 * @param {Object} args.playlist A playlist to look within
4042 * @param {Number} args.retryCount The number of times to try for an accurate seek. Default is 2.
4043 * @param {Function} args.seekTo A method to perform a seek
4044 * @param {Boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
4045 * @param {Object} args.tech The tech to seek on
4046 * @param {Function} args.callback(err, newTime) A callback to return the new time to
4047 * @returns {String} err.message A detailed error message
4048 * @returns {Number} newTime The exact time that was seeked to in seconds
4049 */
4050 var seekToProgramTime = function seekToProgramTime(_ref2) {
4051 var programTime = _ref2.programTime,
4052 playlist = _ref2.playlist,
4053 _ref2$retryCount = _ref2.retryCount,
4054 retryCount = _ref2$retryCount === undefined ? 2 : _ref2$retryCount,
4055 seekTo = _ref2.seekTo,
4056 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
4057 pauseAfterSeek = _ref2$pauseAfterSeek === undefined ? true : _ref2$pauseAfterSeek,
4058 tech = _ref2.tech,
4059 callback = _ref2.callback;
4060
4061
4062 if (!callback) {
4063 throw new Error('seekToProgramTime: callback must be provided');
4064 }
4065
4066 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
4067 return callback({
4068 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
4069 });
4070 }
4071
4072 if (!playlist.endList && !tech.hasStarted_) {
4073 return callback({
4074 message: 'player must be playing a live stream to start buffering'
4075 });
4076 }
4077
4078 if (!verifyProgramDateTimeTags(playlist)) {
4079 return callback({
4080 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
4081 });
4082 }
4083
4084 var matchedSegment = findSegmentForProgramTime(programTime, playlist);
4085
4086 // no match
4087 if (!matchedSegment) {
4088 return callback({
4089 message: programTime + ' was not found in the stream'
4090 });
4091 }
4092
4093 var segment = matchedSegment.segment;
4094 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
4095
4096 if (matchedSegment.type === 'estimate') {
4097 // we've run out of retries
4098 if (retryCount === 0) {
4099 return callback({
4100 message: programTime + ' is not buffered yet. Try again'
4101 });
4102 }
4103
4104 seekTo(matchedSegment.estimatedStart + mediaOffset);
4105
4106 tech.one('seeked', function () {
4107 seekToProgramTime({
4108 programTime: programTime,
4109 playlist: playlist,
4110 retryCount: retryCount - 1,
4111 seekTo: seekTo,
4112 pauseAfterSeek: pauseAfterSeek,
4113 tech: tech,
4114 callback: callback
4115 });
4116 });
4117
4118 return;
4119 }
4120
4121 // Since the segment.start value is determined from the buffered end or ending time
4122 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
4123 // modifications.
4124 var seekToTime = segment.start + mediaOffset;
4125 var seekedCallback = function seekedCallback() {
4126 return callback(null, tech.currentTime());
4127 };
4128
4129 // listen for seeked event
4130 tech.one('seeked', seekedCallback);
4131 // pause before seeking as video.js will restore this state
4132 if (pauseAfterSeek) {
4133 tech.pause();
4134 }
4135 seekTo(seekToTime);
4136 };
4137
4138 /**
4139 * ranges
4140 *
4141 * Utilities for working with TimeRanges.
4142 *
4143 */
4144
4145 // Fudge factor to account for TimeRanges rounding
4146 var TIME_FUDGE_FACTOR = 1 / 30;
4147 // Comparisons between time values such as current time and the end of the buffered range
4148 // can be misleading because of precision differences or when the current media has poorly
4149 // aligned audio and video, which can cause values to be slightly off from what you would
4150 // expect. This value is what we consider to be safe to use in such comparisons to account
4151 // for these scenarios.
4152 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
4153 var filterRanges = function filterRanges(timeRanges, predicate) {
4154 var results = [];
4155 var i = void 0;
4156
4157 if (timeRanges && timeRanges.length) {
4158 // Search for ranges that match the predicate
4159 for (i = 0; i < timeRanges.length; i++) {
4160 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
4161 results.push([timeRanges.start(i), timeRanges.end(i)]);
4162 }
4163 }
4164 }
4165
4166 return videojs.createTimeRanges(results);
4167 };
4168
4169 /**
4170 * Attempts to find the buffered TimeRange that contains the specified
4171 * time.
4172 * @param {TimeRanges} buffered - the TimeRanges object to query
4173 * @param {number} time - the time to filter on.
4174 * @returns {TimeRanges} a new TimeRanges object
4175 */
4176 var findRange = function findRange(buffered, time) {
4177 return filterRanges(buffered, function (start, end) {
4178 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
4179 });
4180 };
4181
4182 /**
4183 * Returns the TimeRanges that begin later than the specified time.
4184 * @param {TimeRanges} timeRanges - the TimeRanges object to query
4185 * @param {number} time - the time to filter on.
4186 * @returns {TimeRanges} a new TimeRanges object.
4187 */
4188 var findNextRange = function findNextRange(timeRanges, time) {
4189 return filterRanges(timeRanges, function (start) {
4190 return start - TIME_FUDGE_FACTOR >= time;
4191 });
4192 };
4193
4194 /**
4195 * Returns gaps within a list of TimeRanges
4196 * @param {TimeRanges} buffered - the TimeRanges object
4197 * @return {TimeRanges} a TimeRanges object of gaps
4198 */
4199 var findGaps = function findGaps(buffered) {
4200 if (buffered.length < 2) {
4201 return videojs.createTimeRanges();
4202 }
4203
4204 var ranges = [];
4205
4206 for (var i = 1; i < buffered.length; i++) {
4207 var start = buffered.end(i - 1);
4208 var end = buffered.start(i);
4209
4210 ranges.push([start, end]);
4211 }
4212
4213 return videojs.createTimeRanges(ranges);
4214 };
4215
4216 /**
4217 * Gets a human readable string for a TimeRange
4218 *
4219 * @param {TimeRange} range
4220 * @returns {String} a human readable string
4221 */
4222 var printableRange = function printableRange(range) {
4223 var strArr = [];
4224
4225 if (!range || !range.length) {
4226 return '';
4227 }
4228
4229 for (var i = 0; i < range.length; i++) {
4230 strArr.push(range.start(i) + ' => ' + range.end(i));
4231 }
4232
4233 return strArr.join(', ');
4234 };
4235
4236 /**
4237 * Calculates the amount of time left in seconds until the player hits the end of the
4238 * buffer and causes a rebuffer
4239 *
4240 * @param {TimeRange} buffered
4241 * The state of the buffer
4242 * @param {Numnber} currentTime
4243 * The current time of the player
4244 * @param {Number} playbackRate
4245 * The current playback rate of the player. Defaults to 1.
4246 * @return {Number}
4247 * Time until the player has to start rebuffering in seconds.
4248 * @function timeUntilRebuffer
4249 */
4250 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime) {
4251 var playbackRate = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1;
4252
4253 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
4254
4255 return (bufferedEnd - currentTime) / playbackRate;
4256 };
4257
4258 /**
4259 * Converts a TimeRanges object into an array representation
4260 * @param {TimeRanges} timeRanges
4261 * @returns {Array}
4262 */
4263 var timeRangesToArray = function timeRangesToArray(timeRanges) {
4264 var timeRangesList = [];
4265
4266 for (var i = 0; i < timeRanges.length; i++) {
4267 timeRangesList.push({
4268 start: timeRanges.start(i),
4269 end: timeRanges.end(i)
4270 });
4271 }
4272
4273 return timeRangesList;
4274 };
4275
4276 /**
4277 * @file create-text-tracks-if-necessary.js
4278 */
4279
4280 /**
4281 * Create text tracks on video.js if they exist on a segment.
4282 *
4283 * @param {Object} sourceBuffer the VSB or FSB
4284 * @param {Object} mediaSource the HTML media source
4285 * @param {Object} segment the segment that may contain the text track
4286 * @private
4287 */
4288 var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
4289 var player = mediaSource.player_;
4290
4291 // create an in-band caption track if one is present in the segment
4292 if (segment.captions && segment.captions.length) {
4293 if (!sourceBuffer.inbandTextTracks_) {
4294 sourceBuffer.inbandTextTracks_ = {};
4295 }
4296
4297 for (var trackId in segment.captionStreams) {
4298 if (!sourceBuffer.inbandTextTracks_[trackId]) {
4299 player.tech_.trigger({ type: 'usage', name: 'hls-608' });
4300 var track = player.textTracks().getTrackById(trackId);
4301
4302 if (track) {
4303 // Resuse an existing track with a CC# id because this was
4304 // very likely created by videojs-contrib-hls from information
4305 // in the m3u8 for us to use
4306 sourceBuffer.inbandTextTracks_[trackId] = track;
4307 } else {
4308 // Otherwise, create a track with the default `CC#` label and
4309 // without a language
4310 sourceBuffer.inbandTextTracks_[trackId] = player.addRemoteTextTrack({
4311 kind: 'captions',
4312 id: trackId,
4313 label: trackId
4314 }, false).track;
4315 }
4316 }
4317 }
4318 }
4319
4320 if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
4321 sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
4322 kind: 'metadata',
4323 label: 'Timed Metadata'
4324 }, false).track;
4325 sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
4326 }
4327 };
4328
4329 /**
4330 * @file remove-cues-from-track.js
4331 */
4332
4333 /**
4334 * Remove cues from a track on video.js.
4335 *
4336 * @param {Double} start start of where we should remove the cue
4337 * @param {Double} end end of where the we should remove the cue
4338 * @param {Object} track the text track to remove the cues from
4339 * @private
4340 */
4341 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
4342 var i = void 0;
4343 var cue = void 0;
4344
4345 if (!track) {
4346 return;
4347 }
4348
4349 if (!track.cues) {
4350 return;
4351 }
4352
4353 i = track.cues.length;
4354
4355 while (i--) {
4356 cue = track.cues[i];
4357
4358 // Remove any overlapping cue
4359 if (cue.startTime <= end && cue.endTime >= start) {
4360 track.removeCue(cue);
4361 }
4362 }
4363 };
4364
4365 /**
4366 * @file add-text-track-data.js
4367 */
4368 /**
4369 * Define properties on a cue for backwards compatability,
4370 * but warn the user that the way that they are using it
4371 * is depricated and will be removed at a later date.
4372 *
4373 * @param {Cue} cue the cue to add the properties on
4374 * @private
4375 */
4376 var deprecateOldCue = function deprecateOldCue(cue) {
4377 Object.defineProperties(cue.frame, {
4378 id: {
4379 get: function get() {
4380 videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
4381 return cue.value.key;
4382 }
4383 },
4384 value: {
4385 get: function get() {
4386 videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
4387 return cue.value.data;
4388 }
4389 },
4390 privateData: {
4391 get: function get() {
4392 videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
4393 return cue.value.data;
4394 }
4395 }
4396 });
4397 };
4398
4399 var durationOfVideo = function durationOfVideo(duration) {
4400 var dur = void 0;
4401
4402 if (isNaN(duration) || Math.abs(duration) === Infinity) {
4403 dur = Number.MAX_VALUE;
4404 } else {
4405 dur = duration;
4406 }
4407 return dur;
4408 };
4409 /**
4410 * Add text track data to a source handler given the captions and
4411 * metadata from the buffer.
4412 *
4413 * @param {Object} sourceHandler the virtual source buffer
4414 * @param {Array} captionArray an array of caption data
4415 * @param {Array} metadataArray an array of meta data
4416 * @private
4417 */
4418 var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
4419 var Cue = window_1.WebKitDataCue || window_1.VTTCue;
4420
4421 if (captionArray) {
4422 captionArray.forEach(function (caption) {
4423 var track = caption.stream;
4424
4425 this.inbandTextTracks_[track].addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
4426 }, sourceHandler);
4427 }
4428
4429 if (metadataArray) {
4430 var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
4431
4432 metadataArray.forEach(function (metadata) {
4433 var time = metadata.cueTime + this.timestampOffset;
4434
4435 // if time isn't a finite number between 0 and Infinity, like NaN,
4436 // ignore this bit of metadata.
4437 // This likely occurs when you have an non-timed ID3 tag like TIT2,
4438 // which is the "Title/Songname/Content description" frame
4439 if (typeof time !== 'number' || window_1.isNaN(time) || time < 0 || !(time < Infinity)) {
4440 return;
4441 }
4442
4443 metadata.frames.forEach(function (frame) {
4444 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
4445
4446 cue.frame = frame;
4447 cue.value = frame;
4448 deprecateOldCue(cue);
4449
4450 this.metadataTrack_.addCue(cue);
4451 }, this);
4452 }, sourceHandler);
4453
4454 // Updating the metadeta cues so that
4455 // the endTime of each cue is the startTime of the next cue
4456 // the endTime of last cue is the duration of the video
4457 if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
4458 var cues = sourceHandler.metadataTrack_.cues;
4459 var cuesArray = [];
4460
4461 // Create a copy of the TextTrackCueList...
4462 // ...disregarding cues with a falsey value
4463 for (var i = 0; i < cues.length; i++) {
4464 if (cues[i]) {
4465 cuesArray.push(cues[i]);
4466 }
4467 }
4468
4469 // Group cues by their startTime value
4470 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
4471 var timeSlot = obj[cue.startTime] || [];
4472
4473 timeSlot.push(cue);
4474 obj[cue.startTime] = timeSlot;
4475
4476 return obj;
4477 }, {});
4478
4479 // Sort startTimes by ascending order
4480 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
4481 return Number(a) - Number(b);
4482 });
4483
4484 // Map each cue group's endTime to the next group's startTime
4485 sortedStartTimes.forEach(function (startTime, idx) {
4486 var cueGroup = cuesGroupedByStartTime[startTime];
4487 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
4488
4489 // Map each cue's endTime the next group's startTime
4490 cueGroup.forEach(function (cue) {
4491 cue.endTime = nextTime;
4492 });
4493 });
4494 }
4495 }
4496 };
4497
4498 var win$1 = typeof window !== 'undefined' ? window : {},
4499 TARGET = typeof Symbol === 'undefined' ? '__target' : Symbol(),
4500 SCRIPT_TYPE = 'application/javascript',
4501 BlobBuilder = win$1.BlobBuilder || win$1.WebKitBlobBuilder || win$1.MozBlobBuilder || win$1.MSBlobBuilder,
4502 URL = win$1.URL || win$1.webkitURL || URL && URL.msURL,
4503 Worker = win$1.Worker;
4504
4505 /**
4506 * Returns a wrapper around Web Worker code that is constructible.
4507 *
4508 * @function shimWorker
4509 *
4510 * @param { String } filename The name of the file
4511 * @param { Function } fn Function wrapping the code of the worker
4512 */
4513 function shimWorker(filename, fn) {
4514 return function ShimWorker(forceFallback) {
4515 var o = this;
4516
4517 if (!fn) {
4518 return new Worker(filename);
4519 } else if (Worker && !forceFallback) {
4520 // Convert the function's inner code to a string to construct the worker
4521 var source = fn.toString().replace(/^function.+?{/, '').slice(0, -1),
4522 objURL = createSourceObject(source);
4523
4524 this[TARGET] = new Worker(objURL);
4525 wrapTerminate(this[TARGET], objURL);
4526 return this[TARGET];
4527 } else {
4528 var selfShim = {
4529 postMessage: function postMessage(m) {
4530 if (o.onmessage) {
4531 setTimeout(function () {
4532 o.onmessage({ data: m, target: selfShim });
4533 });
4534 }
4535 }
4536 };
4537
4538 fn.call(selfShim);
4539 this.postMessage = function (m) {
4540 setTimeout(function () {
4541 selfShim.onmessage({ data: m, target: o });
4542 });
4543 };
4544 this.isThisThread = true;
4545 }
4546 };
4547 }
4548 // Test Worker capabilities
4549 if (Worker) {
4550 var testWorker,
4551 objURL = createSourceObject('self.onmessage = function () {}'),
4552 testArray = new Uint8Array(1);
4553
4554 try {
4555 testWorker = new Worker(objURL);
4556
4557 // Native browser on some Samsung devices throws for transferables, let's detect it
4558 testWorker.postMessage(testArray, [testArray.buffer]);
4559 } catch (e) {
4560 Worker = null;
4561 } finally {
4562 URL.revokeObjectURL(objURL);
4563 if (testWorker) {
4564 testWorker.terminate();
4565 }
4566 }
4567 }
4568
4569 function createSourceObject(str) {
4570 try {
4571 return URL.createObjectURL(new Blob([str], { type: SCRIPT_TYPE }));
4572 } catch (e) {
4573 var blob = new BlobBuilder();
4574 blob.append(str);
4575 return URL.createObjectURL(blob.getBlob(type));
4576 }
4577 }
4578
4579 function wrapTerminate(worker, objURL) {
4580 if (!worker || !objURL) return;
4581 var term = worker.terminate;
4582 worker.objURL = objURL;
4583 worker.terminate = function () {
4584 if (worker.objURL) URL.revokeObjectURL(worker.objURL);
4585 term.call(worker);
4586 };
4587 }
4588
4589 var TransmuxWorker = new shimWorker("./transmuxer-worker.worker.js", function (window, document) {
4590 var self = this;
4591 var transmuxerWorker = function () {
4592
4593 /**
4594 * mux.js
4595 *
4596 * Copyright (c) Brightcove
4597 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4598 *
4599 * A lightweight readable stream implemention that handles event dispatching.
4600 * Objects that inherit from streams should call init in their constructors.
4601 */
4602
4603 var Stream = function Stream() {
4604 this.init = function () {
4605 var listeners = {};
4606 /**
4607 * Add a listener for a specified event type.
4608 * @param type {string} the event name
4609 * @param listener {function} the callback to be invoked when an event of
4610 * the specified type occurs
4611 */
4612 this.on = function (type, listener) {
4613 if (!listeners[type]) {
4614 listeners[type] = [];
4615 }
4616 listeners[type] = listeners[type].concat(listener);
4617 };
4618 /**
4619 * Remove a listener for a specified event type.
4620 * @param type {string} the event name
4621 * @param listener {function} a function previously registered for this
4622 * type of event through `on`
4623 */
4624 this.off = function (type, listener) {
4625 var index;
4626 if (!listeners[type]) {
4627 return false;
4628 }
4629 index = listeners[type].indexOf(listener);
4630 listeners[type] = listeners[type].slice();
4631 listeners[type].splice(index, 1);
4632 return index > -1;
4633 };
4634 /**
4635 * Trigger an event of the specified type on this stream. Any additional
4636 * arguments to this function are passed as parameters to event listeners.
4637 * @param type {string} the event name
4638 */
4639 this.trigger = function (type) {
4640 var callbacks, i, length, args;
4641 callbacks = listeners[type];
4642 if (!callbacks) {
4643 return;
4644 }
4645 // Slicing the arguments on every invocation of this method
4646 // can add a significant amount of overhead. Avoid the
4647 // intermediate object creation for the common case of a
4648 // single callback argument
4649 if (arguments.length === 2) {
4650 length = callbacks.length;
4651 for (i = 0; i < length; ++i) {
4652 callbacks[i].call(this, arguments[1]);
4653 }
4654 } else {
4655 args = [];
4656 i = arguments.length;
4657 for (i = 1; i < arguments.length; ++i) {
4658 args.push(arguments[i]);
4659 }
4660 length = callbacks.length;
4661 for (i = 0; i < length; ++i) {
4662 callbacks[i].apply(this, args);
4663 }
4664 }
4665 };
4666 /**
4667 * Destroys the stream and cleans up.
4668 */
4669 this.dispose = function () {
4670 listeners = {};
4671 };
4672 };
4673 };
4674
4675 /**
4676 * Forwards all `data` events on this stream to the destination stream. The
4677 * destination stream should provide a method `push` to receive the data
4678 * events as they arrive.
4679 * @param destination {stream} the stream that will receive all `data` events
4680 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
4681 * when the current stream emits a 'done' event
4682 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
4683 */
4684 Stream.prototype.pipe = function (destination) {
4685 this.on('data', function (data) {
4686 destination.push(data);
4687 });
4688
4689 this.on('done', function (flushSource) {
4690 destination.flush(flushSource);
4691 });
4692
4693 this.on('partialdone', function (flushSource) {
4694 destination.partialFlush(flushSource);
4695 });
4696
4697 this.on('endedtimeline', function (flushSource) {
4698 destination.endTimeline(flushSource);
4699 });
4700
4701 this.on('reset', function (flushSource) {
4702 destination.reset(flushSource);
4703 });
4704
4705 return destination;
4706 };
4707
4708 // Default stream functions that are expected to be overridden to perform
4709 // actual work. These are provided by the prototype as a sort of no-op
4710 // implementation so that we don't have to check for their existence in the
4711 // `pipe` function above.
4712 Stream.prototype.push = function (data) {
4713 this.trigger('data', data);
4714 };
4715
4716 Stream.prototype.flush = function (flushSource) {
4717 this.trigger('done', flushSource);
4718 };
4719
4720 Stream.prototype.partialFlush = function (flushSource) {
4721 this.trigger('partialdone', flushSource);
4722 };
4723
4724 Stream.prototype.endTimeline = function (flushSource) {
4725 this.trigger('endedtimeline', flushSource);
4726 };
4727
4728 Stream.prototype.reset = function (flushSource) {
4729 this.trigger('reset', flushSource);
4730 };
4731
4732 var stream = Stream;
4733
4734 /**
4735 * mux.js
4736 *
4737 * Copyright (c) Brightcove
4738 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4739 *
4740 * Functions that generate fragmented MP4s suitable for use with Media
4741 * Source Extensions.
4742 */
4743
4744 var UINT32_MAX = Math.pow(2, 32) - 1;
4745
4746 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
4747
4748 // pre-calculate constants
4749 (function () {
4750 var i;
4751 types = {
4752 avc1: [], // codingname
4753 avcC: [],
4754 btrt: [],
4755 dinf: [],
4756 dref: [],
4757 esds: [],
4758 ftyp: [],
4759 hdlr: [],
4760 mdat: [],
4761 mdhd: [],
4762 mdia: [],
4763 mfhd: [],
4764 minf: [],
4765 moof: [],
4766 moov: [],
4767 mp4a: [], // codingname
4768 mvex: [],
4769 mvhd: [],
4770 pasp: [],
4771 sdtp: [],
4772 smhd: [],
4773 stbl: [],
4774 stco: [],
4775 stsc: [],
4776 stsd: [],
4777 stsz: [],
4778 stts: [],
4779 styp: [],
4780 tfdt: [],
4781 tfhd: [],
4782 traf: [],
4783 trak: [],
4784 trun: [],
4785 trex: [],
4786 tkhd: [],
4787 vmhd: []
4788 };
4789
4790 // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
4791 // don't throw an error
4792 if (typeof Uint8Array === 'undefined') {
4793 return;
4794 }
4795
4796 for (i in types) {
4797 if (types.hasOwnProperty(i)) {
4798 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
4799 }
4800 }
4801
4802 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
4803 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
4804 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
4805 VIDEO_HDLR = new Uint8Array([0x00, // version 0
4806 0x00, 0x00, 0x00, // flags
4807 0x00, 0x00, 0x00, 0x00, // pre_defined
4808 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
4809 0x00, 0x00, 0x00, 0x00, // reserved
4810 0x00, 0x00, 0x00, 0x00, // reserved
4811 0x00, 0x00, 0x00, 0x00, // reserved
4812 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
4813 ]);
4814 AUDIO_HDLR = new Uint8Array([0x00, // version 0
4815 0x00, 0x00, 0x00, // flags
4816 0x00, 0x00, 0x00, 0x00, // pre_defined
4817 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
4818 0x00, 0x00, 0x00, 0x00, // reserved
4819 0x00, 0x00, 0x00, 0x00, // reserved
4820 0x00, 0x00, 0x00, 0x00, // reserved
4821 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
4822 ]);
4823 HDLR_TYPES = {
4824 video: VIDEO_HDLR,
4825 audio: AUDIO_HDLR
4826 };
4827 DREF = new Uint8Array([0x00, // version 0
4828 0x00, 0x00, 0x00, // flags
4829 0x00, 0x00, 0x00, 0x01, // entry_count
4830 0x00, 0x00, 0x00, 0x0c, // entry_size
4831 0x75, 0x72, 0x6c, 0x20, // 'url' type
4832 0x00, // version 0
4833 0x00, 0x00, 0x01 // entry_flags
4834 ]);
4835 SMHD = new Uint8Array([0x00, // version
4836 0x00, 0x00, 0x00, // flags
4837 0x00, 0x00, // balance, 0 means centered
4838 0x00, 0x00 // reserved
4839 ]);
4840 STCO = new Uint8Array([0x00, // version
4841 0x00, 0x00, 0x00, // flags
4842 0x00, 0x00, 0x00, 0x00 // entry_count
4843 ]);
4844 STSC = STCO;
4845 STSZ = new Uint8Array([0x00, // version
4846 0x00, 0x00, 0x00, // flags
4847 0x00, 0x00, 0x00, 0x00, // sample_size
4848 0x00, 0x00, 0x00, 0x00 // sample_count
4849 ]);
4850 STTS = STCO;
4851 VMHD = new Uint8Array([0x00, // version
4852 0x00, 0x00, 0x01, // flags
4853 0x00, 0x00, // graphicsmode
4854 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
4855 ]);
4856 })();
4857
4858 box = function box(type) {
4859 var payload = [],
4860 size = 0,
4861 i,
4862 result,
4863 view;
4864
4865 for (i = 1; i < arguments.length; i++) {
4866 payload.push(arguments[i]);
4867 }
4868
4869 i = payload.length;
4870
4871 // calculate the total size we need to allocate
4872 while (i--) {
4873 size += payload[i].byteLength;
4874 }
4875 result = new Uint8Array(size + 8);
4876 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
4877 view.setUint32(0, result.byteLength);
4878 result.set(type, 4);
4879
4880 // copy the payload into the result
4881 for (i = 0, size = 8; i < payload.length; i++) {
4882 result.set(payload[i], size);
4883 size += payload[i].byteLength;
4884 }
4885 return result;
4886 };
4887
4888 dinf = function dinf() {
4889 return box(types.dinf, box(types.dref, DREF));
4890 };
4891
4892 esds = function esds(track) {
4893 return box(types.esds, new Uint8Array([0x00, // version
4894 0x00, 0x00, 0x00, // flags
4895
4896 // ES_Descriptor
4897 0x03, // tag, ES_DescrTag
4898 0x19, // length
4899 0x00, 0x00, // ES_ID
4900 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
4901
4902 // DecoderConfigDescriptor
4903 0x04, // tag, DecoderConfigDescrTag
4904 0x11, // length
4905 0x40, // object type
4906 0x15, // streamType
4907 0x00, 0x06, 0x00, // bufferSizeDB
4908 0x00, 0x00, 0xda, 0xc0, // maxBitrate
4909 0x00, 0x00, 0xda, 0xc0, // avgBitrate
4910
4911 // DecoderSpecificInfo
4912 0x05, // tag, DecoderSpecificInfoTag
4913 0x02, // length
4914 // ISO/IEC 14496-3, AudioSpecificConfig
4915 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
4916 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
4917 ]));
4918 };
4919
4920 ftyp = function ftyp() {
4921 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
4922 };
4923
4924 hdlr = function hdlr(type) {
4925 return box(types.hdlr, HDLR_TYPES[type]);
4926 };
4927 mdat = function mdat(data) {
4928 return box(types.mdat, data);
4929 };
4930 mdhd = function mdhd(track) {
4931 var result = new Uint8Array([0x00, // version 0
4932 0x00, 0x00, 0x00, // flags
4933 0x00, 0x00, 0x00, 0x02, // creation_time
4934 0x00, 0x00, 0x00, 0x03, // modification_time
4935 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4936
4937 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
4938 0x55, 0xc4, // 'und' language (undetermined)
4939 0x00, 0x00]);
4940
4941 // Use the sample rate from the track metadata, when it is
4942 // defined. The sample rate can be parsed out of an ADTS header, for
4943 // instance.
4944 if (track.samplerate) {
4945 result[12] = track.samplerate >>> 24 & 0xFF;
4946 result[13] = track.samplerate >>> 16 & 0xFF;
4947 result[14] = track.samplerate >>> 8 & 0xFF;
4948 result[15] = track.samplerate & 0xFF;
4949 }
4950
4951 return box(types.mdhd, result);
4952 };
4953 mdia = function mdia(track) {
4954 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4955 };
4956 mfhd = function mfhd(sequenceNumber) {
4957 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4958 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4959 ]));
4960 };
4961 minf = function minf(track) {
4962 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4963 };
4964 moof = function moof(sequenceNumber, tracks) {
4965 var trackFragments = [],
4966 i = tracks.length;
4967 // build traf boxes for each track fragment
4968 while (i--) {
4969 trackFragments[i] = traf(tracks[i]);
4970 }
4971 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4972 };
4973 /**
4974 * Returns a movie box.
4975 * @param tracks {array} the tracks associated with this movie
4976 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4977 */
4978 moov = function moov(tracks) {
4979 var i = tracks.length,
4980 boxes = [];
4981
4982 while (i--) {
4983 boxes[i] = trak(tracks[i]);
4984 }
4985
4986 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4987 };
4988 mvex = function mvex(tracks) {
4989 var i = tracks.length,
4990 boxes = [];
4991
4992 while (i--) {
4993 boxes[i] = trex(tracks[i]);
4994 }
4995 return box.apply(null, [types.mvex].concat(boxes));
4996 };
4997 mvhd = function mvhd(duration) {
4998 var bytes = new Uint8Array([0x00, // version 0
4999 0x00, 0x00, 0x00, // flags
5000 0x00, 0x00, 0x00, 0x01, // creation_time
5001 0x00, 0x00, 0x00, 0x02, // modification_time
5002 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
5003 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
5004 0x00, 0x01, 0x00, 0x00, // 1.0 rate
5005 0x01, 0x00, // 1.0 volume
5006 0x00, 0x00, // reserved
5007 0x00, 0x00, 0x00, 0x00, // reserved
5008 0x00, 0x00, 0x00, 0x00, // reserved
5009 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
5010 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
5011 0xff, 0xff, 0xff, 0xff // next_track_ID
5012 ]);
5013 return box(types.mvhd, bytes);
5014 };
5015
5016 sdtp = function sdtp(track) {
5017 var samples = track.samples || [],
5018 bytes = new Uint8Array(4 + samples.length),
5019 flags,
5020 i;
5021
5022 // leave the full box header (4 bytes) all zero
5023
5024 // write the sample table
5025 for (i = 0; i < samples.length; i++) {
5026 flags = samples[i].flags;
5027
5028 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
5029 }
5030
5031 return box(types.sdtp, bytes);
5032 };
5033
5034 stbl = function stbl(track) {
5035 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
5036 };
5037
5038 (function () {
5039 var videoSample, audioSample;
5040
5041 stsd = function stsd(track) {
5042
5043 return box(types.stsd, new Uint8Array([0x00, // version 0
5044 0x00, 0x00, 0x00, // flags
5045 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
5046 };
5047
5048 videoSample = function videoSample(track) {
5049 var sps = track.sps || [],
5050 pps = track.pps || [],
5051 sequenceParameterSets = [],
5052 pictureParameterSets = [],
5053 i,
5054 avc1Box;
5055
5056 // assemble the SPSs
5057 for (i = 0; i < sps.length; i++) {
5058 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
5059 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
5060 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
5061 }
5062
5063 // assemble the PPSs
5064 for (i = 0; i < pps.length; i++) {
5065 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
5066 pictureParameterSets.push(pps[i].byteLength & 0xFF);
5067 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
5068 }
5069
5070 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
5071 0x00, 0x01, // data_reference_index
5072 0x00, 0x00, // pre_defined
5073 0x00, 0x00, // reserved
5074 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
5075 (track.width & 0xff00) >> 8, track.width & 0xff, // width
5076 (track.height & 0xff00) >> 8, track.height & 0xff, // height
5077 0x00, 0x48, 0x00, 0x00, // horizresolution
5078 0x00, 0x48, 0x00, 0x00, // vertresolution
5079 0x00, 0x00, 0x00, 0x00, // reserved
5080 0x00, 0x01, // frame_count
5081 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
5082 0x00, 0x18, // depth = 24
5083 0x11, 0x11 // pre_defined = -1
5084 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
5085 track.profileIdc, // AVCProfileIndication
5086 track.profileCompatibility, // profile_compatibility
5087 track.levelIdc, // AVCLevelIndication
5088 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
5089 ].concat([sps.length], // numOfSequenceParameterSets
5090 sequenceParameterSets, // "SPS"
5091 [pps.length], // numOfPictureParameterSets
5092 pictureParameterSets // "PPS"
5093 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
5094 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
5095 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
5096 ]))];
5097
5098 if (track.sarRatio) {
5099 var hSpacing = track.sarRatio[0],
5100 vSpacing = track.sarRatio[1];
5101
5102 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
5103 }
5104
5105 return box.apply(null, avc1Box);
5106 };
5107
5108 audioSample = function audioSample(track) {
5109 return box(types.mp4a, new Uint8Array([
5110
5111 // SampleEntry, ISO/IEC 14496-12
5112 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
5113 0x00, 0x01, // data_reference_index
5114
5115 // AudioSampleEntry, ISO/IEC 14496-12
5116 0x00, 0x00, 0x00, 0x00, // reserved
5117 0x00, 0x00, 0x00, 0x00, // reserved
5118 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
5119
5120 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
5121 0x00, 0x00, // pre_defined
5122 0x00, 0x00, // reserved
5123
5124 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
5125
5126 // MP4AudioSampleEntry, ISO/IEC 14496-14
5127 ]), esds(track));
5128 };
5129 })();
5130
5131 tkhd = function tkhd(track) {
5132 var result = new Uint8Array([0x00, // version 0
5133 0x00, 0x00, 0x07, // flags
5134 0x00, 0x00, 0x00, 0x00, // creation_time
5135 0x00, 0x00, 0x00, 0x00, // modification_time
5136 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
5137 0x00, 0x00, 0x00, 0x00, // reserved
5138 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
5139 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
5140 0x00, 0x00, // layer
5141 0x00, 0x00, // alternate_group
5142 0x01, 0x00, // non-audio track volume
5143 0x00, 0x00, // reserved
5144 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
5145 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
5146 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
5147 ]);
5148
5149 return box(types.tkhd, result);
5150 };
5151
5152 /**
5153 * Generate a track fragment (traf) box. A traf box collects metadata
5154 * about tracks in a movie fragment (moof) box.
5155 */
5156 traf = function traf(track) {
5157 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
5158
5159 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
5160 0x00, 0x00, 0x3a, // flags
5161 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
5162 0x00, 0x00, 0x00, 0x01, // sample_description_index
5163 0x00, 0x00, 0x00, 0x00, // default_sample_duration
5164 0x00, 0x00, 0x00, 0x00, // default_sample_size
5165 0x00, 0x00, 0x00, 0x00 // default_sample_flags
5166 ]));
5167
5168 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
5169 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
5170
5171 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
5172 0x00, 0x00, 0x00, // flags
5173 // baseMediaDecodeTime
5174 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF]));
5175
5176 // the data offset specifies the number of bytes from the start of
5177 // the containing moof to the first payload byte of the associated
5178 // mdat
5179 dataOffset = 32 + // tfhd
5180 20 + // tfdt
5181 8 + // traf header
5182 16 + // mfhd
5183 8 + // moof header
5184 8; // mdat header
5185
5186 // audio tracks require less metadata
5187 if (track.type === 'audio') {
5188 trackFragmentRun = trun(track, dataOffset);
5189 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
5190 }
5191
5192 // video tracks should contain an independent and disposable samples
5193 // box (sdtp)
5194 // generate one and adjust offsets to match
5195 sampleDependencyTable = sdtp(track);
5196 trackFragmentRun = trun(track, sampleDependencyTable.length + dataOffset);
5197 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
5198 };
5199
5200 /**
5201 * Generate a track box.
5202 * @param track {object} a track definition
5203 * @return {Uint8Array} the track box
5204 */
5205 trak = function trak(track) {
5206 track.duration = track.duration || 0xffffffff;
5207 return box(types.trak, tkhd(track), mdia(track));
5208 };
5209
5210 trex = function trex(track) {
5211 var result = new Uint8Array([0x00, // version 0
5212 0x00, 0x00, 0x00, // flags
5213 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
5214 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
5215 0x00, 0x00, 0x00, 0x00, // default_sample_duration
5216 0x00, 0x00, 0x00, 0x00, // default_sample_size
5217 0x00, 0x01, 0x00, 0x01 // default_sample_flags
5218 ]);
5219 // the last two bytes of default_sample_flags is the sample
5220 // degradation priority, a hint about the importance of this sample
5221 // relative to others. Lower the degradation priority for all sample
5222 // types other than video.
5223 if (track.type !== 'video') {
5224 result[result.length - 1] = 0x00;
5225 }
5226
5227 return box(types.trex, result);
5228 };
5229
5230 (function () {
5231 var audioTrun, videoTrun, trunHeader;
5232
5233 // This method assumes all samples are uniform. That is, if a
5234 // duration is present for the first sample, it will be present for
5235 // all subsequent samples.
5236 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
5237 trunHeader = function trunHeader(samples, offset) {
5238 var durationPresent = 0,
5239 sizePresent = 0,
5240 flagsPresent = 0,
5241 compositionTimeOffset = 0;
5242
5243 // trun flag constants
5244 if (samples.length) {
5245 if (samples[0].duration !== undefined) {
5246 durationPresent = 0x1;
5247 }
5248 if (samples[0].size !== undefined) {
5249 sizePresent = 0x2;
5250 }
5251 if (samples[0].flags !== undefined) {
5252 flagsPresent = 0x4;
5253 }
5254 if (samples[0].compositionTimeOffset !== undefined) {
5255 compositionTimeOffset = 0x8;
5256 }
5257 }
5258
5259 return [0x00, // version 0
5260 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
5261 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
5262 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
5263 ];
5264 };
5265
5266 videoTrun = function videoTrun(track, offset) {
5267 var bytes, samples, sample, i;
5268
5269 samples = track.samples || [];
5270 offset += 8 + 12 + 16 * samples.length;
5271
5272 bytes = trunHeader(samples, offset);
5273
5274 for (i = 0; i < samples.length; i++) {
5275 sample = samples[i];
5276 bytes = bytes.concat([(sample.duration & 0xFF000000) >>> 24, (sample.duration & 0xFF0000) >>> 16, (sample.duration & 0xFF00) >>> 8, sample.duration & 0xFF, // sample_duration
5277 (sample.size & 0xFF000000) >>> 24, (sample.size & 0xFF0000) >>> 16, (sample.size & 0xFF00) >>> 8, sample.size & 0xFF, // sample_size
5278 sample.flags.isLeading << 2 | sample.flags.dependsOn, sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample, sample.flags.degradationPriority & 0xF0 << 8, sample.flags.degradationPriority & 0x0F, // sample_flags
5279 (sample.compositionTimeOffset & 0xFF000000) >>> 24, (sample.compositionTimeOffset & 0xFF0000) >>> 16, (sample.compositionTimeOffset & 0xFF00) >>> 8, sample.compositionTimeOffset & 0xFF // sample_composition_time_offset
5280 ]);
5281 }
5282 return box(types.trun, new Uint8Array(bytes));
5283 };
5284
5285 audioTrun = function audioTrun(track, offset) {
5286 var bytes, samples, sample, i;
5287
5288 samples = track.samples || [];
5289 offset += 8 + 12 + 8 * samples.length;
5290
5291 bytes = trunHeader(samples, offset);
5292
5293 for (i = 0; i < samples.length; i++) {
5294 sample = samples[i];
5295 bytes = bytes.concat([(sample.duration & 0xFF000000) >>> 24, (sample.duration & 0xFF0000) >>> 16, (sample.duration & 0xFF00) >>> 8, sample.duration & 0xFF, // sample_duration
5296 (sample.size & 0xFF000000) >>> 24, (sample.size & 0xFF0000) >>> 16, (sample.size & 0xFF00) >>> 8, sample.size & 0xFF]); // sample_size
5297 }
5298
5299 return box(types.trun, new Uint8Array(bytes));
5300 };
5301
5302 trun = function trun(track, offset) {
5303 if (track.type === 'audio') {
5304 return audioTrun(track, offset);
5305 }
5306
5307 return videoTrun(track, offset);
5308 };
5309 })();
5310
5311 var mp4Generator = {
5312 ftyp: ftyp,
5313 mdat: mdat,
5314 moof: moof,
5315 moov: moov,
5316 initSegment: function initSegment(tracks) {
5317 var fileType = ftyp(),
5318 movie = moov(tracks),
5319 result;
5320
5321 result = new Uint8Array(fileType.byteLength + movie.byteLength);
5322 result.set(fileType);
5323 result.set(movie, fileType.byteLength);
5324 return result;
5325 }
5326 };
5327
5328 /**
5329 * mux.js
5330 *
5331 * Copyright (c) Brightcove
5332 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5333 */
5334 // Convert an array of nal units into an array of frames with each frame being
5335 // composed of the nal units that make up that frame
5336 // Also keep track of cummulative data about the frame from the nal units such
5337 // as the frame duration, starting pts, etc.
5338 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
5339 var i,
5340 currentNal,
5341 currentFrame = [],
5342 frames = [];
5343
5344 // TODO added for LHLS, make sure this is OK
5345 frames.byteLength = 0;
5346 frames.nalCount = 0;
5347 frames.duration = 0;
5348
5349 currentFrame.byteLength = 0;
5350
5351 for (i = 0; i < nalUnits.length; i++) {
5352 currentNal = nalUnits[i];
5353
5354 // Split on 'aud'-type nal units
5355 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
5356 // Since the very first nal unit is expected to be an AUD
5357 // only push to the frames array when currentFrame is not empty
5358 if (currentFrame.length) {
5359 currentFrame.duration = currentNal.dts - currentFrame.dts;
5360 // TODO added for LHLS, make sure this is OK
5361 frames.byteLength += currentFrame.byteLength;
5362 frames.nalCount += currentFrame.length;
5363 frames.duration += currentFrame.duration;
5364 frames.push(currentFrame);
5365 }
5366 currentFrame = [currentNal];
5367 currentFrame.byteLength = currentNal.data.byteLength;
5368 currentFrame.pts = currentNal.pts;
5369 currentFrame.dts = currentNal.dts;
5370 } else {
5371 // Specifically flag key frames for ease of use later
5372 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
5373 currentFrame.keyFrame = true;
5374 }
5375 currentFrame.duration = currentNal.dts - currentFrame.dts;
5376 currentFrame.byteLength += currentNal.data.byteLength;
5377 currentFrame.push(currentNal);
5378 }
5379 }
5380
5381 // For the last frame, use the duration of the previous frame if we
5382 // have nothing better to go on
5383 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
5384 currentFrame.duration = frames[frames.length - 1].duration;
5385 }
5386
5387 // Push the final frame
5388 // TODO added for LHLS, make sure this is OK
5389 frames.byteLength += currentFrame.byteLength;
5390 frames.nalCount += currentFrame.length;
5391 frames.duration += currentFrame.duration;
5392
5393 frames.push(currentFrame);
5394 return frames;
5395 };
5396
5397 // Convert an array of frames into an array of Gop with each Gop being composed
5398 // of the frames that make up that Gop
5399 // Also keep track of cummulative data about the Gop from the frames such as the
5400 // Gop duration, starting pts, etc.
5401 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
5402 var i,
5403 currentFrame,
5404 currentGop = [],
5405 gops = [];
5406
5407 // We must pre-set some of the values on the Gop since we
5408 // keep running totals of these values
5409 currentGop.byteLength = 0;
5410 currentGop.nalCount = 0;
5411 currentGop.duration = 0;
5412 currentGop.pts = frames[0].pts;
5413 currentGop.dts = frames[0].dts;
5414
5415 // store some metadata about all the Gops
5416 gops.byteLength = 0;
5417 gops.nalCount = 0;
5418 gops.duration = 0;
5419 gops.pts = frames[0].pts;
5420 gops.dts = frames[0].dts;
5421
5422 for (i = 0; i < frames.length; i++) {
5423 currentFrame = frames[i];
5424
5425 if (currentFrame.keyFrame) {
5426 // Since the very first frame is expected to be an keyframe
5427 // only push to the gops array when currentGop is not empty
5428 if (currentGop.length) {
5429 gops.push(currentGop);
5430 gops.byteLength += currentGop.byteLength;
5431 gops.nalCount += currentGop.nalCount;
5432 gops.duration += currentGop.duration;
5433 }
5434
5435 currentGop = [currentFrame];
5436 currentGop.nalCount = currentFrame.length;
5437 currentGop.byteLength = currentFrame.byteLength;
5438 currentGop.pts = currentFrame.pts;
5439 currentGop.dts = currentFrame.dts;
5440 currentGop.duration = currentFrame.duration;
5441 } else {
5442 currentGop.duration += currentFrame.duration;
5443 currentGop.nalCount += currentFrame.length;
5444 currentGop.byteLength += currentFrame.byteLength;
5445 currentGop.push(currentFrame);
5446 }
5447 }
5448
5449 if (gops.length && currentGop.duration <= 0) {
5450 currentGop.duration = gops[gops.length - 1].duration;
5451 }
5452 gops.byteLength += currentGop.byteLength;
5453 gops.nalCount += currentGop.nalCount;
5454 gops.duration += currentGop.duration;
5455
5456 // push the final Gop
5457 gops.push(currentGop);
5458 return gops;
5459 };
5460
5461 /*
5462 * Search for the first keyframe in the GOPs and throw away all frames
5463 * until that keyframe. Then extend the duration of the pulled keyframe
5464 * and pull the PTS and DTS of the keyframe so that it covers the time
5465 * range of the frames that were disposed.
5466 *
5467 * @param {Array} gops video GOPs
5468 * @returns {Array} modified video GOPs
5469 */
5470 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
5471 var currentGop;
5472
5473 if (!gops[0][0].keyFrame && gops.length > 1) {
5474 // Remove the first GOP
5475 currentGop = gops.shift();
5476
5477 gops.byteLength -= currentGop.byteLength;
5478 gops.nalCount -= currentGop.nalCount;
5479
5480 // Extend the first frame of what is now the
5481 // first gop to cover the time period of the
5482 // frames we just removed
5483 gops[0][0].dts = currentGop.dts;
5484 gops[0][0].pts = currentGop.pts;
5485 gops[0][0].duration += currentGop.duration;
5486 }
5487
5488 return gops;
5489 };
5490
5491 /**
5492 * Default sample object
5493 * see ISO/IEC 14496-12:2012, section 8.6.4.3
5494 */
5495 var createDefaultSample = function createDefaultSample() {
5496 return {
5497 size: 0,
5498 flags: {
5499 isLeading: 0,
5500 dependsOn: 1,
5501 isDependedOn: 0,
5502 hasRedundancy: 0,
5503 degradationPriority: 0,
5504 isNonSyncSample: 1
5505 }
5506 };
5507 };
5508
5509 /*
5510 * Collates information from a video frame into an object for eventual
5511 * entry into an MP4 sample table.
5512 *
5513 * @param {Object} frame the video frame
5514 * @param {Number} dataOffset the byte offset to position the sample
5515 * @return {Object} object containing sample table info for a frame
5516 */
5517 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
5518 var sample = createDefaultSample();
5519
5520 sample.dataOffset = dataOffset;
5521 sample.compositionTimeOffset = frame.pts - frame.dts;
5522 sample.duration = frame.duration;
5523 sample.size = 4 * frame.length; // Space for nal unit size
5524 sample.size += frame.byteLength;
5525
5526 if (frame.keyFrame) {
5527 sample.flags.dependsOn = 2;
5528 sample.flags.isNonSyncSample = 0;
5529 }
5530
5531 return sample;
5532 };
5533
5534 // generate the track's sample table from an array of gops
5535 var generateSampleTable = function generateSampleTable(gops, baseDataOffset) {
5536 var h,
5537 i,
5538 sample,
5539 currentGop,
5540 currentFrame,
5541 dataOffset = baseDataOffset || 0,
5542 samples = [];
5543
5544 for (h = 0; h < gops.length; h++) {
5545 currentGop = gops[h];
5546
5547 for (i = 0; i < currentGop.length; i++) {
5548 currentFrame = currentGop[i];
5549
5550 sample = sampleForFrame(currentFrame, dataOffset);
5551
5552 dataOffset += sample.size;
5553
5554 samples.push(sample);
5555 }
5556 }
5557 return samples;
5558 };
5559
5560 // generate the track's raw mdat data from an array of gops
5561 var concatenateNalData = function concatenateNalData(gops) {
5562 var h,
5563 i,
5564 j,
5565 currentGop,
5566 currentFrame,
5567 currentNal,
5568 dataOffset = 0,
5569 nalsByteLength = gops.byteLength,
5570 numberOfNals = gops.nalCount,
5571 totalByteLength = nalsByteLength + 4 * numberOfNals,
5572 data = new Uint8Array(totalByteLength),
5573 view = new DataView(data.buffer);
5574
5575 // For each Gop..
5576 for (h = 0; h < gops.length; h++) {
5577 currentGop = gops[h];
5578
5579 // For each Frame..
5580 for (i = 0; i < currentGop.length; i++) {
5581 currentFrame = currentGop[i];
5582
5583 // For each NAL..
5584 for (j = 0; j < currentFrame.length; j++) {
5585 currentNal = currentFrame[j];
5586
5587 view.setUint32(dataOffset, currentNal.data.byteLength);
5588 dataOffset += 4;
5589 data.set(currentNal.data, dataOffset);
5590 dataOffset += currentNal.data.byteLength;
5591 }
5592 }
5593 }
5594 return data;
5595 };
5596
5597 // generate the track's sample table from a frame
5598 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
5599 var sample,
5600 dataOffset = baseDataOffset || 0,
5601 samples = [];
5602
5603 sample = sampleForFrame(frame, dataOffset);
5604 samples.push(sample);
5605
5606 return samples;
5607 };
5608
5609 // generate the track's raw mdat data from a frame
5610 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
5611 var i,
5612 currentNal,
5613 dataOffset = 0,
5614 nalsByteLength = frame.byteLength,
5615 numberOfNals = frame.length,
5616 totalByteLength = nalsByteLength + 4 * numberOfNals,
5617 data = new Uint8Array(totalByteLength),
5618 view = new DataView(data.buffer);
5619
5620 // For each NAL..
5621 for (i = 0; i < frame.length; i++) {
5622 currentNal = frame[i];
5623
5624 view.setUint32(dataOffset, currentNal.data.byteLength);
5625 dataOffset += 4;
5626 data.set(currentNal.data, dataOffset);
5627 dataOffset += currentNal.data.byteLength;
5628 }
5629
5630 return data;
5631 };
5632
5633 var frameUtils = {
5634 groupNalsIntoFrames: groupNalsIntoFrames,
5635 groupFramesIntoGops: groupFramesIntoGops,
5636 extendFirstKeyFrame: extendFirstKeyFrame,
5637 generateSampleTable: generateSampleTable,
5638 concatenateNalData: concatenateNalData,
5639 generateSampleTableForFrame: generateSampleTableForFrame,
5640 concatenateNalDataForFrame: concatenateNalDataForFrame
5641 };
5642
5643 /**
5644 * mux.js
5645 *
5646 * Copyright (c) Brightcove
5647 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5648 */
5649 var highPrefix = [33, 16, 5, 32, 164, 27];
5650 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
5651 var zeroFill = function zeroFill(count) {
5652 var a = [];
5653 while (count--) {
5654 a.push(0);
5655 }
5656 return a;
5657 };
5658
5659 var makeTable = function makeTable(metaTable) {
5660 return Object.keys(metaTable).reduce(function (obj, key) {
5661 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
5662 return arr.concat(part);
5663 }, []));
5664 return obj;
5665 }, {});
5666 };
5667
5668 // Frames-of-silence to use for filling in missing AAC frames
5669 var coneOfSilence = {
5670 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
5671 88200: [highPrefix, [231], zeroFill(170), [56]],
5672 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
5673 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
5674 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
5675 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
5676 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
5677 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
5678 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
5679 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
5680 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
5681 };
5682
5683 var silence = makeTable(coneOfSilence);
5684
5685 /**
5686 * mux.js
5687 *
5688 * Copyright (c) Brightcove
5689 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5690 */
5691 var ONE_SECOND_IN_TS = 90000,
5692
5693 // 90kHz clock
5694 secondsToVideoTs,
5695 secondsToAudioTs,
5696 videoTsToSeconds,
5697 audioTsToSeconds,
5698 audioTsToVideoTs,
5699 videoTsToAudioTs,
5700 metadataTsToSeconds;
5701
5702 secondsToVideoTs = function secondsToVideoTs(seconds) {
5703 return seconds * ONE_SECOND_IN_TS;
5704 };
5705
5706 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
5707 return seconds * sampleRate;
5708 };
5709
5710 videoTsToSeconds = function videoTsToSeconds(timestamp) {
5711 return timestamp / ONE_SECOND_IN_TS;
5712 };
5713
5714 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
5715 return timestamp / sampleRate;
5716 };
5717
5718 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
5719 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
5720 };
5721
5722 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
5723 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
5724 };
5725
5726 /**
5727 * Adjust ID3 tag or caption timing information by the timeline pts values
5728 * (if keepOriginalTimestamps is false) and convert to seconds
5729 */
5730 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
5731 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
5732 };
5733
5734 var clock = {
5735 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
5736 secondsToVideoTs: secondsToVideoTs,
5737 secondsToAudioTs: secondsToAudioTs,
5738 videoTsToSeconds: videoTsToSeconds,
5739 audioTsToSeconds: audioTsToSeconds,
5740 audioTsToVideoTs: audioTsToVideoTs,
5741 videoTsToAudioTs: videoTsToAudioTs,
5742 metadataTsToSeconds: metadataTsToSeconds
5743 };
5744
5745 /**
5746 * mux.js
5747 *
5748 * Copyright (c) Brightcove
5749 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5750 */
5751
5752 /**
5753 * Sum the `byteLength` properties of the data in each AAC frame
5754 */
5755 var sumFrameByteLengths = function sumFrameByteLengths(array) {
5756 var i,
5757 currentObj,
5758 sum = 0;
5759
5760 // sum the byteLength's all each nal unit in the frame
5761 for (i = 0; i < array.length; i++) {
5762 currentObj = array[i];
5763 sum += currentObj.data.byteLength;
5764 }
5765
5766 return sum;
5767 };
5768
5769 // Possibly pad (prefix) the audio track with silence if appending this track
5770 // would lead to the introduction of a gap in the audio buffer
5771 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
5772 var baseMediaDecodeTimeTs,
5773 frameDuration = 0,
5774 audioGapDuration = 0,
5775 audioFillFrameCount = 0,
5776 audioFillDuration = 0,
5777 silentFrame,
5778 i,
5779 firstFrame;
5780
5781 if (!frames.length) {
5782 return;
5783 }
5784
5785 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
5786 // determine frame clock duration based on sample rate, round up to avoid overfills
5787 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
5788
5789 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
5790 // insert the shortest possible amount (audio gap or audio to video gap)
5791 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
5792 // number of full frames in the audio gap
5793 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
5794 audioFillDuration = audioFillFrameCount * frameDuration;
5795 }
5796
5797 // don't attempt to fill gaps smaller than a single frame or larger
5798 // than a half second
5799 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
5800 return;
5801 }
5802
5803 silentFrame = silence[track.samplerate];
5804
5805 if (!silentFrame) {
5806 // we don't have a silent frame pregenerated for the sample rate, so use a frame
5807 // from the content instead
5808 silentFrame = frames[0].data;
5809 }
5810
5811 for (i = 0; i < audioFillFrameCount; i++) {
5812 firstFrame = frames[0];
5813
5814 frames.splice(0, 0, {
5815 data: silentFrame,
5816 dts: firstFrame.dts - frameDuration,
5817 pts: firstFrame.pts - frameDuration
5818 });
5819 }
5820
5821 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
5822 };
5823
5824 // If the audio segment extends before the earliest allowed dts
5825 // value, remove AAC frames until starts at or after the earliest
5826 // allowed DTS so that we don't end up with a negative baseMedia-
5827 // DecodeTime for the audio track
5828 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
5829 if (track.minSegmentDts >= earliestAllowedDts) {
5830 return adtsFrames;
5831 }
5832
5833 // We will need to recalculate the earliest segment Dts
5834 track.minSegmentDts = Infinity;
5835
5836 return adtsFrames.filter(function (currentFrame) {
5837 // If this is an allowed frame, keep it and record it's Dts
5838 if (currentFrame.dts >= earliestAllowedDts) {
5839 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
5840 track.minSegmentPts = track.minSegmentDts;
5841 return true;
5842 }
5843 // Otherwise, discard it
5844 return false;
5845 });
5846 };
5847
5848 // generate the track's raw mdat data from an array of frames
5849 var generateSampleTable$1 = function generateSampleTable(frames) {
5850 var i,
5851 currentFrame,
5852 samples = [];
5853
5854 for (i = 0; i < frames.length; i++) {
5855 currentFrame = frames[i];
5856 samples.push({
5857 size: currentFrame.data.byteLength,
5858 duration: 1024 // For AAC audio, all samples contain 1024 samples
5859 });
5860 }
5861 return samples;
5862 };
5863
5864 // generate the track's sample table from an array of frames
5865 var concatenateFrameData = function concatenateFrameData(frames) {
5866 var i,
5867 currentFrame,
5868 dataOffset = 0,
5869 data = new Uint8Array(sumFrameByteLengths(frames));
5870
5871 for (i = 0; i < frames.length; i++) {
5872 currentFrame = frames[i];
5873
5874 data.set(currentFrame.data, dataOffset);
5875 dataOffset += currentFrame.data.byteLength;
5876 }
5877 return data;
5878 };
5879
5880 var audioFrameUtils = {
5881 prefixWithSilence: prefixWithSilence,
5882 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
5883 generateSampleTable: generateSampleTable$1,
5884 concatenateFrameData: concatenateFrameData
5885 };
5886
5887 /**
5888 * mux.js
5889 *
5890 * Copyright (c) Brightcove
5891 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5892 */
5893 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS;
5894
5895 /**
5896 * Store information about the start and end of the track and the
5897 * duration for each frame/sample we process in order to calculate
5898 * the baseMediaDecodeTime
5899 */
5900 var collectDtsInfo = function collectDtsInfo(track, data) {
5901 if (typeof data.pts === 'number') {
5902 if (track.timelineStartInfo.pts === undefined) {
5903 track.timelineStartInfo.pts = data.pts;
5904 }
5905
5906 if (track.minSegmentPts === undefined) {
5907 track.minSegmentPts = data.pts;
5908 } else {
5909 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
5910 }
5911
5912 if (track.maxSegmentPts === undefined) {
5913 track.maxSegmentPts = data.pts;
5914 } else {
5915 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
5916 }
5917 }
5918
5919 if (typeof data.dts === 'number') {
5920 if (track.timelineStartInfo.dts === undefined) {
5921 track.timelineStartInfo.dts = data.dts;
5922 }
5923
5924 if (track.minSegmentDts === undefined) {
5925 track.minSegmentDts = data.dts;
5926 } else {
5927 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5928 }
5929
5930 if (track.maxSegmentDts === undefined) {
5931 track.maxSegmentDts = data.dts;
5932 } else {
5933 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5934 }
5935 }
5936 };
5937
5938 /**
5939 * Clear values used to calculate the baseMediaDecodeTime between
5940 * tracks
5941 */
5942 var clearDtsInfo = function clearDtsInfo(track) {
5943 delete track.minSegmentDts;
5944 delete track.maxSegmentDts;
5945 delete track.minSegmentPts;
5946 delete track.maxSegmentPts;
5947 };
5948
5949 /**
5950 * Calculate the track's baseMediaDecodeTime based on the earliest
5951 * DTS the transmuxer has ever seen and the minimum DTS for the
5952 * current track
5953 * @param track {object} track metadata configuration
5954 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5955 * in the source; false to adjust the first segment to start at 0.
5956 */
5957 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5958 var baseMediaDecodeTime,
5959 scale,
5960 minSegmentDts = track.minSegmentDts;
5961
5962 // Optionally adjust the time so the first segment starts at zero.
5963 if (!keepOriginalTimestamps) {
5964 minSegmentDts -= track.timelineStartInfo.dts;
5965 }
5966
5967 // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5968 // we want the start of the first segment to be placed
5969 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
5970
5971 // Add to that the distance this segment is from the very first
5972 baseMediaDecodeTime += minSegmentDts;
5973
5974 // baseMediaDecodeTime must not become negative
5975 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
5976
5977 if (track.type === 'audio') {
5978 // Audio has a different clock equal to the sampling_rate so we need to
5979 // scale the PTS values into the clock rate of the track
5980 scale = track.samplerate / ONE_SECOND_IN_TS$1;
5981 baseMediaDecodeTime *= scale;
5982 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
5983 }
5984
5985 return baseMediaDecodeTime;
5986 };
5987
5988 var trackDecodeInfo = {
5989 clearDtsInfo: clearDtsInfo,
5990 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
5991 collectDtsInfo: collectDtsInfo
5992 };
5993
5994 /**
5995 * mux.js
5996 *
5997 * Copyright (c) Brightcove
5998 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5999 *
6000 * Reads in-band caption information from a video elementary
6001 * stream. Captions must follow the CEA-708 standard for injection
6002 * into an MPEG-2 transport streams.
6003 * @see https://en.wikipedia.org/wiki/CEA-708
6004 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
6005 */
6006
6007 // Supplemental enhancement information (SEI) NAL units have a
6008 // payload type field to indicate how they are to be
6009 // interpreted. CEAS-708 caption content is always transmitted with
6010 // payload type 0x04.
6011
6012 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
6013 RBSP_TRAILING_BITS = 128;
6014
6015 /**
6016 * Parse a supplemental enhancement information (SEI) NAL unit.
6017 * Stops parsing once a message of type ITU T T35 has been found.
6018 *
6019 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
6020 * @return {object} the parsed SEI payload
6021 * @see Rec. ITU-T H.264, 7.3.2.3.1
6022 */
6023 var parseSei = function parseSei(bytes) {
6024 var i = 0,
6025 result = {
6026 payloadType: -1,
6027 payloadSize: 0
6028 },
6029 payloadType = 0,
6030 payloadSize = 0;
6031
6032 // go through the sei_rbsp parsing each each individual sei_message
6033 while (i < bytes.byteLength) {
6034 // stop once we have hit the end of the sei_rbsp
6035 if (bytes[i] === RBSP_TRAILING_BITS) {
6036 break;
6037 }
6038
6039 // Parse payload type
6040 while (bytes[i] === 0xFF) {
6041 payloadType += 255;
6042 i++;
6043 }
6044 payloadType += bytes[i++];
6045
6046 // Parse payload size
6047 while (bytes[i] === 0xFF) {
6048 payloadSize += 255;
6049 i++;
6050 }
6051 payloadSize += bytes[i++];
6052
6053 // this sei_message is a 608/708 caption so save it and break
6054 // there can only ever be one caption message in a frame's sei
6055 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
6056 result.payloadType = payloadType;
6057 result.payloadSize = payloadSize;
6058 result.payload = bytes.subarray(i, i + payloadSize);
6059 break;
6060 }
6061
6062 // skip the payload and parse the next message
6063 i += payloadSize;
6064 payloadType = 0;
6065 payloadSize = 0;
6066 }
6067
6068 return result;
6069 };
6070
6071 // see ANSI/SCTE 128-1 (2013), section 8.1
6072 var parseUserData = function parseUserData(sei) {
6073 // itu_t_t35_contry_code must be 181 (United States) for
6074 // captions
6075 if (sei.payload[0] !== 181) {
6076 return null;
6077 }
6078
6079 // itu_t_t35_provider_code should be 49 (ATSC) for captions
6080 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
6081 return null;
6082 }
6083
6084 // the user_identifier should be "GA94" to indicate ATSC1 data
6085 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
6086 return null;
6087 }
6088
6089 // finally, user_data_type_code should be 0x03 for caption data
6090 if (sei.payload[7] !== 0x03) {
6091 return null;
6092 }
6093
6094 // return the user_data_type_structure and strip the trailing
6095 // marker bits
6096 return sei.payload.subarray(8, sei.payload.length - 1);
6097 };
6098
6099 // see CEA-708-D, section 4.4
6100 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
6101 var results = [],
6102 i,
6103 count,
6104 offset,
6105 data;
6106
6107 // if this is just filler, return immediately
6108 if (!(userData[0] & 0x40)) {
6109 return results;
6110 }
6111
6112 // parse out the cc_data_1 and cc_data_2 fields
6113 count = userData[0] & 0x1f;
6114 for (i = 0; i < count; i++) {
6115 offset = i * 3;
6116 data = {
6117 type: userData[offset + 2] & 0x03,
6118 pts: pts
6119 };
6120
6121 // capture cc data when cc_valid is 1
6122 if (userData[offset + 2] & 0x04) {
6123 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
6124 results.push(data);
6125 }
6126 }
6127 return results;
6128 };
6129
6130 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
6131 var length = data.byteLength,
6132 emulationPreventionBytesPositions = [],
6133 i = 1,
6134 newLength,
6135 newData;
6136
6137 // Find all `Emulation Prevention Bytes`
6138 while (i < length - 2) {
6139 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
6140 emulationPreventionBytesPositions.push(i + 2);
6141 i += 2;
6142 } else {
6143 i++;
6144 }
6145 }
6146
6147 // If no Emulation Prevention Bytes were found just return the original
6148 // array
6149 if (emulationPreventionBytesPositions.length === 0) {
6150 return data;
6151 }
6152
6153 // Create a new array to hold the NAL unit data
6154 newLength = length - emulationPreventionBytesPositions.length;
6155 newData = new Uint8Array(newLength);
6156 var sourceIndex = 0;
6157
6158 for (i = 0; i < newLength; sourceIndex++, i++) {
6159 if (sourceIndex === emulationPreventionBytesPositions[0]) {
6160 // Skip this byte
6161 sourceIndex++;
6162 // Remove this position index
6163 emulationPreventionBytesPositions.shift();
6164 }
6165 newData[i] = data[sourceIndex];
6166 }
6167
6168 return newData;
6169 };
6170
6171 // exports
6172 var captionPacketParser = {
6173 parseSei: parseSei,
6174 parseUserData: parseUserData,
6175 parseCaptionPackets: parseCaptionPackets,
6176 discardEmulationPreventionBytes: discardEmulationPreventionBytes,
6177 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
6178 };
6179
6180 // -----------------
6181 // Link To Transport
6182 // -----------------
6183
6184
6185 var CaptionStream = function CaptionStream() {
6186
6187 CaptionStream.prototype.init.call(this);
6188
6189 this.captionPackets_ = [];
6190
6191 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
6192 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
6193 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
6194 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
6195 ];
6196
6197 this.reset();
6198
6199 // forward data and done events from CCs to this CaptionStream
6200 this.ccStreams_.forEach(function (cc) {
6201 cc.on('data', this.trigger.bind(this, 'data'));
6202 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
6203 cc.on('done', this.trigger.bind(this, 'done'));
6204 }, this);
6205 };
6206
6207 CaptionStream.prototype = new stream();
6208 CaptionStream.prototype.push = function (event) {
6209 var sei, userData, newCaptionPackets;
6210
6211 // only examine SEI NALs
6212 if (event.nalUnitType !== 'sei_rbsp') {
6213 return;
6214 }
6215
6216 // parse the sei
6217 sei = captionPacketParser.parseSei(event.escapedRBSP);
6218
6219 // ignore everything but user_data_registered_itu_t_t35
6220 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
6221 return;
6222 }
6223
6224 // parse out the user data payload
6225 userData = captionPacketParser.parseUserData(sei);
6226
6227 // ignore unrecognized userData
6228 if (!userData) {
6229 return;
6230 }
6231
6232 // Sometimes, the same segment # will be downloaded twice. To stop the
6233 // caption data from being processed twice, we track the latest dts we've
6234 // received and ignore everything with a dts before that. However, since
6235 // data for a specific dts can be split across packets on either side of
6236 // a segment boundary, we need to make sure we *don't* ignore the packets
6237 // from the *next* segment that have dts === this.latestDts_. By constantly
6238 // tracking the number of packets received with dts === this.latestDts_, we
6239 // know how many should be ignored once we start receiving duplicates.
6240 if (event.dts < this.latestDts_) {
6241 // We've started getting older data, so set the flag.
6242 this.ignoreNextEqualDts_ = true;
6243 return;
6244 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
6245 this.numSameDts_--;
6246 if (!this.numSameDts_) {
6247 // We've received the last duplicate packet, time to start processing again
6248 this.ignoreNextEqualDts_ = false;
6249 }
6250 return;
6251 }
6252
6253 // parse out CC data packets and save them for later
6254 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
6255 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
6256 if (this.latestDts_ !== event.dts) {
6257 this.numSameDts_ = 0;
6258 }
6259 this.numSameDts_++;
6260 this.latestDts_ = event.dts;
6261 };
6262
6263 CaptionStream.prototype.flushCCStreams = function (flushType) {
6264 this.ccStreams_.forEach(function (cc) {
6265 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
6266 }, this);
6267 };
6268
6269 CaptionStream.prototype.flushStream = function (flushType) {
6270 // make sure we actually parsed captions before proceeding
6271 if (!this.captionPackets_.length) {
6272 this.flushCCStreams(flushType);
6273 return;
6274 }
6275
6276 // In Chrome, the Array#sort function is not stable so add a
6277 // presortIndex that we can use to ensure we get a stable-sort
6278 this.captionPackets_.forEach(function (elem, idx) {
6279 elem.presortIndex = idx;
6280 });
6281
6282 // sort caption byte-pairs based on their PTS values
6283 this.captionPackets_.sort(function (a, b) {
6284 if (a.pts === b.pts) {
6285 return a.presortIndex - b.presortIndex;
6286 }
6287 return a.pts - b.pts;
6288 });
6289
6290 this.captionPackets_.forEach(function (packet) {
6291 if (packet.type < 2) {
6292 // Dispatch packet to the right Cea608Stream
6293 this.dispatchCea608Packet(packet);
6294 }
6295 // this is where an 'else' would go for a dispatching packets
6296 // to a theoretical Cea708Stream that handles SERVICEn data
6297 }, this);
6298
6299 this.captionPackets_.length = 0;
6300 this.flushCCStreams(flushType);
6301 };
6302
6303 CaptionStream.prototype.flush = function () {
6304 return this.flushStream('flush');
6305 };
6306
6307 // Only called if handling partial data
6308 CaptionStream.prototype.partialFlush = function () {
6309 return this.flushStream('partialFlush');
6310 };
6311
6312 CaptionStream.prototype.reset = function () {
6313 this.latestDts_ = null;
6314 this.ignoreNextEqualDts_ = false;
6315 this.numSameDts_ = 0;
6316 this.activeCea608Channel_ = [null, null];
6317 this.ccStreams_.forEach(function (ccStream) {
6318 ccStream.reset();
6319 });
6320 };
6321
6322 // From the CEA-608 spec:
6323 /*
6324 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
6325 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
6326 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
6327 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
6328 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
6329 * to switch to captioning or Text.
6330 */
6331 // With that in mind, we ignore any data between an XDS control code and a
6332 // subsequent closed-captioning control code.
6333 CaptionStream.prototype.dispatchCea608Packet = function (packet) {
6334 // NOTE: packet.type is the CEA608 field
6335 if (this.setsTextOrXDSActive(packet)) {
6336 this.activeCea608Channel_[packet.type] = null;
6337 } else if (this.setsChannel1Active(packet)) {
6338 this.activeCea608Channel_[packet.type] = 0;
6339 } else if (this.setsChannel2Active(packet)) {
6340 this.activeCea608Channel_[packet.type] = 1;
6341 }
6342 if (this.activeCea608Channel_[packet.type] === null) {
6343 // If we haven't received anything to set the active channel, or the
6344 // packets are Text/XDS data, discard the data; we don't want jumbled
6345 // captions
6346 return;
6347 }
6348 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
6349 };
6350
6351 CaptionStream.prototype.setsChannel1Active = function (packet) {
6352 return (packet.ccData & 0x7800) === 0x1000;
6353 };
6354 CaptionStream.prototype.setsChannel2Active = function (packet) {
6355 return (packet.ccData & 0x7800) === 0x1800;
6356 };
6357 CaptionStream.prototype.setsTextOrXDSActive = function (packet) {
6358 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
6359 };
6360
6361 // ----------------------
6362 // Session to Application
6363 // ----------------------
6364
6365 // This hash maps non-ASCII, special, and extended character codes to their
6366 // proper Unicode equivalent. The first keys that are only a single byte
6367 // are the non-standard ASCII characters, which simply map the CEA608 byte
6368 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6369 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6370 // can be performed regardless of the field and data channel on which the
6371 // character code was received.
6372 var CHARACTER_TRANSLATION = {
6373 0x2a: 0xe1, // á
6374 0x5c: 0xe9, // é
6375 0x5e: 0xed, // í
6376 0x5f: 0xf3, // ó
6377 0x60: 0xfa, // ú
6378 0x7b: 0xe7, // ç
6379 0x7c: 0xf7, // ÷
6380 0x7d: 0xd1, // Ñ
6381 0x7e: 0xf1, // ñ
6382 0x7f: 0x2588, // █
6383 0x0130: 0xae, // ®
6384 0x0131: 0xb0, // °
6385 0x0132: 0xbd, // ½
6386 0x0133: 0xbf, // ¿
6387 0x0134: 0x2122, // ™
6388 0x0135: 0xa2, // ¢
6389 0x0136: 0xa3, // £
6390 0x0137: 0x266a, // ♪
6391 0x0138: 0xe0, // à
6392 0x0139: 0xa0, //
6393 0x013a: 0xe8, // è
6394 0x013b: 0xe2, // â
6395 0x013c: 0xea, // ê
6396 0x013d: 0xee, // î
6397 0x013e: 0xf4, // ô
6398 0x013f: 0xfb, // û
6399 0x0220: 0xc1, // Á
6400 0x0221: 0xc9, // É
6401 0x0222: 0xd3, // Ó
6402 0x0223: 0xda, // Ú
6403 0x0224: 0xdc, // Ü
6404 0x0225: 0xfc, // ü
6405 0x0226: 0x2018, // ‘
6406 0x0227: 0xa1, // ¡
6407 0x0228: 0x2a, // *
6408 0x0229: 0x27, // '
6409 0x022a: 0x2014, // —
6410 0x022b: 0xa9, // ©
6411 0x022c: 0x2120, // ℠
6412 0x022d: 0x2022, // •
6413 0x022e: 0x201c, // “
6414 0x022f: 0x201d, // ”
6415 0x0230: 0xc0, // À
6416 0x0231: 0xc2, // Â
6417 0x0232: 0xc7, // Ç
6418 0x0233: 0xc8, // È
6419 0x0234: 0xca, // Ê
6420 0x0235: 0xcb, // Ë
6421 0x0236: 0xeb, // ë
6422 0x0237: 0xce, // Î
6423 0x0238: 0xcf, // Ï
6424 0x0239: 0xef, // ï
6425 0x023a: 0xd4, // Ô
6426 0x023b: 0xd9, // Ù
6427 0x023c: 0xf9, // ù
6428 0x023d: 0xdb, // Û
6429 0x023e: 0xab, // «
6430 0x023f: 0xbb, // »
6431 0x0320: 0xc3, // Ã
6432 0x0321: 0xe3, // ã
6433 0x0322: 0xcd, // Í
6434 0x0323: 0xcc, // Ì
6435 0x0324: 0xec, // ì
6436 0x0325: 0xd2, // Ò
6437 0x0326: 0xf2, // ò
6438 0x0327: 0xd5, // Õ
6439 0x0328: 0xf5, // õ
6440 0x0329: 0x7b, // {
6441 0x032a: 0x7d, // }
6442 0x032b: 0x5c, // \
6443 0x032c: 0x5e, // ^
6444 0x032d: 0x5f, // _
6445 0x032e: 0x7c, // |
6446 0x032f: 0x7e, // ~
6447 0x0330: 0xc4, // Ä
6448 0x0331: 0xe4, // ä
6449 0x0332: 0xd6, // Ö
6450 0x0333: 0xf6, // ö
6451 0x0334: 0xdf, // ß
6452 0x0335: 0xa5, // ¥
6453 0x0336: 0xa4, // ¤
6454 0x0337: 0x2502, // │
6455 0x0338: 0xc5, // Å
6456 0x0339: 0xe5, // å
6457 0x033a: 0xd8, // Ø
6458 0x033b: 0xf8, // ø
6459 0x033c: 0x250c, // ┌
6460 0x033d: 0x2510, // ┐
6461 0x033e: 0x2514, // └
6462 0x033f: 0x2518 // ┘
6463 };
6464
6465 var getCharFromCode = function getCharFromCode(code) {
6466 if (code === null) {
6467 return '';
6468 }
6469 code = CHARACTER_TRANSLATION[code] || code;
6470 return String.fromCharCode(code);
6471 };
6472
6473 // the index of the last row in a CEA-608 display buffer
6474 var BOTTOM_ROW = 14;
6475
6476 // This array is used for mapping PACs -> row #, since there's no way of
6477 // getting it through bit logic.
6478 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
6479
6480 // CEA-608 captions are rendered onto a 34x15 matrix of character
6481 // cells. The "bottom" row is the last element in the outer array.
6482 var createDisplayBuffer = function createDisplayBuffer() {
6483 var result = [],
6484 i = BOTTOM_ROW + 1;
6485 while (i--) {
6486 result.push('');
6487 }
6488 return result;
6489 };
6490
6491 var Cea608Stream = function Cea608Stream(field, dataChannel) {
6492 Cea608Stream.prototype.init.call(this);
6493
6494 this.field_ = field || 0;
6495 this.dataChannel_ = dataChannel || 0;
6496
6497 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
6498
6499 this.setConstants();
6500 this.reset();
6501
6502 this.push = function (packet) {
6503 var data, swap, char0, char1, text;
6504 // remove the parity bits
6505 data = packet.ccData & 0x7f7f;
6506
6507 // ignore duplicate control codes; the spec demands they're sent twice
6508 if (data === this.lastControlCode_) {
6509 this.lastControlCode_ = null;
6510 return;
6511 }
6512
6513 // Store control codes
6514 if ((data & 0xf000) === 0x1000) {
6515 this.lastControlCode_ = data;
6516 } else if (data !== this.PADDING_) {
6517 this.lastControlCode_ = null;
6518 }
6519
6520 char0 = data >>> 8;
6521 char1 = data & 0xff;
6522
6523 if (data === this.PADDING_) {
6524 return;
6525 } else if (data === this.RESUME_CAPTION_LOADING_) {
6526 this.mode_ = 'popOn';
6527 } else if (data === this.END_OF_CAPTION_) {
6528 // If an EOC is received while in paint-on mode, the displayed caption
6529 // text should be swapped to non-displayed memory as if it was a pop-on
6530 // caption. Because of that, we should explicitly switch back to pop-on
6531 // mode
6532 this.mode_ = 'popOn';
6533 this.clearFormatting(packet.pts);
6534 // if a caption was being displayed, it's gone now
6535 this.flushDisplayed(packet.pts);
6536
6537 // flip memory
6538 swap = this.displayed_;
6539 this.displayed_ = this.nonDisplayed_;
6540 this.nonDisplayed_ = swap;
6541
6542 // start measuring the time to display the caption
6543 this.startPts_ = packet.pts;
6544 } else if (data === this.ROLL_UP_2_ROWS_) {
6545 this.rollUpRows_ = 2;
6546 this.setRollUp(packet.pts);
6547 } else if (data === this.ROLL_UP_3_ROWS_) {
6548 this.rollUpRows_ = 3;
6549 this.setRollUp(packet.pts);
6550 } else if (data === this.ROLL_UP_4_ROWS_) {
6551 this.rollUpRows_ = 4;
6552 this.setRollUp(packet.pts);
6553 } else if (data === this.CARRIAGE_RETURN_) {
6554 this.clearFormatting(packet.pts);
6555 this.flushDisplayed(packet.pts);
6556 this.shiftRowsUp_();
6557 this.startPts_ = packet.pts;
6558 } else if (data === this.BACKSPACE_) {
6559 if (this.mode_ === 'popOn') {
6560 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6561 } else {
6562 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6563 }
6564 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
6565 this.flushDisplayed(packet.pts);
6566 this.displayed_ = createDisplayBuffer();
6567 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
6568 this.nonDisplayed_ = createDisplayBuffer();
6569 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
6570 if (this.mode_ !== 'paintOn') {
6571 // NOTE: This should be removed when proper caption positioning is
6572 // implemented
6573 this.flushDisplayed(packet.pts);
6574 this.displayed_ = createDisplayBuffer();
6575 }
6576 this.mode_ = 'paintOn';
6577 this.startPts_ = packet.pts;
6578
6579 // Append special characters to caption text
6580 } else if (this.isSpecialCharacter(char0, char1)) {
6581 // Bitmask char0 so that we can apply character transformations
6582 // regardless of field and data channel.
6583 // Then byte-shift to the left and OR with char1 so we can pass the
6584 // entire character code to `getCharFromCode`.
6585 char0 = (char0 & 0x03) << 8;
6586 text = getCharFromCode(char0 | char1);
6587 this[this.mode_](packet.pts, text);
6588 this.column_++;
6589
6590 // Append extended characters to caption text
6591 } else if (this.isExtCharacter(char0, char1)) {
6592 // Extended characters always follow their "non-extended" equivalents.
6593 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
6594 // decoders are supposed to drop the "è", while compliant decoders
6595 // backspace the "e" and insert "è".
6596
6597 // Delete the previous character
6598 if (this.mode_ === 'popOn') {
6599 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6600 } else {
6601 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6602 }
6603
6604 // Bitmask char0 so that we can apply character transformations
6605 // regardless of field and data channel.
6606 // Then byte-shift to the left and OR with char1 so we can pass the
6607 // entire character code to `getCharFromCode`.
6608 char0 = (char0 & 0x03) << 8;
6609 text = getCharFromCode(char0 | char1);
6610 this[this.mode_](packet.pts, text);
6611 this.column_++;
6612
6613 // Process mid-row codes
6614 } else if (this.isMidRowCode(char0, char1)) {
6615 // Attributes are not additive, so clear all formatting
6616 this.clearFormatting(packet.pts);
6617
6618 // According to the standard, mid-row codes
6619 // should be replaced with spaces, so add one now
6620 this[this.mode_](packet.pts, ' ');
6621 this.column_++;
6622
6623 if ((char1 & 0xe) === 0xe) {
6624 this.addFormatting(packet.pts, ['i']);
6625 }
6626
6627 if ((char1 & 0x1) === 0x1) {
6628 this.addFormatting(packet.pts, ['u']);
6629 }
6630
6631 // Detect offset control codes and adjust cursor
6632 } else if (this.isOffsetControlCode(char0, char1)) {
6633 // Cursor position is set by indent PAC (see below) in 4-column
6634 // increments, with an additional offset code of 1-3 to reach any
6635 // of the 32 columns specified by CEA-608. So all we need to do
6636 // here is increment the column cursor by the given offset.
6637 this.column_ += char1 & 0x03;
6638
6639 // Detect PACs (Preamble Address Codes)
6640 } else if (this.isPAC(char0, char1)) {
6641
6642 // There's no logic for PAC -> row mapping, so we have to just
6643 // find the row code in an array and use its index :(
6644 var row = ROWS.indexOf(data & 0x1f20);
6645
6646 // Configure the caption window if we're in roll-up mode
6647 if (this.mode_ === 'rollUp') {
6648 // This implies that the base row is incorrectly set.
6649 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
6650 // of roll-up rows set.
6651 if (row - this.rollUpRows_ + 1 < 0) {
6652 row = this.rollUpRows_ - 1;
6653 }
6654
6655 this.setRollUp(packet.pts, row);
6656 }
6657
6658 if (row !== this.row_) {
6659 // formatting is only persistent for current row
6660 this.clearFormatting(packet.pts);
6661 this.row_ = row;
6662 }
6663 // All PACs can apply underline, so detect and apply
6664 // (All odd-numbered second bytes set underline)
6665 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
6666 this.addFormatting(packet.pts, ['u']);
6667 }
6668
6669 if ((data & 0x10) === 0x10) {
6670 // We've got an indent level code. Each successive even number
6671 // increments the column cursor by 4, so we can get the desired
6672 // column position by bit-shifting to the right (to get n/2)
6673 // and multiplying by 4.
6674 this.column_ = ((data & 0xe) >> 1) * 4;
6675 }
6676
6677 if (this.isColorPAC(char1)) {
6678 // it's a color code, though we only support white, which
6679 // can be either normal or italicized. white italics can be
6680 // either 0x4e or 0x6e depending on the row, so we just
6681 // bitwise-and with 0xe to see if italics should be turned on
6682 if ((char1 & 0xe) === 0xe) {
6683 this.addFormatting(packet.pts, ['i']);
6684 }
6685 }
6686
6687 // We have a normal character in char0, and possibly one in char1
6688 } else if (this.isNormalChar(char0)) {
6689 if (char1 === 0x00) {
6690 char1 = null;
6691 }
6692 text = getCharFromCode(char0);
6693 text += getCharFromCode(char1);
6694 this[this.mode_](packet.pts, text);
6695 this.column_ += text.length;
6696 } // finish data processing
6697 };
6698 };
6699 Cea608Stream.prototype = new stream();
6700 // Trigger a cue point that captures the current state of the
6701 // display buffer
6702 Cea608Stream.prototype.flushDisplayed = function (pts) {
6703 var content = this.displayed_
6704 // remove spaces from the start and end of the string
6705 .map(function (row) {
6706 try {
6707 return row.trim();
6708 } catch (e) {
6709 // Ordinarily, this shouldn't happen. However, caption
6710 // parsing errors should not throw exceptions and
6711 // break playback.
6712 // eslint-disable-next-line no-console
6713 console.error('Skipping malformed caption.');
6714 return '';
6715 }
6716 })
6717 // combine all text rows to display in one cue
6718 .join('\n')
6719 // and remove blank rows from the start and end, but not the middle
6720 .replace(/^\n+|\n+$/g, '');
6721
6722 if (content.length) {
6723 this.trigger('data', {
6724 startPts: this.startPts_,
6725 endPts: pts,
6726 text: content,
6727 stream: this.name_
6728 });
6729 }
6730 };
6731
6732 /**
6733 * Zero out the data, used for startup and on seek
6734 */
6735 Cea608Stream.prototype.reset = function () {
6736 this.mode_ = 'popOn';
6737 // When in roll-up mode, the index of the last row that will
6738 // actually display captions. If a caption is shifted to a row
6739 // with a lower index than this, it is cleared from the display
6740 // buffer
6741 this.topRow_ = 0;
6742 this.startPts_ = 0;
6743 this.displayed_ = createDisplayBuffer();
6744 this.nonDisplayed_ = createDisplayBuffer();
6745 this.lastControlCode_ = null;
6746
6747 // Track row and column for proper line-breaking and spacing
6748 this.column_ = 0;
6749 this.row_ = BOTTOM_ROW;
6750 this.rollUpRows_ = 2;
6751
6752 // This variable holds currently-applied formatting
6753 this.formatting_ = [];
6754 };
6755
6756 /**
6757 * Sets up control code and related constants for this instance
6758 */
6759 Cea608Stream.prototype.setConstants = function () {
6760 // The following attributes have these uses:
6761 // ext_ : char0 for mid-row codes, and the base for extended
6762 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
6763 // extended codes)
6764 // control_: char0 for control codes, except byte-shifted to the
6765 // left so that we can do this.control_ | CONTROL_CODE
6766 // offset_: char0 for tab offset codes
6767 //
6768 // It's also worth noting that control codes, and _only_ control codes,
6769 // differ between field 1 and field2. Field 2 control codes are always
6770 // their field 1 value plus 1. That's why there's the "| field" on the
6771 // control value.
6772 if (this.dataChannel_ === 0) {
6773 this.BASE_ = 0x10;
6774 this.EXT_ = 0x11;
6775 this.CONTROL_ = (0x14 | this.field_) << 8;
6776 this.OFFSET_ = 0x17;
6777 } else if (this.dataChannel_ === 1) {
6778 this.BASE_ = 0x18;
6779 this.EXT_ = 0x19;
6780 this.CONTROL_ = (0x1c | this.field_) << 8;
6781 this.OFFSET_ = 0x1f;
6782 }
6783
6784 // Constants for the LSByte command codes recognized by Cea608Stream. This
6785 // list is not exhaustive. For a more comprehensive listing and semantics see
6786 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
6787 // Padding
6788 this.PADDING_ = 0x0000;
6789 // Pop-on Mode
6790 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
6791 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
6792 // Roll-up Mode
6793 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
6794 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
6795 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
6796 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
6797 // paint-on mode
6798 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
6799 // Erasure
6800 this.BACKSPACE_ = this.CONTROL_ | 0x21;
6801 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
6802 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
6803 };
6804
6805 /**
6806 * Detects if the 2-byte packet data is a special character
6807 *
6808 * Special characters have a second byte in the range 0x30 to 0x3f,
6809 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
6810 * data channel 2).
6811 *
6812 * @param {Integer} char0 The first byte
6813 * @param {Integer} char1 The second byte
6814 * @return {Boolean} Whether the 2 bytes are an special character
6815 */
6816 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
6817 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
6818 };
6819
6820 /**
6821 * Detects if the 2-byte packet data is an extended character
6822 *
6823 * Extended characters have a second byte in the range 0x20 to 0x3f,
6824 * with the first byte being 0x12 or 0x13 (for data channel 1) or
6825 * 0x1a or 0x1b (for data channel 2).
6826 *
6827 * @param {Integer} char0 The first byte
6828 * @param {Integer} char1 The second byte
6829 * @return {Boolean} Whether the 2 bytes are an extended character
6830 */
6831 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
6832 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
6833 };
6834
6835 /**
6836 * Detects if the 2-byte packet is a mid-row code
6837 *
6838 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
6839 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
6840 * channel 2).
6841 *
6842 * @param {Integer} char0 The first byte
6843 * @param {Integer} char1 The second byte
6844 * @return {Boolean} Whether the 2 bytes are a mid-row code
6845 */
6846 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
6847 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
6848 };
6849
6850 /**
6851 * Detects if the 2-byte packet is an offset control code
6852 *
6853 * Offset control codes have a second byte in the range 0x21 to 0x23,
6854 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
6855 * data channel 2).
6856 *
6857 * @param {Integer} char0 The first byte
6858 * @param {Integer} char1 The second byte
6859 * @return {Boolean} Whether the 2 bytes are an offset control code
6860 */
6861 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
6862 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
6863 };
6864
6865 /**
6866 * Detects if the 2-byte packet is a Preamble Address Code
6867 *
6868 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
6869 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
6870 * range 0x40 to 0x7f.
6871 *
6872 * @param {Integer} char0 The first byte
6873 * @param {Integer} char1 The second byte
6874 * @return {Boolean} Whether the 2 bytes are a PAC
6875 */
6876 Cea608Stream.prototype.isPAC = function (char0, char1) {
6877 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
6878 };
6879
6880 /**
6881 * Detects if a packet's second byte is in the range of a PAC color code
6882 *
6883 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
6884 * 0x60 to 0x6f.
6885 *
6886 * @param {Integer} char1 The second byte
6887 * @return {Boolean} Whether the byte is a color PAC
6888 */
6889 Cea608Stream.prototype.isColorPAC = function (char1) {
6890 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
6891 };
6892
6893 /**
6894 * Detects if a single byte is in the range of a normal character
6895 *
6896 * Normal text bytes are in the range 0x20 to 0x7f.
6897 *
6898 * @param {Integer} char The byte
6899 * @return {Boolean} Whether the byte is a normal character
6900 */
6901 Cea608Stream.prototype.isNormalChar = function (char) {
6902 return char >= 0x20 && char <= 0x7f;
6903 };
6904
6905 /**
6906 * Configures roll-up
6907 *
6908 * @param {Integer} pts Current PTS
6909 * @param {Integer} newBaseRow Used by PACs to slide the current window to
6910 * a new position
6911 */
6912 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
6913 // Reset the base row to the bottom row when switching modes
6914 if (this.mode_ !== 'rollUp') {
6915 this.row_ = BOTTOM_ROW;
6916 this.mode_ = 'rollUp';
6917 // Spec says to wipe memories when switching to roll-up
6918 this.flushDisplayed(pts);
6919 this.nonDisplayed_ = createDisplayBuffer();
6920 this.displayed_ = createDisplayBuffer();
6921 }
6922
6923 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
6924 // move currently displayed captions (up or down) to the new base row
6925 for (var i = 0; i < this.rollUpRows_; i++) {
6926 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
6927 this.displayed_[this.row_ - i] = '';
6928 }
6929 }
6930
6931 if (newBaseRow === undefined) {
6932 newBaseRow = this.row_;
6933 }
6934
6935 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
6936 };
6937
6938 // Adds the opening HTML tag for the passed character to the caption text,
6939 // and keeps track of it for later closing
6940 Cea608Stream.prototype.addFormatting = function (pts, format) {
6941 this.formatting_ = this.formatting_.concat(format);
6942 var text = format.reduce(function (text, format) {
6943 return text + '<' + format + '>';
6944 }, '');
6945 this[this.mode_](pts, text);
6946 };
6947
6948 // Adds HTML closing tags for current formatting to caption text and
6949 // clears remembered formatting
6950 Cea608Stream.prototype.clearFormatting = function (pts) {
6951 if (!this.formatting_.length) {
6952 return;
6953 }
6954 var text = this.formatting_.reverse().reduce(function (text, format) {
6955 return text + '</' + format + '>';
6956 }, '');
6957 this.formatting_ = [];
6958 this[this.mode_](pts, text);
6959 };
6960
6961 // Mode Implementations
6962 Cea608Stream.prototype.popOn = function (pts, text) {
6963 var baseRow = this.nonDisplayed_[this.row_];
6964
6965 // buffer characters
6966 baseRow += text;
6967 this.nonDisplayed_[this.row_] = baseRow;
6968 };
6969
6970 Cea608Stream.prototype.rollUp = function (pts, text) {
6971 var baseRow = this.displayed_[this.row_];
6972
6973 baseRow += text;
6974 this.displayed_[this.row_] = baseRow;
6975 };
6976
6977 Cea608Stream.prototype.shiftRowsUp_ = function () {
6978 var i;
6979 // clear out inactive rows
6980 for (i = 0; i < this.topRow_; i++) {
6981 this.displayed_[i] = '';
6982 }
6983 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
6984 this.displayed_[i] = '';
6985 }
6986 // shift displayed rows up
6987 for (i = this.topRow_; i < this.row_; i++) {
6988 this.displayed_[i] = this.displayed_[i + 1];
6989 }
6990 // clear out the bottom row
6991 this.displayed_[this.row_] = '';
6992 };
6993
6994 Cea608Stream.prototype.paintOn = function (pts, text) {
6995 var baseRow = this.displayed_[this.row_];
6996
6997 baseRow += text;
6998 this.displayed_[this.row_] = baseRow;
6999 };
7000
7001 // exports
7002 var captionStream = {
7003 CaptionStream: CaptionStream,
7004 Cea608Stream: Cea608Stream
7005 };
7006
7007 /**
7008 * mux.js
7009 *
7010 * Copyright (c) Brightcove
7011 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7012 */
7013
7014 var streamTypes = {
7015 H264_STREAM_TYPE: 0x1B,
7016 ADTS_STREAM_TYPE: 0x0F,
7017 METADATA_STREAM_TYPE: 0x15
7018 };
7019
7020 var MAX_TS = 8589934592;
7021
7022 var RO_THRESH = 4294967296;
7023
7024 var TYPE_SHARED = 'shared';
7025
7026 var handleRollover = function handleRollover(value, reference) {
7027 var direction = 1;
7028
7029 if (value > reference) {
7030 // If the current timestamp value is greater than our reference timestamp and we detect a
7031 // timestamp rollover, this means the roll over is happening in the opposite direction.
7032 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7033 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7034 // rollover point. In loading this segment, the timestamp values will be very large,
7035 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7036 // the time stamp to be `value - 2^33`.
7037 direction = -1;
7038 }
7039
7040 // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7041 // cause an incorrect adjustment.
7042 while (Math.abs(reference - value) > RO_THRESH) {
7043 value += direction * MAX_TS;
7044 }
7045
7046 return value;
7047 };
7048
7049 var TimestampRolloverStream = function TimestampRolloverStream(type) {
7050 var lastDTS, referenceDTS;
7051
7052 TimestampRolloverStream.prototype.init.call(this);
7053
7054 // The "shared" type is used in cases where a stream will contain muxed
7055 // video and audio. We could use `undefined` here, but having a string
7056 // makes debugging a little clearer.
7057 this.type_ = type || TYPE_SHARED;
7058
7059 this.push = function (data) {
7060
7061 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7062 // streams will only accept data that matches their type.
7063 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7064 return;
7065 }
7066
7067 if (referenceDTS === undefined) {
7068 referenceDTS = data.dts;
7069 }
7070
7071 data.dts = handleRollover(data.dts, referenceDTS);
7072 data.pts = handleRollover(data.pts, referenceDTS);
7073
7074 lastDTS = data.dts;
7075
7076 this.trigger('data', data);
7077 };
7078
7079 this.flush = function () {
7080 referenceDTS = lastDTS;
7081 this.trigger('done');
7082 };
7083
7084 this.endTimeline = function () {
7085 this.flush();
7086 this.trigger('endedtimeline');
7087 };
7088
7089 this.discontinuity = function () {
7090 referenceDTS = void 0;
7091 lastDTS = void 0;
7092 };
7093
7094 this.reset = function () {
7095 this.discontinuity();
7096 this.trigger('reset');
7097 };
7098 };
7099
7100 TimestampRolloverStream.prototype = new stream();
7101
7102 var timestampRolloverStream = {
7103 TimestampRolloverStream: TimestampRolloverStream,
7104 handleRollover: handleRollover
7105 };
7106
7107 var percentEncode = function percentEncode(bytes, start, end) {
7108 var i,
7109 result = '';
7110 for (i = start; i < end; i++) {
7111 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7112 }
7113 return result;
7114 },
7115
7116
7117 // return the string representation of the specified byte range,
7118 // interpreted as UTf-8.
7119 parseUtf8 = function parseUtf8(bytes, start, end) {
7120 return decodeURIComponent(percentEncode(bytes, start, end));
7121 },
7122
7123
7124 // return the string representation of the specified byte range,
7125 // interpreted as ISO-8859-1.
7126 parseIso88591 = function parseIso88591(bytes, start, end) {
7127 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
7128 },
7129 parseSyncSafeInteger = function parseSyncSafeInteger(data) {
7130 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7131 },
7132 tagParsers = {
7133 TXXX: function TXXX(tag) {
7134 var i;
7135 if (tag.data[0] !== 3) {
7136 // ignore frames with unrecognized character encodings
7137 return;
7138 }
7139
7140 for (i = 1; i < tag.data.length; i++) {
7141 if (tag.data[i] === 0) {
7142 // parse the text fields
7143 tag.description = parseUtf8(tag.data, 1, i);
7144 // do not include the null terminator in the tag value
7145 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7146 break;
7147 }
7148 }
7149 tag.data = tag.value;
7150 },
7151 WXXX: function WXXX(tag) {
7152 var i;
7153 if (tag.data[0] !== 3) {
7154 // ignore frames with unrecognized character encodings
7155 return;
7156 }
7157
7158 for (i = 1; i < tag.data.length; i++) {
7159 if (tag.data[i] === 0) {
7160 // parse the description and URL fields
7161 tag.description = parseUtf8(tag.data, 1, i);
7162 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7163 break;
7164 }
7165 }
7166 },
7167 PRIV: function PRIV(tag) {
7168 var i;
7169
7170 for (i = 0; i < tag.data.length; i++) {
7171 if (tag.data[i] === 0) {
7172 // parse the description and URL fields
7173 tag.owner = parseIso88591(tag.data, 0, i);
7174 break;
7175 }
7176 }
7177 tag.privateData = tag.data.subarray(i + 1);
7178 tag.data = tag.privateData;
7179 }
7180 },
7181 _MetadataStream;
7182
7183 _MetadataStream = function MetadataStream(options) {
7184 var settings = {
7185 debug: !!(options && options.debug),
7186
7187 // the bytes of the program-level descriptor field in MP2T
7188 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7189 // program element descriptors"
7190 descriptor: options && options.descriptor
7191 },
7192
7193
7194 // the total size in bytes of the ID3 tag being parsed
7195 tagSize = 0,
7196
7197
7198 // tag data that is not complete enough to be parsed
7199 buffer = [],
7200
7201
7202 // the total number of bytes currently in the buffer
7203 bufferSize = 0,
7204 i;
7205
7206 _MetadataStream.prototype.init.call(this);
7207
7208 // calculate the text track in-band metadata track dispatch type
7209 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7210 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7211 if (settings.descriptor) {
7212 for (i = 0; i < settings.descriptor.length; i++) {
7213 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7214 }
7215 }
7216
7217 this.push = function (chunk) {
7218 var tag, frameStart, frameSize, frame, i, frameHeader;
7219 if (chunk.type !== 'timed-metadata') {
7220 return;
7221 }
7222
7223 // if data_alignment_indicator is set in the PES header,
7224 // we must have the start of a new ID3 tag. Assume anything
7225 // remaining in the buffer was malformed and throw it out
7226 if (chunk.dataAlignmentIndicator) {
7227 bufferSize = 0;
7228 buffer.length = 0;
7229 }
7230
7231 // ignore events that don't look like ID3 data
7232 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7233 if (settings.debug) {
7234 // eslint-disable-next-line no-console
7235 console.log('Skipping unrecognized metadata packet');
7236 }
7237 return;
7238 }
7239
7240 // add this chunk to the data we've collected so far
7241
7242 buffer.push(chunk);
7243 bufferSize += chunk.data.byteLength;
7244
7245 // grab the size of the entire frame from the ID3 header
7246 if (buffer.length === 1) {
7247 // the frame size is transmitted as a 28-bit integer in the
7248 // last four bytes of the ID3 header.
7249 // The most significant bit of each byte is dropped and the
7250 // results concatenated to recover the actual value.
7251 tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
7252
7253 // ID3 reports the tag size excluding the header but it's more
7254 // convenient for our comparisons to include it
7255 tagSize += 10;
7256 }
7257
7258 // if the entire frame has not arrived, wait for more data
7259 if (bufferSize < tagSize) {
7260 return;
7261 }
7262
7263 // collect the entire frame so it can be parsed
7264 tag = {
7265 data: new Uint8Array(tagSize),
7266 frames: [],
7267 pts: buffer[0].pts,
7268 dts: buffer[0].dts
7269 };
7270 for (i = 0; i < tagSize;) {
7271 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7272 i += buffer[0].data.byteLength;
7273 bufferSize -= buffer[0].data.byteLength;
7274 buffer.shift();
7275 }
7276
7277 // find the start of the first frame and the end of the tag
7278 frameStart = 10;
7279 if (tag.data[5] & 0x40) {
7280 // advance the frame start past the extended header
7281 frameStart += 4; // header size field
7282 frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
7283
7284 // clip any padding off the end
7285 tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
7286 }
7287
7288 // parse one or more ID3 frames
7289 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7290 do {
7291 // determine the number of bytes in this frame
7292 frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
7293 if (frameSize < 1) {
7294 // eslint-disable-next-line no-console
7295 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
7296 }
7297 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7298
7299 frame = {
7300 id: frameHeader,
7301 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7302 };
7303 frame.key = frame.id;
7304 if (tagParsers[frame.id]) {
7305 tagParsers[frame.id](frame);
7306
7307 // handle the special PRIV frame used to indicate the start
7308 // time for raw AAC data
7309 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7310 var d = frame.data,
7311 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7312
7313 size *= 4;
7314 size += d[7] & 0x03;
7315 frame.timeStamp = size;
7316 // in raw AAC, all subsequent data will be timestamped based
7317 // on the value of this frame
7318 // we couldn't have known the appropriate pts and dts before
7319 // parsing this ID3 tag so set those values now
7320 if (tag.pts === undefined && tag.dts === undefined) {
7321 tag.pts = frame.timeStamp;
7322 tag.dts = frame.timeStamp;
7323 }
7324 this.trigger('timestamp', frame);
7325 }
7326 }
7327 tag.frames.push(frame);
7328
7329 frameStart += 10; // advance past the frame header
7330 frameStart += frameSize; // advance past the frame body
7331 } while (frameStart < tagSize);
7332 this.trigger('data', tag);
7333 };
7334 };
7335 _MetadataStream.prototype = new stream();
7336
7337 var metadataStream = _MetadataStream;
7338
7339 var TimestampRolloverStream$1 = timestampRolloverStream.TimestampRolloverStream;
7340
7341 // object types
7342 var _TransportPacketStream, _TransportParseStream, _ElementaryStream;
7343
7344 // constants
7345 var MP2T_PACKET_LENGTH = 188,
7346
7347 // bytes
7348 SYNC_BYTE = 0x47;
7349
7350 /**
7351 * Splits an incoming stream of binary data into MPEG-2 Transport
7352 * Stream packets.
7353 */
7354 _TransportPacketStream = function TransportPacketStream() {
7355 var buffer = new Uint8Array(MP2T_PACKET_LENGTH),
7356 bytesInBuffer = 0;
7357
7358 _TransportPacketStream.prototype.init.call(this);
7359
7360 // Deliver new bytes to the stream.
7361
7362 /**
7363 * Split a stream of data into M2TS packets
7364 **/
7365 this.push = function (bytes) {
7366 var startIndex = 0,
7367 endIndex = MP2T_PACKET_LENGTH,
7368 everything;
7369
7370 // If there are bytes remaining from the last segment, prepend them to the
7371 // bytes that were pushed in
7372 if (bytesInBuffer) {
7373 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7374 everything.set(buffer.subarray(0, bytesInBuffer));
7375 everything.set(bytes, bytesInBuffer);
7376 bytesInBuffer = 0;
7377 } else {
7378 everything = bytes;
7379 }
7380
7381 // While we have enough data for a packet
7382 while (endIndex < everything.byteLength) {
7383 // Look for a pair of start and end sync bytes in the data..
7384 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
7385 // We found a packet so emit it and jump one whole packet forward in
7386 // the stream
7387 this.trigger('data', everything.subarray(startIndex, endIndex));
7388 startIndex += MP2T_PACKET_LENGTH;
7389 endIndex += MP2T_PACKET_LENGTH;
7390 continue;
7391 }
7392 // If we get here, we have somehow become de-synchronized and we need to step
7393 // forward one byte at a time until we find a pair of sync bytes that denote
7394 // a packet
7395 startIndex++;
7396 endIndex++;
7397 }
7398
7399 // If there was some data left over at the end of the segment that couldn't
7400 // possibly be a whole packet, keep it because it might be the start of a packet
7401 // that continues in the next segment
7402 if (startIndex < everything.byteLength) {
7403 buffer.set(everything.subarray(startIndex), 0);
7404 bytesInBuffer = everything.byteLength - startIndex;
7405 }
7406 };
7407
7408 /**
7409 * Passes identified M2TS packets to the TransportParseStream to be parsed
7410 **/
7411 this.flush = function () {
7412 // If the buffer contains a whole packet when we are being flushed, emit it
7413 // and empty the buffer. Otherwise hold onto the data because it may be
7414 // important for decoding the next segment
7415 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
7416 this.trigger('data', buffer);
7417 bytesInBuffer = 0;
7418 }
7419 this.trigger('done');
7420 };
7421
7422 this.endTimeline = function () {
7423 this.flush();
7424 this.trigger('endedtimeline');
7425 };
7426
7427 this.reset = function () {
7428 bytesInBuffer = 0;
7429 this.trigger('reset');
7430 };
7431 };
7432 _TransportPacketStream.prototype = new stream();
7433
7434 /**
7435 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7436 * forms of the individual transport stream packets.
7437 */
7438 _TransportParseStream = function TransportParseStream() {
7439 var parsePsi, parsePat, parsePmt, self;
7440 _TransportParseStream.prototype.init.call(this);
7441 self = this;
7442
7443 this.packetsWaitingForPmt = [];
7444 this.programMapTable = undefined;
7445
7446 parsePsi = function parsePsi(payload, psi) {
7447 var offset = 0;
7448
7449 // PSI packets may be split into multiple sections and those
7450 // sections may be split into multiple packets. If a PSI
7451 // section starts in this packet, the payload_unit_start_indicator
7452 // will be true and the first byte of the payload will indicate
7453 // the offset from the current position to the start of the
7454 // section.
7455 if (psi.payloadUnitStartIndicator) {
7456 offset += payload[offset] + 1;
7457 }
7458
7459 if (psi.type === 'pat') {
7460 parsePat(payload.subarray(offset), psi);
7461 } else {
7462 parsePmt(payload.subarray(offset), psi);
7463 }
7464 };
7465
7466 parsePat = function parsePat(payload, pat) {
7467 pat.section_number = payload[7]; // eslint-disable-line camelcase
7468 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
7469
7470 // skip the PSI header and parse the first PMT entry
7471 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
7472 pat.pmtPid = self.pmtPid;
7473 };
7474
7475 /**
7476 * Parse out the relevant fields of a Program Map Table (PMT).
7477 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
7478 * packet. The first byte in this array should be the table_id
7479 * field.
7480 * @param pmt {object} the object that should be decorated with
7481 * fields parsed from the PMT.
7482 */
7483 parsePmt = function parsePmt(payload, pmt) {
7484 var sectionLength, tableEnd, programInfoLength, offset;
7485
7486 // PMTs can be sent ahead of the time when they should actually
7487 // take effect. We don't believe this should ever be the case
7488 // for HLS but we'll ignore "forward" PMT declarations if we see
7489 // them. Future PMT declarations have the current_next_indicator
7490 // set to zero.
7491 if (!(payload[5] & 0x01)) {
7492 return;
7493 }
7494
7495 // overwrite any existing program map table
7496 self.programMapTable = {
7497 video: null,
7498 audio: null,
7499 'timed-metadata': {}
7500 };
7501
7502 // the mapping table ends at the end of the current section
7503 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
7504 tableEnd = 3 + sectionLength - 4;
7505
7506 // to determine where the table is, we have to figure out how
7507 // long the program info descriptors are
7508 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
7509
7510 // advance the offset to the first entry in the mapping table
7511 offset = 12 + programInfoLength;
7512 while (offset < tableEnd) {
7513 var streamType = payload[offset];
7514 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
7515
7516 // only map a single elementary_pid for audio and video stream types
7517 // TODO: should this be done for metadata too? for now maintain behavior of
7518 // multiple metadata streams
7519 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
7520 self.programMapTable.video = pid;
7521 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
7522 self.programMapTable.audio = pid;
7523 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
7524 // map pid to stream type for metadata streams
7525 self.programMapTable['timed-metadata'][pid] = streamType;
7526 }
7527
7528 // move to the next table entry
7529 // skip past the elementary stream descriptors, if present
7530 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
7531 }
7532
7533 // record the map on the packet as well
7534 pmt.programMapTable = self.programMapTable;
7535 };
7536
7537 /**
7538 * Deliver a new MP2T packet to the next stream in the pipeline.
7539 */
7540 this.push = function (packet) {
7541 var result = {},
7542 offset = 4;
7543
7544 result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
7545
7546 // pid is a 13-bit field starting at the last bit of packet[1]
7547 result.pid = packet[1] & 0x1f;
7548 result.pid <<= 8;
7549 result.pid |= packet[2];
7550
7551 // if an adaption field is present, its length is specified by the
7552 // fifth byte of the TS packet header. The adaptation field is
7553 // used to add stuffing to PES packets that don't fill a complete
7554 // TS packet, and to specify some forms of timing and control data
7555 // that we do not currently use.
7556 if ((packet[3] & 0x30) >>> 4 > 0x01) {
7557 offset += packet[offset] + 1;
7558 }
7559
7560 // parse the rest of the packet based on the type
7561 if (result.pid === 0) {
7562 result.type = 'pat';
7563 parsePsi(packet.subarray(offset), result);
7564 this.trigger('data', result);
7565 } else if (result.pid === this.pmtPid) {
7566 result.type = 'pmt';
7567 parsePsi(packet.subarray(offset), result);
7568 this.trigger('data', result);
7569
7570 // if there are any packets waiting for a PMT to be found, process them now
7571 while (this.packetsWaitingForPmt.length) {
7572 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
7573 }
7574 } else if (this.programMapTable === undefined) {
7575 // When we have not seen a PMT yet, defer further processing of
7576 // PES packets until one has been parsed
7577 this.packetsWaitingForPmt.push([packet, offset, result]);
7578 } else {
7579 this.processPes_(packet, offset, result);
7580 }
7581 };
7582
7583 this.processPes_ = function (packet, offset, result) {
7584 // set the appropriate stream type
7585 if (result.pid === this.programMapTable.video) {
7586 result.streamType = streamTypes.H264_STREAM_TYPE;
7587 } else if (result.pid === this.programMapTable.audio) {
7588 result.streamType = streamTypes.ADTS_STREAM_TYPE;
7589 } else {
7590 // if not video or audio, it is timed-metadata or unknown
7591 // if unknown, streamType will be undefined
7592 result.streamType = this.programMapTable['timed-metadata'][result.pid];
7593 }
7594
7595 result.type = 'pes';
7596 result.data = packet.subarray(offset);
7597 this.trigger('data', result);
7598 };
7599 };
7600 _TransportParseStream.prototype = new stream();
7601 _TransportParseStream.STREAM_TYPES = {
7602 h264: 0x1b,
7603 adts: 0x0f
7604 };
7605
7606 /**
7607 * Reconsistutes program elementary stream (PES) packets from parsed
7608 * transport stream packets. That is, if you pipe an
7609 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
7610 * events will be events which capture the bytes for individual PES
7611 * packets plus relevant metadata that has been extracted from the
7612 * container.
7613 */
7614 _ElementaryStream = function ElementaryStream() {
7615 var self = this,
7616
7617
7618 // PES packet fragments
7619 video = {
7620 data: [],
7621 size: 0
7622 },
7623 audio = {
7624 data: [],
7625 size: 0
7626 },
7627 timedMetadata = {
7628 data: [],
7629 size: 0
7630 },
7631 programMapTable,
7632 parsePes = function parsePes(payload, pes) {
7633 var ptsDtsFlags;
7634
7635 // get the packet length, this will be 0 for video
7636 pes.packetLength = 6 + (payload[4] << 8 | payload[5]);
7637
7638 // find out if this packets starts a new keyframe
7639 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
7640 // PES packets may be annotated with a PTS value, or a PTS value
7641 // and a DTS value. Determine what combination of values is
7642 // available to work with.
7643 ptsDtsFlags = payload[7];
7644
7645 // PTS and DTS are normally stored as a 33-bit number. Javascript
7646 // performs all bitwise operations on 32-bit integers but javascript
7647 // supports a much greater range (52-bits) of integer using standard
7648 // mathematical operations.
7649 // We construct a 31-bit value using bitwise operators over the 31
7650 // most significant bits and then multiply by 4 (equal to a left-shift
7651 // of 2) before we add the final 2 least significant bits of the
7652 // timestamp (equal to an OR.)
7653 if (ptsDtsFlags & 0xC0) {
7654 // the PTS and DTS are not written out directly. For information
7655 // on how they are encoded, see
7656 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
7657 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
7658 pes.pts *= 4; // Left shift by 2
7659 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
7660 pes.dts = pes.pts;
7661 if (ptsDtsFlags & 0x40) {
7662 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
7663 pes.dts *= 4; // Left shift by 2
7664 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
7665 }
7666 }
7667 // the data section starts immediately after the PES header.
7668 // pes_header_data_length specifies the number of header bytes
7669 // that follow the last byte of the field.
7670 pes.data = payload.subarray(9 + payload[8]);
7671 },
7672
7673
7674 /**
7675 * Pass completely parsed PES packets to the next stream in the pipeline
7676 **/
7677 flushStream = function flushStream(stream$$1, type, forceFlush) {
7678 var packetData = new Uint8Array(stream$$1.size),
7679 event = {
7680 type: type
7681 },
7682 i = 0,
7683 offset = 0,
7684 packetFlushable = false,
7685 fragment;
7686
7687 // do nothing if there is not enough buffered data for a complete
7688 // PES header
7689 if (!stream$$1.data.length || stream$$1.size < 9) {
7690 return;
7691 }
7692 event.trackId = stream$$1.data[0].pid;
7693
7694 // reassemble the packet
7695 for (i = 0; i < stream$$1.data.length; i++) {
7696 fragment = stream$$1.data[i];
7697
7698 packetData.set(fragment.data, offset);
7699 offset += fragment.data.byteLength;
7700 }
7701
7702 // parse assembled packet's PES header
7703 parsePes(packetData, event);
7704
7705 // non-video PES packets MUST have a non-zero PES_packet_length
7706 // check that there is enough stream data to fill the packet
7707 packetFlushable = type === 'video' || event.packetLength <= stream$$1.size;
7708
7709 // flush pending packets if the conditions are right
7710 if (forceFlush || packetFlushable) {
7711 stream$$1.size = 0;
7712 stream$$1.data.length = 0;
7713 }
7714
7715 // only emit packets that are complete. this is to avoid assembling
7716 // incomplete PES packets due to poor segmentation
7717 if (packetFlushable) {
7718 self.trigger('data', event);
7719 }
7720 };
7721
7722 _ElementaryStream.prototype.init.call(this);
7723
7724 /**
7725 * Identifies M2TS packet types and parses PES packets using metadata
7726 * parsed from the PMT
7727 **/
7728 this.push = function (data) {
7729 ({
7730 pat: function pat() {
7731 // we have to wait for the PMT to arrive as well before we
7732 // have any meaningful metadata
7733 },
7734 pes: function pes() {
7735 var stream$$1, streamType;
7736
7737 switch (data.streamType) {
7738 case streamTypes.H264_STREAM_TYPE:
7739 case streamTypes.H264_STREAM_TYPE:
7740 stream$$1 = video;
7741 streamType = 'video';
7742 break;
7743 case streamTypes.ADTS_STREAM_TYPE:
7744 stream$$1 = audio;
7745 streamType = 'audio';
7746 break;
7747 case streamTypes.METADATA_STREAM_TYPE:
7748 stream$$1 = timedMetadata;
7749 streamType = 'timed-metadata';
7750 break;
7751 default:
7752 // ignore unknown stream types
7753 return;
7754 }
7755
7756 // if a new packet is starting, we can flush the completed
7757 // packet
7758 if (data.payloadUnitStartIndicator) {
7759 flushStream(stream$$1, streamType, true);
7760 }
7761
7762 // buffer this fragment until we are sure we've received the
7763 // complete payload
7764 stream$$1.data.push(data);
7765 stream$$1.size += data.data.byteLength;
7766 },
7767 pmt: function pmt() {
7768 var event = {
7769 type: 'metadata',
7770 tracks: []
7771 };
7772
7773 programMapTable = data.programMapTable;
7774
7775 // translate audio and video streams to tracks
7776 if (programMapTable.video !== null) {
7777 event.tracks.push({
7778 timelineStartInfo: {
7779 baseMediaDecodeTime: 0
7780 },
7781 id: +programMapTable.video,
7782 codec: 'avc',
7783 type: 'video'
7784 });
7785 }
7786 if (programMapTable.audio !== null) {
7787 event.tracks.push({
7788 timelineStartInfo: {
7789 baseMediaDecodeTime: 0
7790 },
7791 id: +programMapTable.audio,
7792 codec: 'adts',
7793 type: 'audio'
7794 });
7795 }
7796
7797 self.trigger('data', event);
7798 }
7799 })[data.type]();
7800 };
7801
7802 this.reset = function () {
7803 video.size = 0;
7804 video.data.length = 0;
7805 audio.size = 0;
7806 audio.data.length = 0;
7807 this.trigger('reset');
7808 };
7809
7810 /**
7811 * Flush any remaining input. Video PES packets may be of variable
7812 * length. Normally, the start of a new video packet can trigger the
7813 * finalization of the previous packet. That is not possible if no
7814 * more video is forthcoming, however. In that case, some other
7815 * mechanism (like the end of the file) has to be employed. When it is
7816 * clear that no additional data is forthcoming, calling this method
7817 * will flush the buffered packets.
7818 */
7819 this.flushStreams_ = function () {
7820 // !!THIS ORDER IS IMPORTANT!!
7821 // video first then audio
7822 flushStream(video, 'video');
7823 flushStream(audio, 'audio');
7824 flushStream(timedMetadata, 'timed-metadata');
7825 };
7826
7827 this.flush = function () {
7828 this.flushStreams_();
7829 this.trigger('done');
7830 };
7831 };
7832 _ElementaryStream.prototype = new stream();
7833
7834 var m2ts = {
7835 PAT_PID: 0x0000,
7836 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
7837 TransportPacketStream: _TransportPacketStream,
7838 TransportParseStream: _TransportParseStream,
7839 ElementaryStream: _ElementaryStream,
7840 TimestampRolloverStream: TimestampRolloverStream$1,
7841 CaptionStream: captionStream.CaptionStream,
7842 Cea608Stream: captionStream.Cea608Stream,
7843 MetadataStream: metadataStream
7844 };
7845
7846 for (var type in streamTypes) {
7847 if (streamTypes.hasOwnProperty(type)) {
7848 m2ts[type] = streamTypes[type];
7849 }
7850 }
7851
7852 var m2ts_1 = m2ts;
7853
7854 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
7855
7856 var _AdtsStream;
7857
7858 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
7859
7860 /*
7861 * Accepts a ElementaryStream and emits data events with parsed
7862 * AAC Audio Frames of the individual packets. Input audio in ADTS
7863 * format is unpacked and re-emitted as AAC frames.
7864 *
7865 * @see http://wiki.multimedia.cx/index.php?title=ADTS
7866 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
7867 */
7868 _AdtsStream = function AdtsStream(handlePartialSegments) {
7869 var buffer,
7870 frameNum = 0;
7871
7872 _AdtsStream.prototype.init.call(this);
7873
7874 this.push = function (packet) {
7875 var i = 0,
7876 frameLength,
7877 protectionSkipBytes,
7878 frameEnd,
7879 oldBuffer,
7880 sampleCount,
7881 adtsFrameDuration;
7882
7883 if (!handlePartialSegments) {
7884 frameNum = 0;
7885 }
7886
7887 if (packet.type !== 'audio') {
7888 // ignore non-audio data
7889 return;
7890 }
7891
7892 // Prepend any data in the buffer to the input data so that we can parse
7893 // aac frames the cross a PES packet boundary
7894 if (buffer) {
7895 oldBuffer = buffer;
7896 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
7897 buffer.set(oldBuffer);
7898 buffer.set(packet.data, oldBuffer.byteLength);
7899 } else {
7900 buffer = packet.data;
7901 }
7902
7903 // unpack any ADTS frames which have been fully received
7904 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
7905 while (i + 5 < buffer.length) {
7906
7907 // Look for the start of an ADTS header..
7908 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
7909 // If a valid header was not found, jump one forward and attempt to
7910 // find a valid ADTS header starting at the next byte
7911 i++;
7912 continue;
7913 }
7914
7915 // The protection skip bit tells us if we have 2 bytes of CRC data at the
7916 // end of the ADTS header
7917 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
7918
7919 // Frame length is a 13 bit integer starting 16 bits from the
7920 // end of the sync sequence
7921 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
7922
7923 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
7924 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
7925
7926 frameEnd = i + frameLength;
7927
7928 // If we don't have enough data to actually finish this ADTS frame, return
7929 // and wait for more data
7930 if (buffer.byteLength < frameEnd) {
7931 return;
7932 }
7933
7934 // Otherwise, deliver the complete AAC frame
7935 this.trigger('data', {
7936 pts: packet.pts + frameNum * adtsFrameDuration,
7937 dts: packet.dts + frameNum * adtsFrameDuration,
7938 sampleCount: sampleCount,
7939 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
7940 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
7941 samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
7942 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
7943 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
7944 samplesize: 16,
7945 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
7946 });
7947
7948 frameNum++;
7949
7950 // If the buffer is empty, clear it and return
7951 if (buffer.byteLength === frameEnd) {
7952 buffer = undefined;
7953 return;
7954 }
7955
7956 // Remove the finished frame from the buffer and start the process again
7957 buffer = buffer.subarray(frameEnd);
7958 }
7959 };
7960
7961 this.flush = function () {
7962 frameNum = 0;
7963 this.trigger('done');
7964 };
7965
7966 this.reset = function () {
7967 buffer = void 0;
7968 this.trigger('reset');
7969 };
7970
7971 this.endTimeline = function () {
7972 buffer = void 0;
7973 this.trigger('endedtimeline');
7974 };
7975 };
7976
7977 _AdtsStream.prototype = new stream();
7978
7979 var adts = _AdtsStream;
7980
7981 /**
7982 * mux.js
7983 *
7984 * Copyright (c) Brightcove
7985 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7986 */
7987
7988 var ExpGolomb;
7989
7990 /**
7991 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
7992 * scheme used by h264.
7993 */
7994 ExpGolomb = function ExpGolomb(workingData) {
7995 var
7996 // the number of bytes left to examine in workingData
7997 workingBytesAvailable = workingData.byteLength,
7998
7999
8000 // the current word being examined
8001 workingWord = 0,
8002
8003 // :uint
8004
8005 // the number of bits left to examine in the current word
8006 workingBitsAvailable = 0; // :uint;
8007
8008 // ():uint
8009 this.length = function () {
8010 return 8 * workingBytesAvailable;
8011 };
8012
8013 // ():uint
8014 this.bitsAvailable = function () {
8015 return 8 * workingBytesAvailable + workingBitsAvailable;
8016 };
8017
8018 // ():void
8019 this.loadWord = function () {
8020 var position = workingData.byteLength - workingBytesAvailable,
8021 workingBytes = new Uint8Array(4),
8022 availableBytes = Math.min(4, workingBytesAvailable);
8023
8024 if (availableBytes === 0) {
8025 throw new Error('no bytes available');
8026 }
8027
8028 workingBytes.set(workingData.subarray(position, position + availableBytes));
8029 workingWord = new DataView(workingBytes.buffer).getUint32(0);
8030
8031 // track the amount of workingData that has been processed
8032 workingBitsAvailable = availableBytes * 8;
8033 workingBytesAvailable -= availableBytes;
8034 };
8035
8036 // (count:int):void
8037 this.skipBits = function (count) {
8038 var skipBytes; // :int
8039 if (workingBitsAvailable > count) {
8040 workingWord <<= count;
8041 workingBitsAvailable -= count;
8042 } else {
8043 count -= workingBitsAvailable;
8044 skipBytes = Math.floor(count / 8);
8045
8046 count -= skipBytes * 8;
8047 workingBytesAvailable -= skipBytes;
8048
8049 this.loadWord();
8050
8051 workingWord <<= count;
8052 workingBitsAvailable -= count;
8053 }
8054 };
8055
8056 // (size:int):uint
8057 this.readBits = function (size) {
8058 var bits = Math.min(workingBitsAvailable, size),
8059
8060 // :uint
8061 valu = workingWord >>> 32 - bits; // :uint
8062 // if size > 31, handle error
8063 workingBitsAvailable -= bits;
8064 if (workingBitsAvailable > 0) {
8065 workingWord <<= bits;
8066 } else if (workingBytesAvailable > 0) {
8067 this.loadWord();
8068 }
8069
8070 bits = size - bits;
8071 if (bits > 0) {
8072 return valu << bits | this.readBits(bits);
8073 }
8074 return valu;
8075 };
8076
8077 // ():uint
8078 this.skipLeadingZeros = function () {
8079 var leadingZeroCount; // :uint
8080 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8081 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8082 // the first bit of working word is 1
8083 workingWord <<= leadingZeroCount;
8084 workingBitsAvailable -= leadingZeroCount;
8085 return leadingZeroCount;
8086 }
8087 }
8088
8089 // we exhausted workingWord and still have not found a 1
8090 this.loadWord();
8091 return leadingZeroCount + this.skipLeadingZeros();
8092 };
8093
8094 // ():void
8095 this.skipUnsignedExpGolomb = function () {
8096 this.skipBits(1 + this.skipLeadingZeros());
8097 };
8098
8099 // ():void
8100 this.skipExpGolomb = function () {
8101 this.skipBits(1 + this.skipLeadingZeros());
8102 };
8103
8104 // ():uint
8105 this.readUnsignedExpGolomb = function () {
8106 var clz = this.skipLeadingZeros(); // :uint
8107 return this.readBits(clz + 1) - 1;
8108 };
8109
8110 // ():int
8111 this.readExpGolomb = function () {
8112 var valu = this.readUnsignedExpGolomb(); // :int
8113 if (0x01 & valu) {
8114 // the number is odd if the low order bit is set
8115 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8116 }
8117 return -1 * (valu >>> 1); // divide by two then make it negative
8118 };
8119
8120 // Some convenience functions
8121 // :Boolean
8122 this.readBoolean = function () {
8123 return this.readBits(1) === 1;
8124 };
8125
8126 // ():int
8127 this.readUnsignedByte = function () {
8128 return this.readBits(8);
8129 };
8130
8131 this.loadWord();
8132 };
8133
8134 var expGolomb = ExpGolomb;
8135
8136 var _H264Stream, _NalByteStream;
8137 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8138
8139 /**
8140 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8141 */
8142 _NalByteStream = function NalByteStream() {
8143 var syncPoint = 0,
8144 i,
8145 buffer;
8146 _NalByteStream.prototype.init.call(this);
8147
8148 /*
8149 * Scans a byte stream and triggers a data event with the NAL units found.
8150 * @param {Object} data Event received from H264Stream
8151 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8152 *
8153 * @see H264Stream.push
8154 */
8155 this.push = function (data) {
8156 var swapBuffer;
8157
8158 if (!buffer) {
8159 buffer = data.data;
8160 } else {
8161 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8162 swapBuffer.set(buffer);
8163 swapBuffer.set(data.data, buffer.byteLength);
8164 buffer = swapBuffer;
8165 }
8166 var len = buffer.byteLength;
8167
8168 // Rec. ITU-T H.264, Annex B
8169 // scan for NAL unit boundaries
8170
8171 // a match looks like this:
8172 // 0 0 1 .. NAL .. 0 0 1
8173 // ^ sync point ^ i
8174 // or this:
8175 // 0 0 1 .. NAL .. 0 0 0
8176 // ^ sync point ^ i
8177
8178 // advance the sync point to a NAL start, if necessary
8179 for (; syncPoint < len - 3; syncPoint++) {
8180 if (buffer[syncPoint + 2] === 1) {
8181 // the sync point is properly aligned
8182 i = syncPoint + 5;
8183 break;
8184 }
8185 }
8186
8187 while (i < len) {
8188 // look at the current byte to determine if we've hit the end of
8189 // a NAL unit boundary
8190 switch (buffer[i]) {
8191 case 0:
8192 // skip past non-sync sequences
8193 if (buffer[i - 1] !== 0) {
8194 i += 2;
8195 break;
8196 } else if (buffer[i - 2] !== 0) {
8197 i++;
8198 break;
8199 }
8200
8201 // deliver the NAL unit if it isn't empty
8202 if (syncPoint + 3 !== i - 2) {
8203 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8204 }
8205
8206 // drop trailing zeroes
8207 do {
8208 i++;
8209 } while (buffer[i] !== 1 && i < len);
8210 syncPoint = i - 2;
8211 i += 3;
8212 break;
8213 case 1:
8214 // skip past non-sync sequences
8215 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8216 i += 3;
8217 break;
8218 }
8219
8220 // deliver the NAL unit
8221 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8222 syncPoint = i - 2;
8223 i += 3;
8224 break;
8225 default:
8226 // the current byte isn't a one or zero, so it cannot be part
8227 // of a sync sequence
8228 i += 3;
8229 break;
8230 }
8231 }
8232 // filter out the NAL units that were delivered
8233 buffer = buffer.subarray(syncPoint);
8234 i -= syncPoint;
8235 syncPoint = 0;
8236 };
8237
8238 this.reset = function () {
8239 buffer = null;
8240 syncPoint = 0;
8241 this.trigger('reset');
8242 };
8243
8244 this.flush = function () {
8245 // deliver the last buffered NAL unit
8246 if (buffer && buffer.byteLength > 3) {
8247 this.trigger('data', buffer.subarray(syncPoint + 3));
8248 }
8249 // reset the stream state
8250 buffer = null;
8251 syncPoint = 0;
8252 this.trigger('done');
8253 };
8254
8255 this.endTimeline = function () {
8256 this.flush();
8257 this.trigger('endedtimeline');
8258 };
8259 };
8260 _NalByteStream.prototype = new stream();
8261
8262 // values of profile_idc that indicate additional fields are included in the SPS
8263 // see Recommendation ITU-T H.264 (4/2013),
8264 // 7.3.2.1.1 Sequence parameter set data syntax
8265 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8266 100: true,
8267 110: true,
8268 122: true,
8269 244: true,
8270 44: true,
8271 83: true,
8272 86: true,
8273 118: true,
8274 128: true,
8275 138: true,
8276 139: true,
8277 134: true
8278 };
8279
8280 /**
8281 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8282 * events.
8283 */
8284 _H264Stream = function H264Stream() {
8285 var nalByteStream = new _NalByteStream(),
8286 self,
8287 trackId,
8288 currentPts,
8289 currentDts,
8290 discardEmulationPreventionBytes,
8291 readSequenceParameterSet,
8292 skipScalingList;
8293
8294 _H264Stream.prototype.init.call(this);
8295 self = this;
8296
8297 /*
8298 * Pushes a packet from a stream onto the NalByteStream
8299 *
8300 * @param {Object} packet - A packet received from a stream
8301 * @param {Uint8Array} packet.data - The raw bytes of the packet
8302 * @param {Number} packet.dts - Decode timestamp of the packet
8303 * @param {Number} packet.pts - Presentation timestamp of the packet
8304 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8305 * @param {('video'|'audio')} packet.type - The type of packet
8306 *
8307 */
8308 this.push = function (packet) {
8309 if (packet.type !== 'video') {
8310 return;
8311 }
8312 trackId = packet.trackId;
8313 currentPts = packet.pts;
8314 currentDts = packet.dts;
8315
8316 nalByteStream.push(packet);
8317 };
8318
8319 /*
8320 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8321 * for the NALUs to the next stream component.
8322 * Also, preprocess caption and sequence parameter NALUs.
8323 *
8324 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8325 * @see NalByteStream.push
8326 */
8327 nalByteStream.on('data', function (data) {
8328 var event = {
8329 trackId: trackId,
8330 pts: currentPts,
8331 dts: currentDts,
8332 data: data
8333 };
8334
8335 switch (data[0] & 0x1f) {
8336 case 0x05:
8337 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8338 break;
8339 case 0x06:
8340 event.nalUnitType = 'sei_rbsp';
8341 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8342 break;
8343 case 0x07:
8344 event.nalUnitType = 'seq_parameter_set_rbsp';
8345 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8346 event.config = readSequenceParameterSet(event.escapedRBSP);
8347 break;
8348 case 0x08:
8349 event.nalUnitType = 'pic_parameter_set_rbsp';
8350 break;
8351 case 0x09:
8352 event.nalUnitType = 'access_unit_delimiter_rbsp';
8353 break;
8354
8355 default:
8356 break;
8357 }
8358 // This triggers data on the H264Stream
8359 self.trigger('data', event);
8360 });
8361 nalByteStream.on('done', function () {
8362 self.trigger('done');
8363 });
8364 nalByteStream.on('partialdone', function () {
8365 self.trigger('partialdone');
8366 });
8367 nalByteStream.on('reset', function () {
8368 self.trigger('reset');
8369 });
8370 nalByteStream.on('endedtimeline', function () {
8371 self.trigger('endedtimeline');
8372 });
8373
8374 this.flush = function () {
8375 nalByteStream.flush();
8376 };
8377
8378 this.partialFlush = function () {
8379 nalByteStream.partialFlush();
8380 };
8381
8382 this.reset = function () {
8383 nalByteStream.reset();
8384 };
8385
8386 this.endTimeline = function () {
8387 nalByteStream.endTimeline();
8388 };
8389
8390 /**
8391 * Advance the ExpGolomb decoder past a scaling list. The scaling
8392 * list is optionally transmitted as part of a sequence parameter
8393 * set and is not relevant to transmuxing.
8394 * @param count {number} the number of entries in this scaling list
8395 * @param expGolombDecoder {object} an ExpGolomb pointed to the
8396 * start of a scaling list
8397 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
8398 */
8399 skipScalingList = function skipScalingList(count, expGolombDecoder) {
8400 var lastScale = 8,
8401 nextScale = 8,
8402 j,
8403 deltaScale;
8404
8405 for (j = 0; j < count; j++) {
8406 if (nextScale !== 0) {
8407 deltaScale = expGolombDecoder.readExpGolomb();
8408 nextScale = (lastScale + deltaScale + 256) % 256;
8409 }
8410
8411 lastScale = nextScale === 0 ? lastScale : nextScale;
8412 }
8413 };
8414
8415 /**
8416 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8417 * Sequence Payload"
8418 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8419 * unit
8420 * @return {Uint8Array} the RBSP without any Emulation
8421 * Prevention Bytes
8422 */
8423 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
8424 var length = data.byteLength,
8425 emulationPreventionBytesPositions = [],
8426 i = 1,
8427 newLength,
8428 newData;
8429
8430 // Find all `Emulation Prevention Bytes`
8431 while (i < length - 2) {
8432 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
8433 emulationPreventionBytesPositions.push(i + 2);
8434 i += 2;
8435 } else {
8436 i++;
8437 }
8438 }
8439
8440 // If no Emulation Prevention Bytes were found just return the original
8441 // array
8442 if (emulationPreventionBytesPositions.length === 0) {
8443 return data;
8444 }
8445
8446 // Create a new array to hold the NAL unit data
8447 newLength = length - emulationPreventionBytesPositions.length;
8448 newData = new Uint8Array(newLength);
8449 var sourceIndex = 0;
8450
8451 for (i = 0; i < newLength; sourceIndex++, i++) {
8452 if (sourceIndex === emulationPreventionBytesPositions[0]) {
8453 // Skip this byte
8454 sourceIndex++;
8455 // Remove this position index
8456 emulationPreventionBytesPositions.shift();
8457 }
8458 newData[i] = data[sourceIndex];
8459 }
8460
8461 return newData;
8462 };
8463
8464 /**
8465 * Read a sequence parameter set and return some interesting video
8466 * properties. A sequence parameter set is the H264 metadata that
8467 * describes the properties of upcoming video frames.
8468 * @param data {Uint8Array} the bytes of a sequence parameter set
8469 * @return {object} an object with configuration parsed from the
8470 * sequence parameter set, including the dimensions of the
8471 * associated video frames.
8472 */
8473 readSequenceParameterSet = function readSequenceParameterSet(data) {
8474 var frameCropLeftOffset = 0,
8475 frameCropRightOffset = 0,
8476 frameCropTopOffset = 0,
8477 frameCropBottomOffset = 0,
8478 sarScale = 1,
8479 expGolombDecoder,
8480 profileIdc,
8481 levelIdc,
8482 profileCompatibility,
8483 chromaFormatIdc,
8484 picOrderCntType,
8485 numRefFramesInPicOrderCntCycle,
8486 picWidthInMbsMinus1,
8487 picHeightInMapUnitsMinus1,
8488 frameMbsOnlyFlag,
8489 scalingListCount,
8490 sarRatio,
8491 aspectRatioIdc,
8492 i;
8493
8494 expGolombDecoder = new expGolomb(data);
8495 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
8496 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
8497 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
8498 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
8499
8500 // some profiles have more optional data we don't need
8501 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
8502 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
8503 if (chromaFormatIdc === 3) {
8504 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
8505 }
8506 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
8507 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
8508 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
8509 if (expGolombDecoder.readBoolean()) {
8510 // seq_scaling_matrix_present_flag
8511 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
8512 for (i = 0; i < scalingListCount; i++) {
8513 if (expGolombDecoder.readBoolean()) {
8514 // seq_scaling_list_present_flag[ i ]
8515 if (i < 6) {
8516 skipScalingList(16, expGolombDecoder);
8517 } else {
8518 skipScalingList(64, expGolombDecoder);
8519 }
8520 }
8521 }
8522 }
8523 }
8524
8525 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
8526 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
8527
8528 if (picOrderCntType === 0) {
8529 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
8530 } else if (picOrderCntType === 1) {
8531 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
8532 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
8533 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
8534 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
8535 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
8536 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
8537 }
8538 }
8539
8540 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
8541 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
8542
8543 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8544 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8545
8546 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
8547 if (frameMbsOnlyFlag === 0) {
8548 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
8549 }
8550
8551 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
8552 if (expGolombDecoder.readBoolean()) {
8553 // frame_cropping_flag
8554 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
8555 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
8556 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
8557 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
8558 }
8559 if (expGolombDecoder.readBoolean()) {
8560 // vui_parameters_present_flag
8561 if (expGolombDecoder.readBoolean()) {
8562 // aspect_ratio_info_present_flag
8563 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
8564 switch (aspectRatioIdc) {
8565 case 1:
8566 sarRatio = [1, 1];break;
8567 case 2:
8568 sarRatio = [12, 11];break;
8569 case 3:
8570 sarRatio = [10, 11];break;
8571 case 4:
8572 sarRatio = [16, 11];break;
8573 case 5:
8574 sarRatio = [40, 33];break;
8575 case 6:
8576 sarRatio = [24, 11];break;
8577 case 7:
8578 sarRatio = [20, 11];break;
8579 case 8:
8580 sarRatio = [32, 11];break;
8581 case 9:
8582 sarRatio = [80, 33];break;
8583 case 10:
8584 sarRatio = [18, 11];break;
8585 case 11:
8586 sarRatio = [15, 11];break;
8587 case 12:
8588 sarRatio = [64, 33];break;
8589 case 13:
8590 sarRatio = [160, 99];break;
8591 case 14:
8592 sarRatio = [4, 3];break;
8593 case 15:
8594 sarRatio = [3, 2];break;
8595 case 16:
8596 sarRatio = [2, 1];break;
8597 case 255:
8598 {
8599 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
8600 break;
8601 }
8602 }
8603 if (sarRatio) {
8604 sarScale = sarRatio[0] / sarRatio[1];
8605 }
8606 }
8607 }
8608 return {
8609 profileIdc: profileIdc,
8610 levelIdc: levelIdc,
8611 profileCompatibility: profileCompatibility,
8612 width: Math.ceil(((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
8613 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
8614 sarRatio: sarRatio
8615 };
8616 };
8617 };
8618 _H264Stream.prototype = new stream();
8619
8620 var h264 = {
8621 H264Stream: _H264Stream,
8622 NalByteStream: _NalByteStream
8623 };
8624
8625 /**
8626 * mux.js
8627 *
8628 * Copyright (c) Brightcove
8629 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8630 *
8631 * Utilities to detect basic properties and metadata about Aac data.
8632 */
8633
8634 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8635
8636 var isLikelyAacData = function isLikelyAacData(data) {
8637 if (data[0] === 'I'.charCodeAt(0) && data[1] === 'D'.charCodeAt(0) && data[2] === '3'.charCodeAt(0)) {
8638 return true;
8639 }
8640 return false;
8641 };
8642
8643 var parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
8644 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
8645 };
8646
8647 // return a percent-encoded representation of the specified byte range
8648 // @see http://en.wikipedia.org/wiki/Percent-encoding
8649 var percentEncode$1 = function percentEncode(bytes, start, end) {
8650 var i,
8651 result = '';
8652 for (i = start; i < end; i++) {
8653 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
8654 }
8655 return result;
8656 };
8657
8658 // return the string representation of the specified byte range,
8659 // interpreted as ISO-8859-1.
8660 var parseIso88591$1 = function parseIso88591(bytes, start, end) {
8661 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
8662 };
8663
8664 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
8665 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
8666 flags = header[byteIndex + 5],
8667 footerPresent = (flags & 16) >> 4;
8668
8669 if (footerPresent) {
8670 return returnSize + 20;
8671 }
8672 return returnSize + 10;
8673 };
8674
8675 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
8676 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
8677 middle = header[byteIndex + 4] << 3,
8678 highTwo = header[byteIndex + 3] & 0x3 << 11;
8679
8680 return highTwo | middle | lowThree;
8681 };
8682
8683 var parseType = function parseType(header, byteIndex) {
8684 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
8685 return 'timed-metadata';
8686 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
8687 return 'audio';
8688 }
8689 return null;
8690 };
8691
8692 var parseSampleRate = function parseSampleRate(packet) {
8693 var i = 0;
8694
8695 while (i + 5 < packet.length) {
8696 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
8697 // If a valid header was not found, jump one forward and attempt to
8698 // find a valid ADTS header starting at the next byte
8699 i++;
8700 continue;
8701 }
8702 return ADTS_SAMPLING_FREQUENCIES$1[(packet[i + 2] & 0x3c) >>> 2];
8703 }
8704
8705 return null;
8706 };
8707
8708 var parseAacTimestamp = function parseAacTimestamp(packet) {
8709 var frameStart, frameSize, frame, frameHeader;
8710
8711 // find the start of the first frame and the end of the tag
8712 frameStart = 10;
8713 if (packet[5] & 0x40) {
8714 // advance the frame start past the extended header
8715 frameStart += 4; // header size field
8716 frameStart += parseSyncSafeInteger$1(packet.subarray(10, 14));
8717 }
8718
8719 // parse one or more ID3 frames
8720 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
8721 do {
8722 // determine the number of bytes in this frame
8723 frameSize = parseSyncSafeInteger$1(packet.subarray(frameStart + 4, frameStart + 8));
8724 if (frameSize < 1) {
8725 return null;
8726 }
8727 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
8728
8729 if (frameHeader === 'PRIV') {
8730 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
8731
8732 for (var i = 0; i < frame.byteLength; i++) {
8733 if (frame[i] === 0) {
8734 var owner = parseIso88591$1(frame, 0, i);
8735 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
8736 var d = frame.subarray(i + 1);
8737 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
8738 size *= 4;
8739 size += d[7] & 0x03;
8740
8741 return size;
8742 }
8743 break;
8744 }
8745 }
8746 }
8747
8748 frameStart += 10; // advance past the frame header
8749 frameStart += frameSize; // advance past the frame body
8750 } while (frameStart < packet.byteLength);
8751 return null;
8752 };
8753
8754 var utils = {
8755 isLikelyAacData: isLikelyAacData,
8756 parseId3TagSize: parseId3TagSize,
8757 parseAdtsSize: parseAdtsSize,
8758 parseType: parseType,
8759 parseSampleRate: parseSampleRate,
8760 parseAacTimestamp: parseAacTimestamp
8761 };
8762
8763 // Constants
8764 var _AacStream;
8765
8766 /**
8767 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
8768 */
8769
8770 _AacStream = function AacStream() {
8771 var everything = new Uint8Array(),
8772 timeStamp = 0;
8773
8774 _AacStream.prototype.init.call(this);
8775
8776 this.setTimestamp = function (timestamp) {
8777 timeStamp = timestamp;
8778 };
8779
8780 this.push = function (bytes) {
8781 var frameSize = 0,
8782 byteIndex = 0,
8783 bytesLeft,
8784 chunk,
8785 packet,
8786 tempLength;
8787
8788 // If there are bytes remaining from the last segment, prepend them to the
8789 // bytes that were pushed in
8790 if (everything.length) {
8791 tempLength = everything.length;
8792 everything = new Uint8Array(bytes.byteLength + tempLength);
8793 everything.set(everything.subarray(0, tempLength));
8794 everything.set(bytes, tempLength);
8795 } else {
8796 everything = bytes;
8797 }
8798
8799 while (everything.length - byteIndex >= 3) {
8800 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
8801
8802 // Exit early because we don't have enough to parse
8803 // the ID3 tag header
8804 if (everything.length - byteIndex < 10) {
8805 break;
8806 }
8807
8808 // check framesize
8809 frameSize = utils.parseId3TagSize(everything, byteIndex);
8810
8811 // Exit early if we don't have enough in the buffer
8812 // to emit a full packet
8813 // Add to byteIndex to support multiple ID3 tags in sequence
8814 if (byteIndex + frameSize > everything.length) {
8815 break;
8816 }
8817 chunk = {
8818 type: 'timed-metadata',
8819 data: everything.subarray(byteIndex, byteIndex + frameSize)
8820 };
8821 this.trigger('data', chunk);
8822 byteIndex += frameSize;
8823 continue;
8824 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
8825
8826 // Exit early because we don't have enough to parse
8827 // the ADTS frame header
8828 if (everything.length - byteIndex < 7) {
8829 break;
8830 }
8831
8832 frameSize = utils.parseAdtsSize(everything, byteIndex);
8833
8834 // Exit early if we don't have enough in the buffer
8835 // to emit a full packet
8836 if (byteIndex + frameSize > everything.length) {
8837 break;
8838 }
8839
8840 packet = {
8841 type: 'audio',
8842 data: everything.subarray(byteIndex, byteIndex + frameSize),
8843 pts: timeStamp,
8844 dts: timeStamp
8845 };
8846 this.trigger('data', packet);
8847 byteIndex += frameSize;
8848 continue;
8849 }
8850 byteIndex++;
8851 }
8852 bytesLeft = everything.length - byteIndex;
8853
8854 if (bytesLeft > 0) {
8855 everything = everything.subarray(byteIndex);
8856 } else {
8857 everything = new Uint8Array();
8858 }
8859 };
8860
8861 this.reset = function () {
8862 everything = new Uint8Array();
8863 this.trigger('reset');
8864 };
8865
8866 this.endTimeline = function () {
8867 everything = new Uint8Array();
8868 this.trigger('endedtimeline');
8869 };
8870 };
8871
8872 _AacStream.prototype = new stream();
8873
8874 var aac = _AacStream;
8875
8876 var H264Stream = h264.H264Stream;
8877
8878 var isLikelyAacData$1 = utils.isLikelyAacData;
8879 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
8880
8881 // constants
8882 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
8883
8884 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
8885
8886 // object types
8887 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
8888
8889 /**
8890 * Compare two arrays (even typed) for same-ness
8891 */
8892 var arrayEquals = function arrayEquals(a, b) {
8893 var i;
8894
8895 if (a.length !== b.length) {
8896 return false;
8897 }
8898
8899 // compare the value of each element in the array
8900 for (i = 0; i < a.length; i++) {
8901 if (a[i] !== b[i]) {
8902 return false;
8903 }
8904 }
8905
8906 return true;
8907 };
8908
8909 var generateVideoSegmentTimingInfo = function generateVideoSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
8910 var ptsOffsetFromDts = startPts - startDts,
8911 decodeDuration = endDts - startDts,
8912 presentationDuration = endPts - startPts;
8913
8914 // The PTS and DTS values are based on the actual stream times from the segment,
8915 // however, the player time values will reflect a start from the baseMediaDecodeTime.
8916 // In order to provide relevant values for the player times, base timing info on the
8917 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
8918 return {
8919 start: {
8920 dts: baseMediaDecodeTime,
8921 pts: baseMediaDecodeTime + ptsOffsetFromDts
8922 },
8923 end: {
8924 dts: baseMediaDecodeTime + decodeDuration,
8925 pts: baseMediaDecodeTime + presentationDuration
8926 },
8927 prependedContentDuration: prependedContentDuration,
8928 baseMediaDecodeTime: baseMediaDecodeTime
8929 };
8930 };
8931
8932 /**
8933 * Constructs a single-track, ISO BMFF media segment from AAC data
8934 * events. The output of this stream can be fed to a SourceBuffer
8935 * configured with a suitable initialization segment.
8936 * @param track {object} track metadata configuration
8937 * @param options {object} transmuxer options object
8938 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
8939 * in the source; false to adjust the first segment to start at 0.
8940 */
8941 _AudioSegmentStream = function AudioSegmentStream(track, options) {
8942 var adtsFrames = [],
8943 sequenceNumber = 0,
8944 earliestAllowedDts = 0,
8945 audioAppendStartTs = 0,
8946 videoBaseMediaDecodeTime = Infinity;
8947
8948 options = options || {};
8949
8950 _AudioSegmentStream.prototype.init.call(this);
8951
8952 this.push = function (data) {
8953 trackDecodeInfo.collectDtsInfo(track, data);
8954
8955 if (track) {
8956 AUDIO_PROPERTIES.forEach(function (prop) {
8957 track[prop] = data[prop];
8958 });
8959 }
8960
8961 // buffer audio data until end() is called
8962 adtsFrames.push(data);
8963 };
8964
8965 this.setEarliestDts = function (earliestDts) {
8966 earliestAllowedDts = earliestDts - track.timelineStartInfo.baseMediaDecodeTime;
8967 };
8968
8969 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
8970 videoBaseMediaDecodeTime = baseMediaDecodeTime;
8971 };
8972
8973 this.setAudioAppendStart = function (timestamp) {
8974 audioAppendStartTs = timestamp;
8975 };
8976
8977 this.flush = function () {
8978 var frames, moof, mdat, boxes, frameDuration;
8979
8980 // return early if no audio data has been observed
8981 if (adtsFrames.length === 0) {
8982 this.trigger('done', 'AudioSegmentStream');
8983 return;
8984 }
8985
8986 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
8987 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
8988
8989 audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime);
8990
8991 // we have to build the index from byte locations to
8992 // samples (that is, adts frames) in the audio data
8993 track.samples = audioFrameUtils.generateSampleTable(frames);
8994
8995 // concatenate the audio data to constuct the mdat
8996 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
8997
8998 adtsFrames = [];
8999
9000 moof = mp4Generator.moof(sequenceNumber, [track]);
9001 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
9002
9003 // bump the sequence number for next time
9004 sequenceNumber++;
9005
9006 boxes.set(moof);
9007 boxes.set(mdat, moof.byteLength);
9008
9009 trackDecodeInfo.clearDtsInfo(track);
9010
9011 frameDuration = Math.ceil(ONE_SECOND_IN_TS$3 * 1024 / track.samplerate);
9012
9013 // TODO this check was added to maintain backwards compatibility (particularly with
9014 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9015 // valid use-case where an init segment/data should be triggered without associated
9016 // frames. Leaving for now, but should be looked into.
9017 if (frames.length) {
9018 this.trigger('timingInfo', {
9019 start: frames[0].pts,
9020 end: frames[0].pts + frames.length * frameDuration
9021 });
9022 }
9023 this.trigger('data', { track: track, boxes: boxes });
9024 this.trigger('done', 'AudioSegmentStream');
9025 };
9026
9027 this.reset = function () {
9028 trackDecodeInfo.clearDtsInfo(track);
9029 adtsFrames = [];
9030 this.trigger('reset');
9031 };
9032 };
9033
9034 _AudioSegmentStream.prototype = new stream();
9035
9036 /**
9037 * Constructs a single-track, ISO BMFF media segment from H264 data
9038 * events. The output of this stream can be fed to a SourceBuffer
9039 * configured with a suitable initialization segment.
9040 * @param track {object} track metadata configuration
9041 * @param options {object} transmuxer options object
9042 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9043 * gopsToAlignWith list when attempting to align gop pts
9044 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9045 * in the source; false to adjust the first segment to start at 0.
9046 */
9047 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9048 var sequenceNumber = 0,
9049 nalUnits = [],
9050 gopsToAlignWith = [],
9051 config,
9052 pps;
9053
9054 options = options || {};
9055
9056 _VideoSegmentStream.prototype.init.call(this);
9057
9058 delete track.minPTS;
9059
9060 this.gopCache_ = [];
9061
9062 /**
9063 * Constructs a ISO BMFF segment given H264 nalUnits
9064 * @param {Object} nalUnit A data event representing a nalUnit
9065 * @param {String} nalUnit.nalUnitType
9066 * @param {Object} nalUnit.config Properties for a mp4 track
9067 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9068 * @see lib/codecs/h264.js
9069 **/
9070 this.push = function (nalUnit) {
9071 trackDecodeInfo.collectDtsInfo(track, nalUnit);
9072
9073 // record the track config
9074 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9075 config = nalUnit.config;
9076 track.sps = [nalUnit.data];
9077
9078 VIDEO_PROPERTIES.forEach(function (prop) {
9079 track[prop] = config[prop];
9080 }, this);
9081 }
9082
9083 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9084 pps = nalUnit.data;
9085 track.pps = [nalUnit.data];
9086 }
9087
9088 // buffer video until flush() is called
9089 nalUnits.push(nalUnit);
9090 };
9091
9092 /**
9093 * Pass constructed ISO BMFF track and boxes on to the
9094 * next stream in the pipeline
9095 **/
9096 this.flush = function () {
9097 var frames,
9098 gopForFusion,
9099 gops,
9100 moof,
9101 mdat,
9102 boxes,
9103 prependedContentDuration = 0,
9104 firstGop,
9105 lastGop;
9106
9107 // Throw away nalUnits at the start of the byte stream until
9108 // we find the first AUD
9109 while (nalUnits.length) {
9110 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9111 break;
9112 }
9113 nalUnits.shift();
9114 }
9115
9116 // Return early if no video data has been observed
9117 if (nalUnits.length === 0) {
9118 this.resetStream_();
9119 this.trigger('done', 'VideoSegmentStream');
9120 return;
9121 }
9122
9123 // Organize the raw nal-units into arrays that represent
9124 // higher-level constructs such as frames and gops
9125 // (group-of-pictures)
9126 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9127 gops = frameUtils.groupFramesIntoGops(frames);
9128
9129 // If the first frame of this fragment is not a keyframe we have
9130 // a problem since MSE (on Chrome) requires a leading keyframe.
9131 //
9132 // We have two approaches to repairing this situation:
9133 // 1) GOP-FUSION:
9134 // This is where we keep track of the GOPS (group-of-pictures)
9135 // from previous fragments and attempt to find one that we can
9136 // prepend to the current fragment in order to create a valid
9137 // fragment.
9138 // 2) KEYFRAME-PULLING:
9139 // Here we search for the first keyframe in the fragment and
9140 // throw away all the frames between the start of the fragment
9141 // and that keyframe. We then extend the duration and pull the
9142 // PTS of the keyframe forward so that it covers the time range
9143 // of the frames that were disposed of.
9144 //
9145 // #1 is far prefereable over #2 which can cause "stuttering" but
9146 // requires more things to be just right.
9147 if (!gops[0][0].keyFrame) {
9148 // Search for a gop for fusion from our gopCache
9149 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9150
9151 if (gopForFusion) {
9152 // in order to provide more accurate timing information about the segment, save
9153 // the number of seconds prepended to the original segment due to GOP fusion
9154 prependedContentDuration = gopForFusion.duration;
9155
9156 gops.unshift(gopForFusion);
9157 // Adjust Gops' metadata to account for the inclusion of the
9158 // new gop at the beginning
9159 gops.byteLength += gopForFusion.byteLength;
9160 gops.nalCount += gopForFusion.nalCount;
9161 gops.pts = gopForFusion.pts;
9162 gops.dts = gopForFusion.dts;
9163 gops.duration += gopForFusion.duration;
9164 } else {
9165 // If we didn't find a candidate gop fall back to keyframe-pulling
9166 gops = frameUtils.extendFirstKeyFrame(gops);
9167 }
9168 }
9169
9170 // Trim gops to align with gopsToAlignWith
9171 if (gopsToAlignWith.length) {
9172 var alignedGops;
9173
9174 if (options.alignGopsAtEnd) {
9175 alignedGops = this.alignGopsAtEnd_(gops);
9176 } else {
9177 alignedGops = this.alignGopsAtStart_(gops);
9178 }
9179
9180 if (!alignedGops) {
9181 // save all the nals in the last GOP into the gop cache
9182 this.gopCache_.unshift({
9183 gop: gops.pop(),
9184 pps: track.pps,
9185 sps: track.sps
9186 });
9187
9188 // Keep a maximum of 6 GOPs in the cache
9189 this.gopCache_.length = Math.min(6, this.gopCache_.length);
9190
9191 // Clear nalUnits
9192 nalUnits = [];
9193
9194 // return early no gops can be aligned with desired gopsToAlignWith
9195 this.resetStream_();
9196 this.trigger('done', 'VideoSegmentStream');
9197 return;
9198 }
9199
9200 // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9201 // when recalculated before sending off to CoalesceStream
9202 trackDecodeInfo.clearDtsInfo(track);
9203
9204 gops = alignedGops;
9205 }
9206
9207 trackDecodeInfo.collectDtsInfo(track, gops);
9208
9209 // First, we have to build the index from byte locations to
9210 // samples (that is, frames) in the video data
9211 track.samples = frameUtils.generateSampleTable(gops);
9212
9213 // Concatenate the video data and construct the mdat
9214 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9215
9216 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9217
9218 this.trigger('processedGopsInfo', gops.map(function (gop) {
9219 return {
9220 pts: gop.pts,
9221 dts: gop.dts,
9222 byteLength: gop.byteLength
9223 };
9224 }));
9225
9226 firstGop = gops[0];
9227 lastGop = gops[gops.length - 1];
9228
9229 this.trigger('segmentTimingInfo', generateVideoSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9230
9231 this.trigger('timingInfo', {
9232 start: gops[0].pts,
9233 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9234 });
9235
9236 // save all the nals in the last GOP into the gop cache
9237 this.gopCache_.unshift({
9238 gop: gops.pop(),
9239 pps: track.pps,
9240 sps: track.sps
9241 });
9242
9243 // Keep a maximum of 6 GOPs in the cache
9244 this.gopCache_.length = Math.min(6, this.gopCache_.length);
9245
9246 // Clear nalUnits
9247 nalUnits = [];
9248
9249 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9250 this.trigger('timelineStartInfo', track.timelineStartInfo);
9251
9252 moof = mp4Generator.moof(sequenceNumber, [track]);
9253
9254 // it would be great to allocate this array up front instead of
9255 // throwing away hundreds of media segment fragments
9256 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
9257
9258 // Bump the sequence number for next time
9259 sequenceNumber++;
9260
9261 boxes.set(moof);
9262 boxes.set(mdat, moof.byteLength);
9263
9264 this.trigger('data', { track: track, boxes: boxes });
9265
9266 this.resetStream_();
9267
9268 // Continue with the flush process now
9269 this.trigger('done', 'VideoSegmentStream');
9270 };
9271
9272 this.reset = function () {
9273 this.resetStream_();
9274 nalUnits = [];
9275 this.gopCache_.length = 0;
9276 gopsToAlignWith.length = 0;
9277 this.trigger('reset');
9278 };
9279
9280 this.resetStream_ = function () {
9281 trackDecodeInfo.clearDtsInfo(track);
9282
9283 // reset config and pps because they may differ across segments
9284 // for instance, when we are rendition switching
9285 config = undefined;
9286 pps = undefined;
9287 };
9288
9289 // Search for a candidate Gop for gop-fusion from the gop cache and
9290 // return it or return null if no good candidate was found
9291 this.getGopForFusion_ = function (nalUnit) {
9292 var halfSecond = 45000,
9293
9294 // Half-a-second in a 90khz clock
9295 allowableOverlap = 10000,
9296
9297 // About 3 frames @ 30fps
9298 nearestDistance = Infinity,
9299 dtsDistance,
9300 nearestGopObj,
9301 currentGop,
9302 currentGopObj,
9303 i;
9304
9305 // Search for the GOP nearest to the beginning of this nal unit
9306 for (i = 0; i < this.gopCache_.length; i++) {
9307 currentGopObj = this.gopCache_[i];
9308 currentGop = currentGopObj.gop;
9309
9310 // Reject Gops with different SPS or PPS
9311 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9312 continue;
9313 }
9314
9315 // Reject Gops that would require a negative baseMediaDecodeTime
9316 if (currentGop.dts < track.timelineStartInfo.dts) {
9317 continue;
9318 }
9319
9320 // The distance between the end of the gop and the start of the nalUnit
9321 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration;
9322
9323 // Only consider GOPS that start before the nal unit and end within
9324 // a half-second of the nal unit
9325 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
9326
9327 // Always use the closest GOP we found if there is more than
9328 // one candidate
9329 if (!nearestGopObj || nearestDistance > dtsDistance) {
9330 nearestGopObj = currentGopObj;
9331 nearestDistance = dtsDistance;
9332 }
9333 }
9334 }
9335
9336 if (nearestGopObj) {
9337 return nearestGopObj.gop;
9338 }
9339 return null;
9340 };
9341
9342 // trim gop list to the first gop found that has a matching pts with a gop in the list
9343 // of gopsToAlignWith starting from the START of the list
9344 this.alignGopsAtStart_ = function (gops) {
9345 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
9346
9347 byteLength = gops.byteLength;
9348 nalCount = gops.nalCount;
9349 duration = gops.duration;
9350 alignIndex = gopIndex = 0;
9351
9352 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
9353 align = gopsToAlignWith[alignIndex];
9354 gop = gops[gopIndex];
9355
9356 if (align.pts === gop.pts) {
9357 break;
9358 }
9359
9360 if (gop.pts > align.pts) {
9361 // this current gop starts after the current gop we want to align on, so increment
9362 // align index
9363 alignIndex++;
9364 continue;
9365 }
9366
9367 // current gop starts before the current gop we want to align on. so increment gop
9368 // index
9369 gopIndex++;
9370 byteLength -= gop.byteLength;
9371 nalCount -= gop.nalCount;
9372 duration -= gop.duration;
9373 }
9374
9375 if (gopIndex === 0) {
9376 // no gops to trim
9377 return gops;
9378 }
9379
9380 if (gopIndex === gops.length) {
9381 // all gops trimmed, skip appending all gops
9382 return null;
9383 }
9384
9385 alignedGops = gops.slice(gopIndex);
9386 alignedGops.byteLength = byteLength;
9387 alignedGops.duration = duration;
9388 alignedGops.nalCount = nalCount;
9389 alignedGops.pts = alignedGops[0].pts;
9390 alignedGops.dts = alignedGops[0].dts;
9391
9392 return alignedGops;
9393 };
9394
9395 // trim gop list to the first gop found that has a matching pts with a gop in the list
9396 // of gopsToAlignWith starting from the END of the list
9397 this.alignGopsAtEnd_ = function (gops) {
9398 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
9399
9400 alignIndex = gopsToAlignWith.length - 1;
9401 gopIndex = gops.length - 1;
9402 alignEndIndex = null;
9403 matchFound = false;
9404
9405 while (alignIndex >= 0 && gopIndex >= 0) {
9406 align = gopsToAlignWith[alignIndex];
9407 gop = gops[gopIndex];
9408
9409 if (align.pts === gop.pts) {
9410 matchFound = true;
9411 break;
9412 }
9413
9414 if (align.pts > gop.pts) {
9415 alignIndex--;
9416 continue;
9417 }
9418
9419 if (alignIndex === gopsToAlignWith.length - 1) {
9420 // gop.pts is greater than the last alignment candidate. If no match is found
9421 // by the end of this loop, we still want to append gops that come after this
9422 // point
9423 alignEndIndex = gopIndex;
9424 }
9425
9426 gopIndex--;
9427 }
9428
9429 if (!matchFound && alignEndIndex === null) {
9430 return null;
9431 }
9432
9433 var trimIndex;
9434
9435 if (matchFound) {
9436 trimIndex = gopIndex;
9437 } else {
9438 trimIndex = alignEndIndex;
9439 }
9440
9441 if (trimIndex === 0) {
9442 return gops;
9443 }
9444
9445 var alignedGops = gops.slice(trimIndex);
9446 var metadata = alignedGops.reduce(function (total, gop) {
9447 total.byteLength += gop.byteLength;
9448 total.duration += gop.duration;
9449 total.nalCount += gop.nalCount;
9450 return total;
9451 }, { byteLength: 0, duration: 0, nalCount: 0 });
9452
9453 alignedGops.byteLength = metadata.byteLength;
9454 alignedGops.duration = metadata.duration;
9455 alignedGops.nalCount = metadata.nalCount;
9456 alignedGops.pts = alignedGops[0].pts;
9457 alignedGops.dts = alignedGops[0].dts;
9458
9459 return alignedGops;
9460 };
9461
9462 this.alignGopsWith = function (newGopsToAlignWith) {
9463 gopsToAlignWith = newGopsToAlignWith;
9464 };
9465 };
9466
9467 _VideoSegmentStream.prototype = new stream();
9468
9469 /**
9470 * A Stream that can combine multiple streams (ie. audio & video)
9471 * into a single output segment for MSE. Also supports audio-only
9472 * and video-only streams.
9473 * @param options {object} transmuxer options object
9474 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9475 * in the source; false to adjust the first segment to start at media timeline start.
9476 */
9477 _CoalesceStream = function CoalesceStream(options, metadataStream) {
9478 // Number of Tracks per output segment
9479 // If greater than 1, we combine multiple
9480 // tracks into a single segment
9481 this.numberOfTracks = 0;
9482 this.metadataStream = metadataStream;
9483
9484 options = options || {};
9485
9486 if (typeof options.remux !== 'undefined') {
9487 this.remuxTracks = !!options.remux;
9488 } else {
9489 this.remuxTracks = true;
9490 }
9491
9492 if (typeof options.keepOriginalTimestamps === 'boolean') {
9493 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
9494 } else {
9495 this.keepOriginalTimestamps = false;
9496 }
9497
9498 this.pendingTracks = [];
9499 this.videoTrack = null;
9500 this.pendingBoxes = [];
9501 this.pendingCaptions = [];
9502 this.pendingMetadata = [];
9503 this.pendingBytes = 0;
9504 this.emittedTracks = 0;
9505
9506 _CoalesceStream.prototype.init.call(this);
9507
9508 // Take output from multiple
9509 this.push = function (output) {
9510 // buffer incoming captions until the associated video segment
9511 // finishes
9512 if (output.text) {
9513 return this.pendingCaptions.push(output);
9514 }
9515 // buffer incoming id3 tags until the final flush
9516 if (output.frames) {
9517 return this.pendingMetadata.push(output);
9518 }
9519
9520 // Add this track to the list of pending tracks and store
9521 // important information required for the construction of
9522 // the final segment
9523 this.pendingTracks.push(output.track);
9524 this.pendingBytes += output.boxes.byteLength;
9525
9526 // TODO: is there an issue for this against chrome?
9527 // We unshift audio and push video because
9528 // as of Chrome 75 when switching from
9529 // one init segment to another if the video
9530 // mdat does not appear after the audio mdat
9531 // only audio will play for the duration of our transmux.
9532 if (output.track.type === 'video') {
9533 this.videoTrack = output.track;
9534 this.pendingBoxes.push(output.boxes);
9535 }
9536 if (output.track.type === 'audio') {
9537 this.audioTrack = output.track;
9538 this.pendingBoxes.unshift(output.boxes);
9539 }
9540 };
9541 };
9542
9543 _CoalesceStream.prototype = new stream();
9544 _CoalesceStream.prototype.flush = function (flushSource) {
9545 var offset = 0,
9546 event = {
9547 captions: [],
9548 captionStreams: {},
9549 metadata: [],
9550 info: {}
9551 },
9552 caption,
9553 id3,
9554 initSegment,
9555 timelineStartPts = 0,
9556 i;
9557
9558 if (this.pendingTracks.length < this.numberOfTracks) {
9559 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
9560 // Return because we haven't received a flush from a data-generating
9561 // portion of the segment (meaning that we have only recieved meta-data
9562 // or captions.)
9563 return;
9564 } else if (this.remuxTracks) {
9565 // Return until we have enough tracks from the pipeline to remux (if we
9566 // are remuxing audio and video into a single MP4)
9567 return;
9568 } else if (this.pendingTracks.length === 0) {
9569 // In the case where we receive a flush without any data having been
9570 // received we consider it an emitted track for the purposes of coalescing
9571 // `done` events.
9572 // We do this for the case where there is an audio and video track in the
9573 // segment but no audio data. (seen in several playlists with alternate
9574 // audio tracks and no audio present in the main TS segments.)
9575 this.emittedTracks++;
9576
9577 if (this.emittedTracks >= this.numberOfTracks) {
9578 this.trigger('done');
9579 this.emittedTracks = 0;
9580 }
9581 return;
9582 }
9583 }
9584
9585 if (this.videoTrack) {
9586 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
9587 VIDEO_PROPERTIES.forEach(function (prop) {
9588 event.info[prop] = this.videoTrack[prop];
9589 }, this);
9590 } else if (this.audioTrack) {
9591 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
9592 AUDIO_PROPERTIES.forEach(function (prop) {
9593 event.info[prop] = this.audioTrack[prop];
9594 }, this);
9595 }
9596
9597 if (this.videoTrack || this.audioTrack) {
9598 if (this.pendingTracks.length === 1) {
9599 event.type = this.pendingTracks[0].type;
9600 } else {
9601 event.type = 'combined';
9602 }
9603
9604 this.emittedTracks += this.pendingTracks.length;
9605
9606 initSegment = mp4Generator.initSegment(this.pendingTracks);
9607
9608 // Create a new typed array to hold the init segment
9609 event.initSegment = new Uint8Array(initSegment.byteLength);
9610
9611 // Create an init segment containing a moov
9612 // and track definitions
9613 event.initSegment.set(initSegment);
9614
9615 // Create a new typed array to hold the moof+mdats
9616 event.data = new Uint8Array(this.pendingBytes);
9617
9618 // Append each moof+mdat (one per track) together
9619 for (i = 0; i < this.pendingBoxes.length; i++) {
9620 event.data.set(this.pendingBoxes[i], offset);
9621 offset += this.pendingBoxes[i].byteLength;
9622 }
9623
9624 // Translate caption PTS times into second offsets to match the
9625 // video timeline for the segment, and add track info
9626 for (i = 0; i < this.pendingCaptions.length; i++) {
9627 caption = this.pendingCaptions[i];
9628 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
9629 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
9630
9631 event.captionStreams[caption.stream] = true;
9632 event.captions.push(caption);
9633 }
9634
9635 // Translate ID3 frame PTS times into second offsets to match the
9636 // video timeline for the segment
9637 for (i = 0; i < this.pendingMetadata.length; i++) {
9638 id3 = this.pendingMetadata[i];
9639 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
9640
9641 event.metadata.push(id3);
9642 }
9643
9644 // We add this to every single emitted segment even though we only need
9645 // it for the first
9646 event.metadata.dispatchType = this.metadataStream.dispatchType;
9647
9648 // Reset stream state
9649 this.pendingTracks.length = 0;
9650 this.videoTrack = null;
9651 this.pendingBoxes.length = 0;
9652 this.pendingCaptions.length = 0;
9653 this.pendingBytes = 0;
9654 this.pendingMetadata.length = 0;
9655
9656 // Emit the built segment
9657 // We include captions and ID3 tags for backwards compatibility,
9658 // ideally we should send only video and audio in the data event
9659 this.trigger('data', event);
9660 // Emit each caption to the outside world
9661 // Ideally, this would happen immediately on parsing captions,
9662 // but we need to ensure that video data is sent back first
9663 // so that caption timing can be adjusted to match video timing
9664 for (i = 0; i < event.captions.length; i++) {
9665 caption = event.captions[i];
9666
9667 this.trigger('caption', caption);
9668 }
9669 // Emit each id3 tag to the outside world
9670 // Ideally, this would happen immediately on parsing the tag,
9671 // but we need to ensure that video data is sent back first
9672 // so that ID3 frame timing can be adjusted to match video timing
9673 for (i = 0; i < event.metadata.length; i++) {
9674 id3 = event.metadata[i];
9675
9676 this.trigger('id3Frame', id3);
9677 }
9678 }
9679
9680 // Only emit `done` if all tracks have been flushed and emitted
9681 if (this.emittedTracks >= this.numberOfTracks) {
9682 this.trigger('done');
9683 this.emittedTracks = 0;
9684 }
9685 };
9686
9687 _CoalesceStream.prototype.setRemux = function (val) {
9688 this.remuxTracks = val;
9689 };
9690 /**
9691 * A Stream that expects MP2T binary data as input and produces
9692 * corresponding media segments, suitable for use with Media Source
9693 * Extension (MSE) implementations that support the ISO BMFF byte
9694 * stream format, like Chrome.
9695 */
9696 _Transmuxer = function Transmuxer(options) {
9697 var self = this,
9698 hasFlushed = true,
9699 videoTrack,
9700 audioTrack;
9701
9702 _Transmuxer.prototype.init.call(this);
9703
9704 options = options || {};
9705 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
9706 this.transmuxPipeline_ = {};
9707
9708 this.setupAacPipeline = function () {
9709 var pipeline = {};
9710 this.transmuxPipeline_ = pipeline;
9711
9712 pipeline.type = 'aac';
9713 pipeline.metadataStream = new m2ts_1.MetadataStream();
9714
9715 // set up the parsing pipeline
9716 pipeline.aacStream = new aac();
9717 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
9718 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
9719 pipeline.adtsStream = new adts();
9720 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9721 pipeline.headOfPipeline = pipeline.aacStream;
9722
9723 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
9724 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
9725
9726 pipeline.metadataStream.on('timestamp', function (frame) {
9727 pipeline.aacStream.setTimestamp(frame.timeStamp);
9728 });
9729
9730 pipeline.aacStream.on('data', function (data) {
9731 if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
9732 audioTrack = audioTrack || {
9733 timelineStartInfo: {
9734 baseMediaDecodeTime: self.baseMediaDecodeTime
9735 },
9736 codec: 'adts',
9737 type: 'audio'
9738 };
9739 // hook up the audio segment stream to the first track with aac data
9740 pipeline.coalesceStream.numberOfTracks++;
9741 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9742
9743 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
9744
9745 // Set up the final part of the audio pipeline
9746 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
9747 }
9748
9749 // emit pmt info
9750 self.trigger('trackinfo', {
9751 hasAudio: !!audioTrack,
9752 hasVideo: !!videoTrack
9753 });
9754 });
9755
9756 // Re-emit any data coming from the coalesce stream to the outside world
9757 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
9758 // Let the consumer know we have finished flushing the entire pipeline
9759 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9760 };
9761
9762 this.setupTsPipeline = function () {
9763 var pipeline = {};
9764 this.transmuxPipeline_ = pipeline;
9765
9766 pipeline.type = 'ts';
9767 pipeline.metadataStream = new m2ts_1.MetadataStream();
9768
9769 // set up the parsing pipeline
9770 pipeline.packetStream = new m2ts_1.TransportPacketStream();
9771 pipeline.parseStream = new m2ts_1.TransportParseStream();
9772 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
9773 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
9774 pipeline.adtsStream = new adts();
9775 pipeline.h264Stream = new H264Stream();
9776 pipeline.captionStream = new m2ts_1.CaptionStream();
9777 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9778 pipeline.headOfPipeline = pipeline.packetStream;
9779
9780 // disassemble MPEG2-TS packets into elementary streams
9781 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream);
9782
9783 // !!THIS ORDER IS IMPORTANT!!
9784 // demux the streams
9785 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
9786
9787 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
9788
9789 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
9790
9791 // Hook up CEA-608/708 caption stream
9792 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
9793
9794 pipeline.elementaryStream.on('data', function (data) {
9795 var i;
9796
9797 if (data.type === 'metadata') {
9798 i = data.tracks.length;
9799
9800 // scan the tracks listed in the metadata
9801 while (i--) {
9802 if (!videoTrack && data.tracks[i].type === 'video') {
9803 videoTrack = data.tracks[i];
9804 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
9805 } else if (!audioTrack && data.tracks[i].type === 'audio') {
9806 audioTrack = data.tracks[i];
9807 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
9808 }
9809 }
9810
9811 // hook up the video segment stream to the first track with h264 data
9812 if (videoTrack && !pipeline.videoSegmentStream) {
9813 pipeline.coalesceStream.numberOfTracks++;
9814 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
9815
9816 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
9817 // When video emits timelineStartInfo data after a flush, we forward that
9818 // info to the AudioSegmentStream, if it exists, because video timeline
9819 // data takes precedence.
9820 if (audioTrack) {
9821 audioTrack.timelineStartInfo = timelineStartInfo;
9822 // On the first segment we trim AAC frames that exist before the
9823 // very earliest DTS we have seen in video because Chrome will
9824 // interpret any video track with a baseMediaDecodeTime that is
9825 // non-zero as a gap.
9826 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts);
9827 }
9828 });
9829
9830 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
9831 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
9832
9833 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
9834 if (audioTrack) {
9835 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
9836 }
9837 });
9838
9839 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo'));
9840
9841 // Set up the final part of the video pipeline
9842 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
9843 }
9844
9845 if (audioTrack && !pipeline.audioSegmentStream) {
9846 // hook up the audio segment stream to the first track with aac data
9847 pipeline.coalesceStream.numberOfTracks++;
9848 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9849
9850 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
9851
9852 // Set up the final part of the audio pipeline
9853 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
9854 }
9855
9856 // emit pmt info
9857 self.trigger('trackinfo', {
9858 hasAudio: !!audioTrack,
9859 hasVideo: !!videoTrack
9860 });
9861 }
9862 });
9863
9864 // Re-emit any data coming from the coalesce stream to the outside world
9865 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
9866 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
9867 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
9868
9869 self.trigger('id3Frame', id3Frame);
9870 });
9871 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption'));
9872 // Let the consumer know we have finished flushing the entire pipeline
9873 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9874 };
9875
9876 // hook up the segment streams once track metadata is delivered
9877 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9878 var pipeline = this.transmuxPipeline_;
9879
9880 if (!options.keepOriginalTimestamps) {
9881 this.baseMediaDecodeTime = baseMediaDecodeTime;
9882 }
9883
9884 if (audioTrack) {
9885 audioTrack.timelineStartInfo.dts = undefined;
9886 audioTrack.timelineStartInfo.pts = undefined;
9887 trackDecodeInfo.clearDtsInfo(audioTrack);
9888 if (!options.keepOriginalTimestamps) {
9889 audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
9890 }
9891 if (pipeline.audioTimestampRolloverStream) {
9892 pipeline.audioTimestampRolloverStream.discontinuity();
9893 }
9894 }
9895 if (videoTrack) {
9896 if (pipeline.videoSegmentStream) {
9897 pipeline.videoSegmentStream.gopCache_ = [];
9898 }
9899 videoTrack.timelineStartInfo.dts = undefined;
9900 videoTrack.timelineStartInfo.pts = undefined;
9901 trackDecodeInfo.clearDtsInfo(videoTrack);
9902 pipeline.captionStream.reset();
9903 if (!options.keepOriginalTimestamps) {
9904 videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
9905 }
9906 }
9907
9908 if (pipeline.timestampRolloverStream) {
9909 pipeline.timestampRolloverStream.discontinuity();
9910 }
9911 };
9912
9913 this.setAudioAppendStart = function (timestamp) {
9914 if (audioTrack) {
9915 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
9916 }
9917 };
9918
9919 this.setRemux = function (val) {
9920 var pipeline = this.transmuxPipeline_;
9921
9922 options.remux = val;
9923
9924 if (pipeline && pipeline.coalesceStream) {
9925 pipeline.coalesceStream.setRemux(val);
9926 }
9927 };
9928
9929 this.alignGopsWith = function (gopsToAlignWith) {
9930 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
9931 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
9932 }
9933 };
9934
9935 // feed incoming data to the front of the parsing pipeline
9936 this.push = function (data) {
9937 if (hasFlushed) {
9938 var isAac = isLikelyAacData$1(data);
9939
9940 if (isAac && this.transmuxPipeline_.type !== 'aac') {
9941 this.setupAacPipeline();
9942 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
9943 this.setupTsPipeline();
9944 }
9945 hasFlushed = false;
9946 }
9947 this.transmuxPipeline_.headOfPipeline.push(data);
9948 };
9949
9950 // flush any buffered data
9951 this.flush = function () {
9952 hasFlushed = true;
9953 // Start at the top of the pipeline and flush all pending work
9954 this.transmuxPipeline_.headOfPipeline.flush();
9955 };
9956
9957 this.endTimeline = function () {
9958 this.transmuxPipeline_.headOfPipeline.endTimeline();
9959 };
9960
9961 this.reset = function () {
9962 if (this.transmuxPipeline_.headOfPipeline) {
9963 this.transmuxPipeline_.headOfPipeline.reset();
9964 }
9965 };
9966
9967 // Caption data has to be reset when seeking outside buffered range
9968 this.resetCaptions = function () {
9969 if (this.transmuxPipeline_.captionStream) {
9970 this.transmuxPipeline_.captionStream.reset();
9971 }
9972 };
9973 };
9974 _Transmuxer.prototype = new stream();
9975
9976 var transmuxer = {
9977 Transmuxer: _Transmuxer,
9978 VideoSegmentStream: _VideoSegmentStream,
9979 AudioSegmentStream: _AudioSegmentStream,
9980 AUDIO_PROPERTIES: AUDIO_PROPERTIES,
9981 VIDEO_PROPERTIES: VIDEO_PROPERTIES,
9982 // exported for testing
9983 generateVideoSegmentTimingInfo: generateVideoSegmentTimingInfo
9984 };
9985
9986 var classCallCheck = function classCallCheck(instance, Constructor) {
9987 if (!(instance instanceof Constructor)) {
9988 throw new TypeError("Cannot call a class as a function");
9989 }
9990 };
9991
9992 var createClass = function () {
9993 function defineProperties(target, props) {
9994 for (var i = 0; i < props.length; i++) {
9995 var descriptor = props[i];
9996 descriptor.enumerable = descriptor.enumerable || false;
9997 descriptor.configurable = true;
9998 if ("value" in descriptor) descriptor.writable = true;
9999 Object.defineProperty(target, descriptor.key, descriptor);
10000 }
10001 }
10002
10003 return function (Constructor, protoProps, staticProps) {
10004 if (protoProps) defineProperties(Constructor.prototype, protoProps);
10005 if (staticProps) defineProperties(Constructor, staticProps);
10006 return Constructor;
10007 };
10008 }();
10009
10010 /**
10011 * @file transmuxer-worker.js
10012 */
10013
10014 /**
10015 * Re-emits transmuxer events by converting them into messages to the
10016 * world outside the worker.
10017 *
10018 * @param {Object} transmuxer the transmuxer to wire events on
10019 * @private
10020 */
10021 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer$$1) {
10022 transmuxer$$1.on('data', function (segment) {
10023 // transfer ownership of the underlying ArrayBuffer
10024 // instead of doing a copy to save memory
10025 // ArrayBuffers are transferable but generic TypedArrays are not
10026 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
10027 var initArray = segment.initSegment;
10028
10029 segment.initSegment = {
10030 data: initArray.buffer,
10031 byteOffset: initArray.byteOffset,
10032 byteLength: initArray.byteLength
10033 };
10034
10035 var typedArray = segment.data;
10036
10037 segment.data = typedArray.buffer;
10038 self.postMessage({
10039 action: 'data',
10040 segment: segment,
10041 byteOffset: typedArray.byteOffset,
10042 byteLength: typedArray.byteLength
10043 }, [segment.data]);
10044 });
10045
10046 if (transmuxer$$1.captionStream) {
10047 transmuxer$$1.captionStream.on('data', function (caption) {
10048 self.postMessage({
10049 action: 'caption',
10050 data: caption
10051 });
10052 });
10053 }
10054
10055 transmuxer$$1.on('done', function (data) {
10056 self.postMessage({ action: 'done' });
10057 });
10058
10059 transmuxer$$1.on('gopInfo', function (gopInfo) {
10060 self.postMessage({
10061 action: 'gopInfo',
10062 gopInfo: gopInfo
10063 });
10064 });
10065
10066 transmuxer$$1.on('videoSegmentTimingInfo', function (videoSegmentTimingInfo) {
10067 self.postMessage({
10068 action: 'videoSegmentTimingInfo',
10069 videoSegmentTimingInfo: videoSegmentTimingInfo
10070 });
10071 });
10072 };
10073
10074 /**
10075 * All incoming messages route through this hash. If no function exists
10076 * to handle an incoming message, then we ignore the message.
10077 *
10078 * @class MessageHandlers
10079 * @param {Object} options the options to initialize with
10080 */
10081
10082 var MessageHandlers = function () {
10083 function MessageHandlers(self, options) {
10084 classCallCheck(this, MessageHandlers);
10085
10086 this.options = options || {};
10087 this.self = self;
10088 this.init();
10089 }
10090
10091 /**
10092 * initialize our web worker and wire all the events.
10093 */
10094
10095 createClass(MessageHandlers, [{
10096 key: 'init',
10097 value: function init() {
10098 if (this.transmuxer) {
10099 this.transmuxer.dispose();
10100 }
10101 this.transmuxer = new transmuxer.Transmuxer(this.options);
10102 wireTransmuxerEvents(this.self, this.transmuxer);
10103 }
10104
10105 /**
10106 * Adds data (a ts segment) to the start of the transmuxer pipeline for
10107 * processing.
10108 *
10109 * @param {ArrayBuffer} data data to push into the muxer
10110 */
10111
10112 }, {
10113 key: 'push',
10114 value: function push(data) {
10115 // Cast array buffer to correct type for transmuxer
10116 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
10117
10118 this.transmuxer.push(segment);
10119 }
10120
10121 /**
10122 * Recreate the transmuxer so that the next segment added via `push`
10123 * start with a fresh transmuxer.
10124 */
10125
10126 }, {
10127 key: 'reset',
10128 value: function reset() {
10129 this.init();
10130 }
10131
10132 /**
10133 * Set the value that will be used as the `baseMediaDecodeTime` time for the
10134 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
10135 * set relative to the first based on the PTS values.
10136 *
10137 * @param {Object} data used to set the timestamp offset in the muxer
10138 */
10139
10140 }, {
10141 key: 'setTimestampOffset',
10142 value: function setTimestampOffset(data) {
10143 var timestampOffset = data.timestampOffset || 0;
10144
10145 this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
10146 }
10147 }, {
10148 key: 'setAudioAppendStart',
10149 value: function setAudioAppendStart(data) {
10150 this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
10151 }
10152
10153 /**
10154 * Forces the pipeline to finish processing the last segment and emit it's
10155 * results.
10156 *
10157 * @param {Object} data event data, not really used
10158 */
10159
10160 }, {
10161 key: 'flush',
10162 value: function flush(data) {
10163 this.transmuxer.flush();
10164 }
10165 }, {
10166 key: 'resetCaptions',
10167 value: function resetCaptions() {
10168 this.transmuxer.resetCaptions();
10169 }
10170 }, {
10171 key: 'alignGopsWith',
10172 value: function alignGopsWith(data) {
10173 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
10174 }
10175 }]);
10176 return MessageHandlers;
10177 }();
10178
10179 /**
10180 * Our web wroker interface so that things can talk to mux.js
10181 * that will be running in a web worker. the scope is passed to this by
10182 * webworkify.
10183 *
10184 * @param {Object} self the scope for the web worker
10185 */
10186
10187 var TransmuxerWorker = function TransmuxerWorker(self) {
10188 self.onmessage = function (event) {
10189 if (event.data.action === 'init' && event.data.options) {
10190 this.messageHandlers = new MessageHandlers(self, event.data.options);
10191 return;
10192 }
10193
10194 if (!this.messageHandlers) {
10195 this.messageHandlers = new MessageHandlers(self);
10196 }
10197
10198 if (event.data && event.data.action && event.data.action !== 'init') {
10199 if (this.messageHandlers[event.data.action]) {
10200 this.messageHandlers[event.data.action](event.data);
10201 }
10202 }
10203 };
10204 };
10205
10206 var transmuxerWorker = new TransmuxerWorker(self);
10207
10208 return transmuxerWorker;
10209 }();
10210 });
10211
10212 /**
10213 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
10214 * codec strings, or translating codec strings into objects that can be examined.
10215 */
10216
10217 // Default codec parameters if none were provided for video and/or audio
10218 var defaultCodecs = {
10219 videoCodec: 'avc1',
10220 videoObjectTypeIndicator: '.4d400d',
10221 // AAC-LC
10222 audioProfile: '2'
10223 };
10224
10225 /**
10226 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
10227 * `avc1.<hhhhhh>`
10228 *
10229 * @param {Array} codecs an array of codec strings to fix
10230 * @return {Array} the translated codec array
10231 * @private
10232 */
10233 var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
10234 return codecs.map(function (codec) {
10235 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
10236 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
10237 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
10238
10239 return 'avc1.' + profileHex + '00' + avcLevelHex;
10240 });
10241 });
10242 };
10243
10244 /**
10245 * Parses a codec string to retrieve the number of codecs specified,
10246 * the video codec and object type indicator, and the audio profile.
10247 */
10248
10249 var parseCodecs = function parseCodecs() {
10250 var codecs = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '';
10251
10252 var result = {
10253 codecCount: 0
10254 };
10255 var parsed = void 0;
10256
10257 result.codecCount = codecs.split(',').length;
10258 result.codecCount = result.codecCount || 2;
10259
10260 // parse the video codec
10261 parsed = /(^|\s|,)+(avc[13])([^ ,]*)/i.exec(codecs);
10262 if (parsed) {
10263 result.videoCodec = parsed[2];
10264 result.videoObjectTypeIndicator = parsed[3];
10265 }
10266
10267 // parse the last field of the audio codec
10268 result.audioProfile = /(^|\s|,)+mp4a.[0-9A-Fa-f]+\.([0-9A-Fa-f]+)/i.exec(codecs);
10269 result.audioProfile = result.audioProfile && result.audioProfile[2];
10270
10271 return result;
10272 };
10273
10274 /**
10275 * Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
10276 * standard `avc1.<hhhhhh>`.
10277 *
10278 * @param codecString {String} the codec string
10279 * @return {String} the codec string with old apple-style codecs replaced
10280 *
10281 * @private
10282 */
10283 var mapLegacyAvcCodecs = function mapLegacyAvcCodecs(codecString) {
10284 return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
10285 return translateLegacyCodecs([match])[0];
10286 });
10287 };
10288
10289 /**
10290 * Build a media mime-type string from a set of parameters
10291 * @param {String} type either 'audio' or 'video'
10292 * @param {String} container either 'mp2t' or 'mp4'
10293 * @param {Array} codecs an array of codec strings to add
10294 * @return {String} a valid media mime-type
10295 */
10296 var makeMimeTypeString = function makeMimeTypeString(type, container, codecs) {
10297 // The codecs array is filtered so that falsey values are
10298 // dropped and don't cause Array#join to create spurious
10299 // commas
10300 return type + '/' + container + '; codecs="' + codecs.filter(function (c) {
10301 return !!c;
10302 }).join(', ') + '"';
10303 };
10304
10305 /**
10306 * Returns the type container based on information in the playlist
10307 * @param {Playlist} media the current media playlist
10308 * @return {String} a valid media container type
10309 */
10310 var getContainerType = function getContainerType(media) {
10311 // An initialization segment means the media playlist is an iframe
10312 // playlist or is using the mp4 container. We don't currently
10313 // support iframe playlists, so assume this is signalling mp4
10314 // fragments.
10315 if (media.segments && media.segments.length && media.segments[0].map) {
10316 return 'mp4';
10317 }
10318 return 'mp2t';
10319 };
10320
10321 /**
10322 * Returns a set of codec strings parsed from the playlist or the default
10323 * codec strings if no codecs were specified in the playlist
10324 * @param {Playlist} media the current media playlist
10325 * @return {Object} an object with the video and audio codecs
10326 */
10327 var getCodecs = function getCodecs(media) {
10328 // if the codecs were explicitly specified, use them instead of the
10329 // defaults
10330 var mediaAttributes = media.attributes || {};
10331
10332 if (mediaAttributes.CODECS) {
10333 return parseCodecs(mediaAttributes.CODECS);
10334 }
10335 return defaultCodecs;
10336 };
10337
10338 var audioProfileFromDefault = function audioProfileFromDefault(master, audioGroupId) {
10339 if (!master.mediaGroups.AUDIO || !audioGroupId) {
10340 return null;
10341 }
10342
10343 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
10344
10345 if (!audioGroup) {
10346 return null;
10347 }
10348
10349 for (var name in audioGroup) {
10350 var audioType = audioGroup[name];
10351
10352 if (audioType.default && audioType.playlists) {
10353 // codec should be the same for all playlists within the audio type
10354 return parseCodecs(audioType.playlists[0].attributes.CODECS).audioProfile;
10355 }
10356 }
10357
10358 return null;
10359 };
10360
10361 /**
10362 * Calculates the MIME type strings for a working configuration of
10363 * SourceBuffers to play variant streams in a master playlist. If
10364 * there is no possible working configuration, an empty array will be
10365 * returned.
10366 *
10367 * @param master {Object} the m3u8 object for the master playlist
10368 * @param media {Object} the m3u8 object for the variant playlist
10369 * @return {Array} the MIME type strings. If the array has more than
10370 * one entry, the first element should be applied to the video
10371 * SourceBuffer and the second to the audio SourceBuffer.
10372 *
10373 * @private
10374 */
10375 var mimeTypesForPlaylist = function mimeTypesForPlaylist(master, media) {
10376 var containerType = getContainerType(media);
10377 var codecInfo = getCodecs(media);
10378 var mediaAttributes = media.attributes || {};
10379 // Default condition for a traditional HLS (no demuxed audio/video)
10380 var isMuxed = true;
10381 var isMaat = false;
10382
10383 if (!media) {
10384 // Not enough information
10385 return [];
10386 }
10387
10388 if (master.mediaGroups.AUDIO && mediaAttributes.AUDIO) {
10389 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
10390
10391 // Handle the case where we are in a multiple-audio track scenario
10392 if (audioGroup) {
10393 isMaat = true;
10394 // Start with the everything demuxed then...
10395 isMuxed = false;
10396 // ...check to see if any audio group tracks are muxed (ie. lacking a uri)
10397 for (var groupId in audioGroup) {
10398 // either a uri is present (if the case of HLS and an external playlist), or
10399 // playlists is present (in the case of DASH where we don't have external audio
10400 // playlists)
10401 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
10402 isMuxed = true;
10403 break;
10404 }
10405 }
10406 }
10407 }
10408
10409 // HLS with multiple-audio tracks must always get an audio codec.
10410 // Put another way, there is no way to have a video-only multiple-audio HLS!
10411 if (isMaat && !codecInfo.audioProfile) {
10412 if (!isMuxed) {
10413 // It is possible for codecs to be specified on the audio media group playlist but
10414 // not on the rendition playlist. This is mostly the case for DASH, where audio and
10415 // video are always separate (and separately specified).
10416 codecInfo.audioProfile = audioProfileFromDefault(master, mediaAttributes.AUDIO);
10417 }
10418
10419 if (!codecInfo.audioProfile) {
10420 videojs.log.warn('Multiple audio tracks present but no audio codec string is specified. ' + 'Attempting to use the default audio codec (mp4a.40.2)');
10421 codecInfo.audioProfile = defaultCodecs.audioProfile;
10422 }
10423 }
10424
10425 // Generate the final codec strings from the codec object generated above
10426 var codecStrings = {};
10427
10428 if (codecInfo.videoCodec) {
10429 codecStrings.video = '' + codecInfo.videoCodec + codecInfo.videoObjectTypeIndicator;
10430 }
10431
10432 if (codecInfo.audioProfile) {
10433 codecStrings.audio = 'mp4a.40.' + codecInfo.audioProfile;
10434 }
10435
10436 // Finally, make and return an array with proper mime-types depending on
10437 // the configuration
10438 var justAudio = makeMimeTypeString('audio', containerType, [codecStrings.audio]);
10439 var justVideo = makeMimeTypeString('video', containerType, [codecStrings.video]);
10440 var bothVideoAudio = makeMimeTypeString('video', containerType, [codecStrings.video, codecStrings.audio]);
10441
10442 if (isMaat) {
10443 if (!isMuxed && codecStrings.video) {
10444 return [justVideo, justAudio];
10445 }
10446
10447 if (!isMuxed && !codecStrings.video) {
10448 // There is no muxed content and no video codec string, so this is an audio only
10449 // stream with alternate audio.
10450 return [justAudio, justAudio];
10451 }
10452
10453 // There exists the possiblity that this will return a `video/container`
10454 // mime-type for the first entry in the array even when there is only audio.
10455 // This doesn't appear to be a problem and simplifies the code.
10456 return [bothVideoAudio, justAudio];
10457 }
10458
10459 // If there is no video codec at all, always just return a single
10460 // audio/<container> mime-type
10461 if (!codecStrings.video) {
10462 return [justAudio];
10463 }
10464
10465 // When not using separate audio media groups, audio and video is
10466 // *always* muxed
10467 return [bothVideoAudio];
10468 };
10469
10470 /**
10471 * Parse a content type header into a type and parameters
10472 * object
10473 *
10474 * @param {String} type the content type header
10475 * @return {Object} the parsed content-type
10476 * @private
10477 */
10478 var parseContentType = function parseContentType(type) {
10479 var object = { type: '', parameters: {} };
10480 var parameters = type.trim().split(';');
10481
10482 // first parameter should always be content-type
10483 object.type = parameters.shift().trim();
10484 parameters.forEach(function (parameter) {
10485 var pair = parameter.trim().split('=');
10486
10487 if (pair.length > 1) {
10488 var name = pair[0].replace(/"/g, '').trim();
10489 var value = pair[1].replace(/"/g, '').trim();
10490
10491 object.parameters[name] = value;
10492 }
10493 });
10494
10495 return object;
10496 };
10497
10498 /**
10499 * Check if a codec string refers to an audio codec.
10500 *
10501 * @param {String} codec codec string to check
10502 * @return {Boolean} if this is an audio codec
10503 * @private
10504 */
10505 var isAudioCodec = function isAudioCodec(codec) {
10506 return (/mp4a\.\d+.\d+/i.test(codec)
10507 );
10508 };
10509
10510 /**
10511 * Check if a codec string refers to a video codec.
10512 *
10513 * @param {String} codec codec string to check
10514 * @return {Boolean} if this is a video codec
10515 * @private
10516 */
10517 var isVideoCodec = function isVideoCodec(codec) {
10518 return (/avc1\.[\da-f]+/i.test(codec)
10519 );
10520 };
10521
10522 /**
10523 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
10524 * front of current time.
10525 *
10526 * @param {Array} buffer
10527 * The current buffer of gop information
10528 * @param {Number} currentTime
10529 * The current time
10530 * @param {Double} mapping
10531 * Offset to map display time to stream presentation time
10532 * @return {Array}
10533 * List of gops considered safe to append over
10534 */
10535 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
10536 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
10537 return [];
10538 }
10539
10540 // pts value for current time + 3 seconds to give a bit more wiggle room
10541 var currentTimePts = Math.ceil((currentTime - mapping + 3) * 90000);
10542
10543 var i = void 0;
10544
10545 for (i = 0; i < buffer.length; i++) {
10546 if (buffer[i].pts > currentTimePts) {
10547 break;
10548 }
10549 }
10550
10551 return buffer.slice(i);
10552 };
10553
10554 /**
10555 * Appends gop information (timing and byteLength) received by the transmuxer for the
10556 * gops appended in the last call to appendBuffer
10557 *
10558 * @param {Array} buffer
10559 * The current buffer of gop information
10560 * @param {Array} gops
10561 * List of new gop information
10562 * @param {boolean} replace
10563 * If true, replace the buffer with the new gop information. If false, append the
10564 * new gop information to the buffer in the right location of time.
10565 * @return {Array}
10566 * Updated list of gop information
10567 */
10568 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
10569 if (!gops.length) {
10570 return buffer;
10571 }
10572
10573 if (replace) {
10574 // If we are in safe append mode, then completely overwrite the gop buffer
10575 // with the most recent appeneded data. This will make sure that when appending
10576 // future segments, we only try to align with gops that are both ahead of current
10577 // time and in the last segment appended.
10578 return gops.slice();
10579 }
10580
10581 var start = gops[0].pts;
10582
10583 var i = 0;
10584
10585 for (i; i < buffer.length; i++) {
10586 if (buffer[i].pts >= start) {
10587 break;
10588 }
10589 }
10590
10591 return buffer.slice(0, i).concat(gops);
10592 };
10593
10594 /**
10595 * Removes gop information in buffer that overlaps with provided start and end
10596 *
10597 * @param {Array} buffer
10598 * The current buffer of gop information
10599 * @param {Double} start
10600 * position to start the remove at
10601 * @param {Double} end
10602 * position to end the remove at
10603 * @param {Double} mapping
10604 * Offset to map display time to stream presentation time
10605 */
10606 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
10607 var startPts = Math.ceil((start - mapping) * 90000);
10608 var endPts = Math.ceil((end - mapping) * 90000);
10609 var updatedBuffer = buffer.slice();
10610
10611 var i = buffer.length;
10612
10613 while (i--) {
10614 if (buffer[i].pts <= endPts) {
10615 break;
10616 }
10617 }
10618
10619 if (i === -1) {
10620 // no removal because end of remove range is before start of buffer
10621 return updatedBuffer;
10622 }
10623
10624 var j = i + 1;
10625
10626 while (j--) {
10627 if (buffer[j].pts <= startPts) {
10628 break;
10629 }
10630 }
10631
10632 // clamp remove range start to 0 index
10633 j = Math.max(j, 0);
10634
10635 updatedBuffer.splice(j, i - j + 1);
10636
10637 return updatedBuffer;
10638 };
10639
10640 var buffered = function buffered(videoBuffer, audioBuffer, audioDisabled) {
10641 var start = null;
10642 var end = null;
10643 var arity = 0;
10644 var extents = [];
10645 var ranges = [];
10646
10647 // neither buffer has been created yet
10648 if (!videoBuffer && !audioBuffer) {
10649 return videojs.createTimeRange();
10650 }
10651
10652 // only one buffer is configured
10653 if (!videoBuffer) {
10654 return audioBuffer.buffered;
10655 }
10656 if (!audioBuffer) {
10657 return videoBuffer.buffered;
10658 }
10659
10660 // both buffers are configured
10661 if (audioDisabled) {
10662 return videoBuffer.buffered;
10663 }
10664
10665 // both buffers are empty
10666 if (videoBuffer.buffered.length === 0 && audioBuffer.buffered.length === 0) {
10667 return videojs.createTimeRange();
10668 }
10669
10670 // Handle the case where we have both buffers and create an
10671 // intersection of the two
10672 var videoBuffered = videoBuffer.buffered;
10673 var audioBuffered = audioBuffer.buffered;
10674 var count = videoBuffered.length;
10675
10676 // A) Gather up all start and end times
10677 while (count--) {
10678 extents.push({ time: videoBuffered.start(count), type: 'start' });
10679 extents.push({ time: videoBuffered.end(count), type: 'end' });
10680 }
10681 count = audioBuffered.length;
10682 while (count--) {
10683 extents.push({ time: audioBuffered.start(count), type: 'start' });
10684 extents.push({ time: audioBuffered.end(count), type: 'end' });
10685 }
10686 // B) Sort them by time
10687 extents.sort(function (a, b) {
10688 return a.time - b.time;
10689 });
10690
10691 // C) Go along one by one incrementing arity for start and decrementing
10692 // arity for ends
10693 for (count = 0; count < extents.length; count++) {
10694 if (extents[count].type === 'start') {
10695 arity++;
10696
10697 // D) If arity is ever incremented to 2 we are entering an
10698 // overlapping range
10699 if (arity === 2) {
10700 start = extents[count].time;
10701 }
10702 } else if (extents[count].type === 'end') {
10703 arity--;
10704
10705 // E) If arity is ever decremented to 1 we leaving an
10706 // overlapping range
10707 if (arity === 1) {
10708 end = extents[count].time;
10709 }
10710 }
10711
10712 // F) Record overlapping ranges
10713 if (start !== null && end !== null) {
10714 ranges.push([start, end]);
10715 start = null;
10716 end = null;
10717 }
10718 }
10719
10720 return videojs.createTimeRanges(ranges);
10721 };
10722
10723 /**
10724 * @file virtual-source-buffer.js
10725 */
10726
10727 var ONE_SECOND_IN_TS = 90000;
10728
10729 // We create a wrapper around the SourceBuffer so that we can manage the
10730 // state of the `updating` property manually. We have to do this because
10731 // Firefox changes `updating` to false long before triggering `updateend`
10732 // events and that was causing strange problems in videojs-contrib-hls
10733 var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
10734 var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
10735 var wrapper = Object.create(null);
10736
10737 wrapper.updating = false;
10738 wrapper.realBuffer_ = sourceBuffer;
10739
10740 var _loop = function _loop(key) {
10741 if (typeof sourceBuffer[key] === 'function') {
10742 wrapper[key] = function () {
10743 return sourceBuffer[key].apply(sourceBuffer, arguments);
10744 };
10745 } else if (typeof wrapper[key] === 'undefined') {
10746 Object.defineProperty(wrapper, key, {
10747 get: function get$$1() {
10748 return sourceBuffer[key];
10749 },
10750 set: function set$$1(v) {
10751 return sourceBuffer[key] = v;
10752 }
10753 });
10754 }
10755 };
10756
10757 for (var key in sourceBuffer) {
10758 _loop(key);
10759 }
10760
10761 return wrapper;
10762 };
10763
10764 /**
10765 * VirtualSourceBuffers exist so that we can transmux non native formats
10766 * into a native format, but keep the same api as a native source buffer.
10767 * It creates a transmuxer, that works in its own thread (a web worker) and
10768 * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
10769 * then send all of that data to the naive sourcebuffer so that it is
10770 * indestinguishable from a natively supported format.
10771 *
10772 * @param {HtmlMediaSource} mediaSource the parent mediaSource
10773 * @param {Array} codecs array of codecs that we will be dealing with
10774 * @class VirtualSourceBuffer
10775 * @extends video.js.EventTarget
10776 */
10777
10778 var VirtualSourceBuffer = function (_videojs$EventTarget) {
10779 inherits(VirtualSourceBuffer, _videojs$EventTarget);
10780
10781 function VirtualSourceBuffer(mediaSource, codecs) {
10782 classCallCheck(this, VirtualSourceBuffer);
10783
10784 var _this = possibleConstructorReturn(this, (VirtualSourceBuffer.__proto__ || Object.getPrototypeOf(VirtualSourceBuffer)).call(this, videojs.EventTarget));
10785
10786 _this.timestampOffset_ = 0;
10787 _this.pendingBuffers_ = [];
10788 _this.bufferUpdating_ = false;
10789
10790 _this.mediaSource_ = mediaSource;
10791 _this.codecs_ = codecs;
10792 _this.audioCodec_ = null;
10793 _this.videoCodec_ = null;
10794 _this.audioDisabled_ = false;
10795 _this.appendAudioInitSegment_ = true;
10796 _this.gopBuffer_ = [];
10797 _this.timeMapping_ = 0;
10798 _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
10799
10800 var options = {
10801 remux: false,
10802 alignGopsAtEnd: _this.safeAppend_
10803 };
10804
10805 _this.codecs_.forEach(function (codec) {
10806 if (isAudioCodec(codec)) {
10807 _this.audioCodec_ = codec;
10808 } else if (isVideoCodec(codec)) {
10809 _this.videoCodec_ = codec;
10810 }
10811 });
10812
10813 // append muxed segments to their respective native buffers as
10814 // soon as they are available
10815 _this.transmuxer_ = new TransmuxWorker();
10816 _this.transmuxer_.postMessage({ action: 'init', options: options });
10817
10818 _this.transmuxer_.onmessage = function (event) {
10819 if (event.data.action === 'data') {
10820 return _this.data_(event);
10821 }
10822
10823 if (event.data.action === 'done') {
10824 return _this.done_(event);
10825 }
10826
10827 if (event.data.action === 'gopInfo') {
10828 return _this.appendGopInfo_(event);
10829 }
10830
10831 if (event.data.action === 'videoSegmentTimingInfo') {
10832 return _this.videoSegmentTimingInfo_(event.data.videoSegmentTimingInfo);
10833 }
10834 };
10835
10836 // this timestampOffset is a property with the side-effect of resetting
10837 // baseMediaDecodeTime in the transmuxer on the setter
10838 Object.defineProperty(_this, 'timestampOffset', {
10839 get: function get$$1() {
10840 return this.timestampOffset_;
10841 },
10842 set: function set$$1(val) {
10843 if (typeof val === 'number' && val >= 0) {
10844 this.timestampOffset_ = val;
10845 this.appendAudioInitSegment_ = true;
10846
10847 // reset gop buffer on timestampoffset as this signals a change in timeline
10848 this.gopBuffer_.length = 0;
10849 this.timeMapping_ = 0;
10850
10851 // We have to tell the transmuxer to set the baseMediaDecodeTime to
10852 // the desired timestampOffset for the next segment
10853 this.transmuxer_.postMessage({
10854 action: 'setTimestampOffset',
10855 timestampOffset: val
10856 });
10857 }
10858 }
10859 });
10860
10861 // setting the append window affects both source buffers
10862 Object.defineProperty(_this, 'appendWindowStart', {
10863 get: function get$$1() {
10864 return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
10865 },
10866 set: function set$$1(start) {
10867 if (this.videoBuffer_) {
10868 this.videoBuffer_.appendWindowStart = start;
10869 }
10870 if (this.audioBuffer_) {
10871 this.audioBuffer_.appendWindowStart = start;
10872 }
10873 }
10874 });
10875
10876 // this buffer is "updating" if either of its native buffers are
10877 Object.defineProperty(_this, 'updating', {
10878 get: function get$$1() {
10879 return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
10880 }
10881 });
10882
10883 // the buffered property is the intersection of the buffered
10884 // ranges of the native source buffers
10885 Object.defineProperty(_this, 'buffered', {
10886 get: function get$$1() {
10887 return buffered(this.videoBuffer_, this.audioBuffer_, this.audioDisabled_);
10888 }
10889 });
10890 return _this;
10891 }
10892
10893 /**
10894 * When we get a data event from the transmuxer
10895 * we call this function and handle the data that
10896 * was sent to us
10897 *
10898 * @private
10899 * @param {Event} event the data event from the transmuxer
10900 */
10901
10902
10903 createClass(VirtualSourceBuffer, [{
10904 key: 'data_',
10905 value: function data_(event) {
10906 var segment = event.data.segment;
10907
10908 // Cast ArrayBuffer to TypedArray
10909 segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
10910
10911 segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
10912
10913 createTextTracksIfNecessary(this, this.mediaSource_, segment);
10914
10915 // Add the segments to the pendingBuffers array
10916 this.pendingBuffers_.push(segment);
10917 return;
10918 }
10919
10920 /**
10921 * When we get a done event from the transmuxer
10922 * we call this function and we process all
10923 * of the pending data that we have been saving in the
10924 * data_ function
10925 *
10926 * @private
10927 * @param {Event} event the done event from the transmuxer
10928 */
10929
10930 }, {
10931 key: 'done_',
10932 value: function done_(event) {
10933 // Don't process and append data if the mediaSource is closed
10934 if (this.mediaSource_.readyState === 'closed') {
10935 this.pendingBuffers_.length = 0;
10936 return;
10937 }
10938
10939 // All buffers should have been flushed from the muxer
10940 // start processing anything we have received
10941 this.processPendingSegments_();
10942 return;
10943 }
10944 }, {
10945 key: 'videoSegmentTimingInfo_',
10946 value: function videoSegmentTimingInfo_(timingInfo) {
10947 var timingInfoInSeconds = {
10948 start: {
10949 decode: timingInfo.start.dts / ONE_SECOND_IN_TS,
10950 presentation: timingInfo.start.pts / ONE_SECOND_IN_TS
10951 },
10952 end: {
10953 decode: timingInfo.end.dts / ONE_SECOND_IN_TS,
10954 presentation: timingInfo.end.pts / ONE_SECOND_IN_TS
10955 },
10956 baseMediaDecodeTime: timingInfo.baseMediaDecodeTime / ONE_SECOND_IN_TS
10957 };
10958
10959 if (timingInfo.prependedContentDuration) {
10960 timingInfoInSeconds.prependedContentDuration = timingInfo.prependedContentDuration / ONE_SECOND_IN_TS;
10961 }
10962
10963 this.trigger({
10964 type: 'videoSegmentTimingInfo',
10965 videoSegmentTimingInfo: timingInfoInSeconds
10966 });
10967 }
10968
10969 /**
10970 * Create our internal native audio/video source buffers and add
10971 * event handlers to them with the following conditions:
10972 * 1. they do not already exist on the mediaSource
10973 * 2. this VSB has a codec for them
10974 *
10975 * @private
10976 */
10977
10978 }, {
10979 key: 'createRealSourceBuffers_',
10980 value: function createRealSourceBuffers_() {
10981 var _this2 = this;
10982
10983 var types = ['audio', 'video'];
10984
10985 types.forEach(function (type) {
10986 // Don't create a SourceBuffer of this type if we don't have a
10987 // codec for it
10988 if (!_this2[type + 'Codec_']) {
10989 return;
10990 }
10991
10992 // Do nothing if a SourceBuffer of this type already exists
10993 if (_this2[type + 'Buffer_']) {
10994 return;
10995 }
10996
10997 var buffer = null;
10998
10999 // If the mediasource already has a SourceBuffer for the codec
11000 // use that
11001 if (_this2.mediaSource_[type + 'Buffer_']) {
11002 buffer = _this2.mediaSource_[type + 'Buffer_'];
11003 // In multiple audio track cases, the audio source buffer is disabled
11004 // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
11005 // than createRealSourceBuffers_ is called to create the second
11006 // VirtualSourceBuffer because that happens as a side-effect of
11007 // videojs-contrib-hls starting the audioSegmentLoader. As a result,
11008 // the audioBuffer is essentially "ownerless" and no one will toggle
11009 // the `updating` state back to false once the `updateend` event is received
11010 //
11011 // Setting `updating` to false manually will work around this
11012 // situation and allow work to continue
11013 buffer.updating = false;
11014 } else {
11015 var codecProperty = type + 'Codec_';
11016 var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';
11017
11018 buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);
11019
11020 _this2.mediaSource_[type + 'Buffer_'] = buffer;
11021 }
11022
11023 _this2[type + 'Buffer_'] = buffer;
11024
11025 // Wire up the events to the SourceBuffer
11026 ['update', 'updatestart', 'updateend'].forEach(function (event) {
11027 buffer.addEventListener(event, function () {
11028 // if audio is disabled
11029 if (type === 'audio' && _this2.audioDisabled_) {
11030 return;
11031 }
11032
11033 if (event === 'updateend') {
11034 _this2[type + 'Buffer_'].updating = false;
11035 }
11036
11037 var shouldTrigger = types.every(function (t) {
11038 // skip checking audio's updating status if audio
11039 // is not enabled
11040 if (t === 'audio' && _this2.audioDisabled_) {
11041 return true;
11042 }
11043 // if the other type is updating we don't trigger
11044 if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
11045 return false;
11046 }
11047 return true;
11048 });
11049
11050 if (shouldTrigger) {
11051 return _this2.trigger(event);
11052 }
11053 });
11054 });
11055 });
11056 }
11057
11058 /**
11059 * Emulate the native mediasource function, but our function will
11060 * send all of the proposed segments to the transmuxer so that we
11061 * can transmux them before we append them to our internal
11062 * native source buffers in the correct format.
11063 *
11064 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
11065 * @param {Uint8Array} segment the segment to append to the buffer
11066 */
11067
11068 }, {
11069 key: 'appendBuffer',
11070 value: function appendBuffer(segment) {
11071 // Start the internal "updating" state
11072 this.bufferUpdating_ = true;
11073
11074 if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
11075 var audioBuffered = this.audioBuffer_.buffered;
11076
11077 this.transmuxer_.postMessage({
11078 action: 'setAudioAppendStart',
11079 appendStart: audioBuffered.end(audioBuffered.length - 1)
11080 });
11081 }
11082
11083 if (this.videoBuffer_) {
11084 this.transmuxer_.postMessage({
11085 action: 'alignGopsWith',
11086 gopsToAlignWith: gopsSafeToAlignWith(this.gopBuffer_, this.mediaSource_.player_ ? this.mediaSource_.player_.currentTime() : null, this.timeMapping_)
11087 });
11088 }
11089
11090 this.transmuxer_.postMessage({
11091 action: 'push',
11092 // Send the typed-array of data as an ArrayBuffer so that
11093 // it can be sent as a "Transferable" and avoid the costly
11094 // memory copy
11095 data: segment.buffer,
11096
11097 // To recreate the original typed-array, we need information
11098 // about what portion of the ArrayBuffer it was a view into
11099 byteOffset: segment.byteOffset,
11100 byteLength: segment.byteLength
11101 }, [segment.buffer]);
11102 this.transmuxer_.postMessage({ action: 'flush' });
11103 }
11104
11105 /**
11106 * Appends gop information (timing and byteLength) received by the transmuxer for the
11107 * gops appended in the last call to appendBuffer
11108 *
11109 * @param {Event} event
11110 * The gopInfo event from the transmuxer
11111 * @param {Array} event.data.gopInfo
11112 * List of gop info to append
11113 */
11114
11115 }, {
11116 key: 'appendGopInfo_',
11117 value: function appendGopInfo_(event) {
11118 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, event.data.gopInfo, this.safeAppend_);
11119 }
11120
11121 /**
11122 * Emulate the native mediasource function and remove parts
11123 * of the buffer from any of our internal buffers that exist
11124 *
11125 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
11126 * @param {Double} start position to start the remove at
11127 * @param {Double} end position to end the remove at
11128 */
11129
11130 }, {
11131 key: 'remove',
11132 value: function remove(start, end) {
11133 if (this.videoBuffer_) {
11134 this.videoBuffer_.updating = true;
11135 this.videoBuffer_.remove(start, end);
11136 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
11137 }
11138 if (!this.audioDisabled_ && this.audioBuffer_) {
11139 this.audioBuffer_.updating = true;
11140 this.audioBuffer_.remove(start, end);
11141 }
11142
11143 // Remove Metadata Cues (id3)
11144 removeCuesFromTrack(start, end, this.metadataTrack_);
11145
11146 // Remove Any Captions
11147 if (this.inbandTextTracks_) {
11148 for (var track in this.inbandTextTracks_) {
11149 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
11150 }
11151 }
11152 }
11153
11154 /**
11155 * Process any segments that the muxer has output
11156 * Concatenate segments together based on type and append them into
11157 * their respective sourceBuffers
11158 *
11159 * @private
11160 */
11161
11162 }, {
11163 key: 'processPendingSegments_',
11164 value: function processPendingSegments_() {
11165 var sortedSegments = {
11166 video: {
11167 segments: [],
11168 bytes: 0
11169 },
11170 audio: {
11171 segments: [],
11172 bytes: 0
11173 },
11174 captions: [],
11175 metadata: []
11176 };
11177
11178 if (!this.pendingBuffers_.length) {
11179 // We are no longer in the internal "updating" state
11180 this.trigger('updateend');
11181 this.bufferUpdating_ = false;
11182 return;
11183 }
11184
11185 // Sort segments into separate video/audio arrays and
11186 // keep track of their total byte lengths
11187 sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
11188 var type = segment.type;
11189 var data = segment.data;
11190 var initSegment = segment.initSegment;
11191
11192 segmentObj[type].segments.push(data);
11193 segmentObj[type].bytes += data.byteLength;
11194
11195 segmentObj[type].initSegment = initSegment;
11196
11197 // Gather any captions into a single array
11198 if (segment.captions) {
11199 segmentObj.captions = segmentObj.captions.concat(segment.captions);
11200 }
11201
11202 if (segment.info) {
11203 segmentObj[type].info = segment.info;
11204 }
11205
11206 // Gather any metadata into a single array
11207 if (segment.metadata) {
11208 segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
11209 }
11210
11211 return segmentObj;
11212 }, sortedSegments);
11213
11214 // Create the real source buffers if they don't exist by now since we
11215 // finally are sure what tracks are contained in the source
11216 if (!this.videoBuffer_ && !this.audioBuffer_) {
11217 // Remove any codecs that may have been specified by default but
11218 // are no longer applicable now
11219 if (sortedSegments.video.bytes === 0) {
11220 this.videoCodec_ = null;
11221 }
11222 if (sortedSegments.audio.bytes === 0) {
11223 this.audioCodec_ = null;
11224 }
11225
11226 this.createRealSourceBuffers_();
11227 }
11228
11229 if (sortedSegments.audio.info) {
11230 this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
11231 }
11232 if (sortedSegments.video.info) {
11233 this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
11234 }
11235
11236 if (this.appendAudioInitSegment_) {
11237 if (!this.audioDisabled_ && this.audioBuffer_) {
11238 sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
11239 sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
11240 }
11241 this.appendAudioInitSegment_ = false;
11242 }
11243
11244 var triggerUpdateend = false;
11245
11246 // Merge multiple video and audio segments into one and append
11247 if (this.videoBuffer_ && sortedSegments.video.bytes) {
11248 sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
11249 sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
11250 this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
11251 } else if (this.videoBuffer_ && (this.audioDisabled_ || !this.audioBuffer_)) {
11252 // The transmuxer did not return any bytes of video, meaning it was all trimmed
11253 // for gop alignment. Since we have a video buffer and audio is disabled, updateend
11254 // will never be triggered by this source buffer, which will cause contrib-hls
11255 // to be stuck forever waiting for updateend. If audio is not disabled, updateend
11256 // will be triggered by the audio buffer, which will be sent upwards since the video
11257 // buffer will not be in an updating state.
11258 triggerUpdateend = true;
11259 }
11260
11261 // Add text-track data for all
11262 addTextTrackData(this, sortedSegments.captions, sortedSegments.metadata);
11263
11264 if (!this.audioDisabled_ && this.audioBuffer_) {
11265 this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
11266 }
11267
11268 this.pendingBuffers_.length = 0;
11269
11270 if (triggerUpdateend) {
11271 this.trigger('updateend');
11272 }
11273
11274 // We are no longer in the internal "updating" state
11275 this.bufferUpdating_ = false;
11276 }
11277
11278 /**
11279 * Combine all segments into a single Uint8Array and then append them
11280 * to the destination buffer
11281 *
11282 * @param {Object} segmentObj
11283 * @param {SourceBuffer} destinationBuffer native source buffer to append data to
11284 * @private
11285 */
11286
11287 }, {
11288 key: 'concatAndAppendSegments_',
11289 value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
11290 var offset = 0;
11291 var tempBuffer = void 0;
11292
11293 if (segmentObj.bytes) {
11294 tempBuffer = new Uint8Array(segmentObj.bytes);
11295
11296 // Combine the individual segments into one large typed-array
11297 segmentObj.segments.forEach(function (segment) {
11298 tempBuffer.set(segment, offset);
11299 offset += segment.byteLength;
11300 });
11301
11302 try {
11303 destinationBuffer.updating = true;
11304 destinationBuffer.appendBuffer(tempBuffer);
11305 } catch (error) {
11306 if (this.mediaSource_.player_) {
11307 this.mediaSource_.player_.error({
11308 code: -3,
11309 type: 'APPEND_BUFFER_ERR',
11310 message: error.message,
11311 originalError: error
11312 });
11313 }
11314 }
11315 }
11316 }
11317
11318 /**
11319 * Emulate the native mediasource function. abort any soureBuffer
11320 * actions and throw out any un-appended data.
11321 *
11322 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
11323 */
11324
11325 }, {
11326 key: 'abort',
11327 value: function abort() {
11328 if (this.videoBuffer_) {
11329 this.videoBuffer_.abort();
11330 }
11331 if (!this.audioDisabled_ && this.audioBuffer_) {
11332 this.audioBuffer_.abort();
11333 }
11334 if (this.transmuxer_) {
11335 this.transmuxer_.postMessage({ action: 'reset' });
11336 }
11337 this.pendingBuffers_.length = 0;
11338 this.bufferUpdating_ = false;
11339 }
11340 }, {
11341 key: 'dispose',
11342 value: function dispose() {
11343 if (this.transmuxer_) {
11344 this.transmuxer_.terminate();
11345 }
11346 this.trigger('dispose');
11347 this.off();
11348 }
11349 }]);
11350 return VirtualSourceBuffer;
11351 }(videojs.EventTarget);
11352
11353 /**
11354 * @file html-media-source.js
11355 */
11356
11357 /**
11358 * Our MediaSource implementation in HTML, mimics native
11359 * MediaSource where/if possible.
11360 *
11361 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
11362 * @class HtmlMediaSource
11363 * @extends videojs.EventTarget
11364 */
11365
11366 var HtmlMediaSource = function (_videojs$EventTarget) {
11367 inherits(HtmlMediaSource, _videojs$EventTarget);
11368
11369 function HtmlMediaSource() {
11370 classCallCheck(this, HtmlMediaSource);
11371
11372 var _this = possibleConstructorReturn(this, (HtmlMediaSource.__proto__ || Object.getPrototypeOf(HtmlMediaSource)).call(this));
11373
11374 var property = void 0;
11375
11376 _this.nativeMediaSource_ = new window_1.MediaSource();
11377 // delegate to the native MediaSource's methods by default
11378 for (property in _this.nativeMediaSource_) {
11379 if (!(property in HtmlMediaSource.prototype) && typeof _this.nativeMediaSource_[property] === 'function') {
11380 _this[property] = _this.nativeMediaSource_[property].bind(_this.nativeMediaSource_);
11381 }
11382 }
11383
11384 // emulate `duration` and `seekable` until seeking can be
11385 // handled uniformly for live streams
11386 // see https://github.com/w3c/media-source/issues/5
11387 _this.duration_ = NaN;
11388 Object.defineProperty(_this, 'duration', {
11389 get: function get$$1() {
11390 if (this.duration_ === Infinity) {
11391 return this.duration_;
11392 }
11393 return this.nativeMediaSource_.duration;
11394 },
11395 set: function set$$1(duration) {
11396 this.duration_ = duration;
11397 if (duration !== Infinity) {
11398 this.nativeMediaSource_.duration = duration;
11399 return;
11400 }
11401 }
11402 });
11403 Object.defineProperty(_this, 'seekable', {
11404 get: function get$$1() {
11405 if (this.duration_ === Infinity) {
11406 return videojs.createTimeRanges([[0, this.nativeMediaSource_.duration]]);
11407 }
11408 return this.nativeMediaSource_.seekable;
11409 }
11410 });
11411
11412 Object.defineProperty(_this, 'readyState', {
11413 get: function get$$1() {
11414 return this.nativeMediaSource_.readyState;
11415 }
11416 });
11417
11418 Object.defineProperty(_this, 'activeSourceBuffers', {
11419 get: function get$$1() {
11420 return this.activeSourceBuffers_;
11421 }
11422 });
11423
11424 // the list of virtual and native SourceBuffers created by this
11425 // MediaSource
11426 _this.sourceBuffers = [];
11427
11428 _this.activeSourceBuffers_ = [];
11429
11430 /**
11431 * update the list of active source buffers based upon various
11432 * imformation from HLS and video.js
11433 *
11434 * @private
11435 */
11436 _this.updateActiveSourceBuffers_ = function () {
11437 // Retain the reference but empty the array
11438 _this.activeSourceBuffers_.length = 0;
11439
11440 // If there is only one source buffer, then it will always be active and audio will
11441 // be disabled based on the codec of the source buffer
11442 if (_this.sourceBuffers.length === 1) {
11443 var sourceBuffer = _this.sourceBuffers[0];
11444
11445 sourceBuffer.appendAudioInitSegment_ = true;
11446 sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
11447 _this.activeSourceBuffers_.push(sourceBuffer);
11448 return;
11449 }
11450
11451 // There are 2 source buffers, a combined (possibly video only) source buffer and
11452 // and an audio only source buffer.
11453 // By default, the audio in the combined virtual source buffer is enabled
11454 // and the audio-only source buffer (if it exists) is disabled.
11455 var disableCombined = false;
11456 var disableAudioOnly = true;
11457
11458 // TODO: maybe we can store the sourcebuffers on the track objects?
11459 // safari may do something like this
11460 for (var i = 0; i < _this.player_.audioTracks().length; i++) {
11461 var track = _this.player_.audioTracks()[i];
11462
11463 if (track.enabled && track.kind !== 'main') {
11464 // The enabled track is an alternate audio track so disable the audio in
11465 // the combined source buffer and enable the audio-only source buffer.
11466 disableCombined = true;
11467 disableAudioOnly = false;
11468 break;
11469 }
11470 }
11471
11472 _this.sourceBuffers.forEach(function (sourceBuffer, index) {
11473 /* eslinst-disable */
11474 // TODO once codecs are required, we can switch to using the codecs to determine
11475 // what stream is the video stream, rather than relying on videoTracks
11476 /* eslinst-enable */
11477
11478 sourceBuffer.appendAudioInitSegment_ = true;
11479
11480 if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
11481 // combined
11482 sourceBuffer.audioDisabled_ = disableCombined;
11483 } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
11484 // If the "combined" source buffer is video only, then we do not want
11485 // disable the audio-only source buffer (this is mostly for demuxed
11486 // audio and video hls)
11487 sourceBuffer.audioDisabled_ = true;
11488 disableAudioOnly = false;
11489 } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
11490 // audio only
11491 // In the case of audio only with alternate audio and disableAudioOnly is true
11492 // this means we want to disable the audio on the alternate audio sourcebuffer
11493 // but not the main "combined" source buffer. The "combined" source buffer is
11494 // always at index 0, so this ensures audio won't be disabled in both source
11495 // buffers.
11496 sourceBuffer.audioDisabled_ = index ? disableAudioOnly : !disableAudioOnly;
11497 if (sourceBuffer.audioDisabled_) {
11498 return;
11499 }
11500 }
11501
11502 _this.activeSourceBuffers_.push(sourceBuffer);
11503 });
11504 };
11505
11506 _this.onPlayerMediachange_ = function () {
11507 _this.sourceBuffers.forEach(function (sourceBuffer) {
11508 sourceBuffer.appendAudioInitSegment_ = true;
11509 });
11510 };
11511
11512 _this.onHlsReset_ = function () {
11513 _this.sourceBuffers.forEach(function (sourceBuffer) {
11514 if (sourceBuffer.transmuxer_) {
11515 sourceBuffer.transmuxer_.postMessage({ action: 'resetCaptions' });
11516 }
11517 });
11518 };
11519
11520 _this.onHlsSegmentTimeMapping_ = function (event) {
11521 _this.sourceBuffers.forEach(function (buffer) {
11522 return buffer.timeMapping_ = event.mapping;
11523 });
11524 };
11525
11526 // Re-emit MediaSource events on the polyfill
11527 ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
11528 this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
11529 }, _this);
11530
11531 // capture the associated player when the MediaSource is
11532 // successfully attached
11533 _this.on('sourceopen', function (event) {
11534 // Get the player this MediaSource is attached to
11535 var video = document_1.querySelector('[src="' + _this.url_ + '"]');
11536
11537 if (!video) {
11538 return;
11539 }
11540
11541 _this.player_ = videojs(video.parentNode);
11542
11543 if (!_this.player_) {
11544 return;
11545 }
11546
11547 // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
11548 // resets its state and flushes the buffer
11549 _this.player_.tech_.on('hls-reset', _this.onHlsReset_);
11550 // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
11551 // SegmentLoader inspects an MTS segment and has an accurate stream to display
11552 // time mapping
11553 _this.player_.tech_.on('hls-segment-time-mapping', _this.onHlsSegmentTimeMapping_);
11554
11555 if (_this.player_.audioTracks && _this.player_.audioTracks()) {
11556 _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
11557 _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
11558 _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
11559 }
11560
11561 _this.player_.on('mediachange', _this.onPlayerMediachange_);
11562 });
11563
11564 _this.on('sourceended', function (event) {
11565 var duration = durationOfVideo(_this.duration);
11566
11567 for (var i = 0; i < _this.sourceBuffers.length; i++) {
11568 var sourcebuffer = _this.sourceBuffers[i];
11569 var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
11570
11571 if (cues && cues.length) {
11572 cues[cues.length - 1].endTime = duration;
11573 }
11574 }
11575 });
11576
11577 // explicitly terminate any WebWorkers that were created
11578 // by SourceHandlers
11579 _this.on('sourceclose', function (event) {
11580 this.sourceBuffers.forEach(function (sourceBuffer) {
11581 if (sourceBuffer.transmuxer_) {
11582 sourceBuffer.transmuxer_.terminate();
11583 }
11584 });
11585
11586 this.sourceBuffers.length = 0;
11587 if (!this.player_) {
11588 return;
11589 }
11590
11591 if (this.player_.audioTracks && this.player_.audioTracks()) {
11592 this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
11593 this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
11594 this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
11595 }
11596
11597 // We can only change this if the player hasn't been disposed of yet
11598 // because `off` eventually tries to use the el_ property. If it has
11599 // been disposed of, then don't worry about it because there are no
11600 // event handlers left to unbind anyway
11601 if (this.player_.el_) {
11602 this.player_.off('mediachange', this.onPlayerMediachange_);
11603 }
11604
11605 if (this.player_.tech_ && this.player_.tech_.el_) {
11606 this.player_.tech_.off('hls-reset', this.onHlsReset_);
11607 this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
11608 }
11609 });
11610 return _this;
11611 }
11612
11613 /**
11614 * Add a range that that can now be seeked to.
11615 *
11616 * @param {Double} start where to start the addition
11617 * @param {Double} end where to end the addition
11618 * @private
11619 */
11620
11621
11622 createClass(HtmlMediaSource, [{
11623 key: 'addSeekableRange_',
11624 value: function addSeekableRange_(start, end) {
11625 var error = void 0;
11626
11627 if (this.duration !== Infinity) {
11628 error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
11629 error.name = 'InvalidStateError';
11630 error.code = 11;
11631 throw error;
11632 }
11633
11634 if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
11635 this.nativeMediaSource_.duration = end;
11636 }
11637 }
11638
11639 /**
11640 * Add a source buffer to the media source.
11641 *
11642 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
11643 * @param {String} type the content-type of the content
11644 * @return {Object} the created source buffer
11645 */
11646
11647 }, {
11648 key: 'addSourceBuffer',
11649 value: function addSourceBuffer(type) {
11650 var buffer = void 0;
11651 var parsedType = parseContentType(type);
11652
11653 // Create a VirtualSourceBuffer to transmux MPEG-2 transport
11654 // stream segments into fragmented MP4s
11655 if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
11656 var codecs = [];
11657
11658 if (parsedType.parameters && parsedType.parameters.codecs) {
11659 codecs = parsedType.parameters.codecs.split(',');
11660 codecs = translateLegacyCodecs(codecs);
11661 codecs = codecs.filter(function (codec) {
11662 return isAudioCodec(codec) || isVideoCodec(codec);
11663 });
11664 }
11665
11666 if (codecs.length === 0) {
11667 codecs = ['avc1.4d400d', 'mp4a.40.2'];
11668 }
11669
11670 buffer = new VirtualSourceBuffer(this, codecs);
11671
11672 if (this.sourceBuffers.length !== 0) {
11673 // If another VirtualSourceBuffer already exists, then we are creating a
11674 // SourceBuffer for an alternate audio track and therefore we know that
11675 // the source has both an audio and video track.
11676 // That means we should trigger the manual creation of the real
11677 // SourceBuffers instead of waiting for the transmuxer to return data
11678 this.sourceBuffers[0].createRealSourceBuffers_();
11679 buffer.createRealSourceBuffers_();
11680
11681 // Automatically disable the audio on the first source buffer if
11682 // a second source buffer is ever created
11683 this.sourceBuffers[0].audioDisabled_ = true;
11684 }
11685 } else {
11686 // delegate to the native implementation
11687 buffer = this.nativeMediaSource_.addSourceBuffer(type);
11688 }
11689
11690 this.sourceBuffers.push(buffer);
11691 return buffer;
11692 }
11693 }, {
11694 key: 'dispose',
11695 value: function dispose() {
11696 this.trigger('dispose');
11697 this.off();
11698
11699 this.sourceBuffers.forEach(function (buffer) {
11700 if (buffer.dispose) {
11701 buffer.dispose();
11702 }
11703 });
11704
11705 this.sourceBuffers.length = 0;
11706 }
11707 }]);
11708 return HtmlMediaSource;
11709 }(videojs.EventTarget);
11710
11711 /**
11712 * @file videojs-contrib-media-sources.js
11713 */
11714 var urlCount = 0;
11715
11716 // ------------
11717 // Media Source
11718 // ------------
11719
11720 // store references to the media sources so they can be connected
11721 // to a video element (a swf object)
11722 // TODO: can we store this somewhere local to this module?
11723 videojs.mediaSources = {};
11724
11725 /**
11726 * Provide a method for a swf object to notify JS that a
11727 * media source is now open.
11728 *
11729 * @param {String} msObjectURL string referencing the MSE Object URL
11730 * @param {String} swfId the swf id
11731 */
11732 var open = function open(msObjectURL, swfId) {
11733 var mediaSource = videojs.mediaSources[msObjectURL];
11734
11735 if (mediaSource) {
11736 mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
11737 } else {
11738 throw new Error('Media Source not found (Video.js)');
11739 }
11740 };
11741
11742 /**
11743 * Check to see if the native MediaSource object exists and supports
11744 * an MP4 container with both H.264 video and AAC-LC audio.
11745 *
11746 * @return {Boolean} if native media sources are supported
11747 */
11748 var supportsNativeMediaSources = function supportsNativeMediaSources() {
11749 return !!window_1.MediaSource && !!window_1.MediaSource.isTypeSupported && window_1.MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
11750 };
11751
11752 /**
11753 * An emulation of the MediaSource API so that we can support
11754 * native and non-native functionality. returns an instance of
11755 * HtmlMediaSource.
11756 *
11757 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
11758 */
11759 var MediaSource = function MediaSource() {
11760 this.MediaSource = {
11761 open: open,
11762 supportsNativeMediaSources: supportsNativeMediaSources
11763 };
11764
11765 if (supportsNativeMediaSources()) {
11766 return new HtmlMediaSource();
11767 }
11768
11769 throw new Error('Cannot use create a virtual MediaSource for this video');
11770 };
11771
11772 MediaSource.open = open;
11773 MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
11774
11775 /**
11776 * A wrapper around the native URL for our MSE object
11777 * implementation, this object is exposed under videojs.URL
11778 *
11779 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
11780 */
11781 var URL$1 = {
11782 /**
11783 * A wrapper around the native createObjectURL for our objects.
11784 * This function maps a native or emulated mediaSource to a blob
11785 * url so that it can be loaded into video.js
11786 *
11787 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
11788 * @param {MediaSource} object the object to create a blob url to
11789 */
11790 createObjectURL: function createObjectURL(object) {
11791 var objectUrlPrefix = 'blob:vjs-media-source/';
11792 var url = void 0;
11793
11794 // use the native MediaSource to generate an object URL
11795 if (object instanceof HtmlMediaSource) {
11796 url = window_1.URL.createObjectURL(object.nativeMediaSource_);
11797 object.url_ = url;
11798 return url;
11799 }
11800 // if the object isn't an emulated MediaSource, delegate to the
11801 // native implementation
11802 if (!(object instanceof HtmlMediaSource)) {
11803 url = window_1.URL.createObjectURL(object);
11804 object.url_ = url;
11805 return url;
11806 }
11807
11808 // build a URL that can be used to map back to the emulated
11809 // MediaSource
11810 url = objectUrlPrefix + urlCount;
11811
11812 urlCount++;
11813
11814 // setup the mapping back to object
11815 videojs.mediaSources[url] = object;
11816
11817 return url;
11818 }
11819 };
11820
11821 videojs.MediaSource = MediaSource;
11822 videojs.URL = URL$1;
11823
11824 function _interopDefault$1(ex) {
11825 return ex && typeof ex === 'object' && 'default' in ex ? ex['default'] : ex;
11826 }
11827
11828 var URLToolkit = _interopDefault$1(urlToolkit);
11829 var window$1 = _interopDefault$1(window_1);
11830
11831 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
11832 // return early if we don't need to resolve
11833 if (/^[a-z]+:/i.test(relativeUrl)) {
11834 return relativeUrl;
11835 } // if the base URL is relative then combine with the current location
11836
11837
11838 if (!/\/\//i.test(baseUrl)) {
11839 baseUrl = URLToolkit.buildAbsoluteURL(window$1.location && window$1.location.href || '', baseUrl);
11840 }
11841
11842 return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
11843 };
11844
11845 var resolveUrl_1 = resolveUrl$1;
11846
11847 function _interopDefault$2(ex) {
11848 return ex && typeof ex === 'object' && 'default' in ex ? ex['default'] : ex;
11849 }
11850
11851 var window$2 = _interopDefault$2(window_1);
11852
11853 var atob = function atob(s) {
11854 return window$2.atob ? window$2.atob(s) : Buffer.from(s, 'base64').toString('binary');
11855 };
11856
11857 function decodeB64ToUint8Array$1(b64Text) {
11858 var decodedString = atob(b64Text);
11859 var array = new Uint8Array(decodedString.length);
11860
11861 for (var i = 0; i < decodedString.length; i++) {
11862 array[i] = decodedString.charCodeAt(i);
11863 }
11864
11865 return array;
11866 }
11867
11868 var decodeB64ToUint8Array_1 = decodeB64ToUint8Array$1;
11869
11870 //[4] NameStartChar ::= ":" | [A-Z] | "_" | [a-z] | [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF]
11871 //[4a] NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 | [#x0300-#x036F] | [#x203F-#x2040]
11872 //[5] Name ::= NameStartChar (NameChar)*
11873 var nameStartChar = /[A-Z_a-z\xC0-\xD6\xD8-\xF6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/; //\u10000-\uEFFFF
11874 var nameChar = new RegExp("[\\-\\.0-9" + nameStartChar.source.slice(1, -1) + "\\u00B7\\u0300-\\u036F\\u203F-\\u2040]");
11875 var tagNamePattern = new RegExp('^' + nameStartChar.source + nameChar.source + '*(?:\:' + nameStartChar.source + nameChar.source + '*)?$');
11876 //var tagNamePattern = /^[a-zA-Z_][\w\-\.]*(?:\:[a-zA-Z_][\w\-\.]*)?$/
11877 //var handlers = 'resolveEntity,getExternalSubset,characters,endDocument,endElement,endPrefixMapping,ignorableWhitespace,processingInstruction,setDocumentLocator,skippedEntity,startDocument,startElement,startPrefixMapping,notationDecl,unparsedEntityDecl,error,fatalError,warning,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,comment,endCDATA,endDTD,endEntity,startCDATA,startDTD,startEntity'.split(',')
11878
11879 //S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
11880 //S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
11881 var S_TAG = 0; //tag name offerring
11882 var S_ATTR = 1; //attr name offerring
11883 var S_ATTR_SPACE = 2; //attr name end and space offer
11884 var S_EQ = 3; //=space?
11885 var S_ATTR_NOQUOT_VALUE = 4; //attr value(no quot value only)
11886 var S_ATTR_END = 5; //attr value end and no space(quot end)
11887 var S_TAG_SPACE = 6; //(attr value end || tag end ) && (space offer)
11888 var S_TAG_CLOSE = 7; //closed el<el />
11889
11890 function XMLReader() {}
11891
11892 XMLReader.prototype = {
11893 parse: function parse(source, defaultNSMap, entityMap) {
11894 var domBuilder = this.domBuilder;
11895 domBuilder.startDocument();
11896 _copy(defaultNSMap, defaultNSMap = {});
11897 _parse(source, defaultNSMap, entityMap, domBuilder, this.errorHandler);
11898 domBuilder.endDocument();
11899 }
11900 };
11901 function _parse(source, defaultNSMapCopy, entityMap, domBuilder, errorHandler) {
11902 function fixedFromCharCode(code) {
11903 // String.prototype.fromCharCode does not supports
11904 // > 2 bytes unicode chars directly
11905 if (code > 0xffff) {
11906 code -= 0x10000;
11907 var surrogate1 = 0xd800 + (code >> 10),
11908 surrogate2 = 0xdc00 + (code & 0x3ff);
11909
11910 return String.fromCharCode(surrogate1, surrogate2);
11911 } else {
11912 return String.fromCharCode(code);
11913 }
11914 }
11915 function entityReplacer(a) {
11916 var k = a.slice(1, -1);
11917 if (k in entityMap) {
11918 return entityMap[k];
11919 } else if (k.charAt(0) === '#') {
11920 return fixedFromCharCode(parseInt(k.substr(1).replace('x', '0x')));
11921 } else {
11922 errorHandler.error('entity not found:' + a);
11923 return a;
11924 }
11925 }
11926 function appendText(end) {
11927 //has some bugs
11928 if (end > start) {
11929 var xt = source.substring(start, end).replace(/&#?\w+;/g, entityReplacer);
11930 locator && position(start);
11931 domBuilder.characters(xt, 0, end - start);
11932 start = end;
11933 }
11934 }
11935 function position(p, m) {
11936 while (p >= lineEnd && (m = linePattern.exec(source))) {
11937 lineStart = m.index;
11938 lineEnd = lineStart + m[0].length;
11939 locator.lineNumber++;
11940 //console.log('line++:',locator,startPos,endPos)
11941 }
11942 locator.columnNumber = p - lineStart + 1;
11943 }
11944 var lineStart = 0;
11945 var lineEnd = 0;
11946 var linePattern = /.*(?:\r\n?|\n)|.*$/g;
11947 var locator = domBuilder.locator;
11948
11949 var parseStack = [{ currentNSMap: defaultNSMapCopy }];
11950 var closeMap = {};
11951 var start = 0;
11952 while (true) {
11953 try {
11954 var tagStart = source.indexOf('<', start);
11955 if (tagStart < 0) {
11956 if (!source.substr(start).match(/^\s*$/)) {
11957 var doc = domBuilder.doc;
11958 var text = doc.createTextNode(source.substr(start));
11959 doc.appendChild(text);
11960 domBuilder.currentElement = text;
11961 }
11962 return;
11963 }
11964 if (tagStart > start) {
11965 appendText(tagStart);
11966 }
11967 switch (source.charAt(tagStart + 1)) {
11968 case '/':
11969 var end = source.indexOf('>', tagStart + 3);
11970 var tagName = source.substring(tagStart + 2, end);
11971 var config = parseStack.pop();
11972 if (end < 0) {
11973
11974 tagName = source.substring(tagStart + 2).replace(/[\s<].*/, '');
11975 //console.error('#@@@@@@'+tagName)
11976 errorHandler.error("end tag name: " + tagName + ' is not complete:' + config.tagName);
11977 end = tagStart + 1 + tagName.length;
11978 } else if (tagName.match(/\s</)) {
11979 tagName = tagName.replace(/[\s<].*/, '');
11980 errorHandler.error("end tag name: " + tagName + ' maybe not complete');
11981 end = tagStart + 1 + tagName.length;
11982 }
11983 //console.error(parseStack.length,parseStack)
11984 //console.error(config);
11985 var localNSMap = config.localNSMap;
11986 var endMatch = config.tagName == tagName;
11987 var endIgnoreCaseMach = endMatch || config.tagName && config.tagName.toLowerCase() == tagName.toLowerCase();
11988 if (endIgnoreCaseMach) {
11989 domBuilder.endElement(config.uri, config.localName, tagName);
11990 if (localNSMap) {
11991 for (var prefix in localNSMap) {
11992 domBuilder.endPrefixMapping(prefix);
11993 }
11994 }
11995 if (!endMatch) {
11996 errorHandler.fatalError("end tag name: " + tagName + ' is not match the current start tagName:' + config.tagName);
11997 }
11998 } else {
11999 parseStack.push(config);
12000 }
12001
12002 end++;
12003 break;
12004 // end elment
12005 case '?':
12006 // <?...?>
12007 locator && position(tagStart);
12008 end = parseInstruction(source, tagStart, domBuilder);
12009 break;
12010 case '!':
12011 // <!doctype,<![CDATA,<!--
12012 locator && position(tagStart);
12013 end = parseDCC(source, tagStart, domBuilder, errorHandler);
12014 break;
12015 default:
12016 locator && position(tagStart);
12017 var el = new ElementAttributes();
12018 var currentNSMap = parseStack[parseStack.length - 1].currentNSMap;
12019 //elStartEnd
12020 var end = parseElementStartPart(source, tagStart, el, currentNSMap, entityReplacer, errorHandler);
12021 var len = el.length;
12022
12023 if (!el.closed && fixSelfClosed(source, end, el.tagName, closeMap)) {
12024 el.closed = true;
12025 if (!entityMap.nbsp) {
12026 errorHandler.warning('unclosed xml attribute');
12027 }
12028 }
12029 if (locator && len) {
12030 var locator2 = copyLocator(locator, {});
12031 //try{//attribute position fixed
12032 for (var i = 0; i < len; i++) {
12033 var a = el[i];
12034 position(a.offset);
12035 a.locator = copyLocator(locator, {});
12036 }
12037 //}catch(e){console.error('@@@@@'+e)}
12038 domBuilder.locator = locator2;
12039 if (appendElement(el, domBuilder, currentNSMap)) {
12040 parseStack.push(el);
12041 }
12042 domBuilder.locator = locator;
12043 } else {
12044 if (appendElement(el, domBuilder, currentNSMap)) {
12045 parseStack.push(el);
12046 }
12047 }
12048
12049 if (el.uri === 'http://www.w3.org/1999/xhtml' && !el.closed) {
12050 end = parseHtmlSpecialContent(source, end, el.tagName, entityReplacer, domBuilder);
12051 } else {
12052 end++;
12053 }
12054 }
12055 } catch (e) {
12056 errorHandler.error('element parse error: ' + e);
12057 //errorHandler.error('element parse error: '+e);
12058 end = -1;
12059 //throw e;
12060 }
12061 if (end > start) {
12062 start = end;
12063 } else {
12064 //TODO: 这里有可能sax回退,有位置错误风险
12065 appendText(Math.max(tagStart, start) + 1);
12066 }
12067 }
12068 }
12069 function copyLocator(f, t) {
12070 t.lineNumber = f.lineNumber;
12071 t.columnNumber = f.columnNumber;
12072 return t;
12073 }
12074
12075 /**
12076 * @see #appendElement(source,elStartEnd,el,selfClosed,entityReplacer,domBuilder,parseStack);
12077 * @return end of the elementStartPart(end of elementEndPart for selfClosed el)
12078 */
12079 function parseElementStartPart(source, start, el, currentNSMap, entityReplacer, errorHandler) {
12080 var attrName;
12081 var value;
12082 var p = ++start;
12083 var s = S_TAG; //status
12084 while (true) {
12085 var c = source.charAt(p);
12086 switch (c) {
12087 case '=':
12088 if (s === S_ATTR) {
12089 //attrName
12090 attrName = source.slice(start, p);
12091 s = S_EQ;
12092 } else if (s === S_ATTR_SPACE) {
12093 s = S_EQ;
12094 } else {
12095 //fatalError: equal must after attrName or space after attrName
12096 throw new Error('attribute equal must after attrName');
12097 }
12098 break;
12099 case '\'':
12100 case '"':
12101 if (s === S_EQ || s === S_ATTR //|| s == S_ATTR_SPACE
12102 ) {
12103 //equal
12104 if (s === S_ATTR) {
12105 errorHandler.warning('attribute value must after "="');
12106 attrName = source.slice(start, p);
12107 }
12108 start = p + 1;
12109 p = source.indexOf(c, start);
12110 if (p > 0) {
12111 value = source.slice(start, p).replace(/&#?\w+;/g, entityReplacer);
12112 el.add(attrName, value, start - 1);
12113 s = S_ATTR_END;
12114 } else {
12115 //fatalError: no end quot match
12116 throw new Error('attribute value no end \'' + c + '\' match');
12117 }
12118 } else if (s == S_ATTR_NOQUOT_VALUE) {
12119 value = source.slice(start, p).replace(/&#?\w+;/g, entityReplacer);
12120 //console.log(attrName,value,start,p)
12121 el.add(attrName, value, start);
12122 //console.dir(el)
12123 errorHandler.warning('attribute "' + attrName + '" missed start quot(' + c + ')!!');
12124 start = p + 1;
12125 s = S_ATTR_END;
12126 } else {
12127 //fatalError: no equal before
12128 throw new Error('attribute value must after "="');
12129 }
12130 break;
12131 case '/':
12132 switch (s) {
12133 case S_TAG:
12134 el.setTagName(source.slice(start, p));
12135 case S_ATTR_END:
12136 case S_TAG_SPACE:
12137 case S_TAG_CLOSE:
12138 s = S_TAG_CLOSE;
12139 el.closed = true;
12140 case S_ATTR_NOQUOT_VALUE:
12141 case S_ATTR:
12142 case S_ATTR_SPACE:
12143 break;
12144 //case S_EQ:
12145 default:
12146 throw new Error("attribute invalid close char('/')");
12147 }
12148 break;
12149 case '':
12150 //end document
12151 //throw new Error('unexpected end of input')
12152 errorHandler.error('unexpected end of input');
12153 if (s == S_TAG) {
12154 el.setTagName(source.slice(start, p));
12155 }
12156 return p;
12157 case '>':
12158 switch (s) {
12159 case S_TAG:
12160 el.setTagName(source.slice(start, p));
12161 case S_ATTR_END:
12162 case S_TAG_SPACE:
12163 case S_TAG_CLOSE:
12164 break; //normal
12165 case S_ATTR_NOQUOT_VALUE: //Compatible state
12166 case S_ATTR:
12167 value = source.slice(start, p);
12168 if (value.slice(-1) === '/') {
12169 el.closed = true;
12170 value = value.slice(0, -1);
12171 }
12172 case S_ATTR_SPACE:
12173 if (s === S_ATTR_SPACE) {
12174 value = attrName;
12175 }
12176 if (s == S_ATTR_NOQUOT_VALUE) {
12177 errorHandler.warning('attribute "' + value + '" missed quot(")!!');
12178 el.add(attrName, value.replace(/&#?\w+;/g, entityReplacer), start);
12179 } else {
12180 if (currentNSMap[''] !== 'http://www.w3.org/1999/xhtml' || !value.match(/^(?:disabled|checked|selected)$/i)) {
12181 errorHandler.warning('attribute "' + value + '" missed value!! "' + value + '" instead!!');
12182 }
12183 el.add(value, value, start);
12184 }
12185 break;
12186 case S_EQ:
12187 throw new Error('attribute value missed!!');
12188 }
12189 // console.log(tagName,tagNamePattern,tagNamePattern.test(tagName))
12190 return p;
12191 /*xml space '\x20' | #x9 | #xD | #xA; */
12192 case "\x80":
12193 c = ' ';
12194 default:
12195 if (c <= ' ') {
12196 //space
12197 switch (s) {
12198 case S_TAG:
12199 el.setTagName(source.slice(start, p)); //tagName
12200 s = S_TAG_SPACE;
12201 break;
12202 case S_ATTR:
12203 attrName = source.slice(start, p);
12204 s = S_ATTR_SPACE;
12205 break;
12206 case S_ATTR_NOQUOT_VALUE:
12207 var value = source.slice(start, p).replace(/&#?\w+;/g, entityReplacer);
12208 errorHandler.warning('attribute "' + value + '" missed quot(")!!');
12209 el.add(attrName, value, start);
12210 case S_ATTR_END:
12211 s = S_TAG_SPACE;
12212 break;
12213 //case S_TAG_SPACE:
12214 //case S_EQ:
12215 //case S_ATTR_SPACE:
12216 // void();break;
12217 //case S_TAG_CLOSE:
12218 //ignore warning
12219 }
12220 } else {
12221 //not space
12222 //S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
12223 //S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
12224 switch (s) {
12225 //case S_TAG:void();break;
12226 //case S_ATTR:void();break;
12227 //case S_ATTR_NOQUOT_VALUE:void();break;
12228 case S_ATTR_SPACE:
12229 var tagName = el.tagName;
12230 if (currentNSMap[''] !== 'http://www.w3.org/1999/xhtml' || !attrName.match(/^(?:disabled|checked|selected)$/i)) {
12231 errorHandler.warning('attribute "' + attrName + '" missed value!! "' + attrName + '" instead2!!');
12232 }
12233 el.add(attrName, attrName, start);
12234 start = p;
12235 s = S_ATTR;
12236 break;
12237 case S_ATTR_END:
12238 errorHandler.warning('attribute space is required"' + attrName + '"!!');
12239 case S_TAG_SPACE:
12240 s = S_ATTR;
12241 start = p;
12242 break;
12243 case S_EQ:
12244 s = S_ATTR_NOQUOT_VALUE;
12245 start = p;
12246 break;
12247 case S_TAG_CLOSE:
12248 throw new Error("elements closed character '/' and '>' must be connected to");
12249 }
12250 }
12251 } //end outer switch
12252 //console.log('p++',p)
12253 p++;
12254 }
12255 }
12256 /**
12257 * @return true if has new namespace define
12258 */
12259 function appendElement(el, domBuilder, currentNSMap) {
12260 var tagName = el.tagName;
12261 var localNSMap = null;
12262 //var currentNSMap = parseStack[parseStack.length-1].currentNSMap;
12263 var i = el.length;
12264 while (i--) {
12265 var a = el[i];
12266 var qName = a.qName;
12267 var value = a.value;
12268 var nsp = qName.indexOf(':');
12269 if (nsp > 0) {
12270 var prefix = a.prefix = qName.slice(0, nsp);
12271 var localName = qName.slice(nsp + 1);
12272 var nsPrefix = prefix === 'xmlns' && localName;
12273 } else {
12274 localName = qName;
12275 prefix = null;
12276 nsPrefix = qName === 'xmlns' && '';
12277 }
12278 //can not set prefix,because prefix !== ''
12279 a.localName = localName;
12280 //prefix == null for no ns prefix attribute
12281 if (nsPrefix !== false) {
12282 //hack!!
12283 if (localNSMap == null) {
12284 localNSMap = {};
12285 //console.log(currentNSMap,0)
12286 _copy(currentNSMap, currentNSMap = {});
12287 //console.log(currentNSMap,1)
12288 }
12289 currentNSMap[nsPrefix] = localNSMap[nsPrefix] = value;
12290 a.uri = 'http://www.w3.org/2000/xmlns/';
12291 domBuilder.startPrefixMapping(nsPrefix, value);
12292 }
12293 }
12294 var i = el.length;
12295 while (i--) {
12296 a = el[i];
12297 var prefix = a.prefix;
12298 if (prefix) {
12299 //no prefix attribute has no namespace
12300 if (prefix === 'xml') {
12301 a.uri = 'http://www.w3.org/XML/1998/namespace';
12302 }if (prefix !== 'xmlns') {
12303 a.uri = currentNSMap[prefix || ''];
12304
12305 //{console.log('###'+a.qName,domBuilder.locator.systemId+'',currentNSMap,a.uri)}
12306 }
12307 }
12308 }
12309 var nsp = tagName.indexOf(':');
12310 if (nsp > 0) {
12311 prefix = el.prefix = tagName.slice(0, nsp);
12312 localName = el.localName = tagName.slice(nsp + 1);
12313 } else {
12314 prefix = null; //important!!
12315 localName = el.localName = tagName;
12316 }
12317 //no prefix element has default namespace
12318 var ns = el.uri = currentNSMap[prefix || ''];
12319 domBuilder.startElement(ns, localName, tagName, el);
12320 //endPrefixMapping and startPrefixMapping have not any help for dom builder
12321 //localNSMap = null
12322 if (el.closed) {
12323 domBuilder.endElement(ns, localName, tagName);
12324 if (localNSMap) {
12325 for (prefix in localNSMap) {
12326 domBuilder.endPrefixMapping(prefix);
12327 }
12328 }
12329 } else {
12330 el.currentNSMap = currentNSMap;
12331 el.localNSMap = localNSMap;
12332 //parseStack.push(el);
12333 return true;
12334 }
12335 }
12336 function parseHtmlSpecialContent(source, elStartEnd, tagName, entityReplacer, domBuilder) {
12337 if (/^(?:script|textarea)$/i.test(tagName)) {
12338 var elEndStart = source.indexOf('</' + tagName + '>', elStartEnd);
12339 var text = source.substring(elStartEnd + 1, elEndStart);
12340 if (/[&<]/.test(text)) {
12341 if (/^script$/i.test(tagName)) {
12342 //if(!/\]\]>/.test(text)){
12343 //lexHandler.startCDATA();
12344 domBuilder.characters(text, 0, text.length);
12345 //lexHandler.endCDATA();
12346 return elEndStart;
12347 //}
12348 } //}else{//text area
12349 text = text.replace(/&#?\w+;/g, entityReplacer);
12350 domBuilder.characters(text, 0, text.length);
12351 return elEndStart;
12352 //}
12353 }
12354 }
12355 return elStartEnd + 1;
12356 }
12357 function fixSelfClosed(source, elStartEnd, tagName, closeMap) {
12358 //if(tagName in closeMap){
12359 var pos = closeMap[tagName];
12360 if (pos == null) {
12361 //console.log(tagName)
12362 pos = source.lastIndexOf('</' + tagName + '>');
12363 if (pos < elStartEnd) {
12364 //忘记闭合
12365 pos = source.lastIndexOf('</' + tagName);
12366 }
12367 closeMap[tagName] = pos;
12368 }
12369 return pos < elStartEnd;
12370 //}
12371 }
12372 function _copy(source, target) {
12373 for (var n in source) {
12374 target[n] = source[n];
12375 }
12376 }
12377 function parseDCC(source, start, domBuilder, errorHandler) {
12378 //sure start with '<!'
12379 var next = source.charAt(start + 2);
12380 switch (next) {
12381 case '-':
12382 if (source.charAt(start + 3) === '-') {
12383 var end = source.indexOf('-->', start + 4);
12384 //append comment source.substring(4,end)//<!--
12385 if (end > start) {
12386 domBuilder.comment(source, start + 4, end - start - 4);
12387 return end + 3;
12388 } else {
12389 errorHandler.error("Unclosed comment");
12390 return -1;
12391 }
12392 } else {
12393 //error
12394 return -1;
12395 }
12396 default:
12397 if (source.substr(start + 3, 6) == 'CDATA[') {
12398 var end = source.indexOf(']]>', start + 9);
12399 domBuilder.startCDATA();
12400 domBuilder.characters(source, start + 9, end - start - 9);
12401 domBuilder.endCDATA();
12402 return end + 3;
12403 }
12404 //<!DOCTYPE
12405 //startDTD(java.lang.String name, java.lang.String publicId, java.lang.String systemId)
12406 var matchs = split(source, start);
12407 var len = matchs.length;
12408 if (len > 1 && /!doctype/i.test(matchs[0][0])) {
12409 var name = matchs[1][0];
12410 var pubid = len > 3 && /^public$/i.test(matchs[2][0]) && matchs[3][0];
12411 var sysid = len > 4 && matchs[4][0];
12412 var lastMatch = matchs[len - 1];
12413 domBuilder.startDTD(name, pubid && pubid.replace(/^(['"])(.*?)\1$/, '$2'), sysid && sysid.replace(/^(['"])(.*?)\1$/, '$2'));
12414 domBuilder.endDTD();
12415
12416 return lastMatch.index + lastMatch[0].length;
12417 }
12418 }
12419 return -1;
12420 }
12421
12422 function parseInstruction(source, start, domBuilder) {
12423 var end = source.indexOf('?>', start);
12424 if (end) {
12425 var match = source.substring(start, end).match(/^<\?(\S*)\s*([\s\S]*?)\s*$/);
12426 if (match) {
12427 var len = match[0].length;
12428 domBuilder.processingInstruction(match[1], match[2]);
12429 return end + 2;
12430 } else {
12431 //error
12432 return -1;
12433 }
12434 }
12435 return -1;
12436 }
12437
12438 /**
12439 * @param source
12440 */
12441 function ElementAttributes(source) {}
12442 ElementAttributes.prototype = {
12443 setTagName: function setTagName(tagName) {
12444 if (!tagNamePattern.test(tagName)) {
12445 throw new Error('invalid tagName:' + tagName);
12446 }
12447 this.tagName = tagName;
12448 },
12449 add: function add(qName, value, offset) {
12450 if (!tagNamePattern.test(qName)) {
12451 throw new Error('invalid attribute:' + qName);
12452 }
12453 this[this.length++] = { qName: qName, value: value, offset: offset };
12454 },
12455 length: 0,
12456 getLocalName: function getLocalName(i) {
12457 return this[i].localName;
12458 },
12459 getLocator: function getLocator(i) {
12460 return this[i].locator;
12461 },
12462 getQName: function getQName(i) {
12463 return this[i].qName;
12464 },
12465 getURI: function getURI(i) {
12466 return this[i].uri;
12467 },
12468 getValue: function getValue(i) {
12469 return this[i].value;
12470 }
12471 // ,getIndex:function(uri, localName)){
12472 // if(localName){
12473 //
12474 // }else{
12475 // var qName = uri
12476 // }
12477 // },
12478 // getValue:function(){return this.getValue(this.getIndex.apply(this,arguments))},
12479 // getType:function(uri,localName){}
12480 // getType:function(i){},
12481 };
12482
12483 function _set_proto_(thiz, parent) {
12484 thiz.__proto__ = parent;
12485 return thiz;
12486 }
12487 if (!(_set_proto_({}, _set_proto_.prototype) instanceof _set_proto_)) {
12488 _set_proto_ = function _set_proto_(thiz, parent) {
12489 function p() {} p.prototype = parent;
12490 p = new p();
12491 for (parent in thiz) {
12492 p[parent] = thiz[parent];
12493 }
12494 return p;
12495 };
12496 }
12497
12498 function split(source, start) {
12499 var match;
12500 var buf = [];
12501 var reg = /'[^']+'|"[^"]+"|[^\s<>\/=]+=?|(\/?\s*>|<)/g;
12502 reg.lastIndex = start;
12503 reg.exec(source); //skip <
12504 while (match = reg.exec(source)) {
12505 buf.push(match);
12506 if (match[1]) return buf;
12507 }
12508 }
12509
12510 var XMLReader_1 = XMLReader;
12511
12512 var sax = {
12513 XMLReader: XMLReader_1
12514 };
12515
12516 /*
12517 * DOM Level 2
12518 * Object DOMException
12519 * @see http://www.w3.org/TR/REC-DOM-Level-1/ecma-script-language-binding.html
12520 * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/ecma-script-binding.html
12521 */
12522
12523 function copy(src, dest) {
12524 for (var p in src) {
12525 dest[p] = src[p];
12526 }
12527 }
12528 /**
12529 ^\w+\.prototype\.([_\w]+)\s*=\s*((?:.*\{\s*?[\r\n][\s\S]*?^})|\S.*?(?=[;\r\n]));?
12530 ^\w+\.prototype\.([_\w]+)\s*=\s*(\S.*?(?=[;\r\n]));?
12531 */
12532 function _extends$2(Class, Super) {
12533 var pt = Class.prototype;
12534 if (Object.create) {
12535 var ppt = Object.create(Super.prototype);
12536 pt.__proto__ = ppt;
12537 }
12538 if (!(pt instanceof Super)) {
12539 var t = function t() {};
12540 t.prototype = Super.prototype;
12541 t = new t();
12542 copy(pt, t);
12543 Class.prototype = pt = t;
12544 }
12545 if (pt.constructor != Class) {
12546 if (typeof Class != 'function') {
12547 console.error("unknow Class:" + Class);
12548 }
12549 pt.constructor = Class;
12550 }
12551 }
12552 var htmlns = 'http://www.w3.org/1999/xhtml';
12553 // Node Types
12554 var NodeType = {};
12555 var ELEMENT_NODE = NodeType.ELEMENT_NODE = 1;
12556 var ATTRIBUTE_NODE = NodeType.ATTRIBUTE_NODE = 2;
12557 var TEXT_NODE = NodeType.TEXT_NODE = 3;
12558 var CDATA_SECTION_NODE = NodeType.CDATA_SECTION_NODE = 4;
12559 var ENTITY_REFERENCE_NODE = NodeType.ENTITY_REFERENCE_NODE = 5;
12560 var ENTITY_NODE = NodeType.ENTITY_NODE = 6;
12561 var PROCESSING_INSTRUCTION_NODE = NodeType.PROCESSING_INSTRUCTION_NODE = 7;
12562 var COMMENT_NODE = NodeType.COMMENT_NODE = 8;
12563 var DOCUMENT_NODE = NodeType.DOCUMENT_NODE = 9;
12564 var DOCUMENT_TYPE_NODE = NodeType.DOCUMENT_TYPE_NODE = 10;
12565 var DOCUMENT_FRAGMENT_NODE = NodeType.DOCUMENT_FRAGMENT_NODE = 11;
12566 var NOTATION_NODE = NodeType.NOTATION_NODE = 12;
12567
12568 // ExceptionCode
12569 var ExceptionCode = {};
12570 var ExceptionMessage = {};
12571 var INDEX_SIZE_ERR = ExceptionCode.INDEX_SIZE_ERR = (ExceptionMessage[1] = "Index size error", 1);
12572 var DOMSTRING_SIZE_ERR = ExceptionCode.DOMSTRING_SIZE_ERR = (ExceptionMessage[2] = "DOMString size error", 2);
12573 var HIERARCHY_REQUEST_ERR = ExceptionCode.HIERARCHY_REQUEST_ERR = (ExceptionMessage[3] = "Hierarchy request error", 3);
12574 var WRONG_DOCUMENT_ERR = ExceptionCode.WRONG_DOCUMENT_ERR = (ExceptionMessage[4] = "Wrong document", 4);
12575 var INVALID_CHARACTER_ERR = ExceptionCode.INVALID_CHARACTER_ERR = (ExceptionMessage[5] = "Invalid character", 5);
12576 var NO_DATA_ALLOWED_ERR = ExceptionCode.NO_DATA_ALLOWED_ERR = (ExceptionMessage[6] = "No data allowed", 6);
12577 var NO_MODIFICATION_ALLOWED_ERR = ExceptionCode.NO_MODIFICATION_ALLOWED_ERR = (ExceptionMessage[7] = "No modification allowed", 7);
12578 var NOT_FOUND_ERR = ExceptionCode.NOT_FOUND_ERR = (ExceptionMessage[8] = "Not found", 8);
12579 var NOT_SUPPORTED_ERR = ExceptionCode.NOT_SUPPORTED_ERR = (ExceptionMessage[9] = "Not supported", 9);
12580 var INUSE_ATTRIBUTE_ERR = ExceptionCode.INUSE_ATTRIBUTE_ERR = (ExceptionMessage[10] = "Attribute in use", 10);
12581 //level2
12582 var INVALID_STATE_ERR = ExceptionCode.INVALID_STATE_ERR = (ExceptionMessage[11] = "Invalid state", 11);
12583 var SYNTAX_ERR = ExceptionCode.SYNTAX_ERR = (ExceptionMessage[12] = "Syntax error", 12);
12584 var INVALID_MODIFICATION_ERR = ExceptionCode.INVALID_MODIFICATION_ERR = (ExceptionMessage[13] = "Invalid modification", 13);
12585 var NAMESPACE_ERR = ExceptionCode.NAMESPACE_ERR = (ExceptionMessage[14] = "Invalid namespace", 14);
12586 var INVALID_ACCESS_ERR = ExceptionCode.INVALID_ACCESS_ERR = (ExceptionMessage[15] = "Invalid access", 15);
12587
12588 function DOMException(code, message) {
12589 if (message instanceof Error) {
12590 var error = message;
12591 } else {
12592 error = this;
12593 Error.call(this, ExceptionMessage[code]);
12594 this.message = ExceptionMessage[code];
12595 if (Error.captureStackTrace) Error.captureStackTrace(this, DOMException);
12596 }
12597 error.code = code;
12598 if (message) this.message = this.message + ": " + message;
12599 return error;
12600 }DOMException.prototype = Error.prototype;
12601 copy(ExceptionCode, DOMException);
12602 /**
12603 * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-536297177
12604 * The NodeList interface provides the abstraction of an ordered collection of nodes, without defining or constraining how this collection is implemented. NodeList objects in the DOM are live.
12605 * The items in the NodeList are accessible via an integral index, starting from 0.
12606 */
12607 function NodeList() {}NodeList.prototype = {
12608 /**
12609 * The number of nodes in the list. The range of valid child node indices is 0 to length-1 inclusive.
12610 * @standard level1
12611 */
12612 length: 0,
12613 /**
12614 * Returns the indexth item in the collection. If index is greater than or equal to the number of nodes in the list, this returns null.
12615 * @standard level1
12616 * @param index unsigned long
12617 * Index into the collection.
12618 * @return Node
12619 * The node at the indexth position in the NodeList, or null if that is not a valid index.
12620 */
12621 item: function item(index) {
12622 return this[index] || null;
12623 },
12624 toString: function toString(isHTML, nodeFilter) {
12625 for (var buf = [], i = 0; i < this.length; i++) {
12626 serializeToString(this[i], buf, isHTML, nodeFilter);
12627 }
12628 return buf.join('');
12629 }
12630 };
12631 function LiveNodeList(node, refresh) {
12632 this._node = node;
12633 this._refresh = refresh;
12634 _updateLiveList(this);
12635 }
12636 function _updateLiveList(list) {
12637 var inc = list._node._inc || list._node.ownerDocument._inc;
12638 if (list._inc != inc) {
12639 var ls = list._refresh(list._node);
12640 //console.log(ls.length)
12641 __set__(list, 'length', ls.length);
12642 copy(ls, list);
12643 list._inc = inc;
12644 }
12645 }
12646 LiveNodeList.prototype.item = function (i) {
12647 _updateLiveList(this);
12648 return this[i];
12649 };
12650
12651 _extends$2(LiveNodeList, NodeList);
12652 /**
12653 *
12654 * Objects implementing the NamedNodeMap interface are used to represent collections of nodes that can be accessed by name. Note that NamedNodeMap does not inherit from NodeList; NamedNodeMaps are not maintained in any particular order. Objects contained in an object implementing NamedNodeMap may also be accessed by an ordinal index, but this is simply to allow convenient enumeration of the contents of a NamedNodeMap, and does not imply that the DOM specifies an order to these Nodes.
12655 * NamedNodeMap objects in the DOM are live.
12656 * used for attributes or DocumentType entities
12657 */
12658 function NamedNodeMap() {}
12659 function _findNodeIndex(list, node) {
12660 var i = list.length;
12661 while (i--) {
12662 if (list[i] === node) {
12663 return i;
12664 }
12665 }
12666 }
12667
12668 function _addNamedNode(el, list, newAttr, oldAttr) {
12669 if (oldAttr) {
12670 list[_findNodeIndex(list, oldAttr)] = newAttr;
12671 } else {
12672 list[list.length++] = newAttr;
12673 }
12674 if (el) {
12675 newAttr.ownerElement = el;
12676 var doc = el.ownerDocument;
12677 if (doc) {
12678 oldAttr && _onRemoveAttribute(doc, el, oldAttr);
12679 _onAddAttribute(doc, el, newAttr);
12680 }
12681 }
12682 }
12683 function _removeNamedNode(el, list, attr) {
12684 //console.log('remove attr:'+attr)
12685 var i = _findNodeIndex(list, attr);
12686 if (i >= 0) {
12687 var lastIndex = list.length - 1;
12688 while (i < lastIndex) {
12689 list[i] = list[++i];
12690 }
12691 list.length = lastIndex;
12692 if (el) {
12693 var doc = el.ownerDocument;
12694 if (doc) {
12695 _onRemoveAttribute(doc, el, attr);
12696 attr.ownerElement = null;
12697 }
12698 }
12699 } else {
12700 throw DOMException(NOT_FOUND_ERR, new Error(el.tagName + '@' + attr));
12701 }
12702 }
12703 NamedNodeMap.prototype = {
12704 length: 0,
12705 item: NodeList.prototype.item,
12706 getNamedItem: function getNamedItem(key) {
12707 // if(key.indexOf(':')>0 || key == 'xmlns'){
12708 // return null;
12709 // }
12710 //console.log()
12711 var i = this.length;
12712 while (i--) {
12713 var attr = this[i];
12714 //console.log(attr.nodeName,key)
12715 if (attr.nodeName == key) {
12716 return attr;
12717 }
12718 }
12719 },
12720 setNamedItem: function setNamedItem(attr) {
12721 var el = attr.ownerElement;
12722 if (el && el != this._ownerElement) {
12723 throw new DOMException(INUSE_ATTRIBUTE_ERR);
12724 }
12725 var oldAttr = this.getNamedItem(attr.nodeName);
12726 _addNamedNode(this._ownerElement, this, attr, oldAttr);
12727 return oldAttr;
12728 },
12729 /* returns Node */
12730 setNamedItemNS: function setNamedItemNS(attr) {
12731 // raises: WRONG_DOCUMENT_ERR,NO_MODIFICATION_ALLOWED_ERR,INUSE_ATTRIBUTE_ERR
12732 var el = attr.ownerElement,
12733 oldAttr;
12734 if (el && el != this._ownerElement) {
12735 throw new DOMException(INUSE_ATTRIBUTE_ERR);
12736 }
12737 oldAttr = this.getNamedItemNS(attr.namespaceURI, attr.localName);
12738 _addNamedNode(this._ownerElement, this, attr, oldAttr);
12739 return oldAttr;
12740 },
12741
12742 /* returns Node */
12743 removeNamedItem: function removeNamedItem(key) {
12744 var attr = this.getNamedItem(key);
12745 _removeNamedNode(this._ownerElement, this, attr);
12746 return attr;
12747 }, // raises: NOT_FOUND_ERR,NO_MODIFICATION_ALLOWED_ERR
12748
12749 //for level2
12750 removeNamedItemNS: function removeNamedItemNS(namespaceURI, localName) {
12751 var attr = this.getNamedItemNS(namespaceURI, localName);
12752 _removeNamedNode(this._ownerElement, this, attr);
12753 return attr;
12754 },
12755 getNamedItemNS: function getNamedItemNS(namespaceURI, localName) {
12756 var i = this.length;
12757 while (i--) {
12758 var node = this[i];
12759 if (node.localName == localName && node.namespaceURI == namespaceURI) {
12760 return node;
12761 }
12762 }
12763 return null;
12764 }
12765 };
12766 /**
12767 * @see http://www.w3.org/TR/REC-DOM-Level-1/level-one-core.html#ID-102161490
12768 */
12769 function DOMImplementation( /* Object */features) {
12770 this._features = {};
12771 if (features) {
12772 for (var feature in features) {
12773 this._features = features[feature];
12774 }
12775 }
12776 }
12777 DOMImplementation.prototype = {
12778 hasFeature: function hasFeature( /* string */feature, /* string */version) {
12779 var versions = this._features[feature.toLowerCase()];
12780 if (versions && (!version || version in versions)) {
12781 return true;
12782 } else {
12783 return false;
12784 }
12785 },
12786 // Introduced in DOM Level 2:
12787 createDocument: function createDocument(namespaceURI, qualifiedName, doctype) {
12788 // raises:INVALID_CHARACTER_ERR,NAMESPACE_ERR,WRONG_DOCUMENT_ERR
12789 var doc = new Document();
12790 doc.implementation = this;
12791 doc.childNodes = new NodeList();
12792 doc.doctype = doctype;
12793 if (doctype) {
12794 doc.appendChild(doctype);
12795 }
12796 if (qualifiedName) {
12797 var root = doc.createElementNS(namespaceURI, qualifiedName);
12798 doc.appendChild(root);
12799 }
12800 return doc;
12801 },
12802 // Introduced in DOM Level 2:
12803 createDocumentType: function createDocumentType(qualifiedName, publicId, systemId) {
12804 // raises:INVALID_CHARACTER_ERR,NAMESPACE_ERR
12805 var node = new DocumentType();
12806 node.name = qualifiedName;
12807 node.nodeName = qualifiedName;
12808 node.publicId = publicId;
12809 node.systemId = systemId;
12810 // Introduced in DOM Level 2:
12811 //readonly attribute DOMString internalSubset;
12812
12813 //TODO:..
12814 // readonly attribute NamedNodeMap entities;
12815 // readonly attribute NamedNodeMap notations;
12816 return node;
12817 }
12818 };
12819
12820 /**
12821 * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1950641247
12822 */
12823
12824 function Node() {}
12825 Node.prototype = {
12826 firstChild: null,
12827 lastChild: null,
12828 previousSibling: null,
12829 nextSibling: null,
12830 attributes: null,
12831 parentNode: null,
12832 childNodes: null,
12833 ownerDocument: null,
12834 nodeValue: null,
12835 namespaceURI: null,
12836 prefix: null,
12837 localName: null,
12838 // Modified in DOM Level 2:
12839 insertBefore: function insertBefore(newChild, refChild) {
12840 //raises
12841 return _insertBefore(this, newChild, refChild);
12842 },
12843 replaceChild: function replaceChild(newChild, oldChild) {
12844 //raises
12845 this.insertBefore(newChild, oldChild);
12846 if (oldChild) {
12847 this.removeChild(oldChild);
12848 }
12849 },
12850 removeChild: function removeChild(oldChild) {
12851 return _removeChild(this, oldChild);
12852 },
12853 appendChild: function appendChild(newChild) {
12854 return this.insertBefore(newChild, null);
12855 },
12856 hasChildNodes: function hasChildNodes() {
12857 return this.firstChild != null;
12858 },
12859 cloneNode: function cloneNode(deep) {
12860 return _cloneNode(this.ownerDocument || this, this, deep);
12861 },
12862 // Modified in DOM Level 2:
12863 normalize: function normalize() {
12864 var child = this.firstChild;
12865 while (child) {
12866 var next = child.nextSibling;
12867 if (next && next.nodeType == TEXT_NODE && child.nodeType == TEXT_NODE) {
12868 this.removeChild(next);
12869 child.appendData(next.data);
12870 } else {
12871 child.normalize();
12872 child = next;
12873 }
12874 }
12875 },
12876 // Introduced in DOM Level 2:
12877 isSupported: function isSupported(feature, version) {
12878 return this.ownerDocument.implementation.hasFeature(feature, version);
12879 },
12880 // Introduced in DOM Level 2:
12881 hasAttributes: function hasAttributes() {
12882 return this.attributes.length > 0;
12883 },
12884 lookupPrefix: function lookupPrefix(namespaceURI) {
12885 var el = this;
12886 while (el) {
12887 var map = el._nsMap;
12888 //console.dir(map)
12889 if (map) {
12890 for (var n in map) {
12891 if (map[n] == namespaceURI) {
12892 return n;
12893 }
12894 }
12895 }
12896 el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;
12897 }
12898 return null;
12899 },
12900 // Introduced in DOM Level 3:
12901 lookupNamespaceURI: function lookupNamespaceURI(prefix) {
12902 var el = this;
12903 while (el) {
12904 var map = el._nsMap;
12905 //console.dir(map)
12906 if (map) {
12907 if (prefix in map) {
12908 return map[prefix];
12909 }
12910 }
12911 el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;
12912 }
12913 return null;
12914 },
12915 // Introduced in DOM Level 3:
12916 isDefaultNamespace: function isDefaultNamespace(namespaceURI) {
12917 var prefix = this.lookupPrefix(namespaceURI);
12918 return prefix == null;
12919 }
12920 };
12921
12922 function _xmlEncoder(c) {
12923 return c == '<' && '&lt;' || c == '>' && '&gt;' || c == '&' && '&amp;' || c == '"' && '&quot;' || '&#' + c.charCodeAt() + ';';
12924 }
12925
12926 copy(NodeType, Node);
12927 copy(NodeType, Node.prototype);
12928
12929 /**
12930 * @param callback return true for continue,false for break
12931 * @return boolean true: break visit;
12932 */
12933 function _visitNode(node, callback) {
12934 if (callback(node)) {
12935 return true;
12936 }
12937 if (node = node.firstChild) {
12938 do {
12939 if (_visitNode(node, callback)) {
12940 return true;
12941 }
12942 } while (node = node.nextSibling);
12943 }
12944 }
12945
12946 function Document() {}
12947 function _onAddAttribute(doc, el, newAttr) {
12948 doc && doc._inc++;
12949 var ns = newAttr.namespaceURI;
12950 if (ns == 'http://www.w3.org/2000/xmlns/') {
12951 //update namespace
12952 el._nsMap[newAttr.prefix ? newAttr.localName : ''] = newAttr.value;
12953 }
12954 }
12955 function _onRemoveAttribute(doc, el, newAttr, remove) {
12956 doc && doc._inc++;
12957 var ns = newAttr.namespaceURI;
12958 if (ns == 'http://www.w3.org/2000/xmlns/') {
12959 //update namespace
12960 delete el._nsMap[newAttr.prefix ? newAttr.localName : ''];
12961 }
12962 }
12963 function _onUpdateChild(doc, el, newChild) {
12964 if (doc && doc._inc) {
12965 doc._inc++;
12966 //update childNodes
12967 var cs = el.childNodes;
12968 if (newChild) {
12969 cs[cs.length++] = newChild;
12970 } else {
12971 //console.log(1)
12972 var child = el.firstChild;
12973 var i = 0;
12974 while (child) {
12975 cs[i++] = child;
12976 child = child.nextSibling;
12977 }
12978 cs.length = i;
12979 }
12980 }
12981 }
12982
12983 /**
12984 * attributes;
12985 * children;
12986 *
12987 * writeable properties:
12988 * nodeValue,Attr:value,CharacterData:data
12989 * prefix
12990 */
12991 function _removeChild(parentNode, child) {
12992 var previous = child.previousSibling;
12993 var next = child.nextSibling;
12994 if (previous) {
12995 previous.nextSibling = next;
12996 } else {
12997 parentNode.firstChild = next;
12998 }
12999 if (next) {
13000 next.previousSibling = previous;
13001 } else {
13002 parentNode.lastChild = previous;
13003 }
13004 _onUpdateChild(parentNode.ownerDocument, parentNode);
13005 return child;
13006 }
13007 /**
13008 * preformance key(refChild == null)
13009 */
13010 function _insertBefore(parentNode, newChild, nextChild) {
13011 var cp = newChild.parentNode;
13012 if (cp) {
13013 cp.removeChild(newChild); //remove and update
13014 }
13015 if (newChild.nodeType === DOCUMENT_FRAGMENT_NODE) {
13016 var newFirst = newChild.firstChild;
13017 if (newFirst == null) {
13018 return newChild;
13019 }
13020 var newLast = newChild.lastChild;
13021 } else {
13022 newFirst = newLast = newChild;
13023 }
13024 var pre = nextChild ? nextChild.previousSibling : parentNode.lastChild;
13025
13026 newFirst.previousSibling = pre;
13027 newLast.nextSibling = nextChild;
13028
13029 if (pre) {
13030 pre.nextSibling = newFirst;
13031 } else {
13032 parentNode.firstChild = newFirst;
13033 }
13034 if (nextChild == null) {
13035 parentNode.lastChild = newLast;
13036 } else {
13037 nextChild.previousSibling = newLast;
13038 }
13039 do {
13040 newFirst.parentNode = parentNode;
13041 } while (newFirst !== newLast && (newFirst = newFirst.nextSibling));
13042 _onUpdateChild(parentNode.ownerDocument || parentNode, parentNode);
13043 //console.log(parentNode.lastChild.nextSibling == null)
13044 if (newChild.nodeType == DOCUMENT_FRAGMENT_NODE) {
13045 newChild.firstChild = newChild.lastChild = null;
13046 }
13047 return newChild;
13048 }
13049 function _appendSingleChild(parentNode, newChild) {
13050 var cp = newChild.parentNode;
13051 if (cp) {
13052 var pre = parentNode.lastChild;
13053 cp.removeChild(newChild); //remove and update
13054 var pre = parentNode.lastChild;
13055 }
13056 var pre = parentNode.lastChild;
13057 newChild.parentNode = parentNode;
13058 newChild.previousSibling = pre;
13059 newChild.nextSibling = null;
13060 if (pre) {
13061 pre.nextSibling = newChild;
13062 } else {
13063 parentNode.firstChild = newChild;
13064 }
13065 parentNode.lastChild = newChild;
13066 _onUpdateChild(parentNode.ownerDocument, parentNode, newChild);
13067 return newChild;
13068 //console.log("__aa",parentNode.lastChild.nextSibling == null)
13069 }
13070 Document.prototype = {
13071 //implementation : null,
13072 nodeName: '#document',
13073 nodeType: DOCUMENT_NODE,
13074 doctype: null,
13075 documentElement: null,
13076 _inc: 1,
13077
13078 insertBefore: function insertBefore(newChild, refChild) {
13079 //raises
13080 if (newChild.nodeType == DOCUMENT_FRAGMENT_NODE) {
13081 var child = newChild.firstChild;
13082 while (child) {
13083 var next = child.nextSibling;
13084 this.insertBefore(child, refChild);
13085 child = next;
13086 }
13087 return newChild;
13088 }
13089 if (this.documentElement == null && newChild.nodeType == ELEMENT_NODE) {
13090 this.documentElement = newChild;
13091 }
13092
13093 return _insertBefore(this, newChild, refChild), newChild.ownerDocument = this, newChild;
13094 },
13095 removeChild: function removeChild(oldChild) {
13096 if (this.documentElement == oldChild) {
13097 this.documentElement = null;
13098 }
13099 return _removeChild(this, oldChild);
13100 },
13101 // Introduced in DOM Level 2:
13102 importNode: function importNode(importedNode, deep) {
13103 return _importNode(this, importedNode, deep);
13104 },
13105 // Introduced in DOM Level 2:
13106 getElementById: function getElementById(id) {
13107 var rtv = null;
13108 _visitNode(this.documentElement, function (node) {
13109 if (node.nodeType == ELEMENT_NODE) {
13110 if (node.getAttribute('id') == id) {
13111 rtv = node;
13112 return true;
13113 }
13114 }
13115 });
13116 return rtv;
13117 },
13118
13119 //document factory method:
13120 createElement: function createElement(tagName) {
13121 var node = new Element();
13122 node.ownerDocument = this;
13123 node.nodeName = tagName;
13124 node.tagName = tagName;
13125 node.childNodes = new NodeList();
13126 var attrs = node.attributes = new NamedNodeMap();
13127 attrs._ownerElement = node;
13128 return node;
13129 },
13130 createDocumentFragment: function createDocumentFragment() {
13131 var node = new DocumentFragment();
13132 node.ownerDocument = this;
13133 node.childNodes = new NodeList();
13134 return node;
13135 },
13136 createTextNode: function createTextNode(data) {
13137 var node = new Text();
13138 node.ownerDocument = this;
13139 node.appendData(data);
13140 return node;
13141 },
13142 createComment: function createComment(data) {
13143 var node = new Comment();
13144 node.ownerDocument = this;
13145 node.appendData(data);
13146 return node;
13147 },
13148 createCDATASection: function createCDATASection(data) {
13149 var node = new CDATASection();
13150 node.ownerDocument = this;
13151 node.appendData(data);
13152 return node;
13153 },
13154 createProcessingInstruction: function createProcessingInstruction(target, data) {
13155 var node = new ProcessingInstruction();
13156 node.ownerDocument = this;
13157 node.tagName = node.target = target;
13158 node.nodeValue = node.data = data;
13159 return node;
13160 },
13161 createAttribute: function createAttribute(name) {
13162 var node = new Attr();
13163 node.ownerDocument = this;
13164 node.name = name;
13165 node.nodeName = name;
13166 node.localName = name;
13167 node.specified = true;
13168 return node;
13169 },
13170 createEntityReference: function createEntityReference(name) {
13171 var node = new EntityReference();
13172 node.ownerDocument = this;
13173 node.nodeName = name;
13174 return node;
13175 },
13176 // Introduced in DOM Level 2:
13177 createElementNS: function createElementNS(namespaceURI, qualifiedName) {
13178 var node = new Element();
13179 var pl = qualifiedName.split(':');
13180 var attrs = node.attributes = new NamedNodeMap();
13181 node.childNodes = new NodeList();
13182 node.ownerDocument = this;
13183 node.nodeName = qualifiedName;
13184 node.tagName = qualifiedName;
13185 node.namespaceURI = namespaceURI;
13186 if (pl.length == 2) {
13187 node.prefix = pl[0];
13188 node.localName = pl[1];
13189 } else {
13190 //el.prefix = null;
13191 node.localName = qualifiedName;
13192 }
13193 attrs._ownerElement = node;
13194 return node;
13195 },
13196 // Introduced in DOM Level 2:
13197 createAttributeNS: function createAttributeNS(namespaceURI, qualifiedName) {
13198 var node = new Attr();
13199 var pl = qualifiedName.split(':');
13200 node.ownerDocument = this;
13201 node.nodeName = qualifiedName;
13202 node.name = qualifiedName;
13203 node.namespaceURI = namespaceURI;
13204 node.specified = true;
13205 if (pl.length == 2) {
13206 node.prefix = pl[0];
13207 node.localName = pl[1];
13208 } else {
13209 //el.prefix = null;
13210 node.localName = qualifiedName;
13211 }
13212 return node;
13213 }
13214 };
13215 _extends$2(Document, Node);
13216
13217 function Element() {
13218 this._nsMap = {};
13219 }Element.prototype = {
13220 nodeType: ELEMENT_NODE,
13221 hasAttribute: function hasAttribute(name) {
13222 return this.getAttributeNode(name) != null;
13223 },
13224 getAttribute: function getAttribute(name) {
13225 var attr = this.getAttributeNode(name);
13226 return attr && attr.value || '';
13227 },
13228 getAttributeNode: function getAttributeNode(name) {
13229 return this.attributes.getNamedItem(name);
13230 },
13231 setAttribute: function setAttribute(name, value) {
13232 var attr = this.ownerDocument.createAttribute(name);
13233 attr.value = attr.nodeValue = "" + value;
13234 this.setAttributeNode(attr);
13235 },
13236 removeAttribute: function removeAttribute(name) {
13237 var attr = this.getAttributeNode(name);
13238 attr && this.removeAttributeNode(attr);
13239 },
13240
13241 //four real opeartion method
13242 appendChild: function appendChild(newChild) {
13243 if (newChild.nodeType === DOCUMENT_FRAGMENT_NODE) {
13244 return this.insertBefore(newChild, null);
13245 } else {
13246 return _appendSingleChild(this, newChild);
13247 }
13248 },
13249 setAttributeNode: function setAttributeNode(newAttr) {
13250 return this.attributes.setNamedItem(newAttr);
13251 },
13252 setAttributeNodeNS: function setAttributeNodeNS(newAttr) {
13253 return this.attributes.setNamedItemNS(newAttr);
13254 },
13255 removeAttributeNode: function removeAttributeNode(oldAttr) {
13256 //console.log(this == oldAttr.ownerElement)
13257 return this.attributes.removeNamedItem(oldAttr.nodeName);
13258 },
13259 //get real attribute name,and remove it by removeAttributeNode
13260 removeAttributeNS: function removeAttributeNS(namespaceURI, localName) {
13261 var old = this.getAttributeNodeNS(namespaceURI, localName);
13262 old && this.removeAttributeNode(old);
13263 },
13264
13265 hasAttributeNS: function hasAttributeNS(namespaceURI, localName) {
13266 return this.getAttributeNodeNS(namespaceURI, localName) != null;
13267 },
13268 getAttributeNS: function getAttributeNS(namespaceURI, localName) {
13269 var attr = this.getAttributeNodeNS(namespaceURI, localName);
13270 return attr && attr.value || '';
13271 },
13272 setAttributeNS: function setAttributeNS(namespaceURI, qualifiedName, value) {
13273 var attr = this.ownerDocument.createAttributeNS(namespaceURI, qualifiedName);
13274 attr.value = attr.nodeValue = "" + value;
13275 this.setAttributeNode(attr);
13276 },
13277 getAttributeNodeNS: function getAttributeNodeNS(namespaceURI, localName) {
13278 return this.attributes.getNamedItemNS(namespaceURI, localName);
13279 },
13280
13281 getElementsByTagName: function getElementsByTagName(tagName) {
13282 return new LiveNodeList(this, function (base) {
13283 var ls = [];
13284 _visitNode(base, function (node) {
13285 if (node !== base && node.nodeType == ELEMENT_NODE && (tagName === '*' || node.tagName == tagName)) {
13286 ls.push(node);
13287 }
13288 });
13289 return ls;
13290 });
13291 },
13292 getElementsByTagNameNS: function getElementsByTagNameNS(namespaceURI, localName) {
13293 return new LiveNodeList(this, function (base) {
13294 var ls = [];
13295 _visitNode(base, function (node) {
13296 if (node !== base && node.nodeType === ELEMENT_NODE && (namespaceURI === '*' || node.namespaceURI === namespaceURI) && (localName === '*' || node.localName == localName)) {
13297 ls.push(node);
13298 }
13299 });
13300 return ls;
13301 });
13302 }
13303 };
13304 Document.prototype.getElementsByTagName = Element.prototype.getElementsByTagName;
13305 Document.prototype.getElementsByTagNameNS = Element.prototype.getElementsByTagNameNS;
13306
13307 _extends$2(Element, Node);
13308 function Attr() {}Attr.prototype.nodeType = ATTRIBUTE_NODE;
13309 _extends$2(Attr, Node);
13310
13311 function CharacterData() {}CharacterData.prototype = {
13312 data: '',
13313 substringData: function substringData(offset, count) {
13314 return this.data.substring(offset, offset + count);
13315 },
13316 appendData: function appendData(text) {
13317 text = this.data + text;
13318 this.nodeValue = this.data = text;
13319 this.length = text.length;
13320 },
13321 insertData: function insertData(offset, text) {
13322 this.replaceData(offset, 0, text);
13323 },
13324 appendChild: function appendChild(newChild) {
13325 throw new Error(ExceptionMessage[HIERARCHY_REQUEST_ERR]);
13326 },
13327 deleteData: function deleteData(offset, count) {
13328 this.replaceData(offset, count, "");
13329 },
13330 replaceData: function replaceData(offset, count, text) {
13331 var start = this.data.substring(0, offset);
13332 var end = this.data.substring(offset + count);
13333 text = start + text + end;
13334 this.nodeValue = this.data = text;
13335 this.length = text.length;
13336 }
13337 };
13338 _extends$2(CharacterData, Node);
13339 function Text() {}Text.prototype = {
13340 nodeName: "#text",
13341 nodeType: TEXT_NODE,
13342 splitText: function splitText(offset) {
13343 var text = this.data;
13344 var newText = text.substring(offset);
13345 text = text.substring(0, offset);
13346 this.data = this.nodeValue = text;
13347 this.length = text.length;
13348 var newNode = this.ownerDocument.createTextNode(newText);
13349 if (this.parentNode) {
13350 this.parentNode.insertBefore(newNode, this.nextSibling);
13351 }
13352 return newNode;
13353 }
13354 };
13355 _extends$2(Text, CharacterData);
13356 function Comment() {}Comment.prototype = {
13357 nodeName: "#comment",
13358 nodeType: COMMENT_NODE
13359 };
13360 _extends$2(Comment, CharacterData);
13361
13362 function CDATASection() {}CDATASection.prototype = {
13363 nodeName: "#cdata-section",
13364 nodeType: CDATA_SECTION_NODE
13365 };
13366 _extends$2(CDATASection, CharacterData);
13367
13368 function DocumentType() {}DocumentType.prototype.nodeType = DOCUMENT_TYPE_NODE;
13369 _extends$2(DocumentType, Node);
13370
13371 function Notation() {}Notation.prototype.nodeType = NOTATION_NODE;
13372 _extends$2(Notation, Node);
13373
13374 function Entity() {}Entity.prototype.nodeType = ENTITY_NODE;
13375 _extends$2(Entity, Node);
13376
13377 function EntityReference() {}EntityReference.prototype.nodeType = ENTITY_REFERENCE_NODE;
13378 _extends$2(EntityReference, Node);
13379
13380 function DocumentFragment() {}DocumentFragment.prototype.nodeName = "#document-fragment";
13381 DocumentFragment.prototype.nodeType = DOCUMENT_FRAGMENT_NODE;
13382 _extends$2(DocumentFragment, Node);
13383
13384 function ProcessingInstruction() {}
13385 ProcessingInstruction.prototype.nodeType = PROCESSING_INSTRUCTION_NODE;
13386 _extends$2(ProcessingInstruction, Node);
13387 function XMLSerializer() {}
13388 XMLSerializer.prototype.serializeToString = function (node, isHtml, nodeFilter) {
13389 return nodeSerializeToString.call(node, isHtml, nodeFilter);
13390 };
13391 Node.prototype.toString = nodeSerializeToString;
13392 function nodeSerializeToString(isHtml, nodeFilter) {
13393 var buf = [];
13394 var refNode = this.nodeType == 9 ? this.documentElement : this;
13395 var prefix = refNode.prefix;
13396 var uri = refNode.namespaceURI;
13397
13398 if (uri && prefix == null) {
13399 //console.log(prefix)
13400 var prefix = refNode.lookupPrefix(uri);
13401 if (prefix == null) {
13402 //isHTML = true;
13403 var visibleNamespaces = [{ namespace: uri, prefix: null
13404 //{namespace:uri,prefix:''}
13405 }];
13406 }
13407 }
13408 serializeToString(this, buf, isHtml, nodeFilter, visibleNamespaces);
13409 //console.log('###',this.nodeType,uri,prefix,buf.join(''))
13410 return buf.join('');
13411 }
13412 function needNamespaceDefine(node, isHTML, visibleNamespaces) {
13413 var prefix = node.prefix || '';
13414 var uri = node.namespaceURI;
13415 if (!prefix && !uri) {
13416 return false;
13417 }
13418 if (prefix === "xml" && uri === "http://www.w3.org/XML/1998/namespace" || uri == 'http://www.w3.org/2000/xmlns/') {
13419 return false;
13420 }
13421
13422 var i = visibleNamespaces.length;
13423 //console.log('@@@@',node.tagName,prefix,uri,visibleNamespaces)
13424 while (i--) {
13425 var ns = visibleNamespaces[i];
13426 // get namespace prefix
13427 //console.log(node.nodeType,node.tagName,ns.prefix,prefix)
13428 if (ns.prefix == prefix) {
13429 return ns.namespace != uri;
13430 }
13431 }
13432 //console.log(isHTML,uri,prefix=='')
13433 //if(isHTML && prefix ==null && uri == 'http://www.w3.org/1999/xhtml'){
13434 // return false;
13435 //}
13436 //node.flag = '11111'
13437 //console.error(3,true,node.flag,node.prefix,node.namespaceURI)
13438 return true;
13439 }
13440 function serializeToString(node, buf, isHTML, nodeFilter, visibleNamespaces) {
13441 if (nodeFilter) {
13442 node = nodeFilter(node);
13443 if (node) {
13444 if (typeof node == 'string') {
13445 buf.push(node);
13446 return;
13447 }
13448 } else {
13449 return;
13450 }
13451 //buf.sort.apply(attrs, attributeSorter);
13452 }
13453 switch (node.nodeType) {
13454 case ELEMENT_NODE:
13455 if (!visibleNamespaces) visibleNamespaces = [];
13456 var startVisibleNamespaces = visibleNamespaces.length;
13457 var attrs = node.attributes;
13458 var len = attrs.length;
13459 var child = node.firstChild;
13460 var nodeName = node.tagName;
13461
13462 isHTML = htmlns === node.namespaceURI || isHTML;
13463 buf.push('<', nodeName);
13464
13465 for (var i = 0; i < len; i++) {
13466 // add namespaces for attributes
13467 var attr = attrs.item(i);
13468 if (attr.prefix == 'xmlns') {
13469 visibleNamespaces.push({ prefix: attr.localName, namespace: attr.value });
13470 } else if (attr.nodeName == 'xmlns') {
13471 visibleNamespaces.push({ prefix: '', namespace: attr.value });
13472 }
13473 }
13474 for (var i = 0; i < len; i++) {
13475 var attr = attrs.item(i);
13476 if (needNamespaceDefine(attr, isHTML, visibleNamespaces)) {
13477 var prefix = attr.prefix || '';
13478 var uri = attr.namespaceURI;
13479 var ns = prefix ? ' xmlns:' + prefix : " xmlns";
13480 buf.push(ns, '="', uri, '"');
13481 visibleNamespaces.push({ prefix: prefix, namespace: uri });
13482 }
13483 serializeToString(attr, buf, isHTML, nodeFilter, visibleNamespaces);
13484 }
13485 // add namespace for current node
13486 if (needNamespaceDefine(node, isHTML, visibleNamespaces)) {
13487 var prefix = node.prefix || '';
13488 var uri = node.namespaceURI;
13489 var ns = prefix ? ' xmlns:' + prefix : " xmlns";
13490 buf.push(ns, '="', uri, '"');
13491 visibleNamespaces.push({ prefix: prefix, namespace: uri });
13492 }
13493
13494 if (child || isHTML && !/^(?:meta|link|img|br|hr|input)$/i.test(nodeName)) {
13495 buf.push('>');
13496 //if is cdata child node
13497 if (isHTML && /^script$/i.test(nodeName)) {
13498 while (child) {
13499 if (child.data) {
13500 buf.push(child.data);
13501 } else {
13502 serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces);
13503 }
13504 child = child.nextSibling;
13505 }
13506 } else {
13507 while (child) {
13508 serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces);
13509 child = child.nextSibling;
13510 }
13511 }
13512 buf.push('</', nodeName, '>');
13513 } else {
13514 buf.push('/>');
13515 }
13516 // remove added visible namespaces
13517 //visibleNamespaces.length = startVisibleNamespaces;
13518 return;
13519 case DOCUMENT_NODE:
13520 case DOCUMENT_FRAGMENT_NODE:
13521 var child = node.firstChild;
13522 while (child) {
13523 serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces);
13524 child = child.nextSibling;
13525 }
13526 return;
13527 case ATTRIBUTE_NODE:
13528 return buf.push(' ', node.name, '="', node.value.replace(/[<&"]/g, _xmlEncoder), '"');
13529 case TEXT_NODE:
13530 return buf.push(node.data.replace(/[<&]/g, _xmlEncoder));
13531 case CDATA_SECTION_NODE:
13532 return buf.push('<![CDATA[', node.data, ']]>');
13533 case COMMENT_NODE:
13534 return buf.push("<!--", node.data, "-->");
13535 case DOCUMENT_TYPE_NODE:
13536 var pubid = node.publicId;
13537 var sysid = node.systemId;
13538 buf.push('<!DOCTYPE ', node.name);
13539 if (pubid) {
13540 buf.push(' PUBLIC "', pubid);
13541 if (sysid && sysid != '.') {
13542 buf.push('" "', sysid);
13543 }
13544 buf.push('">');
13545 } else if (sysid && sysid != '.') {
13546 buf.push(' SYSTEM "', sysid, '">');
13547 } else {
13548 var sub = node.internalSubset;
13549 if (sub) {
13550 buf.push(" [", sub, "]");
13551 }
13552 buf.push(">");
13553 }
13554 return;
13555 case PROCESSING_INSTRUCTION_NODE:
13556 return buf.push("<?", node.target, " ", node.data, "?>");
13557 case ENTITY_REFERENCE_NODE:
13558 return buf.push('&', node.nodeName, ';');
13559 //case ENTITY_NODE:
13560 //case NOTATION_NODE:
13561 default:
13562 buf.push('??', node.nodeName);
13563 }
13564 }
13565 function _importNode(doc, node, deep) {
13566 var node2;
13567 switch (node.nodeType) {
13568 case ELEMENT_NODE:
13569 node2 = node.cloneNode(false);
13570 node2.ownerDocument = doc;
13571 //var attrs = node2.attributes;
13572 //var len = attrs.length;
13573 //for(var i=0;i<len;i++){
13574 //node2.setAttributeNodeNS(importNode(doc,attrs.item(i),deep));
13575 //}
13576 case DOCUMENT_FRAGMENT_NODE:
13577 break;
13578 case ATTRIBUTE_NODE:
13579 deep = true;
13580 break;
13581 //case ENTITY_REFERENCE_NODE:
13582 //case PROCESSING_INSTRUCTION_NODE:
13583 ////case TEXT_NODE:
13584 //case CDATA_SECTION_NODE:
13585 //case COMMENT_NODE:
13586 // deep = false;
13587 // break;
13588 //case DOCUMENT_NODE:
13589 //case DOCUMENT_TYPE_NODE:
13590 //cannot be imported.
13591 //case ENTITY_NODE:
13592 //case NOTATION_NODE:
13593 //can not hit in level3
13594 //default:throw e;
13595 }
13596 if (!node2) {
13597 node2 = node.cloneNode(false); //false
13598 }
13599 node2.ownerDocument = doc;
13600 node2.parentNode = null;
13601 if (deep) {
13602 var child = node.firstChild;
13603 while (child) {
13604 node2.appendChild(_importNode(doc, child, deep));
13605 child = child.nextSibling;
13606 }
13607 }
13608 return node2;
13609 }
13610 //
13611 //var _relationMap = {firstChild:1,lastChild:1,previousSibling:1,nextSibling:1,
13612 // attributes:1,childNodes:1,parentNode:1,documentElement:1,doctype,};
13613 function _cloneNode(doc, node, deep) {
13614 var node2 = new node.constructor();
13615 for (var n in node) {
13616 var v = node[n];
13617 if (typeof v != 'object') {
13618 if (v != node2[n]) {
13619 node2[n] = v;
13620 }
13621 }
13622 }
13623 if (node.childNodes) {
13624 node2.childNodes = new NodeList();
13625 }
13626 node2.ownerDocument = doc;
13627 switch (node2.nodeType) {
13628 case ELEMENT_NODE:
13629 var attrs = node.attributes;
13630 var attrs2 = node2.attributes = new NamedNodeMap();
13631 var len = attrs.length;
13632 attrs2._ownerElement = node2;
13633 for (var i = 0; i < len; i++) {
13634 node2.setAttributeNode(_cloneNode(doc, attrs.item(i), true));
13635 }
13636 break;
13637 case ATTRIBUTE_NODE:
13638 deep = true;
13639 }
13640 if (deep) {
13641 var child = node.firstChild;
13642 while (child) {
13643 node2.appendChild(_cloneNode(doc, child, deep));
13644 child = child.nextSibling;
13645 }
13646 }
13647 return node2;
13648 }
13649
13650 function __set__(object, key, value) {
13651 object[key] = value;
13652 }
13653 //do dynamic
13654 try {
13655 if (Object.defineProperty) {
13656 var getTextContent = function getTextContent(node) {
13657 switch (node.nodeType) {
13658 case ELEMENT_NODE:
13659 case DOCUMENT_FRAGMENT_NODE:
13660 var buf = [];
13661 node = node.firstChild;
13662 while (node) {
13663 if (node.nodeType !== 7 && node.nodeType !== 8) {
13664 buf.push(getTextContent(node));
13665 }
13666 node = node.nextSibling;
13667 }
13668 return buf.join('');
13669 default:
13670 return node.nodeValue;
13671 }
13672 };
13673
13674 Object.defineProperty(LiveNodeList.prototype, 'length', {
13675 get: function get() {
13676 _updateLiveList(this);
13677 return this.$$length;
13678 }
13679 });
13680 Object.defineProperty(Node.prototype, 'textContent', {
13681 get: function get() {
13682 return getTextContent(this);
13683 },
13684 set: function set(data) {
13685 switch (this.nodeType) {
13686 case ELEMENT_NODE:
13687 case DOCUMENT_FRAGMENT_NODE:
13688 while (this.firstChild) {
13689 this.removeChild(this.firstChild);
13690 }
13691 if (data || String(data)) {
13692 this.appendChild(this.ownerDocument.createTextNode(data));
13693 }
13694 break;
13695 default:
13696 //TODO:
13697 this.data = data;
13698 this.value = data;
13699 this.nodeValue = data;
13700 }
13701 }
13702 });
13703
13704 __set__ = function __set__(object, key, value) {
13705 //console.log(value)
13706 object['$$' + key] = value;
13707 };
13708 }
13709 } catch (e) {} //ie8
13710
13711
13712 //if(typeof require == 'function'){
13713 var DOMImplementation_1 = DOMImplementation;
13714 var XMLSerializer_1 = XMLSerializer;
13715 //}
13716
13717 var dom = {
13718 DOMImplementation: DOMImplementation_1,
13719 XMLSerializer: XMLSerializer_1
13720 };
13721
13722 var domParser = createCommonjsModule(function (module, exports) {
13723 function DOMParser(options) {
13724 this.options = options || { locator: {} };
13725 }
13726 DOMParser.prototype.parseFromString = function (source, mimeType) {
13727 var options = this.options;
13728 var sax$$1 = new XMLReader();
13729 var domBuilder = options.domBuilder || new DOMHandler(); //contentHandler and LexicalHandler
13730 var errorHandler = options.errorHandler;
13731 var locator = options.locator;
13732 var defaultNSMap = options.xmlns || {};
13733 var entityMap = { 'lt': '<', 'gt': '>', 'amp': '&', 'quot': '"', 'apos': "'" };
13734 if (locator) {
13735 domBuilder.setDocumentLocator(locator);
13736 }
13737
13738 sax$$1.errorHandler = buildErrorHandler(errorHandler, domBuilder, locator);
13739 sax$$1.domBuilder = options.domBuilder || domBuilder;
13740 if (/\/x?html?$/.test(mimeType)) {
13741 entityMap.nbsp = '\xa0';
13742 entityMap.copy = '\xa9';
13743 defaultNSMap[''] = 'http://www.w3.org/1999/xhtml';
13744 }
13745 defaultNSMap.xml = defaultNSMap.xml || 'http://www.w3.org/XML/1998/namespace';
13746 if (source) {
13747 sax$$1.parse(source, defaultNSMap, entityMap);
13748 } else {
13749 sax$$1.errorHandler.error("invalid doc source");
13750 }
13751 return domBuilder.doc;
13752 };
13753 function buildErrorHandler(errorImpl, domBuilder, locator) {
13754 if (!errorImpl) {
13755 if (domBuilder instanceof DOMHandler) {
13756 return domBuilder;
13757 }
13758 errorImpl = domBuilder;
13759 }
13760 var errorHandler = {};
13761 var isCallback = errorImpl instanceof Function;
13762 locator = locator || {};
13763 function build(key) {
13764 var fn = errorImpl[key];
13765 if (!fn && isCallback) {
13766 fn = errorImpl.length == 2 ? function (msg) {
13767 errorImpl(key, msg);
13768 } : errorImpl;
13769 }
13770 errorHandler[key] = fn && function (msg) {
13771 fn('[xmldom ' + key + ']\t' + msg + _locator(locator));
13772 } || function () {};
13773 }
13774 build('warning');
13775 build('error');
13776 build('fatalError');
13777 return errorHandler;
13778 }
13779
13780 //console.log('#\n\n\n\n\n\n\n####')
13781 /**
13782 * +ContentHandler+ErrorHandler
13783 * +LexicalHandler+EntityResolver2
13784 * -DeclHandler-DTDHandler
13785 *
13786 * DefaultHandler:EntityResolver, DTDHandler, ContentHandler, ErrorHandler
13787 * DefaultHandler2:DefaultHandler,LexicalHandler, DeclHandler, EntityResolver2
13788 * @link http://www.saxproject.org/apidoc/org/xml/sax/helpers/DefaultHandler.html
13789 */
13790 function DOMHandler() {
13791 this.cdata = false;
13792 }
13793 function position(locator, node) {
13794 node.lineNumber = locator.lineNumber;
13795 node.columnNumber = locator.columnNumber;
13796 }
13797 /**
13798 * @see org.xml.sax.ContentHandler#startDocument
13799 * @link http://www.saxproject.org/apidoc/org/xml/sax/ContentHandler.html
13800 */
13801 DOMHandler.prototype = {
13802 startDocument: function startDocument() {
13803 this.doc = new DOMImplementation().createDocument(null, null, null);
13804 if (this.locator) {
13805 this.doc.documentURI = this.locator.systemId;
13806 }
13807 },
13808 startElement: function startElement(namespaceURI, localName, qName, attrs) {
13809 var doc = this.doc;
13810 var el = doc.createElementNS(namespaceURI, qName || localName);
13811 var len = attrs.length;
13812 appendElement(this, el);
13813 this.currentElement = el;
13814
13815 this.locator && position(this.locator, el);
13816 for (var i = 0; i < len; i++) {
13817 var namespaceURI = attrs.getURI(i);
13818 var value = attrs.getValue(i);
13819 var qName = attrs.getQName(i);
13820 var attr = doc.createAttributeNS(namespaceURI, qName);
13821 this.locator && position(attrs.getLocator(i), attr);
13822 attr.value = attr.nodeValue = value;
13823 el.setAttributeNode(attr);
13824 }
13825 },
13826 endElement: function endElement(namespaceURI, localName, qName) {
13827 var current = this.currentElement;
13828 var tagName = current.tagName;
13829 this.currentElement = current.parentNode;
13830 },
13831 startPrefixMapping: function startPrefixMapping(prefix, uri) {},
13832 endPrefixMapping: function endPrefixMapping(prefix) {},
13833 processingInstruction: function processingInstruction(target, data) {
13834 var ins = this.doc.createProcessingInstruction(target, data);
13835 this.locator && position(this.locator, ins);
13836 appendElement(this, ins);
13837 },
13838 ignorableWhitespace: function ignorableWhitespace(ch, start, length) {},
13839 characters: function characters(chars, start, length) {
13840 chars = _toString.apply(this, arguments);
13841 //console.log(chars)
13842 if (chars) {
13843 if (this.cdata) {
13844 var charNode = this.doc.createCDATASection(chars);
13845 } else {
13846 var charNode = this.doc.createTextNode(chars);
13847 }
13848 if (this.currentElement) {
13849 this.currentElement.appendChild(charNode);
13850 } else if (/^\s*$/.test(chars)) {
13851 this.doc.appendChild(charNode);
13852 //process xml
13853 }
13854 this.locator && position(this.locator, charNode);
13855 }
13856 },
13857 skippedEntity: function skippedEntity(name) {},
13858 endDocument: function endDocument() {
13859 this.doc.normalize();
13860 },
13861 setDocumentLocator: function setDocumentLocator(locator) {
13862 if (this.locator = locator) {
13863 // && !('lineNumber' in locator)){
13864 locator.lineNumber = 0;
13865 }
13866 },
13867 //LexicalHandler
13868 comment: function comment(chars, start, length) {
13869 chars = _toString.apply(this, arguments);
13870 var comm = this.doc.createComment(chars);
13871 this.locator && position(this.locator, comm);
13872 appendElement(this, comm);
13873 },
13874
13875 startCDATA: function startCDATA() {
13876 //used in characters() methods
13877 this.cdata = true;
13878 },
13879 endCDATA: function endCDATA() {
13880 this.cdata = false;
13881 },
13882
13883 startDTD: function startDTD(name, publicId, systemId) {
13884 var impl = this.doc.implementation;
13885 if (impl && impl.createDocumentType) {
13886 var dt = impl.createDocumentType(name, publicId, systemId);
13887 this.locator && position(this.locator, dt);
13888 appendElement(this, dt);
13889 }
13890 },
13891 /**
13892 * @see org.xml.sax.ErrorHandler
13893 * @link http://www.saxproject.org/apidoc/org/xml/sax/ErrorHandler.html
13894 */
13895 warning: function warning(error) {
13896 console.warn('[xmldom warning]\t' + error, _locator(this.locator));
13897 },
13898 error: function error(_error) {
13899 console.error('[xmldom error]\t' + _error, _locator(this.locator));
13900 },
13901 fatalError: function fatalError(error) {
13902 console.error('[xmldom fatalError]\t' + error, _locator(this.locator));
13903 throw error;
13904 }
13905 };
13906 function _locator(l) {
13907 if (l) {
13908 return '\n@' + (l.systemId || '') + '#[line:' + l.lineNumber + ',col:' + l.columnNumber + ']';
13909 }
13910 }
13911 function _toString(chars, start, length) {
13912 if (typeof chars == 'string') {
13913 return chars.substr(start, length);
13914 } else {
13915 //java sax connect width xmldom on rhino(what about: "? && !(chars instanceof String)")
13916 if (chars.length >= start + length || start) {
13917 return new java.lang.String(chars, start, length) + '';
13918 }
13919 return chars;
13920 }
13921 }
13922
13923 /*
13924 * @link http://www.saxproject.org/apidoc/org/xml/sax/ext/LexicalHandler.html
13925 * used method of org.xml.sax.ext.LexicalHandler:
13926 * #comment(chars, start, length)
13927 * #startCDATA()
13928 * #endCDATA()
13929 * #startDTD(name, publicId, systemId)
13930 *
13931 *
13932 * IGNORED method of org.xml.sax.ext.LexicalHandler:
13933 * #endDTD()
13934 * #startEntity(name)
13935 * #endEntity(name)
13936 *
13937 *
13938 * @link http://www.saxproject.org/apidoc/org/xml/sax/ext/DeclHandler.html
13939 * IGNORED method of org.xml.sax.ext.DeclHandler
13940 * #attributeDecl(eName, aName, type, mode, value)
13941 * #elementDecl(name, model)
13942 * #externalEntityDecl(name, publicId, systemId)
13943 * #internalEntityDecl(name, value)
13944 * @link http://www.saxproject.org/apidoc/org/xml/sax/ext/EntityResolver2.html
13945 * IGNORED method of org.xml.sax.EntityResolver2
13946 * #resolveEntity(String name,String publicId,String baseURI,String systemId)
13947 * #resolveEntity(publicId, systemId)
13948 * #getExternalSubset(name, baseURI)
13949 * @link http://www.saxproject.org/apidoc/org/xml/sax/DTDHandler.html
13950 * IGNORED method of org.xml.sax.DTDHandler
13951 * #notationDecl(name, publicId, systemId) {};
13952 * #unparsedEntityDecl(name, publicId, systemId, notationName) {};
13953 */
13954 "endDTD,startEntity,endEntity,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,resolveEntity,getExternalSubset,notationDecl,unparsedEntityDecl".replace(/\w+/g, function (key) {
13955 DOMHandler.prototype[key] = function () {
13956 return null;
13957 };
13958 });
13959
13960 /* Private static helpers treated below as private instance methods, so don't need to add these to the public API; we might use a Relator to also get rid of non-standard public properties */
13961 function appendElement(hander, node) {
13962 if (!hander.currentElement) {
13963 hander.doc.appendChild(node);
13964 } else {
13965 hander.currentElement.appendChild(node);
13966 }
13967 } //appendChild and setAttributeNS are preformance key
13968
13969 //if(typeof require == 'function'){
13970 var XMLReader = sax.XMLReader;
13971 var DOMImplementation = exports.DOMImplementation = dom.DOMImplementation;
13972 exports.XMLSerializer = dom.XMLSerializer;
13973 exports.DOMParser = DOMParser;
13974 //}
13975 });
13976 var domParser_1 = domParser.DOMImplementation;
13977 var domParser_2 = domParser.XMLSerializer;
13978 var domParser_3 = domParser.DOMParser;
13979
13980 /*! @name mpd-parser @version 0.10.0 @license Apache-2.0 */
13981
13982 var isObject = function isObject(obj) {
13983 return !!obj && typeof obj === 'object';
13984 };
13985
13986 var merge = function merge() {
13987 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
13988 objects[_key] = arguments[_key];
13989 }
13990
13991 return objects.reduce(function (result, source) {
13992 Object.keys(source).forEach(function (key) {
13993 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
13994 result[key] = result[key].concat(source[key]);
13995 } else if (isObject(result[key]) && isObject(source[key])) {
13996 result[key] = merge(result[key], source[key]);
13997 } else {
13998 result[key] = source[key];
13999 }
14000 });
14001 return result;
14002 }, {});
14003 };
14004 var values = function values(o) {
14005 return Object.keys(o).map(function (k) {
14006 return o[k];
14007 });
14008 };
14009
14010 var range = function range(start, end) {
14011 var result = [];
14012
14013 for (var i = start; i < end; i++) {
14014 result.push(i);
14015 }
14016
14017 return result;
14018 };
14019 var flatten = function flatten(lists) {
14020 return lists.reduce(function (x, y) {
14021 return x.concat(y);
14022 }, []);
14023 };
14024 var from = function from(list) {
14025 if (!list.length) {
14026 return [];
14027 }
14028
14029 var result = [];
14030
14031 for (var i = 0; i < list.length; i++) {
14032 result.push(list[i]);
14033 }
14034
14035 return result;
14036 };
14037 var findIndexes = function findIndexes(l, key) {
14038 return l.reduce(function (a, e, i) {
14039 if (e[key]) {
14040 a.push(i);
14041 }
14042
14043 return a;
14044 }, []);
14045 };
14046
14047 var errors = {
14048 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
14049 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
14050 DASH_INVALID_XML: 'DASH_INVALID_XML',
14051 NO_BASE_URL: 'NO_BASE_URL',
14052 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
14053 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
14054 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
14055 };
14056
14057 /**
14058 * @typedef {Object} SingleUri
14059 * @property {string} uri - relative location of segment
14060 * @property {string} resolvedUri - resolved location of segment
14061 * @property {Object} byterange - Object containing information on how to make byte range
14062 * requests following byte-range-spec per RFC2616.
14063 * @property {String} byterange.length - length of range request
14064 * @property {String} byterange.offset - byte offset of range request
14065 *
14066 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
14067 */
14068
14069 /**
14070 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
14071 * that conforms to how m3u8-parser is structured
14072 *
14073 * @see https://github.com/videojs/m3u8-parser
14074 *
14075 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
14076 * @param {string} source - source url for segment
14077 * @param {string} range - optional range used for range calls,
14078 * follows RFC 2616, Clause 14.35.1
14079 * @return {SingleUri} full segment information transformed into a format similar
14080 * to m3u8-parser
14081 */
14082
14083 var urlTypeToSegment = function urlTypeToSegment(_ref) {
14084 var _ref$baseUrl = _ref.baseUrl,
14085 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
14086 _ref$source = _ref.source,
14087 source = _ref$source === void 0 ? '' : _ref$source,
14088 _ref$range = _ref.range,
14089 range = _ref$range === void 0 ? '' : _ref$range,
14090 _ref$indexRange = _ref.indexRange,
14091 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
14092 var segment = {
14093 uri: source,
14094 resolvedUri: resolveUrl_1(baseUrl || '', source)
14095 };
14096
14097 if (range || indexRange) {
14098 var rangeStr = range ? range : indexRange;
14099 var ranges = rangeStr.split('-');
14100 var startRange = parseInt(ranges[0], 10);
14101 var endRange = parseInt(ranges[1], 10); // byterange should be inclusive according to
14102 // RFC 2616, Clause 14.35.1
14103
14104 segment.byterange = {
14105 length: endRange - startRange + 1,
14106 offset: startRange
14107 };
14108 }
14109
14110 return segment;
14111 };
14112 var byteRangeToString = function byteRangeToString(byterange) {
14113 // `endRange` is one less than `offset + length` because the HTTP range
14114 // header uses inclusive ranges
14115 var endRange = byterange.offset + byterange.length - 1;
14116 return byterange.offset + "-" + endRange;
14117 };
14118
14119 /**
14120 * Functions for calculating the range of available segments in static and dynamic
14121 * manifests.
14122 */
14123
14124 var segmentRange = {
14125 /**
14126 * Returns the entire range of available segments for a static MPD
14127 *
14128 * @param {Object} attributes
14129 * Inheritied MPD attributes
14130 * @return {{ start: number, end: number }}
14131 * The start and end numbers for available segments
14132 */
14133 static: function _static(attributes) {
14134 var duration = attributes.duration,
14135 _attributes$timescale = attributes.timescale,
14136 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
14137 sourceDuration = attributes.sourceDuration;
14138 return {
14139 start: 0,
14140 end: Math.ceil(sourceDuration / (duration / timescale))
14141 };
14142 },
14143
14144 /**
14145 * Returns the current live window range of available segments for a dynamic MPD
14146 *
14147 * @param {Object} attributes
14148 * Inheritied MPD attributes
14149 * @return {{ start: number, end: number }}
14150 * The start and end numbers for available segments
14151 */
14152 dynamic: function dynamic(attributes) {
14153 var NOW = attributes.NOW,
14154 clientOffset = attributes.clientOffset,
14155 availabilityStartTime = attributes.availabilityStartTime,
14156 _attributes$timescale2 = attributes.timescale,
14157 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
14158 duration = attributes.duration,
14159 _attributes$start = attributes.start,
14160 start = _attributes$start === void 0 ? 0 : _attributes$start,
14161 _attributes$minimumUp = attributes.minimumUpdatePeriod,
14162 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
14163 _attributes$timeShift = attributes.timeShiftBufferDepth,
14164 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
14165 var now = (NOW + clientOffset) / 1000;
14166 var periodStartWC = availabilityStartTime + start;
14167 var periodEndWC = now + minimumUpdatePeriod;
14168 var periodDuration = periodEndWC - periodStartWC;
14169 var segmentCount = Math.ceil(periodDuration * timescale / duration);
14170 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
14171 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
14172 return {
14173 start: Math.max(0, availableStart),
14174 end: Math.min(segmentCount, availableEnd)
14175 };
14176 }
14177 };
14178 /**
14179 * Maps a range of numbers to objects with information needed to build the corresponding
14180 * segment list
14181 *
14182 * @name toSegmentsCallback
14183 * @function
14184 * @param {number} number
14185 * Number of the segment
14186 * @param {number} index
14187 * Index of the number in the range list
14188 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
14189 * Object with segment timing and duration info
14190 */
14191
14192 /**
14193 * Returns a callback for Array.prototype.map for mapping a range of numbers to
14194 * information needed to build the segment list.
14195 *
14196 * @param {Object} attributes
14197 * Inherited MPD attributes
14198 * @return {toSegmentsCallback}
14199 * Callback map function
14200 */
14201
14202 var toSegments = function toSegments(attributes) {
14203 return function (number, index) {
14204 var duration = attributes.duration,
14205 _attributes$timescale3 = attributes.timescale,
14206 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
14207 periodIndex = attributes.periodIndex,
14208 _attributes$startNumb = attributes.startNumber,
14209 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
14210 return {
14211 number: startNumber + number,
14212 duration: duration / timescale,
14213 timeline: periodIndex,
14214 time: index * duration
14215 };
14216 };
14217 };
14218 /**
14219 * Returns a list of objects containing segment timing and duration info used for
14220 * building the list of segments. This uses the @duration attribute specified
14221 * in the MPD manifest to derive the range of segments.
14222 *
14223 * @param {Object} attributes
14224 * Inherited MPD attributes
14225 * @return {{number: number, duration: number, time: number, timeline: number}[]}
14226 * List of Objects with segment timing and duration info
14227 */
14228
14229 var parseByDuration = function parseByDuration(attributes) {
14230 var _attributes$type = attributes.type,
14231 type = _attributes$type === void 0 ? 'static' : _attributes$type,
14232 duration = attributes.duration,
14233 _attributes$timescale4 = attributes.timescale,
14234 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
14235 sourceDuration = attributes.sourceDuration;
14236
14237 var _segmentRange$type = segmentRange[type](attributes),
14238 start = _segmentRange$type.start,
14239 end = _segmentRange$type.end;
14240
14241 var segments = range(start, end).map(toSegments(attributes));
14242
14243 if (type === 'static') {
14244 var index = segments.length - 1; // final segment may be less than full segment duration
14245
14246 segments[index].duration = sourceDuration - duration / timescale * index;
14247 }
14248
14249 return segments;
14250 };
14251
14252 /**
14253 * Translates SegmentBase into a set of segments.
14254 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
14255 * node should be translated into segment.
14256 *
14257 * @param {Object} attributes
14258 * Object containing all inherited attributes from parent elements with attribute
14259 * names as keys
14260 * @return {Object.<Array>} list of segments
14261 */
14262
14263 var segmentsFromBase = function segmentsFromBase(attributes) {
14264 var baseUrl = attributes.baseUrl,
14265 _attributes$initializ = attributes.initialization,
14266 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
14267 sourceDuration = attributes.sourceDuration,
14268 _attributes$timescale = attributes.timescale,
14269 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
14270 _attributes$indexRang = attributes.indexRange,
14271 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
14272 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
14273
14274 if (!baseUrl) {
14275 throw new Error(errors.NO_BASE_URL);
14276 }
14277
14278 var initSegment = urlTypeToSegment({
14279 baseUrl: baseUrl,
14280 source: initialization.sourceURL,
14281 range: initialization.range
14282 });
14283 var segment = urlTypeToSegment({
14284 baseUrl: baseUrl,
14285 source: baseUrl,
14286 indexRange: indexRange
14287 });
14288 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
14289 // (since SegmentBase is only for one total segment)
14290
14291 if (duration) {
14292 var segmentTimeInfo = parseByDuration(attributes);
14293
14294 if (segmentTimeInfo.length) {
14295 segment.duration = segmentTimeInfo[0].duration;
14296 segment.timeline = segmentTimeInfo[0].timeline;
14297 }
14298 } else if (sourceDuration) {
14299 segment.duration = sourceDuration / timescale;
14300 segment.timeline = 0;
14301 } // This is used for mediaSequence
14302
14303
14304 segment.number = 0;
14305 return [segment];
14306 };
14307 /**
14308 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
14309 * according to the sidx information given.
14310 *
14311 * playlist.sidx has metadadata about the sidx where-as the sidx param
14312 * is the parsed sidx box itself.
14313 *
14314 * @param {Object} playlist the playlist to update the sidx information for
14315 * @param {Object} sidx the parsed sidx box
14316 * @return {Object} the playlist object with the updated sidx information
14317 */
14318
14319 var addSegmentsToPlaylist = function addSegmentsToPlaylist(playlist, sidx, baseUrl) {
14320 // Retain init segment information
14321 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial master manifest parsing
14322
14323 var sourceDuration = playlist.sidx.duration; // Retain source timeline
14324
14325 var timeline = playlist.timeline || 0;
14326 var sidxByteRange = playlist.sidx.byterange;
14327 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
14328
14329 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
14330
14331 var mediaReferences = sidx.references.filter(function (r) {
14332 return r.referenceType !== 1;
14333 });
14334 var segments = []; // firstOffset is the offset from the end of the sidx box
14335
14336 var startIndex = sidxEnd + sidx.firstOffset;
14337
14338 for (var i = 0; i < mediaReferences.length; i++) {
14339 var reference = sidx.references[i]; // size of the referenced (sub)segment
14340
14341 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
14342 // this will be converted to seconds when generating segments
14343
14344 var duration = reference.subsegmentDuration; // should be an inclusive range
14345
14346 var endIndex = startIndex + size - 1;
14347 var indexRange = startIndex + "-" + endIndex;
14348 var attributes = {
14349 baseUrl: baseUrl,
14350 timescale: timescale,
14351 timeline: timeline,
14352 // this is used in parseByDuration
14353 periodIndex: timeline,
14354 duration: duration,
14355 sourceDuration: sourceDuration,
14356 indexRange: indexRange
14357 };
14358 var segment = segmentsFromBase(attributes)[0];
14359
14360 if (initSegment) {
14361 segment.map = initSegment;
14362 }
14363
14364 segments.push(segment);
14365 startIndex += size;
14366 }
14367
14368 playlist.segments = segments;
14369 return playlist;
14370 };
14371
14372 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
14373 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
14374 // assuming playlist IDs are the same across periods
14375 // TODO: handle multiperiod where representation sets are not the same
14376 // across periods
14377 var name = playlist.attributes.id + (playlist.attributes.lang || ''); // Periods after first
14378
14379 if (acc[name]) {
14380 var _acc$name$segments;
14381
14382 // first segment of subsequent periods signal a discontinuity
14383 if (playlist.segments[0]) {
14384 playlist.segments[0].discontinuity = true;
14385 }
14386
14387 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments); // bubble up contentProtection, this assumes all DRM content
14388 // has the same contentProtection
14389
14390
14391 if (playlist.attributes.contentProtection) {
14392 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
14393 }
14394 } else {
14395 // first Period
14396 acc[name] = playlist;
14397 }
14398
14399 return acc;
14400 }, {}));
14401 return mergedPlaylists.map(function (playlist) {
14402 playlist.discontinuityStarts = findIndexes(playlist.segments, 'discontinuity');
14403 return playlist;
14404 });
14405 };
14406
14407 var addSegmentInfoFromSidx = function addSegmentInfoFromSidx(playlists, sidxMapping) {
14408 if (sidxMapping === void 0) {
14409 sidxMapping = {};
14410 }
14411
14412 if (!Object.keys(sidxMapping).length) {
14413 return playlists;
14414 }
14415
14416 for (var i in playlists) {
14417 var playlist = playlists[i];
14418
14419 if (!playlist.sidx) {
14420 continue;
14421 }
14422
14423 var sidxKey = playlist.sidx.uri + '-' + byteRangeToString(playlist.sidx.byterange);
14424 var sidxMatch = sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
14425
14426 if (playlist.sidx && sidxMatch) {
14427 addSegmentsToPlaylist(playlist, sidxMatch, playlist.sidx.resolvedUri);
14428 }
14429 }
14430
14431 return playlists;
14432 };
14433
14434 var formatAudioPlaylist = function formatAudioPlaylist(_ref) {
14435 var _attributes;
14436
14437 var attributes = _ref.attributes,
14438 segments = _ref.segments,
14439 sidx = _ref.sidx;
14440 var playlist = {
14441 attributes: (_attributes = {
14442 NAME: attributes.id,
14443 BANDWIDTH: attributes.bandwidth,
14444 CODECS: attributes.codecs
14445 }, _attributes['PROGRAM-ID'] = 1, _attributes),
14446 uri: '',
14447 endList: (attributes.type || 'static') === 'static',
14448 timeline: attributes.periodIndex,
14449 resolvedUri: '',
14450 targetDuration: attributes.duration,
14451 segments: segments,
14452 mediaSequence: segments.length ? segments[0].number : 1
14453 };
14454
14455 if (attributes.contentProtection) {
14456 playlist.contentProtection = attributes.contentProtection;
14457 }
14458
14459 if (sidx) {
14460 playlist.sidx = sidx;
14461 }
14462
14463 return playlist;
14464 };
14465 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
14466 var _attributes2;
14467
14468 var attributes = _ref2.attributes,
14469 segments = _ref2.segments;
14470
14471 if (typeof segments === 'undefined') {
14472 // vtt tracks may use single file in BaseURL
14473 segments = [{
14474 uri: attributes.baseUrl,
14475 timeline: attributes.periodIndex,
14476 resolvedUri: attributes.baseUrl || '',
14477 duration: attributes.sourceDuration,
14478 number: 0
14479 }]; // targetDuration should be the same duration as the only segment
14480
14481 attributes.duration = attributes.sourceDuration;
14482 }
14483
14484 return {
14485 attributes: (_attributes2 = {
14486 NAME: attributes.id,
14487 BANDWIDTH: attributes.bandwidth
14488 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
14489 uri: '',
14490 endList: (attributes.type || 'static') === 'static',
14491 timeline: attributes.periodIndex,
14492 resolvedUri: attributes.baseUrl || '',
14493 targetDuration: attributes.duration,
14494 segments: segments,
14495 mediaSequence: segments.length ? segments[0].number : 1
14496 };
14497 };
14498 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping) {
14499 if (sidxMapping === void 0) {
14500 sidxMapping = {};
14501 }
14502
14503 var mainPlaylist;
14504 var formattedPlaylists = playlists.reduce(function (a, playlist) {
14505 var role = playlist.attributes.role && playlist.attributes.role.value || '';
14506 var language = playlist.attributes.lang || '';
14507 var label = 'main';
14508
14509 if (language) {
14510 var roleLabel = role ? " (" + role + ")" : '';
14511 label = "" + playlist.attributes.lang + roleLabel;
14512 } // skip if we already have the highest quality audio for a language
14513
14514
14515 if (a[label] && a[label].playlists[0].attributes.BANDWIDTH > playlist.attributes.bandwidth) {
14516 return a;
14517 }
14518
14519 a[label] = {
14520 language: language,
14521 autoselect: true,
14522 default: role === 'main',
14523 playlists: addSegmentInfoFromSidx([formatAudioPlaylist(playlist)], sidxMapping),
14524 uri: ''
14525 };
14526
14527 if (typeof mainPlaylist === 'undefined' && role === 'main') {
14528 mainPlaylist = playlist;
14529 mainPlaylist.default = true;
14530 }
14531
14532 return a;
14533 }, {}); // if no playlists have role "main", mark the first as main
14534
14535 if (!mainPlaylist) {
14536 var firstLabel = Object.keys(formattedPlaylists)[0];
14537 formattedPlaylists[firstLabel].default = true;
14538 }
14539
14540 return formattedPlaylists;
14541 };
14542 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
14543 if (sidxMapping === void 0) {
14544 sidxMapping = {};
14545 }
14546
14547 return playlists.reduce(function (a, playlist) {
14548 var label = playlist.attributes.lang || 'text'; // skip if we already have subtitles
14549
14550 if (a[label]) {
14551 return a;
14552 }
14553
14554 a[label] = {
14555 language: label,
14556 default: false,
14557 autoselect: false,
14558 playlists: addSegmentInfoFromSidx([formatVttPlaylist(playlist)], sidxMapping),
14559 uri: ''
14560 };
14561 return a;
14562 }, {});
14563 };
14564 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
14565 var _attributes3;
14566
14567 var attributes = _ref3.attributes,
14568 segments = _ref3.segments,
14569 sidx = _ref3.sidx;
14570 var playlist = {
14571 attributes: (_attributes3 = {
14572 NAME: attributes.id,
14573 AUDIO: 'audio',
14574 SUBTITLES: 'subs',
14575 RESOLUTION: {
14576 width: attributes.width,
14577 height: attributes.height
14578 },
14579 CODECS: attributes.codecs,
14580 BANDWIDTH: attributes.bandwidth
14581 }, _attributes3['PROGRAM-ID'] = 1, _attributes3),
14582 uri: '',
14583 endList: (attributes.type || 'static') === 'static',
14584 timeline: attributes.periodIndex,
14585 resolvedUri: '',
14586 targetDuration: attributes.duration,
14587 segments: segments,
14588 mediaSequence: segments.length ? segments[0].number : 1
14589 };
14590
14591 if (attributes.contentProtection) {
14592 playlist.contentProtection = attributes.contentProtection;
14593 }
14594
14595 if (sidx) {
14596 playlist.sidx = sidx;
14597 }
14598
14599 return playlist;
14600 };
14601 var toM3u8 = function toM3u8(dashPlaylists, sidxMapping) {
14602 var _mediaGroups;
14603
14604 if (sidxMapping === void 0) {
14605 sidxMapping = {};
14606 }
14607
14608 if (!dashPlaylists.length) {
14609 return {};
14610 } // grab all master attributes
14611
14612
14613 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
14614 duration = _dashPlaylists$0$attr.sourceDuration,
14615 _dashPlaylists$0$attr2 = _dashPlaylists$0$attr.type,
14616 type = _dashPlaylists$0$attr2 === void 0 ? 'static' : _dashPlaylists$0$attr2,
14617 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
14618 _dashPlaylists$0$attr3 = _dashPlaylists$0$attr.minimumUpdatePeriod,
14619 minimumUpdatePeriod = _dashPlaylists$0$attr3 === void 0 ? 0 : _dashPlaylists$0$attr3;
14620
14621 var videoOnly = function videoOnly(_ref4) {
14622 var attributes = _ref4.attributes;
14623 return attributes.mimeType === 'video/mp4' || attributes.contentType === 'video';
14624 };
14625
14626 var audioOnly = function audioOnly(_ref5) {
14627 var attributes = _ref5.attributes;
14628 return attributes.mimeType === 'audio/mp4' || attributes.contentType === 'audio';
14629 };
14630
14631 var vttOnly = function vttOnly(_ref6) {
14632 var attributes = _ref6.attributes;
14633 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
14634 };
14635
14636 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
14637 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
14638 var vttPlaylists = dashPlaylists.filter(vttOnly);
14639 var master = {
14640 allowCache: true,
14641 discontinuityStarts: [],
14642 segments: [],
14643 endList: true,
14644 mediaGroups: (_mediaGroups = {
14645 AUDIO: {},
14646 VIDEO: {}
14647 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
14648 uri: '',
14649 duration: duration,
14650 playlists: addSegmentInfoFromSidx(videoPlaylists, sidxMapping),
14651 minimumUpdatePeriod: minimumUpdatePeriod * 1000
14652 };
14653
14654 if (type === 'dynamic') {
14655 master.suggestedPresentationDelay = suggestedPresentationDelay;
14656 }
14657
14658 if (audioPlaylists.length) {
14659 master.mediaGroups.AUDIO.audio = organizeAudioPlaylists(audioPlaylists, sidxMapping);
14660 }
14661
14662 if (vttPlaylists.length) {
14663 master.mediaGroups.SUBTITLES.subs = organizeVttPlaylists(vttPlaylists, sidxMapping);
14664 }
14665
14666 return master;
14667 };
14668
14669 /**
14670 * Calculates the R (repetition) value for a live stream (for the final segment
14671 * in a manifest where the r value is negative 1)
14672 *
14673 * @param {Object} attributes
14674 * Object containing all inherited attributes from parent elements with attribute
14675 * names as keys
14676 * @param {number} time
14677 * current time (typically the total time up until the final segment)
14678 * @param {number} duration
14679 * duration property for the given <S />
14680 *
14681 * @return {number}
14682 * R value to reach the end of the given period
14683 */
14684 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
14685 var NOW = attributes.NOW,
14686 clientOffset = attributes.clientOffset,
14687 availabilityStartTime = attributes.availabilityStartTime,
14688 _attributes$timescale = attributes.timescale,
14689 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
14690 _attributes$start = attributes.start,
14691 start = _attributes$start === void 0 ? 0 : _attributes$start,
14692 _attributes$minimumUp = attributes.minimumUpdatePeriod,
14693 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
14694 var now = (NOW + clientOffset) / 1000;
14695 var periodStartWC = availabilityStartTime + start;
14696 var periodEndWC = now + minimumUpdatePeriod;
14697 var periodDuration = periodEndWC - periodStartWC;
14698 return Math.ceil((periodDuration * timescale - time) / duration);
14699 };
14700 /**
14701 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
14702 * timing and duration
14703 *
14704 * @param {Object} attributes
14705 * Object containing all inherited attributes from parent elements with attribute
14706 * names as keys
14707 * @param {Object[]} segmentTimeline
14708 * List of objects representing the attributes of each S element contained within
14709 *
14710 * @return {{number: number, duration: number, time: number, timeline: number}[]}
14711 * List of Objects with segment timing and duration info
14712 */
14713
14714 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
14715 var _attributes$type = attributes.type,
14716 type = _attributes$type === void 0 ? 'static' : _attributes$type,
14717 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
14718 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
14719 _attributes$media = attributes.media,
14720 media = _attributes$media === void 0 ? '' : _attributes$media,
14721 sourceDuration = attributes.sourceDuration,
14722 _attributes$timescale2 = attributes.timescale,
14723 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
14724 _attributes$startNumb = attributes.startNumber,
14725 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
14726 timeline = attributes.periodIndex;
14727 var segments = [];
14728 var time = -1;
14729
14730 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
14731 var S = segmentTimeline[sIndex];
14732 var duration = S.d;
14733 var repeat = S.r || 0;
14734 var segmentTime = S.t || 0;
14735
14736 if (time < 0) {
14737 // first segment
14738 time = segmentTime;
14739 }
14740
14741 if (segmentTime && segmentTime > time) {
14742 // discontinuity
14743 // TODO: How to handle this type of discontinuity
14744 // timeline++ here would treat it like HLS discontuity and content would
14745 // get appended without gap
14746 // E.G.
14747 // <S t="0" d="1" />
14748 // <S d="1" />
14749 // <S d="1" />
14750 // <S t="5" d="1" />
14751 // would have $Time$ values of [0, 1, 2, 5]
14752 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
14753 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
14754 // does the value of sourceDuration consider this when calculating arbitrary
14755 // negative @r repeat value?
14756 // E.G. Same elements as above with this added at the end
14757 // <S d="1" r="-1" />
14758 // with a sourceDuration of 10
14759 // Would the 2 gaps be included in the time duration calculations resulting in
14760 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
14761 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
14762 time = segmentTime;
14763 }
14764
14765 var count = void 0;
14766
14767 if (repeat < 0) {
14768 var nextS = sIndex + 1;
14769
14770 if (nextS === segmentTimeline.length) {
14771 // last segment
14772 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
14773 count = getLiveRValue(attributes, time, duration);
14774 } else {
14775 // TODO: This may be incorrect depending on conclusion of TODO above
14776 count = (sourceDuration * timescale - time) / duration;
14777 }
14778 } else {
14779 count = (segmentTimeline[nextS].t - time) / duration;
14780 }
14781 } else {
14782 count = repeat + 1;
14783 }
14784
14785 var end = startNumber + segments.length + count;
14786 var number = startNumber + segments.length;
14787
14788 while (number < end) {
14789 segments.push({
14790 number: number,
14791 duration: duration / timescale,
14792 time: time,
14793 timeline: timeline
14794 });
14795 time += duration;
14796 number++;
14797 }
14798 }
14799
14800 return segments;
14801 };
14802
14803 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
14804 /**
14805 * Replaces template identifiers with corresponding values. To be used as the callback
14806 * for String.prototype.replace
14807 *
14808 * @name replaceCallback
14809 * @function
14810 * @param {string} match
14811 * Entire match of identifier
14812 * @param {string} identifier
14813 * Name of matched identifier
14814 * @param {string} format
14815 * Format tag string. Its presence indicates that padding is expected
14816 * @param {string} width
14817 * Desired length of the replaced value. Values less than this width shall be left
14818 * zero padded
14819 * @return {string}
14820 * Replacement for the matched identifier
14821 */
14822
14823 /**
14824 * Returns a function to be used as a callback for String.prototype.replace to replace
14825 * template identifiers
14826 *
14827 * @param {Obect} values
14828 * Object containing values that shall be used to replace known identifiers
14829 * @param {number} values.RepresentationID
14830 * Value of the Representation@id attribute
14831 * @param {number} values.Number
14832 * Number of the corresponding segment
14833 * @param {number} values.Bandwidth
14834 * Value of the Representation@bandwidth attribute.
14835 * @param {number} values.Time
14836 * Timestamp value of the corresponding segment
14837 * @return {replaceCallback}
14838 * Callback to be used with String.prototype.replace to replace identifiers
14839 */
14840
14841 var identifierReplacement = function identifierReplacement(values) {
14842 return function (match, identifier, format, width) {
14843 if (match === '$$') {
14844 // escape sequence
14845 return '$';
14846 }
14847
14848 if (typeof values[identifier] === 'undefined') {
14849 return match;
14850 }
14851
14852 var value = '' + values[identifier];
14853
14854 if (identifier === 'RepresentationID') {
14855 // Format tag shall not be present with RepresentationID
14856 return value;
14857 }
14858
14859 if (!format) {
14860 width = 1;
14861 } else {
14862 width = parseInt(width, 10);
14863 }
14864
14865 if (value.length >= width) {
14866 return value;
14867 }
14868
14869 return "" + new Array(width - value.length + 1).join('0') + value;
14870 };
14871 };
14872 /**
14873 * Constructs a segment url from a template string
14874 *
14875 * @param {string} url
14876 * Template string to construct url from
14877 * @param {Obect} values
14878 * Object containing values that shall be used to replace known identifiers
14879 * @param {number} values.RepresentationID
14880 * Value of the Representation@id attribute
14881 * @param {number} values.Number
14882 * Number of the corresponding segment
14883 * @param {number} values.Bandwidth
14884 * Value of the Representation@bandwidth attribute.
14885 * @param {number} values.Time
14886 * Timestamp value of the corresponding segment
14887 * @return {string}
14888 * Segment url with identifiers replaced
14889 */
14890
14891 var constructTemplateUrl = function constructTemplateUrl(url, values) {
14892 return url.replace(identifierPattern, identifierReplacement(values));
14893 };
14894 /**
14895 * Generates a list of objects containing timing and duration information about each
14896 * segment needed to generate segment uris and the complete segment object
14897 *
14898 * @param {Object} attributes
14899 * Object containing all inherited attributes from parent elements with attribute
14900 * names as keys
14901 * @param {Object[]|undefined} segmentTimeline
14902 * List of objects representing the attributes of each S element contained within
14903 * the SegmentTimeline element
14904 * @return {{number: number, duration: number, time: number, timeline: number}[]}
14905 * List of Objects with segment timing and duration info
14906 */
14907
14908 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
14909 if (!attributes.duration && !segmentTimeline) {
14910 // if neither @duration or SegmentTimeline are present, then there shall be exactly
14911 // one media segment
14912 return [{
14913 number: attributes.startNumber || 1,
14914 duration: attributes.sourceDuration,
14915 time: 0,
14916 timeline: attributes.periodIndex
14917 }];
14918 }
14919
14920 if (attributes.duration) {
14921 return parseByDuration(attributes);
14922 }
14923
14924 return parseByTimeline(attributes, segmentTimeline);
14925 };
14926 /**
14927 * Generates a list of segments using information provided by the SegmentTemplate element
14928 *
14929 * @param {Object} attributes
14930 * Object containing all inherited attributes from parent elements with attribute
14931 * names as keys
14932 * @param {Object[]|undefined} segmentTimeline
14933 * List of objects representing the attributes of each S element contained within
14934 * the SegmentTimeline element
14935 * @return {Object[]}
14936 * List of segment objects
14937 */
14938
14939 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
14940 var templateValues = {
14941 RepresentationID: attributes.id,
14942 Bandwidth: attributes.bandwidth || 0
14943 };
14944 var _attributes$initializ = attributes.initialization,
14945 initialization = _attributes$initializ === void 0 ? {
14946 sourceURL: '',
14947 range: ''
14948 } : _attributes$initializ;
14949 var mapSegment = urlTypeToSegment({
14950 baseUrl: attributes.baseUrl,
14951 source: constructTemplateUrl(initialization.sourceURL, templateValues),
14952 range: initialization.range
14953 });
14954 var segments = parseTemplateInfo(attributes, segmentTimeline);
14955 return segments.map(function (segment) {
14956 templateValues.Number = segment.number;
14957 templateValues.Time = segment.time;
14958 var uri = constructTemplateUrl(attributes.media || '', templateValues);
14959 return {
14960 uri: uri,
14961 timeline: segment.timeline,
14962 duration: segment.duration,
14963 resolvedUri: resolveUrl_1(attributes.baseUrl || '', uri),
14964 map: mapSegment,
14965 number: segment.number
14966 };
14967 });
14968 };
14969
14970 /**
14971 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
14972 * to an object that matches the output of a segment in videojs/mpd-parser
14973 *
14974 * @param {Object} attributes
14975 * Object containing all inherited attributes from parent elements with attribute
14976 * names as keys
14977 * @param {Object} segmentUrl
14978 * <SegmentURL> node to translate into a segment object
14979 * @return {Object} translated segment object
14980 */
14981
14982 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
14983 var baseUrl = attributes.baseUrl,
14984 _attributes$initializ = attributes.initialization,
14985 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
14986 var initSegment = urlTypeToSegment({
14987 baseUrl: baseUrl,
14988 source: initialization.sourceURL,
14989 range: initialization.range
14990 });
14991 var segment = urlTypeToSegment({
14992 baseUrl: baseUrl,
14993 source: segmentUrl.media,
14994 range: segmentUrl.mediaRange
14995 });
14996 segment.map = initSegment;
14997 return segment;
14998 };
14999 /**
15000 * Generates a list of segments using information provided by the SegmentList element
15001 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
15002 * node should be translated into segment.
15003 *
15004 * @param {Object} attributes
15005 * Object containing all inherited attributes from parent elements with attribute
15006 * names as keys
15007 * @param {Object[]|undefined} segmentTimeline
15008 * List of objects representing the attributes of each S element contained within
15009 * the SegmentTimeline element
15010 * @return {Object.<Array>} list of segments
15011 */
15012
15013 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
15014 var duration = attributes.duration,
15015 _attributes$segmentUr = attributes.segmentUrls,
15016 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr; // Per spec (5.3.9.2.1) no way to determine segment duration OR
15017 // if both SegmentTimeline and @duration are defined, it is outside of spec.
15018
15019 if (!duration && !segmentTimeline || duration && segmentTimeline) {
15020 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
15021 }
15022
15023 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
15024 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
15025 });
15026 var segmentTimeInfo;
15027
15028 if (duration) {
15029 segmentTimeInfo = parseByDuration(attributes);
15030 }
15031
15032 if (segmentTimeline) {
15033 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
15034 }
15035
15036 var segments = segmentTimeInfo.map(function (segmentTime, index) {
15037 if (segmentUrlMap[index]) {
15038 var segment = segmentUrlMap[index];
15039 segment.timeline = segmentTime.timeline;
15040 segment.duration = segmentTime.duration;
15041 segment.number = segmentTime.number;
15042 return segment;
15043 } // Since we're mapping we should get rid of any blank segments (in case
15044 // the given SegmentTimeline is handling for more elements than we have
15045 // SegmentURLs for).
15046 }).filter(function (segment) {
15047 return segment;
15048 });
15049 return segments;
15050 };
15051
15052 var generateSegments = function generateSegments(_ref) {
15053 var attributes = _ref.attributes,
15054 segmentInfo = _ref.segmentInfo;
15055 var segmentAttributes;
15056 var segmentsFn;
15057
15058 if (segmentInfo.template) {
15059 segmentsFn = segmentsFromTemplate;
15060 segmentAttributes = merge(attributes, segmentInfo.template);
15061 } else if (segmentInfo.base) {
15062 segmentsFn = segmentsFromBase;
15063 segmentAttributes = merge(attributes, segmentInfo.base);
15064 } else if (segmentInfo.list) {
15065 segmentsFn = segmentsFromList;
15066 segmentAttributes = merge(attributes, segmentInfo.list);
15067 }
15068
15069 var segmentsInfo = {
15070 attributes: attributes
15071 };
15072
15073 if (!segmentsFn) {
15074 return segmentsInfo;
15075 }
15076
15077 var segments = segmentsFn(segmentAttributes, segmentInfo.timeline); // The @duration attribute will be used to determin the playlist's targetDuration which
15078 // must be in seconds. Since we've generated the segment list, we no longer need
15079 // @duration to be in @timescale units, so we can convert it here.
15080
15081 if (segmentAttributes.duration) {
15082 var _segmentAttributes = segmentAttributes,
15083 duration = _segmentAttributes.duration,
15084 _segmentAttributes$ti = _segmentAttributes.timescale,
15085 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
15086 segmentAttributes.duration = duration / timescale;
15087 } else if (segments.length) {
15088 // if there is no @duration attribute, use the largest segment duration as
15089 // as target duration
15090 segmentAttributes.duration = segments.reduce(function (max, segment) {
15091 return Math.max(max, Math.ceil(segment.duration));
15092 }, 0);
15093 } else {
15094 segmentAttributes.duration = 0;
15095 }
15096
15097 segmentsInfo.attributes = segmentAttributes;
15098 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
15099
15100 if (segmentInfo.base && segmentAttributes.indexRange) {
15101 segmentsInfo.sidx = segments[0];
15102 segmentsInfo.segments = [];
15103 }
15104
15105 return segmentsInfo;
15106 };
15107 var toPlaylists = function toPlaylists(representations) {
15108 return representations.map(generateSegments);
15109 };
15110
15111 var findChildren = function findChildren(element, name) {
15112 return from(element.childNodes).filter(function (_ref) {
15113 var tagName = _ref.tagName;
15114 return tagName === name;
15115 });
15116 };
15117 var getContent = function getContent(element) {
15118 return element.textContent.trim();
15119 };
15120
15121 var parseDuration = function parseDuration(str) {
15122 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
15123 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
15124 var SECONDS_IN_DAY = 24 * 60 * 60;
15125 var SECONDS_IN_HOUR = 60 * 60;
15126 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
15127
15128 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
15129 var match = durationRegex.exec(str);
15130
15131 if (!match) {
15132 return 0;
15133 }
15134
15135 var _match$slice = match.slice(1),
15136 year = _match$slice[0],
15137 month = _match$slice[1],
15138 day = _match$slice[2],
15139 hour = _match$slice[3],
15140 minute = _match$slice[4],
15141 second = _match$slice[5];
15142
15143 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
15144 };
15145 var parseDate = function parseDate(str) {
15146 // Date format without timezone according to ISO 8601
15147 // YYY-MM-DDThh:mm:ss.ssssss
15148 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
15149 // expressed by ending with 'Z'
15150
15151 if (dateRegex.test(str)) {
15152 str += 'Z';
15153 }
15154
15155 return Date.parse(str);
15156 };
15157
15158 var parsers = {
15159 /**
15160 * Specifies the duration of the entire Media Presentation. Format is a duration string
15161 * as specified in ISO 8601
15162 *
15163 * @param {string} value
15164 * value of attribute as a string
15165 * @return {number}
15166 * The duration in seconds
15167 */
15168 mediaPresentationDuration: function mediaPresentationDuration(value) {
15169 return parseDuration(value);
15170 },
15171
15172 /**
15173 * Specifies the Segment availability start time for all Segments referred to in this
15174 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
15175 * time. Format is a date string as specified in ISO 8601
15176 *
15177 * @param {string} value
15178 * value of attribute as a string
15179 * @return {number}
15180 * The date as seconds from unix epoch
15181 */
15182 availabilityStartTime: function availabilityStartTime(value) {
15183 return parseDate(value) / 1000;
15184 },
15185
15186 /**
15187 * Specifies the smallest period between potential changes to the MPD. Format is a
15188 * duration string as specified in ISO 8601
15189 *
15190 * @param {string} value
15191 * value of attribute as a string
15192 * @return {number}
15193 * The duration in seconds
15194 */
15195 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
15196 return parseDuration(value);
15197 },
15198
15199 /**
15200 * Specifies the suggested presentation delay. Format is a
15201 * duration string as specified in ISO 8601
15202 *
15203 * @param {string} value
15204 * value of attribute as a string
15205 * @return {number}
15206 * The duration in seconds
15207 */
15208 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
15209 return parseDuration(value);
15210 },
15211
15212 /**
15213 * specifices the type of mpd. Can be either "static" or "dynamic"
15214 *
15215 * @param {string} value
15216 * value of attribute as a string
15217 *
15218 * @return {string}
15219 * The type as a string
15220 */
15221 type: function type(value) {
15222 return value;
15223 },
15224
15225 /**
15226 * Specifies the duration of the smallest time shifting buffer for any Representation
15227 * in the MPD. Format is a duration string as specified in ISO 8601
15228 *
15229 * @param {string} value
15230 * value of attribute as a string
15231 * @return {number}
15232 * The duration in seconds
15233 */
15234 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
15235 return parseDuration(value);
15236 },
15237
15238 /**
15239 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
15240 * Format is a duration string as specified in ISO 8601
15241 *
15242 * @param {string} value
15243 * value of attribute as a string
15244 * @return {number}
15245 * The duration in seconds
15246 */
15247 start: function start(value) {
15248 return parseDuration(value);
15249 },
15250
15251 /**
15252 * Specifies the width of the visual presentation
15253 *
15254 * @param {string} value
15255 * value of attribute as a string
15256 * @return {number}
15257 * The parsed width
15258 */
15259 width: function width(value) {
15260 return parseInt(value, 10);
15261 },
15262
15263 /**
15264 * Specifies the height of the visual presentation
15265 *
15266 * @param {string} value
15267 * value of attribute as a string
15268 * @return {number}
15269 * The parsed height
15270 */
15271 height: function height(value) {
15272 return parseInt(value, 10);
15273 },
15274
15275 /**
15276 * Specifies the bitrate of the representation
15277 *
15278 * @param {string} value
15279 * value of attribute as a string
15280 * @return {number}
15281 * The parsed bandwidth
15282 */
15283 bandwidth: function bandwidth(value) {
15284 return parseInt(value, 10);
15285 },
15286
15287 /**
15288 * Specifies the number of the first Media Segment in this Representation in the Period
15289 *
15290 * @param {string} value
15291 * value of attribute as a string
15292 * @return {number}
15293 * The parsed number
15294 */
15295 startNumber: function startNumber(value) {
15296 return parseInt(value, 10);
15297 },
15298
15299 /**
15300 * Specifies the timescale in units per seconds
15301 *
15302 * @param {string} value
15303 * value of attribute as a string
15304 * @return {number}
15305 * The aprsed timescale
15306 */
15307 timescale: function timescale(value) {
15308 return parseInt(value, 10);
15309 },
15310
15311 /**
15312 * Specifies the constant approximate Segment duration
15313 * NOTE: The <Period> element also contains an @duration attribute. This duration
15314 * specifies the duration of the Period. This attribute is currently not
15315 * supported by the rest of the parser, however we still check for it to prevent
15316 * errors.
15317 *
15318 * @param {string} value
15319 * value of attribute as a string
15320 * @return {number}
15321 * The parsed duration
15322 */
15323 duration: function duration(value) {
15324 var parsedValue = parseInt(value, 10);
15325
15326 if (isNaN(parsedValue)) {
15327 return parseDuration(value);
15328 }
15329
15330 return parsedValue;
15331 },
15332
15333 /**
15334 * Specifies the Segment duration, in units of the value of the @timescale.
15335 *
15336 * @param {string} value
15337 * value of attribute as a string
15338 * @return {number}
15339 * The parsed duration
15340 */
15341 d: function d(value) {
15342 return parseInt(value, 10);
15343 },
15344
15345 /**
15346 * Specifies the MPD start time, in @timescale units, the first Segment in the series
15347 * starts relative to the beginning of the Period
15348 *
15349 * @param {string} value
15350 * value of attribute as a string
15351 * @return {number}
15352 * The parsed time
15353 */
15354 t: function t(value) {
15355 return parseInt(value, 10);
15356 },
15357
15358 /**
15359 * Specifies the repeat count of the number of following contiguous Segments with the
15360 * same duration expressed by the value of @d
15361 *
15362 * @param {string} value
15363 * value of attribute as a string
15364 * @return {number}
15365 * The parsed number
15366 */
15367 r: function r(value) {
15368 return parseInt(value, 10);
15369 },
15370
15371 /**
15372 * Default parser for all other attributes. Acts as a no-op and just returns the value
15373 * as a string
15374 *
15375 * @param {string} value
15376 * value of attribute as a string
15377 * @return {string}
15378 * Unparsed value
15379 */
15380 DEFAULT: function DEFAULT(value) {
15381 return value;
15382 }
15383 };
15384 /**
15385 * Gets all the attributes and values of the provided node, parses attributes with known
15386 * types, and returns an object with attribute names mapped to values.
15387 *
15388 * @param {Node} el
15389 * The node to parse attributes from
15390 * @return {Object}
15391 * Object with all attributes of el parsed
15392 */
15393
15394 var parseAttributes$1 = function parseAttributes(el) {
15395 if (!(el && el.attributes)) {
15396 return {};
15397 }
15398
15399 return from(el.attributes).reduce(function (a, e) {
15400 var parseFn = parsers[e.name] || parsers.DEFAULT;
15401 a[e.name] = parseFn(e.value);
15402 return a;
15403 }, {});
15404 };
15405
15406 var keySystemsMap = {
15407 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
15408 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
15409 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
15410 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
15411 };
15412 /**
15413 * Builds a list of urls that is the product of the reference urls and BaseURL values
15414 *
15415 * @param {string[]} referenceUrls
15416 * List of reference urls to resolve to
15417 * @param {Node[]} baseUrlElements
15418 * List of BaseURL nodes from the mpd
15419 * @return {string[]}
15420 * List of resolved urls
15421 */
15422
15423 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
15424 if (!baseUrlElements.length) {
15425 return referenceUrls;
15426 }
15427
15428 return flatten(referenceUrls.map(function (reference) {
15429 return baseUrlElements.map(function (baseUrlElement) {
15430 return resolveUrl_1(reference, getContent(baseUrlElement));
15431 });
15432 }));
15433 };
15434 /**
15435 * Contains all Segment information for its containing AdaptationSet
15436 *
15437 * @typedef {Object} SegmentInformation
15438 * @property {Object|undefined} template
15439 * Contains the attributes for the SegmentTemplate node
15440 * @property {Object[]|undefined} timeline
15441 * Contains a list of atrributes for each S node within the SegmentTimeline node
15442 * @property {Object|undefined} list
15443 * Contains the attributes for the SegmentList node
15444 * @property {Object|undefined} base
15445 * Contains the attributes for the SegmentBase node
15446 */
15447
15448 /**
15449 * Returns all available Segment information contained within the AdaptationSet node
15450 *
15451 * @param {Node} adaptationSet
15452 * The AdaptationSet node to get Segment information from
15453 * @return {SegmentInformation}
15454 * The Segment information contained within the provided AdaptationSet
15455 */
15456
15457 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
15458 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
15459 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
15460 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
15461 return merge({
15462 tag: 'SegmentURL'
15463 }, parseAttributes$1(s));
15464 });
15465 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
15466 var segmentTimelineParentNode = segmentList || segmentTemplate;
15467 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
15468 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
15469 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
15470 // @initialization and an <Initialization> node. @initialization can be templated,
15471 // while the node can have a url and range specified. If the <SegmentTemplate> has
15472 // both @initialization and an <Initialization> subelement we opt to override with
15473 // the node, as this interaction is not defined in the spec.
15474
15475 var template = segmentTemplate && parseAttributes$1(segmentTemplate);
15476
15477 if (template && segmentInitialization) {
15478 template.initialization = segmentInitialization && parseAttributes$1(segmentInitialization);
15479 } else if (template && template.initialization) {
15480 // If it is @initialization we convert it to an object since this is the format that
15481 // later functions will rely on for the initialization segment. This is only valid
15482 // for <SegmentTemplate>
15483 template.initialization = {
15484 sourceURL: template.initialization
15485 };
15486 }
15487
15488 var segmentInfo = {
15489 template: template,
15490 timeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
15491 return parseAttributes$1(s);
15492 }),
15493 list: segmentList && merge(parseAttributes$1(segmentList), {
15494 segmentUrls: segmentUrls,
15495 initialization: parseAttributes$1(segmentInitialization)
15496 }),
15497 base: segmentBase && merge(parseAttributes$1(segmentBase), {
15498 initialization: parseAttributes$1(segmentInitialization)
15499 })
15500 };
15501 Object.keys(segmentInfo).forEach(function (key) {
15502 if (!segmentInfo[key]) {
15503 delete segmentInfo[key];
15504 }
15505 });
15506 return segmentInfo;
15507 };
15508 /**
15509 * Contains Segment information and attributes needed to construct a Playlist object
15510 * from a Representation
15511 *
15512 * @typedef {Object} RepresentationInformation
15513 * @property {SegmentInformation} segmentInfo
15514 * Segment information for this Representation
15515 * @property {Object} attributes
15516 * Inherited attributes for this Representation
15517 */
15518
15519 /**
15520 * Maps a Representation node to an object containing Segment information and attributes
15521 *
15522 * @name inheritBaseUrlsCallback
15523 * @function
15524 * @param {Node} representation
15525 * Representation node from the mpd
15526 * @return {RepresentationInformation}
15527 * Representation information needed to construct a Playlist object
15528 */
15529
15530 /**
15531 * Returns a callback for Array.prototype.map for mapping Representation nodes to
15532 * Segment information and attributes using inherited BaseURL nodes.
15533 *
15534 * @param {Object} adaptationSetAttributes
15535 * Contains attributes inherited by the AdaptationSet
15536 * @param {string[]} adaptationSetBaseUrls
15537 * Contains list of resolved base urls inherited by the AdaptationSet
15538 * @param {SegmentInformation} adaptationSetSegmentInfo
15539 * Contains Segment information for the AdaptationSet
15540 * @return {inheritBaseUrlsCallback}
15541 * Callback map function
15542 */
15543
15544 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
15545 return function (representation) {
15546 var repBaseUrlElements = findChildren(representation, 'BaseURL');
15547 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
15548 var attributes = merge(adaptationSetAttributes, parseAttributes$1(representation));
15549 var representationSegmentInfo = getSegmentInformation(representation);
15550 return repBaseUrls.map(function (baseUrl) {
15551 return {
15552 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
15553 attributes: merge(attributes, {
15554 baseUrl: baseUrl
15555 })
15556 };
15557 });
15558 };
15559 };
15560 /**
15561 * Tranforms a series of content protection nodes to
15562 * an object containing pssh data by key system
15563 *
15564 * @param {Node[]} contentProtectionNodes
15565 * Content protection nodes
15566 * @return {Object}
15567 * Object containing pssh data by key system
15568 */
15569
15570 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
15571 return contentProtectionNodes.reduce(function (acc, node) {
15572 var attributes = parseAttributes$1(node);
15573 var keySystem = keySystemsMap[attributes.schemeIdUri];
15574
15575 if (keySystem) {
15576 acc[keySystem] = {
15577 attributes: attributes
15578 };
15579 var psshNode = findChildren(node, 'cenc:pssh')[0];
15580
15581 if (psshNode) {
15582 var pssh = getContent(psshNode);
15583 var psshBuffer = pssh && decodeB64ToUint8Array_1(pssh);
15584 acc[keySystem].pssh = psshBuffer;
15585 }
15586 }
15587
15588 return acc;
15589 }, {});
15590 };
15591 /**
15592 * Maps an AdaptationSet node to a list of Representation information objects
15593 *
15594 * @name toRepresentationsCallback
15595 * @function
15596 * @param {Node} adaptationSet
15597 * AdaptationSet node from the mpd
15598 * @return {RepresentationInformation[]}
15599 * List of objects containing Representaion information
15600 */
15601
15602 /**
15603 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
15604 * Representation information objects
15605 *
15606 * @param {Object} periodAttributes
15607 * Contains attributes inherited by the Period
15608 * @param {string[]} periodBaseUrls
15609 * Contains list of resolved base urls inherited by the Period
15610 * @param {string[]} periodSegmentInfo
15611 * Contains Segment Information at the period level
15612 * @return {toRepresentationsCallback}
15613 * Callback map function
15614 */
15615
15616 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
15617 return function (adaptationSet) {
15618 var adaptationSetAttributes = parseAttributes$1(adaptationSet);
15619 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
15620 var role = findChildren(adaptationSet, 'Role')[0];
15621 var roleAttributes = {
15622 role: parseAttributes$1(role)
15623 };
15624 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
15625 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
15626
15627 if (Object.keys(contentProtection).length) {
15628 attrs = merge(attrs, {
15629 contentProtection: contentProtection
15630 });
15631 }
15632
15633 var segmentInfo = getSegmentInformation(adaptationSet);
15634 var representations = findChildren(adaptationSet, 'Representation');
15635 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
15636 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
15637 };
15638 };
15639 /**
15640 * Maps an Period node to a list of Representation inforamtion objects for all
15641 * AdaptationSet nodes contained within the Period
15642 *
15643 * @name toAdaptationSetsCallback
15644 * @function
15645 * @param {Node} period
15646 * Period node from the mpd
15647 * @param {number} periodIndex
15648 * Index of the Period within the mpd
15649 * @return {RepresentationInformation[]}
15650 * List of objects containing Representaion information
15651 */
15652
15653 /**
15654 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
15655 * Representation information objects
15656 *
15657 * @param {Object} mpdAttributes
15658 * Contains attributes inherited by the mpd
15659 * @param {string[]} mpdBaseUrls
15660 * Contains list of resolved base urls inherited by the mpd
15661 * @return {toAdaptationSetsCallback}
15662 * Callback map function
15663 */
15664
15665 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
15666 return function (period, index) {
15667 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period, 'BaseURL'));
15668 var periodAtt = parseAttributes$1(period);
15669 var parsedPeriodId = parseInt(periodAtt.id, 10); // fallback to mapping index if Period@id is not a number
15670
15671 var periodIndex = window_1.isNaN(parsedPeriodId) ? index : parsedPeriodId;
15672 var periodAttributes = merge(mpdAttributes, {
15673 periodIndex: periodIndex
15674 });
15675 var adaptationSets = findChildren(period, 'AdaptationSet');
15676 var periodSegmentInfo = getSegmentInformation(period);
15677 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
15678 };
15679 };
15680 /**
15681 * Traverses the mpd xml tree to generate a list of Representation information objects
15682 * that have inherited attributes from parent nodes
15683 *
15684 * @param {Node} mpd
15685 * The root node of the mpd
15686 * @param {Object} options
15687 * Available options for inheritAttributes
15688 * @param {string} options.manifestUri
15689 * The uri source of the mpd
15690 * @param {number} options.NOW
15691 * Current time per DASH IOP. Default is current time in ms since epoch
15692 * @param {number} options.clientOffset
15693 * Client time difference from NOW (in milliseconds)
15694 * @return {RepresentationInformation[]}
15695 * List of objects containing Representation information
15696 */
15697
15698 var inheritAttributes = function inheritAttributes(mpd, options) {
15699 if (options === void 0) {
15700 options = {};
15701 }
15702
15703 var _options = options,
15704 _options$manifestUri = _options.manifestUri,
15705 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
15706 _options$NOW = _options.NOW,
15707 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
15708 _options$clientOffset = _options.clientOffset,
15709 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
15710 var periods = findChildren(mpd, 'Period');
15711
15712 if (!periods.length) {
15713 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
15714 }
15715
15716 var mpdAttributes = parseAttributes$1(mpd);
15717 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL'));
15718 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
15719 mpdAttributes.NOW = NOW;
15720 mpdAttributes.clientOffset = clientOffset;
15721 return flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)));
15722 };
15723
15724 var stringToMpdXml = function stringToMpdXml(manifestString) {
15725 if (manifestString === '') {
15726 throw new Error(errors.DASH_EMPTY_MANIFEST);
15727 }
15728
15729 var parser = new domParser_3();
15730 var xml = parser.parseFromString(manifestString, 'application/xml');
15731 var mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
15732
15733 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
15734 throw new Error(errors.DASH_INVALID_XML);
15735 }
15736
15737 return mpd;
15738 };
15739
15740 /**
15741 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
15742 *
15743 * @param {string} mpd
15744 * XML string of the MPD manifest
15745 * @return {Object|null}
15746 * Attributes of UTCTiming node specified in the manifest. Null if none found
15747 */
15748
15749 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
15750 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
15751
15752 if (!UTCTimingNode) {
15753 return null;
15754 }
15755
15756 var attributes = parseAttributes$1(UTCTimingNode);
15757
15758 switch (attributes.schemeIdUri) {
15759 case 'urn:mpeg:dash:utc:http-head:2014':
15760 case 'urn:mpeg:dash:utc:http-head:2012':
15761 attributes.method = 'HEAD';
15762 break;
15763
15764 case 'urn:mpeg:dash:utc:http-xsdate:2014':
15765 case 'urn:mpeg:dash:utc:http-iso:2014':
15766 case 'urn:mpeg:dash:utc:http-xsdate:2012':
15767 case 'urn:mpeg:dash:utc:http-iso:2012':
15768 attributes.method = 'GET';
15769 break;
15770
15771 case 'urn:mpeg:dash:utc:direct:2014':
15772 case 'urn:mpeg:dash:utc:direct:2012':
15773 attributes.method = 'DIRECT';
15774 attributes.value = Date.parse(attributes.value);
15775 break;
15776
15777 case 'urn:mpeg:dash:utc:http-ntp:2014':
15778 case 'urn:mpeg:dash:utc:ntp:2014':
15779 case 'urn:mpeg:dash:utc:sntp:2014':
15780 default:
15781 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
15782 }
15783
15784 return attributes;
15785 };
15786
15787 var parse = function parse(manifestString, options) {
15788 if (options === void 0) {
15789 options = {};
15790 }
15791
15792 return toM3u8(toPlaylists(inheritAttributes(stringToMpdXml(manifestString), options)), options.sidxMapping);
15793 };
15794 /**
15795 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
15796 *
15797 * @param {string} manifestString
15798 * XML string of the MPD manifest
15799 * @return {Object|null}
15800 * Attributes of UTCTiming node specified in the manifest. Null if none found
15801 */
15802
15803 var parseUTCTiming = function parseUTCTiming(manifestString) {
15804 return parseUTCTimingScheme(stringToMpdXml(manifestString));
15805 };
15806
15807 /**
15808 * mux.js
15809 *
15810 * Copyright (c) Brightcove
15811 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15812 */
15813 var toUnsigned = function toUnsigned(value) {
15814 return value >>> 0;
15815 };
15816
15817 var toHexString = function toHexString(value) {
15818 return ('00' + value.toString(16)).slice(-2);
15819 };
15820
15821 var bin = {
15822 toUnsigned: toUnsigned,
15823 toHexString: toHexString
15824 };
15825
15826 var inspectMp4,
15827 _textifyMp,
15828 toUnsigned$1 = bin.toUnsigned,
15829 parseMp4Date = function parseMp4Date(seconds) {
15830 return new Date(seconds * 1000 - 2082844800000);
15831 },
15832 parseSampleFlags = function parseSampleFlags(flags) {
15833 return {
15834 isLeading: (flags[0] & 0x0c) >>> 2,
15835 dependsOn: flags[0] & 0x03,
15836 isDependedOn: (flags[1] & 0xc0) >>> 6,
15837 hasRedundancy: (flags[1] & 0x30) >>> 4,
15838 paddingValue: (flags[1] & 0x0e) >>> 1,
15839 isNonSyncSample: flags[1] & 0x01,
15840 degradationPriority: flags[2] << 8 | flags[3]
15841 };
15842 },
15843
15844 /**
15845 * Returns the string representation of an ASCII encoded four byte buffer.
15846 * @param buffer {Uint8Array} a four-byte buffer to translate
15847 * @return {string} the corresponding string
15848 */
15849 parseType = function parseType(buffer) {
15850 var result = '';
15851 result += String.fromCharCode(buffer[0]);
15852 result += String.fromCharCode(buffer[1]);
15853 result += String.fromCharCode(buffer[2]);
15854 result += String.fromCharCode(buffer[3]);
15855 return result;
15856 },
15857
15858 // Find the data for a box specified by its path
15859 findBox = function findBox(data, path) {
15860 var results = [],
15861 i,
15862 size,
15863 type,
15864 end,
15865 subresults;
15866
15867 if (!path.length) {
15868 // short-circuit the search for empty paths
15869 return null;
15870 }
15871
15872 for (i = 0; i < data.byteLength;) {
15873 size = toUnsigned$1(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
15874
15875 type = parseType(data.subarray(i + 4, i + 8));
15876
15877 end = size > 1 ? i + size : data.byteLength;
15878
15879 if (type === path[0]) {
15880 if (path.length === 1) {
15881 // this is the end of the path and we've found the box we were
15882 // looking for
15883 results.push(data.subarray(i + 8, end));
15884 } else {
15885 // recursively search for the next box along the path
15886 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
15887 if (subresults.length) {
15888 results = results.concat(subresults);
15889 }
15890 }
15891 }
15892 i = end;
15893 }
15894
15895 // we've finished searching all of data
15896 return results;
15897 },
15898 nalParse = function nalParse(avcStream) {
15899 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
15900 result = [],
15901 i,
15902 length;
15903 for (i = 0; i + 4 < avcStream.length; i += length) {
15904 length = avcView.getUint32(i);
15905 i += 4;
15906
15907 // bail if this doesn't appear to be an H264 stream
15908 if (length <= 0) {
15909 result.push('<span style=\'color:red;\'>MALFORMED DATA</span>');
15910 continue;
15911 }
15912
15913 switch (avcStream[i] & 0x1F) {
15914 case 0x01:
15915 result.push('slice_layer_without_partitioning_rbsp');
15916 break;
15917 case 0x05:
15918 result.push('slice_layer_without_partitioning_rbsp_idr');
15919 break;
15920 case 0x06:
15921 result.push('sei_rbsp');
15922 break;
15923 case 0x07:
15924 result.push('seq_parameter_set_rbsp');
15925 break;
15926 case 0x08:
15927 result.push('pic_parameter_set_rbsp');
15928 break;
15929 case 0x09:
15930 result.push('access_unit_delimiter_rbsp');
15931 break;
15932 default:
15933 result.push('UNKNOWN NAL - ' + avcStream[i] & 0x1F);
15934 break;
15935 }
15936 }
15937 return result;
15938 },
15939
15940
15941 // registry of handlers for individual mp4 box types
15942 parse$1 = {
15943 // codingname, not a first-class box type. stsd entries share the
15944 // same format as real boxes so the parsing infrastructure can be
15945 // shared
15946 avc1: function avc1(data) {
15947 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
15948 return {
15949 dataReferenceIndex: view.getUint16(6),
15950 width: view.getUint16(24),
15951 height: view.getUint16(26),
15952 horizresolution: view.getUint16(28) + view.getUint16(30) / 16,
15953 vertresolution: view.getUint16(32) + view.getUint16(34) / 16,
15954 frameCount: view.getUint16(40),
15955 depth: view.getUint16(74),
15956 config: inspectMp4(data.subarray(78, data.byteLength))
15957 };
15958 },
15959 avcC: function avcC(data) {
15960 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
15961 result = {
15962 configurationVersion: data[0],
15963 avcProfileIndication: data[1],
15964 profileCompatibility: data[2],
15965 avcLevelIndication: data[3],
15966 lengthSizeMinusOne: data[4] & 0x03,
15967 sps: [],
15968 pps: []
15969 },
15970 numOfSequenceParameterSets = data[5] & 0x1f,
15971 numOfPictureParameterSets,
15972 nalSize,
15973 offset,
15974 i;
15975
15976 // iterate past any SPSs
15977 offset = 6;
15978 for (i = 0; i < numOfSequenceParameterSets; i++) {
15979 nalSize = view.getUint16(offset);
15980 offset += 2;
15981 result.sps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
15982 offset += nalSize;
15983 }
15984 // iterate past any PPSs
15985 numOfPictureParameterSets = data[offset];
15986 offset++;
15987 for (i = 0; i < numOfPictureParameterSets; i++) {
15988 nalSize = view.getUint16(offset);
15989 offset += 2;
15990 result.pps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
15991 offset += nalSize;
15992 }
15993 return result;
15994 },
15995 btrt: function btrt(data) {
15996 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
15997 return {
15998 bufferSizeDB: view.getUint32(0),
15999 maxBitrate: view.getUint32(4),
16000 avgBitrate: view.getUint32(8)
16001 };
16002 },
16003 esds: function esds(data) {
16004 return {
16005 version: data[0],
16006 flags: new Uint8Array(data.subarray(1, 4)),
16007 esId: data[6] << 8 | data[7],
16008 streamPriority: data[8] & 0x1f,
16009 decoderConfig: {
16010 objectProfileIndication: data[11],
16011 streamType: data[12] >>> 2 & 0x3f,
16012 bufferSize: data[13] << 16 | data[14] << 8 | data[15],
16013 maxBitrate: data[16] << 24 | data[17] << 16 | data[18] << 8 | data[19],
16014 avgBitrate: data[20] << 24 | data[21] << 16 | data[22] << 8 | data[23],
16015 decoderConfigDescriptor: {
16016 tag: data[24],
16017 length: data[25],
16018 audioObjectType: data[26] >>> 3 & 0x1f,
16019 samplingFrequencyIndex: (data[26] & 0x07) << 1 | data[27] >>> 7 & 0x01,
16020 channelConfiguration: data[27] >>> 3 & 0x0f
16021 }
16022 }
16023 };
16024 },
16025 ftyp: function ftyp(data) {
16026 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16027 result = {
16028 majorBrand: parseType(data.subarray(0, 4)),
16029 minorVersion: view.getUint32(4),
16030 compatibleBrands: []
16031 },
16032 i = 8;
16033 while (i < data.byteLength) {
16034 result.compatibleBrands.push(parseType(data.subarray(i, i + 4)));
16035 i += 4;
16036 }
16037 return result;
16038 },
16039 dinf: function dinf(data) {
16040 return {
16041 boxes: inspectMp4(data)
16042 };
16043 },
16044 dref: function dref(data) {
16045 return {
16046 version: data[0],
16047 flags: new Uint8Array(data.subarray(1, 4)),
16048 dataReferences: inspectMp4(data.subarray(8))
16049 };
16050 },
16051 hdlr: function hdlr(data) {
16052 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16053 result = {
16054 version: view.getUint8(0),
16055 flags: new Uint8Array(data.subarray(1, 4)),
16056 handlerType: parseType(data.subarray(8, 12)),
16057 name: ''
16058 },
16059 i = 8;
16060
16061 // parse out the name field
16062 for (i = 24; i < data.byteLength; i++) {
16063 if (data[i] === 0x00) {
16064 // the name field is null-terminated
16065 i++;
16066 break;
16067 }
16068 result.name += String.fromCharCode(data[i]);
16069 }
16070 // decode UTF-8 to javascript's internal representation
16071 // see http://ecmanaut.blogspot.com/2006/07/encoding-decoding-utf8-in-javascript.html
16072 result.name = decodeURIComponent(escape(result.name));
16073
16074 return result;
16075 },
16076 mdat: function mdat(data) {
16077 return {
16078 byteLength: data.byteLength,
16079 nals: nalParse(data)
16080 };
16081 },
16082 mdhd: function mdhd(data) {
16083 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16084 i = 4,
16085 language,
16086 result = {
16087 version: view.getUint8(0),
16088 flags: new Uint8Array(data.subarray(1, 4)),
16089 language: ''
16090 };
16091 if (result.version === 1) {
16092 i += 4;
16093 result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16094 i += 8;
16095 result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16096 i += 4;
16097 result.timescale = view.getUint32(i);
16098 i += 8;
16099 result.duration = view.getUint32(i); // truncating top 4 bytes
16100 } else {
16101 result.creationTime = parseMp4Date(view.getUint32(i));
16102 i += 4;
16103 result.modificationTime = parseMp4Date(view.getUint32(i));
16104 i += 4;
16105 result.timescale = view.getUint32(i);
16106 i += 4;
16107 result.duration = view.getUint32(i);
16108 }
16109 i += 4;
16110 // language is stored as an ISO-639-2/T code in an array of three 5-bit fields
16111 // each field is the packed difference between its ASCII value and 0x60
16112 language = view.getUint16(i);
16113 result.language += String.fromCharCode((language >> 10) + 0x60);
16114 result.language += String.fromCharCode(((language & 0x03e0) >> 5) + 0x60);
16115 result.language += String.fromCharCode((language & 0x1f) + 0x60);
16116
16117 return result;
16118 },
16119 mdia: function mdia(data) {
16120 return {
16121 boxes: inspectMp4(data)
16122 };
16123 },
16124 mfhd: function mfhd(data) {
16125 return {
16126 version: data[0],
16127 flags: new Uint8Array(data.subarray(1, 4)),
16128 sequenceNumber: data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]
16129 };
16130 },
16131 minf: function minf(data) {
16132 return {
16133 boxes: inspectMp4(data)
16134 };
16135 },
16136 // codingname, not a first-class box type. stsd entries share the
16137 // same format as real boxes so the parsing infrastructure can be
16138 // shared
16139 mp4a: function mp4a(data) {
16140 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16141 result = {
16142 // 6 bytes reserved
16143 dataReferenceIndex: view.getUint16(6),
16144 // 4 + 4 bytes reserved
16145 channelcount: view.getUint16(16),
16146 samplesize: view.getUint16(18),
16147 // 2 bytes pre_defined
16148 // 2 bytes reserved
16149 samplerate: view.getUint16(24) + view.getUint16(26) / 65536
16150 };
16151
16152 // if there are more bytes to process, assume this is an ISO/IEC
16153 // 14496-14 MP4AudioSampleEntry and parse the ESDBox
16154 if (data.byteLength > 28) {
16155 result.streamDescriptor = inspectMp4(data.subarray(28))[0];
16156 }
16157 return result;
16158 },
16159 moof: function moof(data) {
16160 return {
16161 boxes: inspectMp4(data)
16162 };
16163 },
16164 moov: function moov(data) {
16165 return {
16166 boxes: inspectMp4(data)
16167 };
16168 },
16169 mvex: function mvex(data) {
16170 return {
16171 boxes: inspectMp4(data)
16172 };
16173 },
16174 mvhd: function mvhd(data) {
16175 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16176 i = 4,
16177 result = {
16178 version: view.getUint8(0),
16179 flags: new Uint8Array(data.subarray(1, 4))
16180 };
16181
16182 if (result.version === 1) {
16183 i += 4;
16184 result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16185 i += 8;
16186 result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16187 i += 4;
16188 result.timescale = view.getUint32(i);
16189 i += 8;
16190 result.duration = view.getUint32(i); // truncating top 4 bytes
16191 } else {
16192 result.creationTime = parseMp4Date(view.getUint32(i));
16193 i += 4;
16194 result.modificationTime = parseMp4Date(view.getUint32(i));
16195 i += 4;
16196 result.timescale = view.getUint32(i);
16197 i += 4;
16198 result.duration = view.getUint32(i);
16199 }
16200 i += 4;
16201
16202 // convert fixed-point, base 16 back to a number
16203 result.rate = view.getUint16(i) + view.getUint16(i + 2) / 16;
16204 i += 4;
16205 result.volume = view.getUint8(i) + view.getUint8(i + 1) / 8;
16206 i += 2;
16207 i += 2;
16208 i += 2 * 4;
16209 result.matrix = new Uint32Array(data.subarray(i, i + 9 * 4));
16210 i += 9 * 4;
16211 i += 6 * 4;
16212 result.nextTrackId = view.getUint32(i);
16213 return result;
16214 },
16215 pdin: function pdin(data) {
16216 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
16217 return {
16218 version: view.getUint8(0),
16219 flags: new Uint8Array(data.subarray(1, 4)),
16220 rate: view.getUint32(4),
16221 initialDelay: view.getUint32(8)
16222 };
16223 },
16224 sdtp: function sdtp(data) {
16225 var result = {
16226 version: data[0],
16227 flags: new Uint8Array(data.subarray(1, 4)),
16228 samples: []
16229 },
16230 i;
16231
16232 for (i = 4; i < data.byteLength; i++) {
16233 result.samples.push({
16234 dependsOn: (data[i] & 0x30) >> 4,
16235 isDependedOn: (data[i] & 0x0c) >> 2,
16236 hasRedundancy: data[i] & 0x03
16237 });
16238 }
16239 return result;
16240 },
16241 sidx: function sidx(data) {
16242 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16243 result = {
16244 version: data[0],
16245 flags: new Uint8Array(data.subarray(1, 4)),
16246 references: [],
16247 referenceId: view.getUint32(4),
16248 timescale: view.getUint32(8),
16249 earliestPresentationTime: view.getUint32(12),
16250 firstOffset: view.getUint32(16)
16251 },
16252 referenceCount = view.getUint16(22),
16253 i;
16254
16255 for (i = 24; referenceCount; i += 12, referenceCount--) {
16256 result.references.push({
16257 referenceType: (data[i] & 0x80) >>> 7,
16258 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
16259 subsegmentDuration: view.getUint32(i + 4),
16260 startsWithSap: !!(data[i + 8] & 0x80),
16261 sapType: (data[i + 8] & 0x70) >>> 4,
16262 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
16263 });
16264 }
16265
16266 return result;
16267 },
16268 smhd: function smhd(data) {
16269 return {
16270 version: data[0],
16271 flags: new Uint8Array(data.subarray(1, 4)),
16272 balance: data[4] + data[5] / 256
16273 };
16274 },
16275 stbl: function stbl(data) {
16276 return {
16277 boxes: inspectMp4(data)
16278 };
16279 },
16280 stco: function stco(data) {
16281 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16282 result = {
16283 version: data[0],
16284 flags: new Uint8Array(data.subarray(1, 4)),
16285 chunkOffsets: []
16286 },
16287 entryCount = view.getUint32(4),
16288 i;
16289 for (i = 8; entryCount; i += 4, entryCount--) {
16290 result.chunkOffsets.push(view.getUint32(i));
16291 }
16292 return result;
16293 },
16294 stsc: function stsc(data) {
16295 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16296 entryCount = view.getUint32(4),
16297 result = {
16298 version: data[0],
16299 flags: new Uint8Array(data.subarray(1, 4)),
16300 sampleToChunks: []
16301 },
16302 i;
16303 for (i = 8; entryCount; i += 12, entryCount--) {
16304 result.sampleToChunks.push({
16305 firstChunk: view.getUint32(i),
16306 samplesPerChunk: view.getUint32(i + 4),
16307 sampleDescriptionIndex: view.getUint32(i + 8)
16308 });
16309 }
16310 return result;
16311 },
16312 stsd: function stsd(data) {
16313 return {
16314 version: data[0],
16315 flags: new Uint8Array(data.subarray(1, 4)),
16316 sampleDescriptions: inspectMp4(data.subarray(8))
16317 };
16318 },
16319 stsz: function stsz(data) {
16320 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16321 result = {
16322 version: data[0],
16323 flags: new Uint8Array(data.subarray(1, 4)),
16324 sampleSize: view.getUint32(4),
16325 entries: []
16326 },
16327 i;
16328 for (i = 12; i < data.byteLength; i += 4) {
16329 result.entries.push(view.getUint32(i));
16330 }
16331 return result;
16332 },
16333 stts: function stts(data) {
16334 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16335 result = {
16336 version: data[0],
16337 flags: new Uint8Array(data.subarray(1, 4)),
16338 timeToSamples: []
16339 },
16340 entryCount = view.getUint32(4),
16341 i;
16342
16343 for (i = 8; entryCount; i += 8, entryCount--) {
16344 result.timeToSamples.push({
16345 sampleCount: view.getUint32(i),
16346 sampleDelta: view.getUint32(i + 4)
16347 });
16348 }
16349 return result;
16350 },
16351 styp: function styp(data) {
16352 return parse$1.ftyp(data);
16353 },
16354 tfdt: function tfdt(data) {
16355 var result = {
16356 version: data[0],
16357 flags: new Uint8Array(data.subarray(1, 4)),
16358 baseMediaDecodeTime: toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
16359 };
16360 if (result.version === 1) {
16361 result.baseMediaDecodeTime *= Math.pow(2, 32);
16362 result.baseMediaDecodeTime += toUnsigned$1(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
16363 }
16364 return result;
16365 },
16366 tfhd: function tfhd(data) {
16367 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16368 result = {
16369 version: data[0],
16370 flags: new Uint8Array(data.subarray(1, 4)),
16371 trackId: view.getUint32(4)
16372 },
16373 baseDataOffsetPresent = result.flags[2] & 0x01,
16374 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
16375 defaultSampleDurationPresent = result.flags[2] & 0x08,
16376 defaultSampleSizePresent = result.flags[2] & 0x10,
16377 defaultSampleFlagsPresent = result.flags[2] & 0x20,
16378 durationIsEmpty = result.flags[0] & 0x010000,
16379 defaultBaseIsMoof = result.flags[0] & 0x020000,
16380 i;
16381
16382 i = 8;
16383 if (baseDataOffsetPresent) {
16384 i += 4; // truncate top 4 bytes
16385 // FIXME: should we read the full 64 bits?
16386 result.baseDataOffset = view.getUint32(12);
16387 i += 4;
16388 }
16389 if (sampleDescriptionIndexPresent) {
16390 result.sampleDescriptionIndex = view.getUint32(i);
16391 i += 4;
16392 }
16393 if (defaultSampleDurationPresent) {
16394 result.defaultSampleDuration = view.getUint32(i);
16395 i += 4;
16396 }
16397 if (defaultSampleSizePresent) {
16398 result.defaultSampleSize = view.getUint32(i);
16399 i += 4;
16400 }
16401 if (defaultSampleFlagsPresent) {
16402 result.defaultSampleFlags = view.getUint32(i);
16403 }
16404 if (durationIsEmpty) {
16405 result.durationIsEmpty = true;
16406 }
16407 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
16408 result.baseDataOffsetIsMoof = true;
16409 }
16410 return result;
16411 },
16412 tkhd: function tkhd(data) {
16413 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16414 i = 4,
16415 result = {
16416 version: view.getUint8(0),
16417 flags: new Uint8Array(data.subarray(1, 4))
16418 };
16419 if (result.version === 1) {
16420 i += 4;
16421 result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16422 i += 8;
16423 result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16424 i += 4;
16425 result.trackId = view.getUint32(i);
16426 i += 4;
16427 i += 8;
16428 result.duration = view.getUint32(i); // truncating top 4 bytes
16429 } else {
16430 result.creationTime = parseMp4Date(view.getUint32(i));
16431 i += 4;
16432 result.modificationTime = parseMp4Date(view.getUint32(i));
16433 i += 4;
16434 result.trackId = view.getUint32(i);
16435 i += 4;
16436 i += 4;
16437 result.duration = view.getUint32(i);
16438 }
16439 i += 4;
16440 i += 2 * 4;
16441 result.layer = view.getUint16(i);
16442 i += 2;
16443 result.alternateGroup = view.getUint16(i);
16444 i += 2;
16445 // convert fixed-point, base 16 back to a number
16446 result.volume = view.getUint8(i) + view.getUint8(i + 1) / 8;
16447 i += 2;
16448 i += 2;
16449 result.matrix = new Uint32Array(data.subarray(i, i + 9 * 4));
16450 i += 9 * 4;
16451 result.width = view.getUint16(i) + view.getUint16(i + 2) / 16;
16452 i += 4;
16453 result.height = view.getUint16(i) + view.getUint16(i + 2) / 16;
16454 return result;
16455 },
16456 traf: function traf(data) {
16457 return {
16458 boxes: inspectMp4(data)
16459 };
16460 },
16461 trak: function trak(data) {
16462 return {
16463 boxes: inspectMp4(data)
16464 };
16465 },
16466 trex: function trex(data) {
16467 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
16468 return {
16469 version: data[0],
16470 flags: new Uint8Array(data.subarray(1, 4)),
16471 trackId: view.getUint32(4),
16472 defaultSampleDescriptionIndex: view.getUint32(8),
16473 defaultSampleDuration: view.getUint32(12),
16474 defaultSampleSize: view.getUint32(16),
16475 sampleDependsOn: data[20] & 0x03,
16476 sampleIsDependedOn: (data[21] & 0xc0) >> 6,
16477 sampleHasRedundancy: (data[21] & 0x30) >> 4,
16478 samplePaddingValue: (data[21] & 0x0e) >> 1,
16479 sampleIsDifferenceSample: !!(data[21] & 0x01),
16480 sampleDegradationPriority: view.getUint16(22)
16481 };
16482 },
16483 trun: function trun(data) {
16484 var result = {
16485 version: data[0],
16486 flags: new Uint8Array(data.subarray(1, 4)),
16487 samples: []
16488 },
16489 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16490
16491 // Flag interpretation
16492 dataOffsetPresent = result.flags[2] & 0x01,
16493 // compare with 2nd byte of 0x1
16494 firstSampleFlagsPresent = result.flags[2] & 0x04,
16495 // compare with 2nd byte of 0x4
16496 sampleDurationPresent = result.flags[1] & 0x01,
16497 // compare with 2nd byte of 0x100
16498 sampleSizePresent = result.flags[1] & 0x02,
16499 // compare with 2nd byte of 0x200
16500 sampleFlagsPresent = result.flags[1] & 0x04,
16501 // compare with 2nd byte of 0x400
16502 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
16503 // compare with 2nd byte of 0x800
16504 sampleCount = view.getUint32(4),
16505 offset = 8,
16506 sample;
16507
16508 if (dataOffsetPresent) {
16509 // 32 bit signed integer
16510 result.dataOffset = view.getInt32(offset);
16511 offset += 4;
16512 }
16513
16514 // Overrides the flags for the first sample only. The order of
16515 // optional values will be: duration, size, compositionTimeOffset
16516 if (firstSampleFlagsPresent && sampleCount) {
16517 sample = {
16518 flags: parseSampleFlags(data.subarray(offset, offset + 4))
16519 };
16520 offset += 4;
16521 if (sampleDurationPresent) {
16522 sample.duration = view.getUint32(offset);
16523 offset += 4;
16524 }
16525 if (sampleSizePresent) {
16526 sample.size = view.getUint32(offset);
16527 offset += 4;
16528 }
16529 if (sampleCompositionTimeOffsetPresent) {
16530 // Note: this should be a signed int if version is 1
16531 sample.compositionTimeOffset = view.getUint32(offset);
16532 offset += 4;
16533 }
16534 result.samples.push(sample);
16535 sampleCount--;
16536 }
16537
16538 while (sampleCount--) {
16539 sample = {};
16540 if (sampleDurationPresent) {
16541 sample.duration = view.getUint32(offset);
16542 offset += 4;
16543 }
16544 if (sampleSizePresent) {
16545 sample.size = view.getUint32(offset);
16546 offset += 4;
16547 }
16548 if (sampleFlagsPresent) {
16549 sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
16550 offset += 4;
16551 }
16552 if (sampleCompositionTimeOffsetPresent) {
16553 // Note: this should be a signed int if version is 1
16554 sample.compositionTimeOffset = view.getUint32(offset);
16555 offset += 4;
16556 }
16557 result.samples.push(sample);
16558 }
16559 return result;
16560 },
16561 'url ': function url(data) {
16562 return {
16563 version: data[0],
16564 flags: new Uint8Array(data.subarray(1, 4))
16565 };
16566 },
16567 vmhd: function vmhd(data) {
16568 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
16569 return {
16570 version: data[0],
16571 flags: new Uint8Array(data.subarray(1, 4)),
16572 graphicsmode: view.getUint16(4),
16573 opcolor: new Uint16Array([view.getUint16(6), view.getUint16(8), view.getUint16(10)])
16574 };
16575 }
16576 };
16577
16578 /**
16579 * Return a javascript array of box objects parsed from an ISO base
16580 * media file.
16581 * @param data {Uint8Array} the binary data of the media to be inspected
16582 * @return {array} a javascript array of potentially nested box objects
16583 */
16584 inspectMp4 = function inspectMp4(data) {
16585 var i = 0,
16586 result = [],
16587 view,
16588 size,
16589 type,
16590 end,
16591 box;
16592
16593 // Convert data from Uint8Array to ArrayBuffer, to follow Dataview API
16594 var ab = new ArrayBuffer(data.length);
16595 var v = new Uint8Array(ab);
16596 for (var z = 0; z < data.length; ++z) {
16597 v[z] = data[z];
16598 }
16599 view = new DataView(ab);
16600
16601 while (i < data.byteLength) {
16602 // parse box data
16603 size = view.getUint32(i);
16604 type = parseType(data.subarray(i + 4, i + 8));
16605 end = size > 1 ? i + size : data.byteLength;
16606
16607 // parse type-specific data
16608 box = (parse$1[type] || function (data) {
16609 return {
16610 data: data
16611 };
16612 })(data.subarray(i + 8, end));
16613 box.size = size;
16614 box.type = type;
16615
16616 // store this box and move to the next
16617 result.push(box);
16618 i = end;
16619 }
16620 return result;
16621 };
16622
16623 /**
16624 * Returns a textual representation of the javascript represtentation
16625 * of an MP4 file. You can use it as an alternative to
16626 * JSON.stringify() to compare inspected MP4s.
16627 * @param inspectedMp4 {array} the parsed array of boxes in an MP4
16628 * file
16629 * @param depth {number} (optional) the number of ancestor boxes of
16630 * the elements of inspectedMp4. Assumed to be zero if unspecified.
16631 * @return {string} a text representation of the parsed MP4
16632 */
16633 _textifyMp = function textifyMp4(inspectedMp4, depth) {
16634 var indent;
16635 depth = depth || 0;
16636 indent = new Array(depth * 2 + 1).join(' ');
16637
16638 // iterate over all the boxes
16639 return inspectedMp4.map(function (box, index) {
16640
16641 // list the box type first at the current indentation level
16642 return indent + box.type + '\n' +
16643
16644 // the type is already included and handle child boxes separately
16645 Object.keys(box).filter(function (key) {
16646 return key !== 'type' && key !== 'boxes';
16647
16648 // output all the box properties
16649 }).map(function (key) {
16650 var prefix = indent + ' ' + key + ': ',
16651 value = box[key];
16652
16653 // print out raw bytes as hexademical
16654 if (value instanceof Uint8Array || value instanceof Uint32Array) {
16655 var bytes = Array.prototype.slice.call(new Uint8Array(value.buffer, value.byteOffset, value.byteLength)).map(function (byte) {
16656 return ' ' + ('00' + byte.toString(16)).slice(-2);
16657 }).join('').match(/.{1,24}/g);
16658 if (!bytes) {
16659 return prefix + '<>';
16660 }
16661 if (bytes.length === 1) {
16662 return prefix + '<' + bytes.join('').slice(1) + '>';
16663 }
16664 return prefix + '<\n' + bytes.map(function (line) {
16665 return indent + ' ' + line;
16666 }).join('\n') + '\n' + indent + ' >';
16667 }
16668
16669 // stringify generic objects
16670 return prefix + JSON.stringify(value, null, 2).split('\n').map(function (line, index) {
16671 if (index === 0) {
16672 return line;
16673 }
16674 return indent + ' ' + line;
16675 }).join('\n');
16676 }).join('\n') + (
16677
16678 // recursively textify the child boxes
16679 box.boxes ? '\n' + _textifyMp(box.boxes, depth + 1) : '');
16680 }).join('\n');
16681 };
16682
16683 var mp4Inspector = {
16684 inspect: inspectMp4,
16685 textify: _textifyMp,
16686 parseType: parseType,
16687 findBox: findBox,
16688 parseTraf: parse$1.traf,
16689 parseTfdt: parse$1.tfdt,
16690 parseHdlr: parse$1.hdlr,
16691 parseTfhd: parse$1.tfhd,
16692 parseTrun: parse$1.trun,
16693 parseSidx: parse$1.sidx
16694 };
16695
16696 var EventTarget$1 = videojs.EventTarget,
16697 mergeOptions$2 = videojs.mergeOptions;
16698
16699 /**
16700 * Returns a new master manifest that is the result of merging an updated master manifest
16701 * into the original version.
16702 *
16703 * @param {Object} oldMaster
16704 * The old parsed mpd object
16705 * @param {Object} newMaster
16706 * The updated parsed mpd object
16707 * @return {Object}
16708 * A new object representing the original master manifest with the updated media
16709 * playlists merged in
16710 */
16711
16712 var updateMaster$1 = function updateMaster$$1(oldMaster, newMaster) {
16713 var noChanges = void 0;
16714 var update = mergeOptions$2(oldMaster, {
16715 // These are top level properties that can be updated
16716 duration: newMaster.duration,
16717 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
16718 });
16719
16720 // First update the playlists in playlist list
16721 for (var i = 0; i < newMaster.playlists.length; i++) {
16722 var playlistUpdate = updateMaster(update, newMaster.playlists[i]);
16723
16724 if (playlistUpdate) {
16725 update = playlistUpdate;
16726 } else {
16727 noChanges = true;
16728 }
16729 }
16730
16731 // Then update media group playlists
16732 forEachMediaGroup(newMaster, function (properties, type, group, label) {
16733 if (properties.playlists && properties.playlists.length) {
16734 var id = properties.playlists[0].id;
16735 var _playlistUpdate = updateMaster(update, properties.playlists[0]);
16736
16737 if (_playlistUpdate) {
16738 update = _playlistUpdate;
16739 // update the playlist reference within media groups
16740 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
16741 noChanges = false;
16742 }
16743 }
16744 });
16745
16746 if (noChanges) {
16747 return null;
16748 }
16749
16750 return update;
16751 };
16752
16753 var generateSidxKey = function generateSidxKey(sidxInfo) {
16754 // should be non-inclusive
16755 var sidxByteRangeEnd = sidxInfo.byterange.offset + sidxInfo.byterange.length - 1;
16756
16757 return sidxInfo.uri + '-' + sidxInfo.byterange.offset + '-' + sidxByteRangeEnd;
16758 };
16759
16760 // SIDX should be equivalent if the URI and byteranges of the SIDX match.
16761 // If the SIDXs have maps, the two maps should match,
16762 // both `a` and `b` missing SIDXs is considered matching.
16763 // If `a` or `b` but not both have a map, they aren't matching.
16764 var equivalentSidx = function equivalentSidx(a, b) {
16765 var neitherMap = Boolean(!a.map && !b.map);
16766
16767 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
16768
16769 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
16770 };
16771
16772 // exported for testing
16773 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
16774 var newSidxMapping = {};
16775
16776 for (var id in playlists) {
16777 var playlist = playlists[id];
16778 var currentSidxInfo = playlist.sidx;
16779
16780 if (currentSidxInfo) {
16781 var key = generateSidxKey(currentSidxInfo);
16782
16783 if (!oldSidxMapping[key]) {
16784 break;
16785 }
16786
16787 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
16788
16789 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
16790 newSidxMapping[key] = oldSidxMapping[key];
16791 }
16792 }
16793 }
16794
16795 return newSidxMapping;
16796 };
16797
16798 /**
16799 * A function that filters out changed items as they need to be requested separately.
16800 *
16801 * The method is exported for testing
16802 *
16803 * @param {Object} masterXml the mpd XML
16804 * @param {string} srcUrl the mpd url
16805 * @param {Date} clientOffset a time difference between server and client (passed through and not used)
16806 * @param {Object} oldSidxMapping the SIDX to compare against
16807 */
16808 var filterChangedSidxMappings = function filterChangedSidxMappings(masterXml, srcUrl, clientOffset, oldSidxMapping) {
16809 // Don't pass current sidx mapping
16810 var master = parse(masterXml, {
16811 manifestUri: srcUrl,
16812 clientOffset: clientOffset
16813 });
16814
16815 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
16816 var mediaGroupSidx = videoSidx;
16817
16818 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
16819 if (properties.playlists && properties.playlists.length) {
16820 var playlists = properties.playlists;
16821
16822 mediaGroupSidx = mergeOptions$2(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
16823 }
16824 });
16825
16826 return mediaGroupSidx;
16827 };
16828
16829 // exported for testing
16830 var requestSidx_ = function requestSidx_(sidxRange, playlist, xhr, options, finishProcessingFn) {
16831 var sidxInfo = {
16832 // resolve the segment URL relative to the playlist
16833 uri: resolveManifestRedirect(options.handleManifestRedirects, sidxRange.resolvedUri),
16834 // resolvedUri: sidxRange.resolvedUri,
16835 byterange: sidxRange.byterange,
16836 // the segment's playlist
16837 playlist: playlist
16838 };
16839
16840 var sidxRequestOptions = videojs.mergeOptions(sidxInfo, {
16841 responseType: 'arraybuffer',
16842 headers: segmentXhrHeaders(sidxInfo)
16843 });
16844
16845 return xhr(sidxRequestOptions, finishProcessingFn);
16846 };
16847
16848 var DashPlaylistLoader = function (_EventTarget) {
16849 inherits(DashPlaylistLoader, _EventTarget);
16850
16851 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
16852 // playlist loader setups from media groups will expect to be able to pass a playlist
16853 // (since there aren't external URLs to media playlists with DASH)
16854 function DashPlaylistLoader(srcUrlOrPlaylist, hls) {
16855 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
16856 var masterPlaylistLoader = arguments[3];
16857 classCallCheck(this, DashPlaylistLoader);
16858
16859 var _this = possibleConstructorReturn(this, (DashPlaylistLoader.__proto__ || Object.getPrototypeOf(DashPlaylistLoader)).call(this));
16860
16861 var _options$withCredenti = options.withCredentials,
16862 withCredentials = _options$withCredenti === undefined ? false : _options$withCredenti,
16863 _options$handleManife = options.handleManifestRedirects,
16864 handleManifestRedirects = _options$handleManife === undefined ? false : _options$handleManife;
16865
16866
16867 _this.hls_ = hls;
16868 _this.withCredentials = withCredentials;
16869 _this.handleManifestRedirects = handleManifestRedirects;
16870
16871 if (!srcUrlOrPlaylist) {
16872 throw new Error('A non-empty playlist URL or playlist is required');
16873 }
16874
16875 // event naming?
16876 _this.on('minimumUpdatePeriod', function () {
16877 _this.refreshXml_();
16878 });
16879
16880 // live playlist staleness timeout
16881 _this.on('mediaupdatetimeout', function () {
16882 _this.refreshMedia_(_this.media().id);
16883 });
16884
16885 _this.state = 'HAVE_NOTHING';
16886 _this.loadedPlaylists_ = {};
16887
16888 // initialize the loader state
16889 // The masterPlaylistLoader will be created with a string
16890 if (typeof srcUrlOrPlaylist === 'string') {
16891 _this.srcUrl = srcUrlOrPlaylist;
16892 // TODO: reset sidxMapping between period changes
16893 // once multi-period is refactored
16894 _this.sidxMapping_ = {};
16895 return possibleConstructorReturn(_this);
16896 }
16897
16898 _this.setupChildLoader(masterPlaylistLoader, srcUrlOrPlaylist);
16899 return _this;
16900 }
16901
16902 createClass(DashPlaylistLoader, [{
16903 key: 'setupChildLoader',
16904 value: function setupChildLoader(masterPlaylistLoader, playlist) {
16905 this.masterPlaylistLoader_ = masterPlaylistLoader;
16906 this.childPlaylist_ = playlist;
16907 }
16908 }, {
16909 key: 'dispose',
16910 value: function dispose() {
16911 this.trigger('dispose');
16912 this.stopRequest();
16913 this.loadedPlaylists_ = {};
16914 window_1.clearTimeout(this.minimumUpdatePeriodTimeout_);
16915 window_1.clearTimeout(this.mediaRequest_);
16916 window_1.clearTimeout(this.mediaUpdateTimeout);
16917
16918 this.off();
16919 }
16920 }, {
16921 key: 'hasPendingRequest',
16922 value: function hasPendingRequest() {
16923 return this.request || this.mediaRequest_;
16924 }
16925 }, {
16926 key: 'stopRequest',
16927 value: function stopRequest() {
16928 if (this.request) {
16929 var oldRequest = this.request;
16930
16931 this.request = null;
16932 oldRequest.onreadystatechange = null;
16933 oldRequest.abort();
16934 }
16935 }
16936 }, {
16937 key: 'sidxRequestFinished_',
16938 value: function sidxRequestFinished_(playlist, master, startingState, doneFn) {
16939 var _this2 = this;
16940
16941 return function (err, request) {
16942 // disposed
16943 if (!_this2.request) {
16944 return;
16945 }
16946
16947 // pending request is cleared
16948 _this2.request = null;
16949
16950 if (err) {
16951 _this2.error = {
16952 status: request.status,
16953 message: 'DASH playlist request error at URL: ' + playlist.uri,
16954 response: request.response,
16955 // MEDIA_ERR_NETWORK
16956 code: 2
16957 };
16958 if (startingState) {
16959 _this2.state = startingState;
16960 }
16961
16962 _this2.trigger('error');
16963 return doneFn(master, null);
16964 }
16965
16966 var bytes = new Uint8Array(request.response);
16967 var sidx = mp4Inspector.parseSidx(bytes.subarray(8));
16968
16969 return doneFn(master, sidx);
16970 };
16971 }
16972 }, {
16973 key: 'media',
16974 value: function media(playlist) {
16975 var _this3 = this;
16976
16977 // getter
16978 if (!playlist) {
16979 return this.media_;
16980 }
16981
16982 // setter
16983 if (this.state === 'HAVE_NOTHING') {
16984 throw new Error('Cannot switch media playlist from ' + this.state);
16985 }
16986
16987 var startingState = this.state;
16988
16989 // find the playlist object if the target playlist has been specified by URI
16990 if (typeof playlist === 'string') {
16991 if (!this.master.playlists[playlist]) {
16992 throw new Error('Unknown playlist URI: ' + playlist);
16993 }
16994 playlist = this.master.playlists[playlist];
16995 }
16996
16997 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
16998
16999 // switch to previously loaded playlists immediately
17000 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
17001 this.state = 'HAVE_METADATA';
17002 this.media_ = playlist;
17003
17004 // trigger media change if the active media has been updated
17005 if (mediaChange) {
17006 this.trigger('mediachanging');
17007 this.trigger('mediachange');
17008 }
17009 return;
17010 }
17011
17012 // switching to the active playlist is a no-op
17013 if (!mediaChange) {
17014 return;
17015 }
17016
17017 // switching from an already loaded playlist
17018 if (this.media_) {
17019 this.trigger('mediachanging');
17020 }
17021
17022 if (!playlist.sidx) {
17023 // Continue asynchronously if there is no sidx
17024 // wait one tick to allow haveMaster to run first on a child loader
17025 this.mediaRequest_ = window_1.setTimeout(this.haveMetadata.bind(this, { startingState: startingState, playlist: playlist }), 0);
17026
17027 // exit early and don't do sidx work
17028 return;
17029 }
17030
17031 // we have sidx mappings
17032 var oldMaster = void 0;
17033 var sidxMapping = void 0;
17034
17035 // sidxMapping is used when parsing the masterXml, so store
17036 // it on the masterPlaylistLoader
17037 if (this.masterPlaylistLoader_) {
17038 oldMaster = this.masterPlaylistLoader_.master;
17039 sidxMapping = this.masterPlaylistLoader_.sidxMapping_;
17040 } else {
17041 oldMaster = this.master;
17042 sidxMapping = this.sidxMapping_;
17043 }
17044
17045 var sidxKey = generateSidxKey(playlist.sidx);
17046
17047 sidxMapping[sidxKey] = {
17048 sidxInfo: playlist.sidx
17049 };
17050
17051 this.request = requestSidx_(playlist.sidx, playlist, this.hls_.xhr, { handleManifestRedirects: this.handleManifestRedirects }, this.sidxRequestFinished_(playlist, oldMaster, startingState, function (newMaster, sidx) {
17052 if (!newMaster || !sidx) {
17053 throw new Error('failed to request sidx');
17054 }
17055
17056 // update loader's sidxMapping with parsed sidx box
17057 sidxMapping[sidxKey].sidx = sidx;
17058
17059 // everything is ready just continue to haveMetadata
17060 _this3.haveMetadata({
17061 startingState: startingState,
17062 playlist: newMaster.playlists[playlist.id]
17063 });
17064 }));
17065 }
17066 }, {
17067 key: 'haveMetadata',
17068 value: function haveMetadata(_ref) {
17069 var startingState = _ref.startingState,
17070 playlist = _ref.playlist;
17071
17072 this.state = 'HAVE_METADATA';
17073 this.loadedPlaylists_[playlist.id] = playlist;
17074 this.mediaRequest_ = null;
17075
17076 // This will trigger loadedplaylist
17077 this.refreshMedia_(playlist.id);
17078
17079 // fire loadedmetadata the first time a media playlist is loaded
17080 // to resolve setup of media groups
17081 if (startingState === 'HAVE_MASTER') {
17082 this.trigger('loadedmetadata');
17083 } else {
17084 // trigger media change if the active media has been updated
17085 this.trigger('mediachange');
17086 }
17087 }
17088 }, {
17089 key: 'pause',
17090 value: function pause() {
17091 this.stopRequest();
17092 window_1.clearTimeout(this.mediaUpdateTimeout);
17093 window_1.clearTimeout(this.minimumUpdatePeriodTimeout_);
17094 if (this.state === 'HAVE_NOTHING') {
17095 // If we pause the loader before any data has been retrieved, its as if we never
17096 // started, so reset to an unstarted state.
17097 this.started = false;
17098 }
17099 }
17100 }, {
17101 key: 'load',
17102 value: function load(isFinalRendition) {
17103 var _this4 = this;
17104
17105 window_1.clearTimeout(this.mediaUpdateTimeout);
17106 window_1.clearTimeout(this.minimumUpdatePeriodTimeout_);
17107
17108 var media = this.media();
17109
17110 if (isFinalRendition) {
17111 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
17112
17113 this.mediaUpdateTimeout = window_1.setTimeout(function () {
17114 return _this4.load();
17115 }, delay);
17116 return;
17117 }
17118
17119 // because the playlists are internal to the manifest, load should either load the
17120 // main manifest, or do nothing but trigger an event
17121 if (!this.started) {
17122 this.start();
17123 return;
17124 }
17125
17126 if (media && !media.endList) {
17127 this.trigger('mediaupdatetimeout');
17128 } else {
17129 this.trigger('loadedplaylist');
17130 }
17131 }
17132
17133 /**
17134 * Parses the master xml string and updates playlist uri references
17135 *
17136 * @return {Object}
17137 * The parsed mpd manifest object
17138 */
17139
17140 }, {
17141 key: 'parseMasterXml',
17142 value: function parseMasterXml() {
17143 var master = parse(this.masterXml_, {
17144 manifestUri: this.srcUrl,
17145 clientOffset: this.clientOffset_,
17146 sidxMapping: this.sidxMapping_
17147 });
17148
17149 master.uri = this.srcUrl;
17150
17151 // Set up phony URIs for the playlists since we won't have external URIs for DASH
17152 // but reference playlists by their URI throughout the project
17153 // TODO: Should we create the dummy uris in mpd-parser as well (leaning towards yes).
17154 for (var i = 0; i < master.playlists.length; i++) {
17155 var phonyUri = 'placeholder-uri-' + i;
17156
17157 master.playlists[i].uri = phonyUri;
17158 }
17159
17160 // set up phony URIs for the media group playlists since we won't have external
17161 // URIs for DASH but reference playlists by their URI throughout the project
17162 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
17163 if (properties.playlists && properties.playlists.length) {
17164 var _phonyUri = 'placeholder-uri-' + mediaType + '-' + groupKey + '-' + labelKey;
17165 var id = createPlaylistID(0, _phonyUri);
17166
17167 properties.playlists[0].uri = _phonyUri;
17168 properties.playlists[0].id = id;
17169 // setup ID and URI references (URI for backwards compatibility)
17170 master.playlists[id] = properties.playlists[0];
17171 master.playlists[_phonyUri] = properties.playlists[0];
17172 }
17173 });
17174
17175 setupMediaPlaylists(master);
17176 resolveMediaGroupUris(master);
17177
17178 return master;
17179 }
17180 }, {
17181 key: 'start',
17182 value: function start() {
17183 var _this5 = this;
17184
17185 this.started = true;
17186
17187 // We don't need to request the master manifest again
17188 // Call this asynchronously to match the xhr request behavior below
17189 if (this.masterPlaylistLoader_) {
17190 this.mediaRequest_ = window_1.setTimeout(this.haveMaster_.bind(this), 0);
17191 return;
17192 }
17193
17194 // request the specified URL
17195 this.request = this.hls_.xhr({
17196 uri: this.srcUrl,
17197 withCredentials: this.withCredentials
17198 }, function (error, req) {
17199 // disposed
17200 if (!_this5.request) {
17201 return;
17202 }
17203
17204 // clear the loader's request reference
17205 _this5.request = null;
17206
17207 if (error) {
17208 _this5.error = {
17209 status: req.status,
17210 message: 'DASH playlist request error at URL: ' + _this5.srcUrl,
17211 responseText: req.responseText,
17212 // MEDIA_ERR_NETWORK
17213 code: 2
17214 };
17215 if (_this5.state === 'HAVE_NOTHING') {
17216 _this5.started = false;
17217 }
17218 return _this5.trigger('error');
17219 }
17220
17221 _this5.masterXml_ = req.responseText;
17222
17223 if (req.responseHeaders && req.responseHeaders.date) {
17224 _this5.masterLoaded_ = Date.parse(req.responseHeaders.date);
17225 } else {
17226 _this5.masterLoaded_ = Date.now();
17227 }
17228
17229 _this5.srcUrl = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.srcUrl, req);
17230
17231 _this5.syncClientServerClock_(_this5.onClientServerClockSync_.bind(_this5));
17232 });
17233 }
17234
17235 /**
17236 * Parses the master xml for UTCTiming node to sync the client clock to the server
17237 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
17238 *
17239 * @param {Function} done
17240 * Function to call when clock sync has completed
17241 */
17242
17243 }, {
17244 key: 'syncClientServerClock_',
17245 value: function syncClientServerClock_(done) {
17246 var _this6 = this;
17247
17248 var utcTiming = parseUTCTiming(this.masterXml_);
17249
17250 // No UTCTiming element found in the mpd. Use Date header from mpd request as the
17251 // server clock
17252 if (utcTiming === null) {
17253 this.clientOffset_ = this.masterLoaded_ - Date.now();
17254 return done();
17255 }
17256
17257 if (utcTiming.method === 'DIRECT') {
17258 this.clientOffset_ = utcTiming.value - Date.now();
17259 return done();
17260 }
17261
17262 this.request = this.hls_.xhr({
17263 uri: resolveUrl(this.srcUrl, utcTiming.value),
17264 method: utcTiming.method,
17265 withCredentials: this.withCredentials
17266 }, function (error, req) {
17267 // disposed
17268 if (!_this6.request) {
17269 return;
17270 }
17271
17272 if (error) {
17273 // sync request failed, fall back to using date header from mpd
17274 // TODO: log warning
17275 _this6.clientOffset_ = _this6.masterLoaded_ - Date.now();
17276 return done();
17277 }
17278
17279 var serverTime = void 0;
17280
17281 if (utcTiming.method === 'HEAD') {
17282 if (!req.responseHeaders || !req.responseHeaders.date) {
17283 // expected date header not preset, fall back to using date header from mpd
17284 // TODO: log warning
17285 serverTime = _this6.masterLoaded_;
17286 } else {
17287 serverTime = Date.parse(req.responseHeaders.date);
17288 }
17289 } else {
17290 serverTime = Date.parse(req.responseText);
17291 }
17292
17293 _this6.clientOffset_ = serverTime - Date.now();
17294
17295 done();
17296 });
17297 }
17298 }, {
17299 key: 'haveMaster_',
17300 value: function haveMaster_() {
17301 this.state = 'HAVE_MASTER';
17302 // clear media request
17303 this.mediaRequest_ = null;
17304
17305 if (!this.masterPlaylistLoader_) {
17306 this.master = this.parseMasterXml();
17307 // We have the master playlist at this point, so
17308 // trigger this to allow MasterPlaylistController
17309 // to make an initial playlist selection
17310 this.trigger('loadedplaylist');
17311 } else if (!this.media_) {
17312 // no media playlist was specifically selected so select
17313 // the one the child playlist loader was created with
17314 this.media(this.childPlaylist_);
17315 }
17316 }
17317
17318 /**
17319 * Handler for after client/server clock synchronization has happened. Sets up
17320 * xml refresh timer if specificed by the manifest.
17321 */
17322
17323 }, {
17324 key: 'onClientServerClockSync_',
17325 value: function onClientServerClockSync_() {
17326 var _this7 = this;
17327
17328 this.haveMaster_();
17329
17330 if (!this.hasPendingRequest() && !this.media_) {
17331 this.media(this.master.playlists[0]);
17332 }
17333
17334 // TODO: minimumUpdatePeriod can have a value of 0. Currently the manifest will not
17335 // be refreshed when this is the case. The inter-op guide says that when the
17336 // minimumUpdatePeriod is 0, the manifest should outline all currently available
17337 // segments, but future segments may require an update. I think a good solution
17338 // would be to update the manifest at the same rate that the media playlists
17339 // are "refreshed", i.e. every targetDuration.
17340 if (this.master && this.master.minimumUpdatePeriod) {
17341 this.minimumUpdatePeriodTimeout_ = window_1.setTimeout(function () {
17342 _this7.trigger('minimumUpdatePeriod');
17343 }, this.master.minimumUpdatePeriod);
17344 }
17345 }
17346
17347 /**
17348 * Sends request to refresh the master xml and updates the parsed master manifest
17349 * TODO: Does the client offset need to be recalculated when the xml is refreshed?
17350 */
17351
17352 }, {
17353 key: 'refreshXml_',
17354 value: function refreshXml_() {
17355 var _this8 = this;
17356
17357 // The srcUrl here *may* need to pass through handleManifestsRedirects when
17358 // sidx is implemented
17359 this.request = this.hls_.xhr({
17360 uri: this.srcUrl,
17361 withCredentials: this.withCredentials
17362 }, function (error, req) {
17363 // disposed
17364 if (!_this8.request) {
17365 return;
17366 }
17367
17368 // clear the loader's request reference
17369 _this8.request = null;
17370
17371 if (error) {
17372 _this8.error = {
17373 status: req.status,
17374 message: 'DASH playlist request error at URL: ' + _this8.srcUrl,
17375 responseText: req.responseText,
17376 // MEDIA_ERR_NETWORK
17377 code: 2
17378 };
17379 if (_this8.state === 'HAVE_NOTHING') {
17380 _this8.started = false;
17381 }
17382 return _this8.trigger('error');
17383 }
17384
17385 _this8.masterXml_ = req.responseText;
17386
17387 // This will filter out updated sidx info from the mapping
17388 _this8.sidxMapping_ = filterChangedSidxMappings(_this8.masterXml_, _this8.srcUrl, _this8.clientOffset_, _this8.sidxMapping_);
17389
17390 var master = _this8.parseMasterXml();
17391 var updatedMaster = updateMaster$1(_this8.master, master);
17392 var currentSidxInfo = _this8.media().sidx;
17393
17394 if (updatedMaster) {
17395 if (currentSidxInfo) {
17396 var sidxKey = generateSidxKey(currentSidxInfo);
17397
17398 // the sidx was updated, so the previous mapping was removed
17399 if (!_this8.sidxMapping_[sidxKey]) {
17400 var playlist = _this8.media();
17401
17402 _this8.request = requestSidx_(playlist.sidx, playlist, _this8.hls_.xhr, { handleManifestRedirects: _this8.handleManifestRedirects }, _this8.sidxRequestFinished_(playlist, master, _this8.state, function (newMaster, sidx) {
17403 if (!newMaster || !sidx) {
17404 throw new Error('failed to request sidx on minimumUpdatePeriod');
17405 }
17406
17407 // update loader's sidxMapping with parsed sidx box
17408 _this8.sidxMapping_[sidxKey].sidx = sidx;
17409
17410 _this8.minimumUpdatePeriodTimeout_ = window_1.setTimeout(function () {
17411 _this8.trigger('minimumUpdatePeriod');
17412 }, _this8.master.minimumUpdatePeriod);
17413
17414 // TODO: do we need to reload the current playlist?
17415 _this8.refreshMedia_(_this8.media().id);
17416
17417 return;
17418 }));
17419 }
17420 } else {
17421
17422 _this8.master = updatedMaster;
17423 }
17424 }
17425
17426 _this8.minimumUpdatePeriodTimeout_ = window_1.setTimeout(function () {
17427 _this8.trigger('minimumUpdatePeriod');
17428 }, _this8.master.minimumUpdatePeriod);
17429 });
17430 }
17431
17432 /**
17433 * Refreshes the media playlist by re-parsing the master xml and updating playlist
17434 * references. If this is an alternate loader, the updated parsed manifest is retrieved
17435 * from the master loader.
17436 */
17437
17438 }, {
17439 key: 'refreshMedia_',
17440 value: function refreshMedia_(mediaID) {
17441 var _this9 = this;
17442
17443 if (!mediaID) {
17444 throw new Error('refreshMedia_ must take a media id');
17445 }
17446
17447 var oldMaster = void 0;
17448 var newMaster = void 0;
17449
17450 if (this.masterPlaylistLoader_) {
17451 oldMaster = this.masterPlaylistLoader_.master;
17452 newMaster = this.masterPlaylistLoader_.parseMasterXml();
17453 } else {
17454 oldMaster = this.master;
17455 newMaster = this.parseMasterXml();
17456 }
17457
17458 var updatedMaster = updateMaster$1(oldMaster, newMaster);
17459
17460 if (updatedMaster) {
17461 if (this.masterPlaylistLoader_) {
17462 this.masterPlaylistLoader_.master = updatedMaster;
17463 } else {
17464 this.master = updatedMaster;
17465 }
17466 this.media_ = updatedMaster.playlists[mediaID];
17467 } else {
17468 this.media_ = newMaster.playlists[mediaID];
17469 this.trigger('playlistunchanged');
17470 }
17471
17472 if (!this.media().endList) {
17473 this.mediaUpdateTimeout = window_1.setTimeout(function () {
17474 _this9.trigger('mediaupdatetimeout');
17475 }, refreshDelay(this.media(), !!updatedMaster));
17476 }
17477
17478 this.trigger('loadedplaylist');
17479 }
17480 }]);
17481 return DashPlaylistLoader;
17482 }(EventTarget$1);
17483
17484 var logger = function logger(source) {
17485 if (videojs.log.debug) {
17486 return videojs.log.debug.bind(videojs, 'VHS:', source + ' >');
17487 }
17488
17489 return function () {};
17490 };
17491
17492 function noop() {}
17493
17494 /**
17495 * @file source-updater.js
17496 */
17497
17498 /**
17499 * A queue of callbacks to be serialized and applied when a
17500 * MediaSource and its associated SourceBuffers are not in the
17501 * updating state. It is used by the segment loader to update the
17502 * underlying SourceBuffers when new data is loaded, for instance.
17503 *
17504 * @class SourceUpdater
17505 * @param {MediaSource} mediaSource the MediaSource to create the
17506 * SourceBuffer from
17507 * @param {String} mimeType the desired MIME type of the underlying
17508 * SourceBuffer
17509 * @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer is
17510 * added to the media source
17511 */
17512
17513 var SourceUpdater = function () {
17514 function SourceUpdater(mediaSource, mimeType, type, sourceBufferEmitter) {
17515 classCallCheck(this, SourceUpdater);
17516
17517 this.callbacks_ = [];
17518 this.pendingCallback_ = null;
17519 this.timestampOffset_ = 0;
17520 this.mediaSource = mediaSource;
17521 this.processedAppend_ = false;
17522 this.type_ = type;
17523 this.mimeType_ = mimeType;
17524 this.logger_ = logger('SourceUpdater[' + type + '][' + mimeType + ']');
17525
17526 if (mediaSource.readyState === 'closed') {
17527 mediaSource.addEventListener('sourceopen', this.createSourceBuffer_.bind(this, mimeType, sourceBufferEmitter));
17528 } else {
17529 this.createSourceBuffer_(mimeType, sourceBufferEmitter);
17530 }
17531 }
17532
17533 createClass(SourceUpdater, [{
17534 key: 'createSourceBuffer_',
17535 value: function createSourceBuffer_(mimeType, sourceBufferEmitter) {
17536 var _this = this;
17537
17538 this.sourceBuffer_ = this.mediaSource.addSourceBuffer(mimeType);
17539
17540 this.logger_('created SourceBuffer');
17541
17542 if (sourceBufferEmitter) {
17543 sourceBufferEmitter.trigger('sourcebufferadded');
17544
17545 if (this.mediaSource.sourceBuffers.length < 2) {
17546 // There's another source buffer we must wait for before we can start updating
17547 // our own (or else we can get into a bad state, i.e., appending video/audio data
17548 // before the other video/audio source buffer is available and leading to a video
17549 // or audio only buffer).
17550 sourceBufferEmitter.on('sourcebufferadded', function () {
17551 _this.start_();
17552 });
17553 return;
17554 }
17555 }
17556
17557 this.start_();
17558 }
17559 }, {
17560 key: 'start_',
17561 value: function start_() {
17562 var _this2 = this;
17563
17564 this.started_ = true;
17565
17566 // run completion handlers and process callbacks as updateend
17567 // events fire
17568 this.onUpdateendCallback_ = function () {
17569 var pendingCallback = _this2.pendingCallback_;
17570
17571 _this2.pendingCallback_ = null;
17572 _this2.sourceBuffer_.removing = false;
17573
17574 _this2.logger_('buffered [' + printableRange(_this2.buffered()) + ']');
17575
17576 if (pendingCallback) {
17577 pendingCallback();
17578 }
17579
17580 _this2.runCallback_();
17581 };
17582
17583 this.sourceBuffer_.addEventListener('updateend', this.onUpdateendCallback_);
17584
17585 this.runCallback_();
17586 }
17587
17588 /**
17589 * Aborts the current segment and resets the segment parser.
17590 *
17591 * @param {Function} done function to call when done
17592 * @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
17593 */
17594
17595 }, {
17596 key: 'abort',
17597 value: function abort(done) {
17598 var _this3 = this;
17599
17600 if (this.processedAppend_) {
17601 this.queueCallback_(function () {
17602 _this3.sourceBuffer_.abort();
17603 }, done);
17604 }
17605 }
17606
17607 /**
17608 * Queue an update to append an ArrayBuffer.
17609 *
17610 * @param {ArrayBuffer} bytes
17611 * @param {Function} done the function to call when done
17612 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
17613 */
17614
17615 }, {
17616 key: 'appendBuffer',
17617 value: function appendBuffer(config, done) {
17618 var _this4 = this;
17619
17620 this.processedAppend_ = true;
17621 this.queueCallback_(function () {
17622 if (config.videoSegmentTimingInfoCallback) {
17623 _this4.sourceBuffer_.addEventListener('videoSegmentTimingInfo', config.videoSegmentTimingInfoCallback);
17624 }
17625 _this4.sourceBuffer_.appendBuffer(config.bytes);
17626 }, function () {
17627 if (config.videoSegmentTimingInfoCallback) {
17628 _this4.sourceBuffer_.removeEventListener('videoSegmentTimingInfo', config.videoSegmentTimingInfoCallback);
17629 }
17630 done();
17631 });
17632 }
17633
17634 /**
17635 * Indicates what TimeRanges are buffered in the managed SourceBuffer.
17636 *
17637 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-buffered
17638 */
17639
17640 }, {
17641 key: 'buffered',
17642 value: function buffered() {
17643 if (!this.sourceBuffer_) {
17644 return videojs.createTimeRanges();
17645 }
17646 return this.sourceBuffer_.buffered;
17647 }
17648
17649 /**
17650 * Queue an update to remove a time range from the buffer.
17651 *
17652 * @param {Number} start where to start the removal
17653 * @param {Number} end where to end the removal
17654 * @param {Function} [done=noop] optional callback to be executed when the remove
17655 * operation is complete
17656 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
17657 */
17658
17659 }, {
17660 key: 'remove',
17661 value: function remove(start, end) {
17662 var _this5 = this;
17663
17664 var done = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop;
17665
17666 if (this.processedAppend_) {
17667 this.queueCallback_(function () {
17668 _this5.logger_('remove [' + start + ' => ' + end + ']');
17669 _this5.sourceBuffer_.removing = true;
17670 _this5.sourceBuffer_.remove(start, end);
17671 }, done);
17672 }
17673 }
17674
17675 /**
17676 * Whether the underlying sourceBuffer is updating or not
17677 *
17678 * @return {Boolean} the updating status of the SourceBuffer
17679 */
17680
17681 }, {
17682 key: 'updating',
17683 value: function updating() {
17684 // we are updating if the sourcebuffer is updating or
17685 return !this.sourceBuffer_ || this.sourceBuffer_.updating ||
17686 // if we have a pending callback that is not our internal noop
17687 !!this.pendingCallback_ && this.pendingCallback_ !== noop;
17688 }
17689
17690 /**
17691 * Set/get the timestampoffset on the SourceBuffer
17692 *
17693 * @return {Number} the timestamp offset
17694 */
17695
17696 }, {
17697 key: 'timestampOffset',
17698 value: function timestampOffset(offset) {
17699 var _this6 = this;
17700
17701 if (typeof offset !== 'undefined') {
17702 this.queueCallback_(function () {
17703 _this6.sourceBuffer_.timestampOffset = offset;
17704 _this6.runCallback_();
17705 });
17706 this.timestampOffset_ = offset;
17707 }
17708 return this.timestampOffset_;
17709 }
17710
17711 /**
17712 * Queue a callback to run
17713 */
17714
17715 }, {
17716 key: 'queueCallback_',
17717 value: function queueCallback_(callback, done) {
17718 this.callbacks_.push([callback.bind(this), done]);
17719 this.runCallback_();
17720 }
17721
17722 /**
17723 * Run a queued callback
17724 */
17725
17726 }, {
17727 key: 'runCallback_',
17728 value: function runCallback_() {
17729 var callbacks = void 0;
17730
17731 if (!this.updating() && this.callbacks_.length && this.started_) {
17732 callbacks = this.callbacks_.shift();
17733 this.pendingCallback_ = callbacks[1];
17734 callbacks[0]();
17735 }
17736 }
17737
17738 /**
17739 * dispose of the source updater and the underlying sourceBuffer
17740 */
17741
17742 }, {
17743 key: 'dispose',
17744 value: function dispose() {
17745 var _this7 = this;
17746
17747 var disposeFn = function disposeFn() {
17748 if (_this7.sourceBuffer_ && _this7.mediaSource.readyState === 'open') {
17749 _this7.sourceBuffer_.abort();
17750 }
17751 _this7.sourceBuffer_.removeEventListener('updateend', disposeFn);
17752 };
17753
17754 this.sourceBuffer_.removeEventListener('updateend', this.onUpdateendCallback_);
17755 if (this.sourceBuffer_.removing) {
17756 this.sourceBuffer_.addEventListener('updateend', disposeFn);
17757 } else {
17758 disposeFn();
17759 }
17760 }
17761 }]);
17762 return SourceUpdater;
17763 }();
17764
17765 var Config = {
17766 GOAL_BUFFER_LENGTH: 30,
17767 MAX_GOAL_BUFFER_LENGTH: 60,
17768 GOAL_BUFFER_LENGTH_RATE: 1,
17769 // 0.5 MB/s
17770 INITIAL_BANDWIDTH: 4194304,
17771 // A fudge factor to apply to advertised playlist bitrates to account for
17772 // temporary flucations in client bandwidth
17773 BANDWIDTH_VARIANCE: 1.2,
17774 // How much of the buffer must be filled before we consider upswitching
17775 BUFFER_LOW_WATER_LINE: 0,
17776 MAX_BUFFER_LOW_WATER_LINE: 30,
17777 BUFFER_LOW_WATER_LINE_RATE: 1
17778 };
17779
17780 var toUnsigned$2 = bin.toUnsigned;
17781 var toHexString$1 = bin.toHexString;
17782
17783 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks;
17784
17785 /**
17786 * Parses an MP4 initialization segment and extracts the timescale
17787 * values for any declared tracks. Timescale values indicate the
17788 * number of clock ticks per second to assume for time-based values
17789 * elsewhere in the MP4.
17790 *
17791 * To determine the start time of an MP4, you need two pieces of
17792 * information: the timescale unit and the earliest base media decode
17793 * time. Multiple timescales can be specified within an MP4 but the
17794 * base media decode time is always expressed in the timescale from
17795 * the media header box for the track:
17796 * ```
17797 * moov > trak > mdia > mdhd.timescale
17798 * ```
17799 * @param init {Uint8Array} the bytes of the init segment
17800 * @return {object} a hash of track ids to timescale values or null if
17801 * the init segment is malformed.
17802 */
17803 timescale = function timescale(init) {
17804 var result = {},
17805 traks = mp4Inspector.findBox(init, ['moov', 'trak']);
17806
17807 // mdhd timescale
17808 return traks.reduce(function (result, trak) {
17809 var tkhd, version, index, id, mdhd;
17810
17811 tkhd = mp4Inspector.findBox(trak, ['tkhd'])[0];
17812 if (!tkhd) {
17813 return null;
17814 }
17815 version = tkhd[0];
17816 index = version === 0 ? 12 : 20;
17817 id = toUnsigned$2(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
17818
17819 mdhd = mp4Inspector.findBox(trak, ['mdia', 'mdhd'])[0];
17820 if (!mdhd) {
17821 return null;
17822 }
17823 version = mdhd[0];
17824 index = version === 0 ? 12 : 20;
17825 result[id] = toUnsigned$2(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17826 return result;
17827 }, result);
17828 };
17829
17830 /**
17831 * Determine the base media decode start time, in seconds, for an MP4
17832 * fragment. If multiple fragments are specified, the earliest time is
17833 * returned.
17834 *
17835 * The base media decode time can be parsed from track fragment
17836 * metadata:
17837 * ```
17838 * moof > traf > tfdt.baseMediaDecodeTime
17839 * ```
17840 * It requires the timescale value from the mdhd to interpret.
17841 *
17842 * @param timescale {object} a hash of track ids to timescale values.
17843 * @return {number} the earliest base media decode start time for the
17844 * fragment, in seconds
17845 */
17846 startTime = function startTime(timescale, fragment) {
17847 var trafs, baseTimes, result;
17848
17849 // we need info from two childrend of each track fragment box
17850 trafs = mp4Inspector.findBox(fragment, ['moof', 'traf']);
17851
17852 // determine the start times for each track
17853 baseTimes = [].concat.apply([], trafs.map(function (traf) {
17854 return mp4Inspector.findBox(traf, ['tfhd']).map(function (tfhd) {
17855 var id, scale, baseTime;
17856
17857 // get the track id from the tfhd
17858 id = toUnsigned$2(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]);
17859 // assume a 90kHz clock if no timescale was specified
17860 scale = timescale[id] || 90e3;
17861
17862 // get the base media decode time from the tfdt
17863 baseTime = mp4Inspector.findBox(traf, ['tfdt']).map(function (tfdt) {
17864 var version, result;
17865
17866 version = tfdt[0];
17867 result = toUnsigned$2(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]);
17868 if (version === 1) {
17869 result *= Math.pow(2, 32);
17870 result += toUnsigned$2(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11]);
17871 }
17872 return result;
17873 })[0];
17874 baseTime = baseTime || Infinity;
17875
17876 // convert base time to seconds
17877 return baseTime / scale;
17878 });
17879 }));
17880
17881 // return the minimum
17882 result = Math.min.apply(null, baseTimes);
17883 return isFinite(result) ? result : 0;
17884 };
17885
17886 /**
17887 * Determine the composition start, in seconds, for an MP4
17888 * fragment.
17889 *
17890 * The composition start time of a fragment can be calculated using the base
17891 * media decode time, composition time offset, and timescale, as follows:
17892 *
17893 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
17894 *
17895 * All of the aforementioned information is contained within a media fragment's
17896 * `traf` box, except for timescale info, which comes from the initialization
17897 * segment, so a track id (also contained within a `traf`) is also necessary to
17898 * associate it with a timescale
17899 *
17900 *
17901 * @param timescales {object} - a hash of track ids to timescale values.
17902 * @param fragment {Unit8Array} - the bytes of a media segment
17903 * @return {number} the composition start time for the fragment, in seconds
17904 **/
17905 compositionStartTime = function compositionStartTime(timescales, fragment) {
17906 var trafBoxes = mp4Inspector.findBox(fragment, ['moof', 'traf']);
17907 var baseMediaDecodeTime = 0;
17908 var compositionTimeOffset = 0;
17909 var trackId;
17910
17911 if (trafBoxes && trafBoxes.length) {
17912 // The spec states that track run samples contained within a `traf` box are contiguous, but
17913 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
17914 // We will assume that they are, so we only need the first to calculate start time.
17915 var parsedTraf = mp4Inspector.parseTraf(trafBoxes[0]);
17916
17917 for (var i = 0; i < parsedTraf.boxes.length; i++) {
17918 if (parsedTraf.boxes[i].type === 'tfhd') {
17919 trackId = parsedTraf.boxes[i].trackId;
17920 } else if (parsedTraf.boxes[i].type === 'tfdt') {
17921 baseMediaDecodeTime = parsedTraf.boxes[i].baseMediaDecodeTime;
17922 } else if (parsedTraf.boxes[i].type === 'trun' && parsedTraf.boxes[i].samples.length) {
17923 compositionTimeOffset = parsedTraf.boxes[i].samples[0].compositionTimeOffset || 0;
17924 }
17925 }
17926 }
17927
17928 // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
17929 // specified.
17930 var timescale = timescales[trackId] || 90e3;
17931
17932 // return the composition start time, in seconds
17933 return (baseMediaDecodeTime + compositionTimeOffset) / timescale;
17934 };
17935
17936 /**
17937 * Find the trackIds of the video tracks in this source.
17938 * Found by parsing the Handler Reference and Track Header Boxes:
17939 * moov > trak > mdia > hdlr
17940 * moov > trak > tkhd
17941 *
17942 * @param {Uint8Array} init - The bytes of the init segment for this source
17943 * @return {Number[]} A list of trackIds
17944 *
17945 * @see ISO-BMFF-12/2015, Section 8.4.3
17946 **/
17947 getVideoTrackIds = function getVideoTrackIds(init) {
17948 var traks = mp4Inspector.findBox(init, ['moov', 'trak']);
17949 var videoTrackIds = [];
17950
17951 traks.forEach(function (trak) {
17952 var hdlrs = mp4Inspector.findBox(trak, ['mdia', 'hdlr']);
17953 var tkhds = mp4Inspector.findBox(trak, ['tkhd']);
17954
17955 hdlrs.forEach(function (hdlr, index) {
17956 var handlerType = mp4Inspector.parseType(hdlr.subarray(8, 12));
17957 var tkhd = tkhds[index];
17958 var view;
17959 var version;
17960 var trackId;
17961
17962 if (handlerType === 'vide') {
17963 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17964 version = view.getUint8(0);
17965 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
17966
17967 videoTrackIds.push(trackId);
17968 }
17969 });
17970 });
17971
17972 return videoTrackIds;
17973 };
17974
17975 /**
17976 * Get all the video, audio, and hint tracks from a non fragmented
17977 * mp4 segment
17978 */
17979 getTracks = function getTracks(init) {
17980 var traks = mp4Inspector.findBox(init, ['moov', 'trak']);
17981 var tracks = [];
17982
17983 traks.forEach(function (trak) {
17984 var track = {};
17985 var tkhd = mp4Inspector.findBox(trak, ['tkhd'])[0];
17986 var view, version;
17987
17988 // id
17989 if (tkhd) {
17990 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17991 version = view.getUint8(0);
17992
17993 track.id = version === 0 ? view.getUint32(12) : view.getUint32(20);
17994 }
17995
17996 var hdlr = mp4Inspector.findBox(trak, ['mdia', 'hdlr'])[0];
17997
17998 // type
17999 if (hdlr) {
18000 var type = mp4Inspector.parseType(hdlr.subarray(8, 12));
18001
18002 if (type === 'vide') {
18003 track.type = 'video';
18004 } else if (type === 'soun') {
18005 track.type = 'audio';
18006 } else {
18007 track.type = type;
18008 }
18009 }
18010
18011 // codec
18012 var stsd = mp4Inspector.findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
18013
18014 if (stsd) {
18015 var sampleDescriptions = stsd.subarray(8);
18016 // gives the codec type string
18017 track.codec = mp4Inspector.parseType(sampleDescriptions.subarray(4, 8));
18018
18019 var codecBox = mp4Inspector.findBox(sampleDescriptions, [track.codec])[0];
18020 var codecConfig, codecConfigType;
18021
18022 if (codecBox) {
18023 // https://tools.ietf.org/html/rfc6381#section-3.3
18024 if (/^[a-z]vc[1-9]$/i.test(track.codec)) {
18025 // we don't need anything but the "config" parameter of the
18026 // avc1 codecBox
18027 codecConfig = codecBox.subarray(78);
18028 codecConfigType = mp4Inspector.parseType(codecConfig.subarray(4, 8));
18029
18030 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
18031 track.codec += '.';
18032
18033 // left padded with zeroes for single digit hex
18034 // profile idc
18035 track.codec += toHexString$1(codecConfig[9]);
18036 // the byte containing the constraint_set flags
18037 track.codec += toHexString$1(codecConfig[10]);
18038 // level idc
18039 track.codec += toHexString$1(codecConfig[11]);
18040 } else {
18041 // TODO: show a warning that we couldn't parse the codec
18042 // and are using the default
18043 track.codec = 'avc1.4d400d';
18044 }
18045 } else if (/^mp4[a,v]$/i.test(track.codec)) {
18046 // we do not need anything but the streamDescriptor of the mp4a codecBox
18047 codecConfig = codecBox.subarray(28);
18048 codecConfigType = mp4Inspector.parseType(codecConfig.subarray(4, 8));
18049
18050 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
18051 track.codec += '.' + toHexString$1(codecConfig[19]);
18052 // this value is only a single digit
18053 track.codec += '.' + toHexString$1(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
18054 } else {
18055 // TODO: show a warning that we couldn't parse the codec
18056 // and are using the default
18057 track.codec = 'mp4a.40.2';
18058 }
18059 } else {
18060 // TODO: show a warning? for unknown codec type
18061 }
18062 }
18063 }
18064
18065 var mdhd = mp4Inspector.findBox(trak, ['mdia', 'mdhd'])[0];
18066
18067 if (mdhd && tkhd) {
18068 var index = version === 0 ? 12 : 20;
18069
18070 track.timescale = toUnsigned$2(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
18071 }
18072
18073 tracks.push(track);
18074 });
18075
18076 return tracks;
18077 };
18078
18079 var probe = {
18080 // export mp4 inspector's findBox and parseType for backwards compatibility
18081 findBox: mp4Inspector.findBox,
18082 parseType: mp4Inspector.parseType,
18083 timescale: timescale,
18084 startTime: startTime,
18085 compositionStartTime: compositionStartTime,
18086 videoTrackIds: getVideoTrackIds,
18087 tracks: getTracks
18088 };
18089
18090 var REQUEST_ERRORS = {
18091 FAILURE: 2,
18092 TIMEOUT: -101,
18093 ABORTED: -102
18094 };
18095
18096 /**
18097 * Abort all requests
18098 *
18099 * @param {Object} activeXhrs - an object that tracks all XHR requests
18100 */
18101 var abortAll = function abortAll(activeXhrs) {
18102 activeXhrs.forEach(function (xhr) {
18103 xhr.abort();
18104 });
18105 };
18106
18107 /**
18108 * Gather important bandwidth stats once a request has completed
18109 *
18110 * @param {Object} request - the XHR request from which to gather stats
18111 */
18112 var getRequestStats = function getRequestStats(request) {
18113 return {
18114 bandwidth: request.bandwidth,
18115 bytesReceived: request.bytesReceived || 0,
18116 roundTripTime: request.roundTripTime || 0
18117 };
18118 };
18119
18120 /**
18121 * If possible gather bandwidth stats as a request is in
18122 * progress
18123 *
18124 * @param {Event} progressEvent - an event object from an XHR's progress event
18125 */
18126 var getProgressStats = function getProgressStats(progressEvent) {
18127 var request = progressEvent.target;
18128 var roundTripTime = Date.now() - request.requestTime;
18129 var stats = {
18130 bandwidth: Infinity,
18131 bytesReceived: 0,
18132 roundTripTime: roundTripTime || 0
18133 };
18134
18135 stats.bytesReceived = progressEvent.loaded;
18136 // This can result in Infinity if stats.roundTripTime is 0 but that is ok
18137 // because we should only use bandwidth stats on progress to determine when
18138 // abort a request early due to insufficient bandwidth
18139 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
18140
18141 return stats;
18142 };
18143
18144 /**
18145 * Handle all error conditions in one place and return an object
18146 * with all the information
18147 *
18148 * @param {Error|null} error - if non-null signals an error occured with the XHR
18149 * @param {Object} request - the XHR request that possibly generated the error
18150 */
18151 var handleErrors = function handleErrors(error, request) {
18152 if (request.timedout) {
18153 return {
18154 status: request.status,
18155 message: 'HLS request timed-out at URL: ' + request.uri,
18156 code: REQUEST_ERRORS.TIMEOUT,
18157 xhr: request
18158 };
18159 }
18160
18161 if (request.aborted) {
18162 return {
18163 status: request.status,
18164 message: 'HLS request aborted at URL: ' + request.uri,
18165 code: REQUEST_ERRORS.ABORTED,
18166 xhr: request
18167 };
18168 }
18169
18170 if (error) {
18171 return {
18172 status: request.status,
18173 message: 'HLS request errored at URL: ' + request.uri,
18174 code: REQUEST_ERRORS.FAILURE,
18175 xhr: request
18176 };
18177 }
18178
18179 return null;
18180 };
18181
18182 /**
18183 * Handle responses for key data and convert the key data to the correct format
18184 * for the decryption step later
18185 *
18186 * @param {Object} segment - a simplified copy of the segmentInfo object
18187 * from SegmentLoader
18188 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18189 * this request
18190 */
18191 var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
18192 return function (error, request) {
18193 var response = request.response;
18194 var errorObj = handleErrors(error, request);
18195
18196 if (errorObj) {
18197 return finishProcessingFn(errorObj, segment);
18198 }
18199
18200 if (response.byteLength !== 16) {
18201 return finishProcessingFn({
18202 status: request.status,
18203 message: 'Invalid HLS key at URL: ' + request.uri,
18204 code: REQUEST_ERRORS.FAILURE,
18205 xhr: request
18206 }, segment);
18207 }
18208
18209 var view = new DataView(response);
18210
18211 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
18212 return finishProcessingFn(null, segment);
18213 };
18214 };
18215
18216 /**
18217 * Handle init-segment responses
18218 *
18219 * @param {Object} segment - a simplified copy of the segmentInfo object
18220 * from SegmentLoader
18221 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18222 * this request
18223 */
18224 var handleInitSegmentResponse = function handleInitSegmentResponse(segment, captionParser, finishProcessingFn) {
18225 return function (error, request) {
18226 var response = request.response;
18227 var errorObj = handleErrors(error, request);
18228
18229 if (errorObj) {
18230 return finishProcessingFn(errorObj, segment);
18231 }
18232
18233 // stop processing if received empty content
18234 if (response.byteLength === 0) {
18235 return finishProcessingFn({
18236 status: request.status,
18237 message: 'Empty HLS segment content at URL: ' + request.uri,
18238 code: REQUEST_ERRORS.FAILURE,
18239 xhr: request
18240 }, segment);
18241 }
18242
18243 segment.map.bytes = new Uint8Array(request.response);
18244
18245 // Initialize CaptionParser if it hasn't been yet
18246 if (captionParser && !captionParser.isInitialized()) {
18247 captionParser.init();
18248 }
18249
18250 segment.map.timescales = probe.timescale(segment.map.bytes);
18251 segment.map.videoTrackIds = probe.videoTrackIds(segment.map.bytes);
18252
18253 return finishProcessingFn(null, segment);
18254 };
18255 };
18256
18257 /**
18258 * Response handler for segment-requests being sure to set the correct
18259 * property depending on whether the segment is encryped or not
18260 * Also records and keeps track of stats that are used for ABR purposes
18261 *
18262 * @param {Object} segment - a simplified copy of the segmentInfo object
18263 * from SegmentLoader
18264 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18265 * this request
18266 */
18267 var handleSegmentResponse = function handleSegmentResponse(segment, captionParser, finishProcessingFn) {
18268 return function (error, request) {
18269 var response = request.response;
18270 var errorObj = handleErrors(error, request);
18271 var parsed = void 0;
18272
18273 if (errorObj) {
18274 return finishProcessingFn(errorObj, segment);
18275 }
18276
18277 // stop processing if received empty content
18278 if (response.byteLength === 0) {
18279 return finishProcessingFn({
18280 status: request.status,
18281 message: 'Empty HLS segment content at URL: ' + request.uri,
18282 code: REQUEST_ERRORS.FAILURE,
18283 xhr: request
18284 }, segment);
18285 }
18286
18287 segment.stats = getRequestStats(request);
18288
18289 if (segment.key) {
18290 segment.encryptedBytes = new Uint8Array(request.response);
18291 } else {
18292 segment.bytes = new Uint8Array(request.response);
18293 }
18294
18295 // This is likely an FMP4 and has the init segment.
18296 // Run through the CaptionParser in case there are captions.
18297 if (captionParser && segment.map && segment.map.bytes) {
18298 // Initialize CaptionParser if it hasn't been yet
18299 if (!captionParser.isInitialized()) {
18300 captionParser.init();
18301 }
18302
18303 parsed = captionParser.parse(segment.bytes, segment.map.videoTrackIds, segment.map.timescales);
18304
18305 if (parsed && parsed.captions) {
18306 segment.captionStreams = parsed.captionStreams;
18307 segment.fmp4Captions = parsed.captions;
18308 }
18309 }
18310
18311 return finishProcessingFn(null, segment);
18312 };
18313 };
18314
18315 /**
18316 * Decrypt the segment via the decryption web worker
18317 *
18318 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
18319 * @param {Object} segment - a simplified copy of the segmentInfo object
18320 * from SegmentLoader
18321 * @param {Function} doneFn - a callback that is executed after decryption has completed
18322 */
18323 var decryptSegment = function decryptSegment(decrypter, segment, doneFn) {
18324 var decryptionHandler = function decryptionHandler(event) {
18325 if (event.data.source === segment.requestId) {
18326 decrypter.removeEventListener('message', decryptionHandler);
18327 var decrypted = event.data.decrypted;
18328
18329 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
18330 return doneFn(null, segment);
18331 }
18332 };
18333
18334 decrypter.addEventListener('message', decryptionHandler);
18335
18336 var keyBytes = void 0;
18337
18338 if (segment.key.bytes.slice) {
18339 keyBytes = segment.key.bytes.slice();
18340 } else {
18341 keyBytes = new Uint32Array(Array.prototype.slice.call(segment.key.bytes));
18342 }
18343
18344 // this is an encrypted segment
18345 // incrementally decrypt the segment
18346 decrypter.postMessage(createTransferableMessage({
18347 source: segment.requestId,
18348 encrypted: segment.encryptedBytes,
18349 key: keyBytes,
18350 iv: segment.key.iv
18351 }), [segment.encryptedBytes.buffer, keyBytes.buffer]);
18352 };
18353
18354 /**
18355 * This function waits for all XHRs to finish (with either success or failure)
18356 * before continueing processing via it's callback. The function gathers errors
18357 * from each request into a single errors array so that the error status for
18358 * each request can be examined later.
18359 *
18360 * @param {Object} activeXhrs - an object that tracks all XHR requests
18361 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
18362 * @param {Function} doneFn - a callback that is executed after all resources have been
18363 * downloaded and any decryption completed
18364 */
18365 var waitForCompletion = function waitForCompletion(activeXhrs, decrypter, doneFn) {
18366 var count = 0;
18367 var didError = false;
18368
18369 return function (error, segment) {
18370 if (didError) {
18371 return;
18372 }
18373
18374 if (error) {
18375 didError = true;
18376 // If there are errors, we have to abort any outstanding requests
18377 abortAll(activeXhrs);
18378
18379 // Even though the requests above are aborted, and in theory we could wait until we
18380 // handle the aborted events from those requests, there are some cases where we may
18381 // never get an aborted event. For instance, if the network connection is lost and
18382 // there were two requests, the first may have triggered an error immediately, while
18383 // the second request remains unsent. In that case, the aborted algorithm will not
18384 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
18385 //
18386 // We also can't rely on the ready state of the XHR, since the request that
18387 // triggered the connection error may also show as a ready state of 0 (unsent).
18388 // Therefore, we have to finish this group of requests immediately after the first
18389 // seen error.
18390 return doneFn(error, segment);
18391 }
18392
18393 count += 1;
18394
18395 if (count === activeXhrs.length) {
18396 // Keep track of when *all* of the requests have completed
18397 segment.endOfAllRequests = Date.now();
18398
18399 if (segment.encryptedBytes) {
18400 return decryptSegment(decrypter, segment, doneFn);
18401 }
18402 // Otherwise, everything is ready just continue
18403 return doneFn(null, segment);
18404 }
18405 };
18406 };
18407
18408 /**
18409 * Simple progress event callback handler that gathers some stats before
18410 * executing a provided callback with the `segment` object
18411 *
18412 * @param {Object} segment - a simplified copy of the segmentInfo object
18413 * from SegmentLoader
18414 * @param {Function} progressFn - a callback that is executed each time a progress event
18415 * is received
18416 * @param {Event} event - the progress event object from XMLHttpRequest
18417 */
18418 var handleProgress = function handleProgress(segment, progressFn) {
18419 return function (event) {
18420 segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event));
18421
18422 // record the time that we receive the first byte of data
18423 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
18424 segment.stats.firstBytesReceivedAt = Date.now();
18425 }
18426
18427 return progressFn(event, segment);
18428 };
18429 };
18430
18431 /**
18432 * Load all resources and does any processing necessary for a media-segment
18433 *
18434 * Features:
18435 * decrypts the media-segment if it has a key uri and an iv
18436 * aborts *all* requests if *any* one request fails
18437 *
18438 * The segment object, at minimum, has the following format:
18439 * {
18440 * resolvedUri: String,
18441 * [byterange]: {
18442 * offset: Number,
18443 * length: Number
18444 * },
18445 * [key]: {
18446 * resolvedUri: String
18447 * [byterange]: {
18448 * offset: Number,
18449 * length: Number
18450 * },
18451 * iv: {
18452 * bytes: Uint32Array
18453 * }
18454 * },
18455 * [map]: {
18456 * resolvedUri: String,
18457 * [byterange]: {
18458 * offset: Number,
18459 * length: Number
18460 * },
18461 * [bytes]: Uint8Array
18462 * }
18463 * }
18464 * ...where [name] denotes optional properties
18465 *
18466 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
18467 * @param {Object} xhrOptions - the base options to provide to all xhr requests
18468 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
18469 * decryption routines
18470 * @param {Object} segment - a simplified copy of the segmentInfo object
18471 * from SegmentLoader
18472 * @param {Function} progressFn - a callback that receives progress events from the main
18473 * segment's xhr request
18474 * @param {Function} doneFn - a callback that is executed only once all requests have
18475 * succeeded or failed
18476 * @returns {Function} a function that, when invoked, immediately aborts all
18477 * outstanding requests
18478 */
18479 var mediaSegmentRequest = function mediaSegmentRequest(xhr, xhrOptions, decryptionWorker, captionParser, segment, progressFn, doneFn) {
18480 var activeXhrs = [];
18481 var finishProcessingFn = waitForCompletion(activeXhrs, decryptionWorker, doneFn);
18482
18483 // optionally, request the decryption key
18484 if (segment.key && !segment.key.bytes) {
18485 var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
18486 uri: segment.key.resolvedUri,
18487 responseType: 'arraybuffer'
18488 });
18489 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
18490 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
18491
18492 activeXhrs.push(keyXhr);
18493 }
18494
18495 // optionally, request the associated media init segment
18496 if (segment.map && !segment.map.bytes) {
18497 var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
18498 uri: segment.map.resolvedUri,
18499 responseType: 'arraybuffer',
18500 headers: segmentXhrHeaders(segment.map)
18501 });
18502 var initSegmentRequestCallback = handleInitSegmentResponse(segment, captionParser, finishProcessingFn);
18503 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
18504
18505 activeXhrs.push(initSegmentXhr);
18506 }
18507
18508 var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
18509 uri: segment.resolvedUri,
18510 responseType: 'arraybuffer',
18511 headers: segmentXhrHeaders(segment)
18512 });
18513 var segmentRequestCallback = handleSegmentResponse(segment, captionParser, finishProcessingFn);
18514 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
18515
18516 segmentXhr.addEventListener('progress', handleProgress(segment, progressFn));
18517 activeXhrs.push(segmentXhr);
18518
18519 return function () {
18520 return abortAll(activeXhrs);
18521 };
18522 };
18523
18524 // Utilities
18525
18526 /**
18527 * Returns the CSS value for the specified property on an element
18528 * using `getComputedStyle`. Firefox has a long-standing issue where
18529 * getComputedStyle() may return null when running in an iframe with
18530 * `display: none`.
18531 *
18532 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
18533 * @param {HTMLElement} el the htmlelement to work on
18534 * @param {string} the proprety to get the style for
18535 */
18536 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
18537 var result = void 0;
18538
18539 if (!el) {
18540 return '';
18541 }
18542
18543 result = window_1.getComputedStyle(el);
18544 if (!result) {
18545 return '';
18546 }
18547
18548 return result[property];
18549 };
18550
18551 /**
18552 * Resuable stable sort function
18553 *
18554 * @param {Playlists} array
18555 * @param {Function} sortFn Different comparators
18556 * @function stableSort
18557 */
18558 var stableSort = function stableSort(array, sortFn) {
18559 var newArray = array.slice();
18560
18561 array.sort(function (left, right) {
18562 var cmp = sortFn(left, right);
18563
18564 if (cmp === 0) {
18565 return newArray.indexOf(left) - newArray.indexOf(right);
18566 }
18567 return cmp;
18568 });
18569 };
18570
18571 /**
18572 * A comparator function to sort two playlist object by bandwidth.
18573 *
18574 * @param {Object} left a media playlist object
18575 * @param {Object} right a media playlist object
18576 * @return {Number} Greater than zero if the bandwidth attribute of
18577 * left is greater than the corresponding attribute of right. Less
18578 * than zero if the bandwidth of right is greater than left and
18579 * exactly zero if the two are equal.
18580 */
18581 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
18582 var leftBandwidth = void 0;
18583 var rightBandwidth = void 0;
18584
18585 if (left.attributes.BANDWIDTH) {
18586 leftBandwidth = left.attributes.BANDWIDTH;
18587 }
18588 leftBandwidth = leftBandwidth || window_1.Number.MAX_VALUE;
18589 if (right.attributes.BANDWIDTH) {
18590 rightBandwidth = right.attributes.BANDWIDTH;
18591 }
18592 rightBandwidth = rightBandwidth || window_1.Number.MAX_VALUE;
18593
18594 return leftBandwidth - rightBandwidth;
18595 };
18596
18597 /**
18598 * A comparator function to sort two playlist object by resolution (width).
18599 * @param {Object} left a media playlist object
18600 * @param {Object} right a media playlist object
18601 * @return {Number} Greater than zero if the resolution.width attribute of
18602 * left is greater than the corresponding attribute of right. Less
18603 * than zero if the resolution.width of right is greater than left and
18604 * exactly zero if the two are equal.
18605 */
18606 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
18607 var leftWidth = void 0;
18608 var rightWidth = void 0;
18609
18610 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
18611 leftWidth = left.attributes.RESOLUTION.width;
18612 }
18613
18614 leftWidth = leftWidth || window_1.Number.MAX_VALUE;
18615
18616 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
18617 rightWidth = right.attributes.RESOLUTION.width;
18618 }
18619
18620 rightWidth = rightWidth || window_1.Number.MAX_VALUE;
18621
18622 // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
18623 // have the same media dimensions/ resolution
18624 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
18625 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
18626 }
18627 return leftWidth - rightWidth;
18628 };
18629
18630 /**
18631 * Chooses the appropriate media playlist based on bandwidth and player size
18632 *
18633 * @param {Object} master
18634 * Object representation of the master manifest
18635 * @param {Number} playerBandwidth
18636 * Current calculated bandwidth of the player
18637 * @param {Number} playerWidth
18638 * Current width of the player element (should account for the device pixel ratio)
18639 * @param {Number} playerHeight
18640 * Current height of the player element (should account for the device pixel ratio)
18641 * @param {Boolean} limitRenditionByPlayerDimensions
18642 * True if the player width and height should be used during the selection, false otherwise
18643 * @return {Playlist} the highest bitrate playlist less than the
18644 * currently detected bandwidth, accounting for some amount of
18645 * bandwidth variance
18646 */
18647 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions) {
18648 // convert the playlists to an intermediary representation to make comparisons easier
18649 var sortedPlaylistReps = master.playlists.map(function (playlist) {
18650 var width = void 0;
18651 var height = void 0;
18652 var bandwidth = void 0;
18653
18654 width = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
18655 height = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
18656 bandwidth = playlist.attributes.BANDWIDTH;
18657
18658 bandwidth = bandwidth || window_1.Number.MAX_VALUE;
18659
18660 return {
18661 bandwidth: bandwidth,
18662 width: width,
18663 height: height,
18664 playlist: playlist
18665 };
18666 });
18667
18668 stableSort(sortedPlaylistReps, function (left, right) {
18669 return left.bandwidth - right.bandwidth;
18670 });
18671
18672 // filter out any playlists that have been excluded due to
18673 // incompatible configurations
18674 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
18675 return !Playlist.isIncompatible(rep.playlist);
18676 });
18677
18678 // filter out any playlists that have been disabled manually through the representations
18679 // api or blacklisted temporarily due to playback errors.
18680 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
18681 return Playlist.isEnabled(rep.playlist);
18682 });
18683
18684 if (!enabledPlaylistReps.length) {
18685 // if there are no enabled playlists, then they have all been blacklisted or disabled
18686 // by the user through the representations api. In this case, ignore blacklisting and
18687 // fallback to what the user wants by using playlists the user has not disabled.
18688 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
18689 return !Playlist.isDisabled(rep.playlist);
18690 });
18691 }
18692
18693 // filter out any variant that has greater effective bitrate
18694 // than the current estimated bandwidth
18695 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
18696 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
18697 });
18698
18699 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1];
18700
18701 // get all of the renditions with the same (highest) bandwidth
18702 // and then taking the very first element
18703 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
18704 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
18705 })[0];
18706
18707 // if we're not going to limit renditions by player size, make an early decision.
18708 if (limitRenditionByPlayerDimensions === false) {
18709 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
18710
18711 return _chosenRep ? _chosenRep.playlist : null;
18712 }
18713
18714 // filter out playlists without resolution information
18715 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
18716 return rep.width && rep.height;
18717 });
18718
18719 // sort variants by resolution
18720 stableSort(haveResolution, function (left, right) {
18721 return left.width - right.width;
18722 });
18723
18724 // if we have the exact resolution as the player use it
18725 var resolutionBestRepList = haveResolution.filter(function (rep) {
18726 return rep.width === playerWidth && rep.height === playerHeight;
18727 });
18728
18729 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1];
18730 // ensure that we pick the highest bandwidth variant that have exact resolution
18731 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
18732 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
18733 })[0];
18734
18735 var resolutionPlusOneList = void 0;
18736 var resolutionPlusOneSmallest = void 0;
18737 var resolutionPlusOneRep = void 0;
18738
18739 // find the smallest variant that is larger than the player
18740 // if there is no match of exact resolution
18741 if (!resolutionBestRep) {
18742 resolutionPlusOneList = haveResolution.filter(function (rep) {
18743 return rep.width > playerWidth || rep.height > playerHeight;
18744 });
18745
18746 // find all the variants have the same smallest resolution
18747 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
18748 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
18749 });
18750
18751 // ensure that we also pick the highest bandwidth variant that
18752 // is just-larger-than the video player
18753 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
18754 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
18755 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
18756 })[0];
18757 }
18758
18759 // fallback chain of variants
18760 var chosenRep = resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
18761
18762 return chosenRep ? chosenRep.playlist : null;
18763 };
18764
18765 // Playlist Selectors
18766
18767 /**
18768 * Chooses the appropriate media playlist based on the most recent
18769 * bandwidth estimate and the player size.
18770 *
18771 * Expects to be called within the context of an instance of HlsHandler
18772 *
18773 * @return {Playlist} the highest bitrate playlist less than the
18774 * currently detected bandwidth, accounting for some amount of
18775 * bandwidth variance
18776 */
18777 var lastBandwidthSelector = function lastBandwidthSelector() {
18778 var pixelRatio = this.useDevicePixelRatio ? window_1.devicePixelRatio || 1 : 1;
18779
18780 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions);
18781 };
18782
18783 /**
18784 * Chooses the appropriate media playlist based on the potential to rebuffer
18785 *
18786 * @param {Object} settings
18787 * Object of information required to use this selector
18788 * @param {Object} settings.master
18789 * Object representation of the master manifest
18790 * @param {Number} settings.currentTime
18791 * The current time of the player
18792 * @param {Number} settings.bandwidth
18793 * Current measured bandwidth
18794 * @param {Number} settings.duration
18795 * Duration of the media
18796 * @param {Number} settings.segmentDuration
18797 * Segment duration to be used in round trip time calculations
18798 * @param {Number} settings.timeUntilRebuffer
18799 * Time left in seconds until the player has to rebuffer
18800 * @param {Number} settings.currentTimeline
18801 * The current timeline segments are being loaded from
18802 * @param {SyncController} settings.syncController
18803 * SyncController for determining if we have a sync point for a given playlist
18804 * @return {Object|null}
18805 * {Object} return.playlist
18806 * The highest bandwidth playlist with the least amount of rebuffering
18807 * {Number} return.rebufferingImpact
18808 * The amount of time in seconds switching to this playlist will rebuffer. A
18809 * negative value means that switching will cause zero rebuffering.
18810 */
18811 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
18812 var master = settings.master,
18813 currentTime = settings.currentTime,
18814 bandwidth = settings.bandwidth,
18815 duration$$1 = settings.duration,
18816 segmentDuration = settings.segmentDuration,
18817 timeUntilRebuffer = settings.timeUntilRebuffer,
18818 currentTimeline = settings.currentTimeline,
18819 syncController = settings.syncController;
18820
18821 // filter out any playlists that have been excluded due to
18822 // incompatible configurations
18823
18824 var compatiblePlaylists = master.playlists.filter(function (playlist) {
18825 return !Playlist.isIncompatible(playlist);
18826 });
18827
18828 // filter out any playlists that have been disabled manually through the representations
18829 // api or blacklisted temporarily due to playback errors.
18830 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
18831
18832 if (!enabledPlaylists.length) {
18833 // if there are no enabled playlists, then they have all been blacklisted or disabled
18834 // by the user through the representations api. In this case, ignore blacklisting and
18835 // fallback to what the user wants by using playlists the user has not disabled.
18836 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
18837 return !Playlist.isDisabled(playlist);
18838 });
18839 }
18840
18841 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
18842
18843 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
18844 var syncPoint = syncController.getSyncPoint(playlist, duration$$1, currentTimeline, currentTime);
18845 // If there is no sync point for this playlist, switching to it will require a
18846 // sync request first. This will double the request time
18847 var numRequests = syncPoint ? 1 : 2;
18848 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
18849 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
18850
18851 return {
18852 playlist: playlist,
18853 rebufferingImpact: rebufferingImpact
18854 };
18855 });
18856
18857 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
18858 return estimate.rebufferingImpact <= 0;
18859 });
18860
18861 // Sort by bandwidth DESC
18862 stableSort(noRebufferingPlaylists, function (a, b) {
18863 return comparePlaylistBandwidth(b.playlist, a.playlist);
18864 });
18865
18866 if (noRebufferingPlaylists.length) {
18867 return noRebufferingPlaylists[0];
18868 }
18869
18870 stableSort(rebufferingEstimates, function (a, b) {
18871 return a.rebufferingImpact - b.rebufferingImpact;
18872 });
18873
18874 return rebufferingEstimates[0] || null;
18875 };
18876
18877 /**
18878 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
18879 * one with video. If no renditions with video exist, return the lowest audio rendition.
18880 *
18881 * Expects to be called within the context of an instance of HlsHandler
18882 *
18883 * @return {Object|null}
18884 * {Object} return.playlist
18885 * The lowest bitrate playlist that contains a video codec. If no such rendition
18886 * exists pick the lowest audio rendition.
18887 */
18888 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
18889 // filter out any playlists that have been excluded due to
18890 // incompatible configurations or playback errors
18891 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled);
18892
18893 // Sort ascending by bitrate
18894 stableSort(playlists, function (a, b) {
18895 return comparePlaylistBandwidth(a, b);
18896 });
18897
18898 // Parse and assume that playlists with no video codec have no video
18899 // (this is not necessarily true, although it is generally true).
18900 //
18901 // If an entire manifest has no valid videos everything will get filtered
18902 // out.
18903 var playlistsWithVideo = playlists.filter(function (playlist) {
18904 return parseCodecs(playlist.attributes.CODECS).videoCodec;
18905 });
18906
18907 return playlistsWithVideo[0] || null;
18908 };
18909
18910 /**
18911 * Create captions text tracks on video.js if they do not exist
18912 *
18913 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
18914 * @param {Object} tech the video.js tech
18915 * @param {Object} captionStreams the caption streams to create
18916 * @private
18917 */
18918 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStreams) {
18919 for (var trackId in captionStreams) {
18920 if (!inbandTextTracks[trackId]) {
18921 tech.trigger({ type: 'usage', name: 'hls-608' });
18922 var track = tech.textTracks().getTrackById(trackId);
18923
18924 if (track) {
18925 // Resuse an existing track with a CC# id because this was
18926 // very likely created by videojs-contrib-hls from information
18927 // in the m3u8 for us to use
18928 inbandTextTracks[trackId] = track;
18929 } else {
18930 // Otherwise, create a track with the default `CC#` label and
18931 // without a language
18932 inbandTextTracks[trackId] = tech.addRemoteTextTrack({
18933 kind: 'captions',
18934 id: trackId,
18935 label: trackId
18936 }, false).track;
18937 }
18938 }
18939 }
18940 };
18941
18942 var addCaptionData = function addCaptionData(_ref) {
18943 var inbandTextTracks = _ref.inbandTextTracks,
18944 captionArray = _ref.captionArray,
18945 timestampOffset = _ref.timestampOffset;
18946
18947 if (!captionArray) {
18948 return;
18949 }
18950
18951 var Cue = window.WebKitDataCue || window.VTTCue;
18952
18953 captionArray.forEach(function (caption) {
18954 var track = caption.stream;
18955 var startTime = caption.startTime;
18956 var endTime = caption.endTime;
18957
18958 if (!inbandTextTracks[track]) {
18959 return;
18960 }
18961
18962 startTime += timestampOffset;
18963 endTime += timestampOffset;
18964
18965 inbandTextTracks[track].addCue(new Cue(startTime, endTime, caption.text));
18966 });
18967 };
18968
18969 /**
18970 * mux.js
18971 *
18972 * Copyright (c) Brightcove
18973 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
18974 *
18975 * Reads in-band caption information from a video elementary
18976 * stream. Captions must follow the CEA-708 standard for injection
18977 * into an MPEG-2 transport streams.
18978 * @see https://en.wikipedia.org/wiki/CEA-708
18979 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
18980 */
18981
18982 // Supplemental enhancement information (SEI) NAL units have a
18983 // payload type field to indicate how they are to be
18984 // interpreted. CEAS-708 caption content is always transmitted with
18985 // payload type 0x04.
18986
18987 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
18988 RBSP_TRAILING_BITS = 128;
18989
18990 /**
18991 * Parse a supplemental enhancement information (SEI) NAL unit.
18992 * Stops parsing once a message of type ITU T T35 has been found.
18993 *
18994 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
18995 * @return {object} the parsed SEI payload
18996 * @see Rec. ITU-T H.264, 7.3.2.3.1
18997 */
18998 var parseSei = function parseSei(bytes) {
18999 var i = 0,
19000 result = {
19001 payloadType: -1,
19002 payloadSize: 0
19003 },
19004 payloadType = 0,
19005 payloadSize = 0;
19006
19007 // go through the sei_rbsp parsing each each individual sei_message
19008 while (i < bytes.byteLength) {
19009 // stop once we have hit the end of the sei_rbsp
19010 if (bytes[i] === RBSP_TRAILING_BITS) {
19011 break;
19012 }
19013
19014 // Parse payload type
19015 while (bytes[i] === 0xFF) {
19016 payloadType += 255;
19017 i++;
19018 }
19019 payloadType += bytes[i++];
19020
19021 // Parse payload size
19022 while (bytes[i] === 0xFF) {
19023 payloadSize += 255;
19024 i++;
19025 }
19026 payloadSize += bytes[i++];
19027
19028 // this sei_message is a 608/708 caption so save it and break
19029 // there can only ever be one caption message in a frame's sei
19030 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
19031 result.payloadType = payloadType;
19032 result.payloadSize = payloadSize;
19033 result.payload = bytes.subarray(i, i + payloadSize);
19034 break;
19035 }
19036
19037 // skip the payload and parse the next message
19038 i += payloadSize;
19039 payloadType = 0;
19040 payloadSize = 0;
19041 }
19042
19043 return result;
19044 };
19045
19046 // see ANSI/SCTE 128-1 (2013), section 8.1
19047 var parseUserData = function parseUserData(sei) {
19048 // itu_t_t35_contry_code must be 181 (United States) for
19049 // captions
19050 if (sei.payload[0] !== 181) {
19051 return null;
19052 }
19053
19054 // itu_t_t35_provider_code should be 49 (ATSC) for captions
19055 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
19056 return null;
19057 }
19058
19059 // the user_identifier should be "GA94" to indicate ATSC1 data
19060 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
19061 return null;
19062 }
19063
19064 // finally, user_data_type_code should be 0x03 for caption data
19065 if (sei.payload[7] !== 0x03) {
19066 return null;
19067 }
19068
19069 // return the user_data_type_structure and strip the trailing
19070 // marker bits
19071 return sei.payload.subarray(8, sei.payload.length - 1);
19072 };
19073
19074 // see CEA-708-D, section 4.4
19075 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
19076 var results = [],
19077 i,
19078 count,
19079 offset,
19080 data;
19081
19082 // if this is just filler, return immediately
19083 if (!(userData[0] & 0x40)) {
19084 return results;
19085 }
19086
19087 // parse out the cc_data_1 and cc_data_2 fields
19088 count = userData[0] & 0x1f;
19089 for (i = 0; i < count; i++) {
19090 offset = i * 3;
19091 data = {
19092 type: userData[offset + 2] & 0x03,
19093 pts: pts
19094 };
19095
19096 // capture cc data when cc_valid is 1
19097 if (userData[offset + 2] & 0x04) {
19098 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
19099 results.push(data);
19100 }
19101 }
19102 return results;
19103 };
19104
19105 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
19106 var length = data.byteLength,
19107 emulationPreventionBytesPositions = [],
19108 i = 1,
19109 newLength,
19110 newData;
19111
19112 // Find all `Emulation Prevention Bytes`
19113 while (i < length - 2) {
19114 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
19115 emulationPreventionBytesPositions.push(i + 2);
19116 i += 2;
19117 } else {
19118 i++;
19119 }
19120 }
19121
19122 // If no Emulation Prevention Bytes were found just return the original
19123 // array
19124 if (emulationPreventionBytesPositions.length === 0) {
19125 return data;
19126 }
19127
19128 // Create a new array to hold the NAL unit data
19129 newLength = length - emulationPreventionBytesPositions.length;
19130 newData = new Uint8Array(newLength);
19131 var sourceIndex = 0;
19132
19133 for (i = 0; i < newLength; sourceIndex++, i++) {
19134 if (sourceIndex === emulationPreventionBytesPositions[0]) {
19135 // Skip this byte
19136 sourceIndex++;
19137 // Remove this position index
19138 emulationPreventionBytesPositions.shift();
19139 }
19140 newData[i] = data[sourceIndex];
19141 }
19142
19143 return newData;
19144 };
19145
19146 // exports
19147 var captionPacketParser = {
19148 parseSei: parseSei,
19149 parseUserData: parseUserData,
19150 parseCaptionPackets: parseCaptionPackets,
19151 discardEmulationPreventionBytes: discardEmulationPreventionBytes,
19152 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
19153 };
19154
19155 /**
19156 * mux.js
19157 *
19158 * Copyright (c) Brightcove
19159 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
19160 *
19161 * A lightweight readable stream implemention that handles event dispatching.
19162 * Objects that inherit from streams should call init in their constructors.
19163 */
19164
19165 var Stream$2 = function Stream() {
19166 this.init = function () {
19167 var listeners = {};
19168 /**
19169 * Add a listener for a specified event type.
19170 * @param type {string} the event name
19171 * @param listener {function} the callback to be invoked when an event of
19172 * the specified type occurs
19173 */
19174 this.on = function (type, listener) {
19175 if (!listeners[type]) {
19176 listeners[type] = [];
19177 }
19178 listeners[type] = listeners[type].concat(listener);
19179 };
19180 /**
19181 * Remove a listener for a specified event type.
19182 * @param type {string} the event name
19183 * @param listener {function} a function previously registered for this
19184 * type of event through `on`
19185 */
19186 this.off = function (type, listener) {
19187 var index;
19188 if (!listeners[type]) {
19189 return false;
19190 }
19191 index = listeners[type].indexOf(listener);
19192 listeners[type] = listeners[type].slice();
19193 listeners[type].splice(index, 1);
19194 return index > -1;
19195 };
19196 /**
19197 * Trigger an event of the specified type on this stream. Any additional
19198 * arguments to this function are passed as parameters to event listeners.
19199 * @param type {string} the event name
19200 */
19201 this.trigger = function (type) {
19202 var callbacks, i, length, args;
19203 callbacks = listeners[type];
19204 if (!callbacks) {
19205 return;
19206 }
19207 // Slicing the arguments on every invocation of this method
19208 // can add a significant amount of overhead. Avoid the
19209 // intermediate object creation for the common case of a
19210 // single callback argument
19211 if (arguments.length === 2) {
19212 length = callbacks.length;
19213 for (i = 0; i < length; ++i) {
19214 callbacks[i].call(this, arguments[1]);
19215 }
19216 } else {
19217 args = [];
19218 i = arguments.length;
19219 for (i = 1; i < arguments.length; ++i) {
19220 args.push(arguments[i]);
19221 }
19222 length = callbacks.length;
19223 for (i = 0; i < length; ++i) {
19224 callbacks[i].apply(this, args);
19225 }
19226 }
19227 };
19228 /**
19229 * Destroys the stream and cleans up.
19230 */
19231 this.dispose = function () {
19232 listeners = {};
19233 };
19234 };
19235 };
19236
19237 /**
19238 * Forwards all `data` events on this stream to the destination stream. The
19239 * destination stream should provide a method `push` to receive the data
19240 * events as they arrive.
19241 * @param destination {stream} the stream that will receive all `data` events
19242 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
19243 * when the current stream emits a 'done' event
19244 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
19245 */
19246 Stream$2.prototype.pipe = function (destination) {
19247 this.on('data', function (data) {
19248 destination.push(data);
19249 });
19250
19251 this.on('done', function (flushSource) {
19252 destination.flush(flushSource);
19253 });
19254
19255 this.on('partialdone', function (flushSource) {
19256 destination.partialFlush(flushSource);
19257 });
19258
19259 this.on('endedtimeline', function (flushSource) {
19260 destination.endTimeline(flushSource);
19261 });
19262
19263 this.on('reset', function (flushSource) {
19264 destination.reset(flushSource);
19265 });
19266
19267 return destination;
19268 };
19269
19270 // Default stream functions that are expected to be overridden to perform
19271 // actual work. These are provided by the prototype as a sort of no-op
19272 // implementation so that we don't have to check for their existence in the
19273 // `pipe` function above.
19274 Stream$2.prototype.push = function (data) {
19275 this.trigger('data', data);
19276 };
19277
19278 Stream$2.prototype.flush = function (flushSource) {
19279 this.trigger('done', flushSource);
19280 };
19281
19282 Stream$2.prototype.partialFlush = function (flushSource) {
19283 this.trigger('partialdone', flushSource);
19284 };
19285
19286 Stream$2.prototype.endTimeline = function (flushSource) {
19287 this.trigger('endedtimeline', flushSource);
19288 };
19289
19290 Stream$2.prototype.reset = function (flushSource) {
19291 this.trigger('reset', flushSource);
19292 };
19293
19294 var stream = Stream$2;
19295
19296 // -----------------
19297 // Link To Transport
19298 // -----------------
19299
19300
19301 var CaptionStream = function CaptionStream() {
19302
19303 CaptionStream.prototype.init.call(this);
19304
19305 this.captionPackets_ = [];
19306
19307 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
19308 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
19309 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
19310 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
19311 ];
19312
19313 this.reset();
19314
19315 // forward data and done events from CCs to this CaptionStream
19316 this.ccStreams_.forEach(function (cc) {
19317 cc.on('data', this.trigger.bind(this, 'data'));
19318 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
19319 cc.on('done', this.trigger.bind(this, 'done'));
19320 }, this);
19321 };
19322
19323 CaptionStream.prototype = new stream();
19324 CaptionStream.prototype.push = function (event) {
19325 var sei, userData, newCaptionPackets;
19326
19327 // only examine SEI NALs
19328 if (event.nalUnitType !== 'sei_rbsp') {
19329 return;
19330 }
19331
19332 // parse the sei
19333 sei = captionPacketParser.parseSei(event.escapedRBSP);
19334
19335 // ignore everything but user_data_registered_itu_t_t35
19336 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
19337 return;
19338 }
19339
19340 // parse out the user data payload
19341 userData = captionPacketParser.parseUserData(sei);
19342
19343 // ignore unrecognized userData
19344 if (!userData) {
19345 return;
19346 }
19347
19348 // Sometimes, the same segment # will be downloaded twice. To stop the
19349 // caption data from being processed twice, we track the latest dts we've
19350 // received and ignore everything with a dts before that. However, since
19351 // data for a specific dts can be split across packets on either side of
19352 // a segment boundary, we need to make sure we *don't* ignore the packets
19353 // from the *next* segment that have dts === this.latestDts_. By constantly
19354 // tracking the number of packets received with dts === this.latestDts_, we
19355 // know how many should be ignored once we start receiving duplicates.
19356 if (event.dts < this.latestDts_) {
19357 // We've started getting older data, so set the flag.
19358 this.ignoreNextEqualDts_ = true;
19359 return;
19360 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
19361 this.numSameDts_--;
19362 if (!this.numSameDts_) {
19363 // We've received the last duplicate packet, time to start processing again
19364 this.ignoreNextEqualDts_ = false;
19365 }
19366 return;
19367 }
19368
19369 // parse out CC data packets and save them for later
19370 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
19371 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
19372 if (this.latestDts_ !== event.dts) {
19373 this.numSameDts_ = 0;
19374 }
19375 this.numSameDts_++;
19376 this.latestDts_ = event.dts;
19377 };
19378
19379 CaptionStream.prototype.flushCCStreams = function (flushType) {
19380 this.ccStreams_.forEach(function (cc) {
19381 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
19382 }, this);
19383 };
19384
19385 CaptionStream.prototype.flushStream = function (flushType) {
19386 // make sure we actually parsed captions before proceeding
19387 if (!this.captionPackets_.length) {
19388 this.flushCCStreams(flushType);
19389 return;
19390 }
19391
19392 // In Chrome, the Array#sort function is not stable so add a
19393 // presortIndex that we can use to ensure we get a stable-sort
19394 this.captionPackets_.forEach(function (elem, idx) {
19395 elem.presortIndex = idx;
19396 });
19397
19398 // sort caption byte-pairs based on their PTS values
19399 this.captionPackets_.sort(function (a, b) {
19400 if (a.pts === b.pts) {
19401 return a.presortIndex - b.presortIndex;
19402 }
19403 return a.pts - b.pts;
19404 });
19405
19406 this.captionPackets_.forEach(function (packet) {
19407 if (packet.type < 2) {
19408 // Dispatch packet to the right Cea608Stream
19409 this.dispatchCea608Packet(packet);
19410 }
19411 // this is where an 'else' would go for a dispatching packets
19412 // to a theoretical Cea708Stream that handles SERVICEn data
19413 }, this);
19414
19415 this.captionPackets_.length = 0;
19416 this.flushCCStreams(flushType);
19417 };
19418
19419 CaptionStream.prototype.flush = function () {
19420 return this.flushStream('flush');
19421 };
19422
19423 // Only called if handling partial data
19424 CaptionStream.prototype.partialFlush = function () {
19425 return this.flushStream('partialFlush');
19426 };
19427
19428 CaptionStream.prototype.reset = function () {
19429 this.latestDts_ = null;
19430 this.ignoreNextEqualDts_ = false;
19431 this.numSameDts_ = 0;
19432 this.activeCea608Channel_ = [null, null];
19433 this.ccStreams_.forEach(function (ccStream) {
19434 ccStream.reset();
19435 });
19436 };
19437
19438 // From the CEA-608 spec:
19439 /*
19440 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
19441 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
19442 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
19443 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
19444 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
19445 * to switch to captioning or Text.
19446 */
19447 // With that in mind, we ignore any data between an XDS control code and a
19448 // subsequent closed-captioning control code.
19449 CaptionStream.prototype.dispatchCea608Packet = function (packet) {
19450 // NOTE: packet.type is the CEA608 field
19451 if (this.setsTextOrXDSActive(packet)) {
19452 this.activeCea608Channel_[packet.type] = null;
19453 } else if (this.setsChannel1Active(packet)) {
19454 this.activeCea608Channel_[packet.type] = 0;
19455 } else if (this.setsChannel2Active(packet)) {
19456 this.activeCea608Channel_[packet.type] = 1;
19457 }
19458 if (this.activeCea608Channel_[packet.type] === null) {
19459 // If we haven't received anything to set the active channel, or the
19460 // packets are Text/XDS data, discard the data; we don't want jumbled
19461 // captions
19462 return;
19463 }
19464 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
19465 };
19466
19467 CaptionStream.prototype.setsChannel1Active = function (packet) {
19468 return (packet.ccData & 0x7800) === 0x1000;
19469 };
19470 CaptionStream.prototype.setsChannel2Active = function (packet) {
19471 return (packet.ccData & 0x7800) === 0x1800;
19472 };
19473 CaptionStream.prototype.setsTextOrXDSActive = function (packet) {
19474 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
19475 };
19476
19477 // ----------------------
19478 // Session to Application
19479 // ----------------------
19480
19481 // This hash maps non-ASCII, special, and extended character codes to their
19482 // proper Unicode equivalent. The first keys that are only a single byte
19483 // are the non-standard ASCII characters, which simply map the CEA608 byte
19484 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
19485 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
19486 // can be performed regardless of the field and data channel on which the
19487 // character code was received.
19488 var CHARACTER_TRANSLATION = {
19489 0x2a: 0xe1, // á
19490 0x5c: 0xe9, // é
19491 0x5e: 0xed, // í
19492 0x5f: 0xf3, // ó
19493 0x60: 0xfa, // ú
19494 0x7b: 0xe7, // ç
19495 0x7c: 0xf7, // ÷
19496 0x7d: 0xd1, // Ñ
19497 0x7e: 0xf1, // ñ
19498 0x7f: 0x2588, // █
19499 0x0130: 0xae, // ®
19500 0x0131: 0xb0, // °
19501 0x0132: 0xbd, // ½
19502 0x0133: 0xbf, // ¿
19503 0x0134: 0x2122, // ™
19504 0x0135: 0xa2, // ¢
19505 0x0136: 0xa3, // £
19506 0x0137: 0x266a, // ♪
19507 0x0138: 0xe0, // à
19508 0x0139: 0xa0, //
19509 0x013a: 0xe8, // è
19510 0x013b: 0xe2, // â
19511 0x013c: 0xea, // ê
19512 0x013d: 0xee, // î
19513 0x013e: 0xf4, // ô
19514 0x013f: 0xfb, // û
19515 0x0220: 0xc1, // Á
19516 0x0221: 0xc9, // É
19517 0x0222: 0xd3, // Ó
19518 0x0223: 0xda, // Ú
19519 0x0224: 0xdc, // Ü
19520 0x0225: 0xfc, // ü
19521 0x0226: 0x2018, // ‘
19522 0x0227: 0xa1, // ¡
19523 0x0228: 0x2a, // *
19524 0x0229: 0x27, // '
19525 0x022a: 0x2014, // —
19526 0x022b: 0xa9, // ©
19527 0x022c: 0x2120, // ℠
19528 0x022d: 0x2022, // •
19529 0x022e: 0x201c, // “
19530 0x022f: 0x201d, // ”
19531 0x0230: 0xc0, // À
19532 0x0231: 0xc2, // Â
19533 0x0232: 0xc7, // Ç
19534 0x0233: 0xc8, // È
19535 0x0234: 0xca, // Ê
19536 0x0235: 0xcb, // Ë
19537 0x0236: 0xeb, // ë
19538 0x0237: 0xce, // Î
19539 0x0238: 0xcf, // Ï
19540 0x0239: 0xef, // ï
19541 0x023a: 0xd4, // Ô
19542 0x023b: 0xd9, // Ù
19543 0x023c: 0xf9, // ù
19544 0x023d: 0xdb, // Û
19545 0x023e: 0xab, // «
19546 0x023f: 0xbb, // »
19547 0x0320: 0xc3, // Ã
19548 0x0321: 0xe3, // ã
19549 0x0322: 0xcd, // Í
19550 0x0323: 0xcc, // Ì
19551 0x0324: 0xec, // ì
19552 0x0325: 0xd2, // Ò
19553 0x0326: 0xf2, // ò
19554 0x0327: 0xd5, // Õ
19555 0x0328: 0xf5, // õ
19556 0x0329: 0x7b, // {
19557 0x032a: 0x7d, // }
19558 0x032b: 0x5c, // \
19559 0x032c: 0x5e, // ^
19560 0x032d: 0x5f, // _
19561 0x032e: 0x7c, // |
19562 0x032f: 0x7e, // ~
19563 0x0330: 0xc4, // Ä
19564 0x0331: 0xe4, // ä
19565 0x0332: 0xd6, // Ö
19566 0x0333: 0xf6, // ö
19567 0x0334: 0xdf, // ß
19568 0x0335: 0xa5, // ¥
19569 0x0336: 0xa4, // ¤
19570 0x0337: 0x2502, // │
19571 0x0338: 0xc5, // Å
19572 0x0339: 0xe5, // å
19573 0x033a: 0xd8, // Ø
19574 0x033b: 0xf8, // ø
19575 0x033c: 0x250c, // ┌
19576 0x033d: 0x2510, // ┐
19577 0x033e: 0x2514, // └
19578 0x033f: 0x2518 // ┘
19579 };
19580
19581 var getCharFromCode = function getCharFromCode(code) {
19582 if (code === null) {
19583 return '';
19584 }
19585 code = CHARACTER_TRANSLATION[code] || code;
19586 return String.fromCharCode(code);
19587 };
19588
19589 // the index of the last row in a CEA-608 display buffer
19590 var BOTTOM_ROW = 14;
19591
19592 // This array is used for mapping PACs -> row #, since there's no way of
19593 // getting it through bit logic.
19594 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
19595
19596 // CEA-608 captions are rendered onto a 34x15 matrix of character
19597 // cells. The "bottom" row is the last element in the outer array.
19598 var createDisplayBuffer = function createDisplayBuffer() {
19599 var result = [],
19600 i = BOTTOM_ROW + 1;
19601 while (i--) {
19602 result.push('');
19603 }
19604 return result;
19605 };
19606
19607 var Cea608Stream = function Cea608Stream(field, dataChannel) {
19608 Cea608Stream.prototype.init.call(this);
19609
19610 this.field_ = field || 0;
19611 this.dataChannel_ = dataChannel || 0;
19612
19613 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
19614
19615 this.setConstants();
19616 this.reset();
19617
19618 this.push = function (packet) {
19619 var data, swap, char0, char1, text;
19620 // remove the parity bits
19621 data = packet.ccData & 0x7f7f;
19622
19623 // ignore duplicate control codes; the spec demands they're sent twice
19624 if (data === this.lastControlCode_) {
19625 this.lastControlCode_ = null;
19626 return;
19627 }
19628
19629 // Store control codes
19630 if ((data & 0xf000) === 0x1000) {
19631 this.lastControlCode_ = data;
19632 } else if (data !== this.PADDING_) {
19633 this.lastControlCode_ = null;
19634 }
19635
19636 char0 = data >>> 8;
19637 char1 = data & 0xff;
19638
19639 if (data === this.PADDING_) {
19640 return;
19641 } else if (data === this.RESUME_CAPTION_LOADING_) {
19642 this.mode_ = 'popOn';
19643 } else if (data === this.END_OF_CAPTION_) {
19644 // If an EOC is received while in paint-on mode, the displayed caption
19645 // text should be swapped to non-displayed memory as if it was a pop-on
19646 // caption. Because of that, we should explicitly switch back to pop-on
19647 // mode
19648 this.mode_ = 'popOn';
19649 this.clearFormatting(packet.pts);
19650 // if a caption was being displayed, it's gone now
19651 this.flushDisplayed(packet.pts);
19652
19653 // flip memory
19654 swap = this.displayed_;
19655 this.displayed_ = this.nonDisplayed_;
19656 this.nonDisplayed_ = swap;
19657
19658 // start measuring the time to display the caption
19659 this.startPts_ = packet.pts;
19660 } else if (data === this.ROLL_UP_2_ROWS_) {
19661 this.rollUpRows_ = 2;
19662 this.setRollUp(packet.pts);
19663 } else if (data === this.ROLL_UP_3_ROWS_) {
19664 this.rollUpRows_ = 3;
19665 this.setRollUp(packet.pts);
19666 } else if (data === this.ROLL_UP_4_ROWS_) {
19667 this.rollUpRows_ = 4;
19668 this.setRollUp(packet.pts);
19669 } else if (data === this.CARRIAGE_RETURN_) {
19670 this.clearFormatting(packet.pts);
19671 this.flushDisplayed(packet.pts);
19672 this.shiftRowsUp_();
19673 this.startPts_ = packet.pts;
19674 } else if (data === this.BACKSPACE_) {
19675 if (this.mode_ === 'popOn') {
19676 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
19677 } else {
19678 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
19679 }
19680 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
19681 this.flushDisplayed(packet.pts);
19682 this.displayed_ = createDisplayBuffer();
19683 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
19684 this.nonDisplayed_ = createDisplayBuffer();
19685 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
19686 if (this.mode_ !== 'paintOn') {
19687 // NOTE: This should be removed when proper caption positioning is
19688 // implemented
19689 this.flushDisplayed(packet.pts);
19690 this.displayed_ = createDisplayBuffer();
19691 }
19692 this.mode_ = 'paintOn';
19693 this.startPts_ = packet.pts;
19694
19695 // Append special characters to caption text
19696 } else if (this.isSpecialCharacter(char0, char1)) {
19697 // Bitmask char0 so that we can apply character transformations
19698 // regardless of field and data channel.
19699 // Then byte-shift to the left and OR with char1 so we can pass the
19700 // entire character code to `getCharFromCode`.
19701 char0 = (char0 & 0x03) << 8;
19702 text = getCharFromCode(char0 | char1);
19703 this[this.mode_](packet.pts, text);
19704 this.column_++;
19705
19706 // Append extended characters to caption text
19707 } else if (this.isExtCharacter(char0, char1)) {
19708 // Extended characters always follow their "non-extended" equivalents.
19709 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
19710 // decoders are supposed to drop the "è", while compliant decoders
19711 // backspace the "e" and insert "è".
19712
19713 // Delete the previous character
19714 if (this.mode_ === 'popOn') {
19715 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
19716 } else {
19717 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
19718 }
19719
19720 // Bitmask char0 so that we can apply character transformations
19721 // regardless of field and data channel.
19722 // Then byte-shift to the left and OR with char1 so we can pass the
19723 // entire character code to `getCharFromCode`.
19724 char0 = (char0 & 0x03) << 8;
19725 text = getCharFromCode(char0 | char1);
19726 this[this.mode_](packet.pts, text);
19727 this.column_++;
19728
19729 // Process mid-row codes
19730 } else if (this.isMidRowCode(char0, char1)) {
19731 // Attributes are not additive, so clear all formatting
19732 this.clearFormatting(packet.pts);
19733
19734 // According to the standard, mid-row codes
19735 // should be replaced with spaces, so add one now
19736 this[this.mode_](packet.pts, ' ');
19737 this.column_++;
19738
19739 if ((char1 & 0xe) === 0xe) {
19740 this.addFormatting(packet.pts, ['i']);
19741 }
19742
19743 if ((char1 & 0x1) === 0x1) {
19744 this.addFormatting(packet.pts, ['u']);
19745 }
19746
19747 // Detect offset control codes and adjust cursor
19748 } else if (this.isOffsetControlCode(char0, char1)) {
19749 // Cursor position is set by indent PAC (see below) in 4-column
19750 // increments, with an additional offset code of 1-3 to reach any
19751 // of the 32 columns specified by CEA-608. So all we need to do
19752 // here is increment the column cursor by the given offset.
19753 this.column_ += char1 & 0x03;
19754
19755 // Detect PACs (Preamble Address Codes)
19756 } else if (this.isPAC(char0, char1)) {
19757
19758 // There's no logic for PAC -> row mapping, so we have to just
19759 // find the row code in an array and use its index :(
19760 var row = ROWS.indexOf(data & 0x1f20);
19761
19762 // Configure the caption window if we're in roll-up mode
19763 if (this.mode_ === 'rollUp') {
19764 // This implies that the base row is incorrectly set.
19765 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
19766 // of roll-up rows set.
19767 if (row - this.rollUpRows_ + 1 < 0) {
19768 row = this.rollUpRows_ - 1;
19769 }
19770
19771 this.setRollUp(packet.pts, row);
19772 }
19773
19774 if (row !== this.row_) {
19775 // formatting is only persistent for current row
19776 this.clearFormatting(packet.pts);
19777 this.row_ = row;
19778 }
19779 // All PACs can apply underline, so detect and apply
19780 // (All odd-numbered second bytes set underline)
19781 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
19782 this.addFormatting(packet.pts, ['u']);
19783 }
19784
19785 if ((data & 0x10) === 0x10) {
19786 // We've got an indent level code. Each successive even number
19787 // increments the column cursor by 4, so we can get the desired
19788 // column position by bit-shifting to the right (to get n/2)
19789 // and multiplying by 4.
19790 this.column_ = ((data & 0xe) >> 1) * 4;
19791 }
19792
19793 if (this.isColorPAC(char1)) {
19794 // it's a color code, though we only support white, which
19795 // can be either normal or italicized. white italics can be
19796 // either 0x4e or 0x6e depending on the row, so we just
19797 // bitwise-and with 0xe to see if italics should be turned on
19798 if ((char1 & 0xe) === 0xe) {
19799 this.addFormatting(packet.pts, ['i']);
19800 }
19801 }
19802
19803 // We have a normal character in char0, and possibly one in char1
19804 } else if (this.isNormalChar(char0)) {
19805 if (char1 === 0x00) {
19806 char1 = null;
19807 }
19808 text = getCharFromCode(char0);
19809 text += getCharFromCode(char1);
19810 this[this.mode_](packet.pts, text);
19811 this.column_ += text.length;
19812 } // finish data processing
19813 };
19814 };
19815 Cea608Stream.prototype = new stream();
19816 // Trigger a cue point that captures the current state of the
19817 // display buffer
19818 Cea608Stream.prototype.flushDisplayed = function (pts) {
19819 var content = this.displayed_
19820 // remove spaces from the start and end of the string
19821 .map(function (row) {
19822 try {
19823 return row.trim();
19824 } catch (e) {
19825 // Ordinarily, this shouldn't happen. However, caption
19826 // parsing errors should not throw exceptions and
19827 // break playback.
19828 // eslint-disable-next-line no-console
19829 console.error('Skipping malformed caption.');
19830 return '';
19831 }
19832 })
19833 // combine all text rows to display in one cue
19834 .join('\n')
19835 // and remove blank rows from the start and end, but not the middle
19836 .replace(/^\n+|\n+$/g, '');
19837
19838 if (content.length) {
19839 this.trigger('data', {
19840 startPts: this.startPts_,
19841 endPts: pts,
19842 text: content,
19843 stream: this.name_
19844 });
19845 }
19846 };
19847
19848 /**
19849 * Zero out the data, used for startup and on seek
19850 */
19851 Cea608Stream.prototype.reset = function () {
19852 this.mode_ = 'popOn';
19853 // When in roll-up mode, the index of the last row that will
19854 // actually display captions. If a caption is shifted to a row
19855 // with a lower index than this, it is cleared from the display
19856 // buffer
19857 this.topRow_ = 0;
19858 this.startPts_ = 0;
19859 this.displayed_ = createDisplayBuffer();
19860 this.nonDisplayed_ = createDisplayBuffer();
19861 this.lastControlCode_ = null;
19862
19863 // Track row and column for proper line-breaking and spacing
19864 this.column_ = 0;
19865 this.row_ = BOTTOM_ROW;
19866 this.rollUpRows_ = 2;
19867
19868 // This variable holds currently-applied formatting
19869 this.formatting_ = [];
19870 };
19871
19872 /**
19873 * Sets up control code and related constants for this instance
19874 */
19875 Cea608Stream.prototype.setConstants = function () {
19876 // The following attributes have these uses:
19877 // ext_ : char0 for mid-row codes, and the base for extended
19878 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
19879 // extended codes)
19880 // control_: char0 for control codes, except byte-shifted to the
19881 // left so that we can do this.control_ | CONTROL_CODE
19882 // offset_: char0 for tab offset codes
19883 //
19884 // It's also worth noting that control codes, and _only_ control codes,
19885 // differ between field 1 and field2. Field 2 control codes are always
19886 // their field 1 value plus 1. That's why there's the "| field" on the
19887 // control value.
19888 if (this.dataChannel_ === 0) {
19889 this.BASE_ = 0x10;
19890 this.EXT_ = 0x11;
19891 this.CONTROL_ = (0x14 | this.field_) << 8;
19892 this.OFFSET_ = 0x17;
19893 } else if (this.dataChannel_ === 1) {
19894 this.BASE_ = 0x18;
19895 this.EXT_ = 0x19;
19896 this.CONTROL_ = (0x1c | this.field_) << 8;
19897 this.OFFSET_ = 0x1f;
19898 }
19899
19900 // Constants for the LSByte command codes recognized by Cea608Stream. This
19901 // list is not exhaustive. For a more comprehensive listing and semantics see
19902 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
19903 // Padding
19904 this.PADDING_ = 0x0000;
19905 // Pop-on Mode
19906 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
19907 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
19908 // Roll-up Mode
19909 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
19910 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
19911 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
19912 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
19913 // paint-on mode
19914 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
19915 // Erasure
19916 this.BACKSPACE_ = this.CONTROL_ | 0x21;
19917 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
19918 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
19919 };
19920
19921 /**
19922 * Detects if the 2-byte packet data is a special character
19923 *
19924 * Special characters have a second byte in the range 0x30 to 0x3f,
19925 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
19926 * data channel 2).
19927 *
19928 * @param {Integer} char0 The first byte
19929 * @param {Integer} char1 The second byte
19930 * @return {Boolean} Whether the 2 bytes are an special character
19931 */
19932 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
19933 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
19934 };
19935
19936 /**
19937 * Detects if the 2-byte packet data is an extended character
19938 *
19939 * Extended characters have a second byte in the range 0x20 to 0x3f,
19940 * with the first byte being 0x12 or 0x13 (for data channel 1) or
19941 * 0x1a or 0x1b (for data channel 2).
19942 *
19943 * @param {Integer} char0 The first byte
19944 * @param {Integer} char1 The second byte
19945 * @return {Boolean} Whether the 2 bytes are an extended character
19946 */
19947 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
19948 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
19949 };
19950
19951 /**
19952 * Detects if the 2-byte packet is a mid-row code
19953 *
19954 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
19955 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
19956 * channel 2).
19957 *
19958 * @param {Integer} char0 The first byte
19959 * @param {Integer} char1 The second byte
19960 * @return {Boolean} Whether the 2 bytes are a mid-row code
19961 */
19962 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
19963 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
19964 };
19965
19966 /**
19967 * Detects if the 2-byte packet is an offset control code
19968 *
19969 * Offset control codes have a second byte in the range 0x21 to 0x23,
19970 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
19971 * data channel 2).
19972 *
19973 * @param {Integer} char0 The first byte
19974 * @param {Integer} char1 The second byte
19975 * @return {Boolean} Whether the 2 bytes are an offset control code
19976 */
19977 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
19978 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
19979 };
19980
19981 /**
19982 * Detects if the 2-byte packet is a Preamble Address Code
19983 *
19984 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
19985 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
19986 * range 0x40 to 0x7f.
19987 *
19988 * @param {Integer} char0 The first byte
19989 * @param {Integer} char1 The second byte
19990 * @return {Boolean} Whether the 2 bytes are a PAC
19991 */
19992 Cea608Stream.prototype.isPAC = function (char0, char1) {
19993 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
19994 };
19995
19996 /**
19997 * Detects if a packet's second byte is in the range of a PAC color code
19998 *
19999 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
20000 * 0x60 to 0x6f.
20001 *
20002 * @param {Integer} char1 The second byte
20003 * @return {Boolean} Whether the byte is a color PAC
20004 */
20005 Cea608Stream.prototype.isColorPAC = function (char1) {
20006 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
20007 };
20008
20009 /**
20010 * Detects if a single byte is in the range of a normal character
20011 *
20012 * Normal text bytes are in the range 0x20 to 0x7f.
20013 *
20014 * @param {Integer} char The byte
20015 * @return {Boolean} Whether the byte is a normal character
20016 */
20017 Cea608Stream.prototype.isNormalChar = function (char) {
20018 return char >= 0x20 && char <= 0x7f;
20019 };
20020
20021 /**
20022 * Configures roll-up
20023 *
20024 * @param {Integer} pts Current PTS
20025 * @param {Integer} newBaseRow Used by PACs to slide the current window to
20026 * a new position
20027 */
20028 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
20029 // Reset the base row to the bottom row when switching modes
20030 if (this.mode_ !== 'rollUp') {
20031 this.row_ = BOTTOM_ROW;
20032 this.mode_ = 'rollUp';
20033 // Spec says to wipe memories when switching to roll-up
20034 this.flushDisplayed(pts);
20035 this.nonDisplayed_ = createDisplayBuffer();
20036 this.displayed_ = createDisplayBuffer();
20037 }
20038
20039 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
20040 // move currently displayed captions (up or down) to the new base row
20041 for (var i = 0; i < this.rollUpRows_; i++) {
20042 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
20043 this.displayed_[this.row_ - i] = '';
20044 }
20045 }
20046
20047 if (newBaseRow === undefined) {
20048 newBaseRow = this.row_;
20049 }
20050
20051 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
20052 };
20053
20054 // Adds the opening HTML tag for the passed character to the caption text,
20055 // and keeps track of it for later closing
20056 Cea608Stream.prototype.addFormatting = function (pts, format) {
20057 this.formatting_ = this.formatting_.concat(format);
20058 var text = format.reduce(function (text, format) {
20059 return text + '<' + format + '>';
20060 }, '');
20061 this[this.mode_](pts, text);
20062 };
20063
20064 // Adds HTML closing tags for current formatting to caption text and
20065 // clears remembered formatting
20066 Cea608Stream.prototype.clearFormatting = function (pts) {
20067 if (!this.formatting_.length) {
20068 return;
20069 }
20070 var text = this.formatting_.reverse().reduce(function (text, format) {
20071 return text + '</' + format + '>';
20072 }, '');
20073 this.formatting_ = [];
20074 this[this.mode_](pts, text);
20075 };
20076
20077 // Mode Implementations
20078 Cea608Stream.prototype.popOn = function (pts, text) {
20079 var baseRow = this.nonDisplayed_[this.row_];
20080
20081 // buffer characters
20082 baseRow += text;
20083 this.nonDisplayed_[this.row_] = baseRow;
20084 };
20085
20086 Cea608Stream.prototype.rollUp = function (pts, text) {
20087 var baseRow = this.displayed_[this.row_];
20088
20089 baseRow += text;
20090 this.displayed_[this.row_] = baseRow;
20091 };
20092
20093 Cea608Stream.prototype.shiftRowsUp_ = function () {
20094 var i;
20095 // clear out inactive rows
20096 for (i = 0; i < this.topRow_; i++) {
20097 this.displayed_[i] = '';
20098 }
20099 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
20100 this.displayed_[i] = '';
20101 }
20102 // shift displayed rows up
20103 for (i = this.topRow_; i < this.row_; i++) {
20104 this.displayed_[i] = this.displayed_[i + 1];
20105 }
20106 // clear out the bottom row
20107 this.displayed_[this.row_] = '';
20108 };
20109
20110 Cea608Stream.prototype.paintOn = function (pts, text) {
20111 var baseRow = this.displayed_[this.row_];
20112
20113 baseRow += text;
20114 this.displayed_[this.row_] = baseRow;
20115 };
20116
20117 // exports
20118 var captionStream = {
20119 CaptionStream: CaptionStream,
20120 Cea608Stream: Cea608Stream
20121 };
20122
20123 var discardEmulationPreventionBytes$1 = captionPacketParser.discardEmulationPreventionBytes;
20124 var CaptionStream$1 = captionStream.CaptionStream;
20125
20126 /**
20127 * Maps an offset in the mdat to a sample based on the the size of the samples.
20128 * Assumes that `parseSamples` has been called first.
20129 *
20130 * @param {Number} offset - The offset into the mdat
20131 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
20132 * @return {?Object} The matching sample, or null if no match was found.
20133 *
20134 * @see ISO-BMFF-12/2015, Section 8.8.8
20135 **/
20136 var mapToSample = function mapToSample(offset, samples) {
20137 var approximateOffset = offset;
20138
20139 for (var i = 0; i < samples.length; i++) {
20140 var sample = samples[i];
20141
20142 if (approximateOffset < sample.size) {
20143 return sample;
20144 }
20145
20146 approximateOffset -= sample.size;
20147 }
20148
20149 return null;
20150 };
20151
20152 /**
20153 * Finds SEI nal units contained in a Media Data Box.
20154 * Assumes that `parseSamples` has been called first.
20155 *
20156 * @param {Uint8Array} avcStream - The bytes of the mdat
20157 * @param {Object[]} samples - The samples parsed out by `parseSamples`
20158 * @param {Number} trackId - The trackId of this video track
20159 * @return {Object[]} seiNals - the parsed SEI NALUs found.
20160 * The contents of the seiNal should match what is expected by
20161 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
20162 *
20163 * @see ISO-BMFF-12/2015, Section 8.1.1
20164 * @see Rec. ITU-T H.264, 7.3.2.3.1
20165 **/
20166 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
20167 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
20168 result = [],
20169 seiNal,
20170 i,
20171 length,
20172 lastMatchedSample;
20173
20174 for (i = 0; i + 4 < avcStream.length; i += length) {
20175 length = avcView.getUint32(i);
20176 i += 4;
20177
20178 // Bail if this doesn't appear to be an H264 stream
20179 if (length <= 0) {
20180 continue;
20181 }
20182
20183 switch (avcStream[i] & 0x1F) {
20184 case 0x06:
20185 var data = avcStream.subarray(i + 1, i + 1 + length);
20186 var matchingSample = mapToSample(i, samples);
20187
20188 seiNal = {
20189 nalUnitType: 'sei_rbsp',
20190 size: length,
20191 data: data,
20192 escapedRBSP: discardEmulationPreventionBytes$1(data),
20193 trackId: trackId
20194 };
20195
20196 if (matchingSample) {
20197 seiNal.pts = matchingSample.pts;
20198 seiNal.dts = matchingSample.dts;
20199 lastMatchedSample = matchingSample;
20200 } else {
20201 // If a matching sample cannot be found, use the last
20202 // sample's values as they should be as close as possible
20203 seiNal.pts = lastMatchedSample.pts;
20204 seiNal.dts = lastMatchedSample.dts;
20205 }
20206
20207 result.push(seiNal);
20208 break;
20209 default:
20210 break;
20211 }
20212 }
20213
20214 return result;
20215 };
20216
20217 /**
20218 * Parses sample information out of Track Run Boxes and calculates
20219 * the absolute presentation and decode timestamps of each sample.
20220 *
20221 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
20222 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
20223 @see ISO-BMFF-12/2015, Section 8.8.12
20224 * @param {Object} tfhd - The parsed Track Fragment Header
20225 * @see inspect.parseTfhd
20226 * @return {Object[]} the parsed samples
20227 *
20228 * @see ISO-BMFF-12/2015, Section 8.8.8
20229 **/
20230 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
20231 var currentDts = baseMediaDecodeTime;
20232 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
20233 var defaultSampleSize = tfhd.defaultSampleSize || 0;
20234 var trackId = tfhd.trackId;
20235 var allSamples = [];
20236
20237 truns.forEach(function (trun) {
20238 // Note: We currently do not parse the sample table as well
20239 // as the trun. It's possible some sources will require this.
20240 // moov > trak > mdia > minf > stbl
20241 var trackRun = mp4Inspector.parseTrun(trun);
20242 var samples = trackRun.samples;
20243
20244 samples.forEach(function (sample) {
20245 if (sample.duration === undefined) {
20246 sample.duration = defaultSampleDuration;
20247 }
20248 if (sample.size === undefined) {
20249 sample.size = defaultSampleSize;
20250 }
20251 sample.trackId = trackId;
20252 sample.dts = currentDts;
20253 if (sample.compositionTimeOffset === undefined) {
20254 sample.compositionTimeOffset = 0;
20255 }
20256 sample.pts = currentDts + sample.compositionTimeOffset;
20257
20258 currentDts += sample.duration;
20259 });
20260
20261 allSamples = allSamples.concat(samples);
20262 });
20263
20264 return allSamples;
20265 };
20266
20267 /**
20268 * Parses out caption nals from an FMP4 segment's video tracks.
20269 *
20270 * @param {Uint8Array} segment - The bytes of a single segment
20271 * @param {Number} videoTrackId - The trackId of a video track in the segment
20272 * @return {Object.<Number, Object[]>} A mapping of video trackId to
20273 * a list of seiNals found in that track
20274 **/
20275 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
20276 // To get the samples
20277 var trafs = probe.findBox(segment, ['moof', 'traf']);
20278 // To get SEI NAL units
20279 var mdats = probe.findBox(segment, ['mdat']);
20280 var captionNals = {};
20281 var mdatTrafPairs = [];
20282
20283 // Pair up each traf with a mdat as moofs and mdats are in pairs
20284 mdats.forEach(function (mdat, index) {
20285 var matchingTraf = trafs[index];
20286 mdatTrafPairs.push({
20287 mdat: mdat,
20288 traf: matchingTraf
20289 });
20290 });
20291
20292 mdatTrafPairs.forEach(function (pair) {
20293 var mdat = pair.mdat;
20294 var traf = pair.traf;
20295 var tfhd = probe.findBox(traf, ['tfhd']);
20296 // Exactly 1 tfhd per traf
20297 var headerInfo = mp4Inspector.parseTfhd(tfhd[0]);
20298 var trackId = headerInfo.trackId;
20299 var tfdt = probe.findBox(traf, ['tfdt']);
20300 // Either 0 or 1 tfdt per traf
20301 var baseMediaDecodeTime = tfdt.length > 0 ? mp4Inspector.parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
20302 var truns = probe.findBox(traf, ['trun']);
20303 var samples;
20304 var seiNals;
20305
20306 // Only parse video data for the chosen video track
20307 if (videoTrackId === trackId && truns.length > 0) {
20308 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
20309
20310 seiNals = findSeiNals(mdat, samples, trackId);
20311
20312 if (!captionNals[trackId]) {
20313 captionNals[trackId] = [];
20314 }
20315
20316 captionNals[trackId] = captionNals[trackId].concat(seiNals);
20317 }
20318 });
20319
20320 return captionNals;
20321 };
20322
20323 /**
20324 * Parses out inband captions from an MP4 container and returns
20325 * caption objects that can be used by WebVTT and the TextTrack API.
20326 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
20327 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
20328 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
20329 *
20330 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
20331 * @param {Number} trackId - The id of the video track to parse
20332 * @param {Number} timescale - The timescale for the video track from the init segment
20333 *
20334 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
20335 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
20336 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
20337 * @return {String} parsedCaptions[].text - The visible content of the caption
20338 **/
20339 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
20340 var seiNals;
20341
20342 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
20343 if (trackId === null) {
20344 return null;
20345 }
20346
20347 seiNals = parseCaptionNals(segment, trackId);
20348
20349 return {
20350 seiNals: seiNals[trackId],
20351 timescale: timescale
20352 };
20353 };
20354
20355 /**
20356 * Converts SEI NALUs into captions that can be used by video.js
20357 **/
20358 var CaptionParser = function CaptionParser() {
20359 var isInitialized = false;
20360 var captionStream$$1;
20361
20362 // Stores segments seen before trackId and timescale are set
20363 var segmentCache;
20364 // Stores video track ID of the track being parsed
20365 var trackId;
20366 // Stores the timescale of the track being parsed
20367 var timescale;
20368 // Stores captions parsed so far
20369 var parsedCaptions;
20370 // Stores whether we are receiving partial data or not
20371 var parsingPartial;
20372
20373 /**
20374 * A method to indicate whether a CaptionParser has been initalized
20375 * @returns {Boolean}
20376 **/
20377 this.isInitialized = function () {
20378 return isInitialized;
20379 };
20380
20381 /**
20382 * Initializes the underlying CaptionStream, SEI NAL parsing
20383 * and management, and caption collection
20384 **/
20385 this.init = function (options) {
20386 captionStream$$1 = new CaptionStream$1();
20387 isInitialized = true;
20388 parsingPartial = options ? options.isPartial : false;
20389
20390 // Collect dispatched captions
20391 captionStream$$1.on('data', function (event) {
20392 // Convert to seconds in the source's timescale
20393 event.startTime = event.startPts / timescale;
20394 event.endTime = event.endPts / timescale;
20395
20396 parsedCaptions.captions.push(event);
20397 parsedCaptions.captionStreams[event.stream] = true;
20398 });
20399 };
20400
20401 /**
20402 * Determines if a new video track will be selected
20403 * or if the timescale changed
20404 * @return {Boolean}
20405 **/
20406 this.isNewInit = function (videoTrackIds, timescales) {
20407 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
20408 return false;
20409 }
20410
20411 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
20412 };
20413
20414 /**
20415 * Parses out SEI captions and interacts with underlying
20416 * CaptionStream to return dispatched captions
20417 *
20418 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
20419 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
20420 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
20421 * @see parseEmbeddedCaptions
20422 * @see m2ts/caption-stream.js
20423 **/
20424 this.parse = function (segment, videoTrackIds, timescales) {
20425 var parsedData;
20426
20427 if (!this.isInitialized()) {
20428 return null;
20429
20430 // This is not likely to be a video segment
20431 } else if (!videoTrackIds || !timescales) {
20432 return null;
20433 } else if (this.isNewInit(videoTrackIds, timescales)) {
20434 // Use the first video track only as there is no
20435 // mechanism to switch to other video tracks
20436 trackId = videoTrackIds[0];
20437 timescale = timescales[trackId];
20438
20439 // If an init segment has not been seen yet, hold onto segment
20440 // data until we have one.
20441 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
20442 } else if (trackId === null || !timescale) {
20443 segmentCache.push(segment);
20444 return null;
20445 }
20446
20447 // Now that a timescale and trackId is set, parse cached segments
20448 while (segmentCache.length > 0) {
20449 var cachedSegment = segmentCache.shift();
20450
20451 this.parse(cachedSegment, videoTrackIds, timescales);
20452 }
20453
20454 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
20455
20456 if (parsedData === null || !parsedData.seiNals) {
20457 return null;
20458 }
20459
20460 this.pushNals(parsedData.seiNals);
20461 // Force the parsed captions to be dispatched
20462 this.flushStream();
20463
20464 return parsedCaptions;
20465 };
20466
20467 /**
20468 * Pushes SEI NALUs onto CaptionStream
20469 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
20470 * Assumes that `parseCaptionNals` has been called first
20471 * @see m2ts/caption-stream.js
20472 **/
20473 this.pushNals = function (nals) {
20474 if (!this.isInitialized() || !nals || nals.length === 0) {
20475 return null;
20476 }
20477
20478 nals.forEach(function (nal) {
20479 captionStream$$1.push(nal);
20480 });
20481 };
20482
20483 /**
20484 * Flushes underlying CaptionStream to dispatch processed, displayable captions
20485 * @see m2ts/caption-stream.js
20486 **/
20487 this.flushStream = function () {
20488 if (!this.isInitialized()) {
20489 return null;
20490 }
20491
20492 if (!parsingPartial) {
20493 captionStream$$1.flush();
20494 } else {
20495 captionStream$$1.partialFlush();
20496 }
20497 };
20498
20499 /**
20500 * Reset caption buckets for new data
20501 **/
20502 this.clearParsedCaptions = function () {
20503 parsedCaptions.captions = [];
20504 parsedCaptions.captionStreams = {};
20505 };
20506
20507 /**
20508 * Resets underlying CaptionStream
20509 * @see m2ts/caption-stream.js
20510 **/
20511 this.resetCaptionStream = function () {
20512 if (!this.isInitialized()) {
20513 return null;
20514 }
20515
20516 captionStream$$1.reset();
20517 };
20518
20519 /**
20520 * Convenience method to clear all captions flushed from the
20521 * CaptionStream and still being parsed
20522 * @see m2ts/caption-stream.js
20523 **/
20524 this.clearAllCaptions = function () {
20525 this.clearParsedCaptions();
20526 this.resetCaptionStream();
20527 };
20528
20529 /**
20530 * Reset caption parser
20531 **/
20532 this.reset = function () {
20533 segmentCache = [];
20534 trackId = null;
20535 timescale = null;
20536
20537 if (!parsedCaptions) {
20538 parsedCaptions = {
20539 captions: [],
20540 // CC1, CC2, CC3, CC4
20541 captionStreams: {}
20542 };
20543 } else {
20544 this.clearParsedCaptions();
20545 }
20546
20547 this.resetCaptionStream();
20548 };
20549
20550 this.reset();
20551 };
20552
20553 var captionParser = CaptionParser;
20554
20555 /**
20556 * @file segment-loader.js
20557 */
20558
20559 // in ms
20560 var CHECK_BUFFER_DELAY = 500;
20561
20562 /**
20563 * Determines if we should call endOfStream on the media source based
20564 * on the state of the buffer or if appened segment was the final
20565 * segment in the playlist.
20566 *
20567 * @param {Object} playlist a media playlist object
20568 * @param {Object} mediaSource the MediaSource object
20569 * @param {Number} segmentIndex the index of segment we last appended
20570 * @returns {Boolean} do we need to call endOfStream on the MediaSource
20571 */
20572 var detectEndOfStream = function detectEndOfStream(playlist, mediaSource, segmentIndex) {
20573 if (!playlist || !mediaSource) {
20574 return false;
20575 }
20576
20577 var segments = playlist.segments;
20578
20579 // determine a few boolean values to help make the branch below easier
20580 // to read
20581 var appendedLastSegment = segmentIndex === segments.length;
20582
20583 // if we've buffered to the end of the video, we need to call endOfStream
20584 // so that MediaSources can trigger the `ended` event when it runs out of
20585 // buffered data instead of waiting for me
20586 return playlist.endList && mediaSource.readyState === 'open' && appendedLastSegment;
20587 };
20588
20589 var finite = function finite(num) {
20590 return typeof num === 'number' && isFinite(num);
20591 };
20592
20593 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, newSegmentMedia) {
20594 // Although these checks should most likely cover non 'main' types, for now it narrows
20595 // the scope of our checks.
20596 if (loaderType !== 'main' || !startingMedia || !newSegmentMedia) {
20597 return null;
20598 }
20599
20600 if (!newSegmentMedia.containsAudio && !newSegmentMedia.containsVideo) {
20601 return 'Neither audio nor video found in segment.';
20602 }
20603
20604 if (startingMedia.containsVideo && !newSegmentMedia.containsVideo) {
20605 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
20606 }
20607
20608 if (!startingMedia.containsVideo && newSegmentMedia.containsVideo) {
20609 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
20610 }
20611
20612 return null;
20613 };
20614
20615 /**
20616 * Calculates a time value that is safe to remove from the back buffer without interupting
20617 * playback.
20618 *
20619 * @param {TimeRange} seekable
20620 * The current seekable range
20621 * @param {Number} currentTime
20622 * The current time of the player
20623 * @param {Number} targetDuration
20624 * The target duration of the current playlist
20625 * @return {Number}
20626 * Time that is safe to remove from the back buffer without interupting playback
20627 */
20628 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable$$1, currentTime, targetDuration) {
20629 // 30 seconds before the playhead provides a safe default for trimming.
20630 //
20631 // Choosing a reasonable default is particularly important for high bitrate content and
20632 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
20633 // throw an APPEND_BUFFER_ERR.
20634 var trimTime = currentTime - 30;
20635
20636 if (seekable$$1.length) {
20637 // Some live playlists may have a shorter window of content than the full allowed back
20638 // buffer. For these playlists, don't save content that's no longer within the window.
20639 trimTime = Math.max(trimTime, seekable$$1.start(0));
20640 }
20641
20642 // Don't remove within target duration of the current time to avoid the possibility of
20643 // removing the GOP currently being played, as removing it can cause playback stalls.
20644 var maxTrimTime = currentTime - targetDuration;
20645
20646 return Math.min(maxTrimTime, trimTime);
20647 };
20648
20649 var segmentInfoString = function segmentInfoString(segmentInfo) {
20650 var _segmentInfo$segment = segmentInfo.segment,
20651 start = _segmentInfo$segment.start,
20652 end = _segmentInfo$segment.end,
20653 _segmentInfo$playlist = segmentInfo.playlist,
20654 seq = _segmentInfo$playlist.mediaSequence,
20655 id = _segmentInfo$playlist.id,
20656 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
20657 segments = _segmentInfo$playlist2 === undefined ? [] : _segmentInfo$playlist2,
20658 index = segmentInfo.mediaIndex,
20659 timeline = segmentInfo.timeline;
20660
20661
20662 return ['appending [' + index + '] of [' + seq + ', ' + (seq + segments.length) + '] from playlist [' + id + ']', '[' + start + ' => ' + end + '] in timeline [' + timeline + ']'].join(' ');
20663 };
20664
20665 /**
20666 * An object that manages segment loading and appending.
20667 *
20668 * @class SegmentLoader
20669 * @param {Object} options required and optional options
20670 * @extends videojs.EventTarget
20671 */
20672
20673 var SegmentLoader = function (_videojs$EventTarget) {
20674 inherits(SegmentLoader, _videojs$EventTarget);
20675
20676 function SegmentLoader(settings) {
20677 classCallCheck(this, SegmentLoader);
20678
20679 // check pre-conditions
20680 var _this = possibleConstructorReturn(this, (SegmentLoader.__proto__ || Object.getPrototypeOf(SegmentLoader)).call(this));
20681
20682 if (!settings) {
20683 throw new TypeError('Initialization settings are required');
20684 }
20685 if (typeof settings.currentTime !== 'function') {
20686 throw new TypeError('No currentTime getter specified');
20687 }
20688 if (!settings.mediaSource) {
20689 throw new TypeError('No MediaSource specified');
20690 }
20691 // public properties
20692 _this.bandwidth = settings.bandwidth;
20693 _this.throughput = { rate: 0, count: 0 };
20694 _this.roundTrip = NaN;
20695 _this.resetStats_();
20696 _this.mediaIndex = null;
20697
20698 // private settings
20699 _this.hasPlayed_ = settings.hasPlayed;
20700 _this.currentTime_ = settings.currentTime;
20701 _this.seekable_ = settings.seekable;
20702 _this.seeking_ = settings.seeking;
20703 _this.duration_ = settings.duration;
20704 _this.mediaSource_ = settings.mediaSource;
20705 _this.hls_ = settings.hls;
20706 _this.loaderType_ = settings.loaderType;
20707 _this.startingMedia_ = void 0;
20708 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
20709 _this.goalBufferLength_ = settings.goalBufferLength;
20710 _this.sourceType_ = settings.sourceType;
20711 _this.inbandTextTracks_ = settings.inbandTextTracks;
20712 _this.state_ = 'INIT';
20713
20714 // private instance variables
20715 _this.checkBufferTimeout_ = null;
20716 _this.error_ = void 0;
20717 _this.currentTimeline_ = -1;
20718 _this.pendingSegment_ = null;
20719 _this.mimeType_ = null;
20720 _this.sourceUpdater_ = null;
20721 _this.xhrOptions_ = null;
20722
20723 // Fragmented mp4 playback
20724 _this.activeInitSegmentId_ = null;
20725 _this.initSegments_ = {};
20726
20727 // HLSe playback
20728 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
20729 _this.keyCache_ = {};
20730
20731 // Fmp4 CaptionParser
20732 if (_this.loaderType_ === 'main') {
20733 _this.captionParser_ = new captionParser();
20734 } else {
20735 _this.captionParser_ = null;
20736 }
20737
20738 _this.decrypter_ = settings.decrypter;
20739
20740 // Manages the tracking and generation of sync-points, mappings
20741 // between a time in the display time and a segment index within
20742 // a playlist
20743 _this.syncController_ = settings.syncController;
20744 _this.syncPoint_ = {
20745 segmentIndex: 0,
20746 time: 0
20747 };
20748
20749 _this.triggerSyncInfoUpdate_ = function () {
20750 return _this.trigger('syncinfoupdate');
20751 };
20752 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
20753
20754 _this.mediaSource_.addEventListener('sourceopen', function () {
20755 return _this.ended_ = false;
20756 });
20757
20758 // ...for determining the fetch location
20759 _this.fetchAtBuffer_ = false;
20760
20761 _this.logger_ = logger('SegmentLoader[' + _this.loaderType_ + ']');
20762
20763 Object.defineProperty(_this, 'state', {
20764 get: function get$$1() {
20765 return this.state_;
20766 },
20767 set: function set$$1(newState) {
20768 if (newState !== this.state_) {
20769 this.logger_(this.state_ + ' -> ' + newState);
20770 this.state_ = newState;
20771 }
20772 }
20773 });
20774 return _this;
20775 }
20776
20777 /**
20778 * reset all of our media stats
20779 *
20780 * @private
20781 */
20782
20783
20784 createClass(SegmentLoader, [{
20785 key: 'resetStats_',
20786 value: function resetStats_() {
20787 this.mediaBytesTransferred = 0;
20788 this.mediaRequests = 0;
20789 this.mediaRequestsAborted = 0;
20790 this.mediaRequestsTimedout = 0;
20791 this.mediaRequestsErrored = 0;
20792 this.mediaTransferDuration = 0;
20793 this.mediaSecondsLoaded = 0;
20794 }
20795
20796 /**
20797 * dispose of the SegmentLoader and reset to the default state
20798 */
20799
20800 }, {
20801 key: 'dispose',
20802 value: function dispose() {
20803 this.trigger('dispose');
20804 this.state = 'DISPOSED';
20805 this.pause();
20806 this.abort_();
20807 if (this.sourceUpdater_) {
20808 this.sourceUpdater_.dispose();
20809 }
20810 this.resetStats_();
20811 if (this.captionParser_) {
20812 this.captionParser_.reset();
20813 }
20814
20815 if (this.checkBufferTimeout_) {
20816 window_1.clearTimeout(this.checkBufferTimeout_);
20817 }
20818
20819 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
20820 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
20821 }
20822
20823 this.off();
20824 }
20825
20826 /**
20827 * abort anything that is currently doing on with the SegmentLoader
20828 * and reset to a default state
20829 */
20830
20831 }, {
20832 key: 'abort',
20833 value: function abort() {
20834 if (this.state !== 'WAITING') {
20835 if (this.pendingSegment_) {
20836 this.pendingSegment_ = null;
20837 }
20838 return;
20839 }
20840
20841 this.abort_();
20842
20843 // We aborted the requests we were waiting on, so reset the loader's state to READY
20844 // since we are no longer "waiting" on any requests. XHR callback is not always run
20845 // when the request is aborted. This will prevent the loader from being stuck in the
20846 // WAITING state indefinitely.
20847 this.state = 'READY';
20848
20849 // don't wait for buffer check timeouts to begin fetching the
20850 // next segment
20851 if (!this.paused()) {
20852 this.monitorBuffer_();
20853 }
20854 }
20855
20856 /**
20857 * abort all pending xhr requests and null any pending segements
20858 *
20859 * @private
20860 */
20861
20862 }, {
20863 key: 'abort_',
20864 value: function abort_() {
20865 if (this.pendingSegment_) {
20866 this.pendingSegment_.abortRequests();
20867 }
20868
20869 // clear out the segment being processed
20870 this.pendingSegment_ = null;
20871 }
20872
20873 /**
20874 * set an error on the segment loader and null out any pending segements
20875 *
20876 * @param {Error} error the error to set on the SegmentLoader
20877 * @return {Error} the error that was set or that is currently set
20878 */
20879
20880 }, {
20881 key: 'error',
20882 value: function error(_error) {
20883 if (typeof _error !== 'undefined') {
20884 this.error_ = _error;
20885 }
20886
20887 this.pendingSegment_ = null;
20888 return this.error_;
20889 }
20890 }, {
20891 key: 'endOfStream',
20892 value: function endOfStream() {
20893 this.ended_ = true;
20894 this.pause();
20895 this.trigger('ended');
20896 }
20897
20898 /**
20899 * Indicates which time ranges are buffered
20900 *
20901 * @return {TimeRange}
20902 * TimeRange object representing the current buffered ranges
20903 */
20904
20905 }, {
20906 key: 'buffered_',
20907 value: function buffered_() {
20908 if (!this.sourceUpdater_) {
20909 return videojs.createTimeRanges();
20910 }
20911
20912 return this.sourceUpdater_.buffered();
20913 }
20914
20915 /**
20916 * Gets and sets init segment for the provided map
20917 *
20918 * @param {Object} map
20919 * The map object representing the init segment to get or set
20920 * @param {Boolean=} set
20921 * If true, the init segment for the provided map should be saved
20922 * @return {Object}
20923 * map object for desired init segment
20924 */
20925
20926 }, {
20927 key: 'initSegment',
20928 value: function initSegment(map) {
20929 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
20930
20931 if (!map) {
20932 return null;
20933 }
20934
20935 var id = initSegmentId(map);
20936 var storedMap = this.initSegments_[id];
20937
20938 if (set$$1 && !storedMap && map.bytes) {
20939 this.initSegments_[id] = storedMap = {
20940 resolvedUri: map.resolvedUri,
20941 byterange: map.byterange,
20942 bytes: map.bytes,
20943 timescales: map.timescales,
20944 videoTrackIds: map.videoTrackIds
20945 };
20946 }
20947
20948 return storedMap || map;
20949 }
20950
20951 /**
20952 * Gets and sets key for the provided key
20953 *
20954 * @param {Object} key
20955 * The key object representing the key to get or set
20956 * @param {Boolean=} set
20957 * If true, the key for the provided key should be saved
20958 * @return {Object}
20959 * Key object for desired key
20960 */
20961
20962 }, {
20963 key: 'segmentKey',
20964 value: function segmentKey(key) {
20965 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
20966
20967 if (!key) {
20968 return null;
20969 }
20970
20971 var id = segmentKeyId(key);
20972 var storedKey = this.keyCache_[id];
20973
20974 // TODO: We should use the HTTP Expires header to invalidate our cache per
20975 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
20976 if (this.cacheEncryptionKeys_ && set$$1 && !storedKey && key.bytes) {
20977 this.keyCache_[id] = storedKey = {
20978 resolvedUri: key.resolvedUri,
20979 bytes: key.bytes
20980 };
20981 }
20982
20983 var result = {
20984 resolvedUri: (storedKey || key).resolvedUri
20985 };
20986
20987 if (storedKey) {
20988 result.bytes = storedKey.bytes;
20989 }
20990
20991 return result;
20992 }
20993
20994 /**
20995 * Returns true if all configuration required for loading is present, otherwise false.
20996 *
20997 * @return {Boolean} True if the all configuration is ready for loading
20998 * @private
20999 */
21000
21001 }, {
21002 key: 'couldBeginLoading_',
21003 value: function couldBeginLoading_() {
21004 return this.playlist_ && (
21005 // the source updater is created when init_ is called, so either having a
21006 // source updater or being in the INIT state with a mimeType is enough
21007 // to say we have all the needed configuration to start loading.
21008 this.sourceUpdater_ || this.mimeType_ && this.state === 'INIT') && !this.paused();
21009 }
21010
21011 /**
21012 * load a playlist and start to fill the buffer
21013 */
21014
21015 }, {
21016 key: 'load',
21017 value: function load() {
21018 // un-pause
21019 this.monitorBuffer_();
21020
21021 // if we don't have a playlist yet, keep waiting for one to be
21022 // specified
21023 if (!this.playlist_) {
21024 return;
21025 }
21026
21027 // not sure if this is the best place for this
21028 this.syncController_.setDateTimeMapping(this.playlist_);
21029
21030 // if all the configuration is ready, initialize and begin loading
21031 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21032 return this.init_();
21033 }
21034
21035 // if we're in the middle of processing a segment already, don't
21036 // kick off an additional segment request
21037 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
21038 return;
21039 }
21040
21041 this.state = 'READY';
21042 }
21043
21044 /**
21045 * Once all the starting parameters have been specified, begin
21046 * operation. This method should only be invoked from the INIT
21047 * state.
21048 *
21049 * @private
21050 */
21051
21052 }, {
21053 key: 'init_',
21054 value: function init_() {
21055 this.state = 'READY';
21056 this.sourceUpdater_ = new SourceUpdater(this.mediaSource_, this.mimeType_, this.loaderType_, this.sourceBufferEmitter_);
21057 this.resetEverything();
21058 return this.monitorBuffer_();
21059 }
21060
21061 /**
21062 * set a playlist on the segment loader
21063 *
21064 * @param {PlaylistLoader} media the playlist to set on the segment loader
21065 */
21066
21067 }, {
21068 key: 'playlist',
21069 value: function playlist(newPlaylist) {
21070 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
21071
21072 if (!newPlaylist) {
21073 return;
21074 }
21075
21076 var oldPlaylist = this.playlist_;
21077 var segmentInfo = this.pendingSegment_;
21078
21079 this.playlist_ = newPlaylist;
21080 this.xhrOptions_ = options;
21081
21082 // when we haven't started playing yet, the start of a live playlist
21083 // is always our zero-time so force a sync update each time the playlist
21084 // is refreshed from the server
21085 //
21086 // Use the INIT state to determine if playback has started, as the playlist sync info
21087 // should be fixed once requests begin (as sync points are generated based on sync
21088 // info), but not before then.
21089 if (this.state === 'INIT') {
21090 newPlaylist.syncInfo = {
21091 mediaSequence: newPlaylist.mediaSequence,
21092 time: 0
21093 };
21094 }
21095
21096 var oldId = null;
21097
21098 if (oldPlaylist) {
21099 if (oldPlaylist.id) {
21100 oldId = oldPlaylist.id;
21101 } else if (oldPlaylist.uri) {
21102 oldId = oldPlaylist.uri;
21103 }
21104 }
21105
21106 this.logger_('playlist update [' + oldId + ' => ' + (newPlaylist.id || newPlaylist.uri) + ']');
21107
21108 // in VOD, this is always a rendition switch (or we updated our syncInfo above)
21109 // in LIVE, we always want to update with new playlists (including refreshes)
21110 this.trigger('syncinfoupdate');
21111
21112 // if we were unpaused but waiting for a playlist, start
21113 // buffering now
21114 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21115 return this.init_();
21116 }
21117
21118 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
21119 if (this.mediaIndex !== null) {
21120 // we must "resync" the segment loader when we switch renditions and
21121 // the segment loader is already synced to the previous rendition
21122 this.resyncLoader();
21123 }
21124
21125 // the rest of this function depends on `oldPlaylist` being defined
21126 return;
21127 }
21128
21129 // we reloaded the same playlist so we are in a live scenario
21130 // and we will likely need to adjust the mediaIndex
21131 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
21132
21133 this.logger_('live window shift [' + mediaSequenceDiff + ']');
21134
21135 // update the mediaIndex on the SegmentLoader
21136 // this is important because we can abort a request and this value must be
21137 // equal to the last appended mediaIndex
21138 if (this.mediaIndex !== null) {
21139 this.mediaIndex -= mediaSequenceDiff;
21140 }
21141
21142 // update the mediaIndex on the SegmentInfo object
21143 // this is important because we will update this.mediaIndex with this value
21144 // in `handleUpdateEnd_` after the segment has been successfully appended
21145 if (segmentInfo) {
21146 segmentInfo.mediaIndex -= mediaSequenceDiff;
21147
21148 // we need to update the referenced segment so that timing information is
21149 // saved for the new playlist's segment, however, if the segment fell off the
21150 // playlist, we can leave the old reference and just lose the timing info
21151 if (segmentInfo.mediaIndex >= 0) {
21152 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
21153 }
21154 }
21155
21156 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
21157 }
21158
21159 /**
21160 * Prevent the loader from fetching additional segments. If there
21161 * is a segment request outstanding, it will finish processing
21162 * before the loader halts. A segment loader can be unpaused by
21163 * calling load().
21164 */
21165
21166 }, {
21167 key: 'pause',
21168 value: function pause() {
21169 if (this.checkBufferTimeout_) {
21170 window_1.clearTimeout(this.checkBufferTimeout_);
21171
21172 this.checkBufferTimeout_ = null;
21173 }
21174 }
21175
21176 /**
21177 * Returns whether the segment loader is fetching additional
21178 * segments when given the opportunity. This property can be
21179 * modified through calls to pause() and load().
21180 */
21181
21182 }, {
21183 key: 'paused',
21184 value: function paused() {
21185 return this.checkBufferTimeout_ === null;
21186 }
21187
21188 /**
21189 * create/set the following mimetype on the SourceBuffer through a
21190 * SourceUpdater
21191 *
21192 * @param {String} mimeType the mime type string to use
21193 * @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer
21194 * is added to the media source
21195 */
21196
21197 }, {
21198 key: 'mimeType',
21199 value: function mimeType(_mimeType, sourceBufferEmitter) {
21200 if (this.mimeType_) {
21201 return;
21202 }
21203
21204 this.mimeType_ = _mimeType;
21205 this.sourceBufferEmitter_ = sourceBufferEmitter;
21206 // if we were unpaused but waiting for a sourceUpdater, start
21207 // buffering now
21208 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21209 this.init_();
21210 }
21211 }
21212
21213 /**
21214 * Delete all the buffered data and reset the SegmentLoader
21215 * @param {Function} [done] an optional callback to be executed when the remove
21216 * operation is complete
21217 */
21218
21219 }, {
21220 key: 'resetEverything',
21221 value: function resetEverything(done) {
21222 this.ended_ = false;
21223 this.resetLoader();
21224
21225 // remove from 0, the earliest point, to Infinity, to signify removal of everything.
21226 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
21227 // we then clamp the value to duration if necessary.
21228 this.remove(0, Infinity, done);
21229
21230 // clears fmp4 captions
21231 if (this.captionParser_) {
21232 this.captionParser_.clearAllCaptions();
21233 }
21234 this.trigger('reseteverything');
21235 }
21236
21237 /**
21238 * Force the SegmentLoader to resync and start loading around the currentTime instead
21239 * of starting at the end of the buffer
21240 *
21241 * Useful for fast quality changes
21242 */
21243
21244 }, {
21245 key: 'resetLoader',
21246 value: function resetLoader() {
21247 this.fetchAtBuffer_ = false;
21248 this.resyncLoader();
21249 }
21250
21251 /**
21252 * Force the SegmentLoader to restart synchronization and make a conservative guess
21253 * before returning to the simple walk-forward method
21254 */
21255
21256 }, {
21257 key: 'resyncLoader',
21258 value: function resyncLoader() {
21259 this.mediaIndex = null;
21260 this.syncPoint_ = null;
21261 this.abort();
21262 }
21263
21264 /**
21265 * Remove any data in the source buffer between start and end times
21266 * @param {Number} start - the start time of the region to remove from the buffer
21267 * @param {Number} end - the end time of the region to remove from the buffer
21268 * @param {Function} [done] - an optional callback to be executed when the remove
21269 * operation is complete
21270 */
21271
21272 }, {
21273 key: 'remove',
21274 value: function remove(start, end, done) {
21275 // clamp end to duration if we need to remove everything.
21276 // This is due to a browser bug that causes issues if we remove to Infinity.
21277 // videojs/videojs-contrib-hls#1225
21278 if (end === Infinity) {
21279 end = this.duration_();
21280 }
21281
21282 if (this.sourceUpdater_) {
21283 this.sourceUpdater_.remove(start, end, done);
21284 }
21285 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
21286
21287 if (this.inbandTextTracks_) {
21288 for (var id in this.inbandTextTracks_) {
21289 removeCuesFromTrack(start, end, this.inbandTextTracks_[id]);
21290 }
21291 }
21292 }
21293
21294 /**
21295 * (re-)schedule monitorBufferTick_ to run as soon as possible
21296 *
21297 * @private
21298 */
21299
21300 }, {
21301 key: 'monitorBuffer_',
21302 value: function monitorBuffer_() {
21303 if (this.checkBufferTimeout_) {
21304 window_1.clearTimeout(this.checkBufferTimeout_);
21305 }
21306
21307 this.checkBufferTimeout_ = window_1.setTimeout(this.monitorBufferTick_.bind(this), 1);
21308 }
21309
21310 /**
21311 * As long as the SegmentLoader is in the READY state, periodically
21312 * invoke fillBuffer_().
21313 *
21314 * @private
21315 */
21316
21317 }, {
21318 key: 'monitorBufferTick_',
21319 value: function monitorBufferTick_() {
21320 if (this.state === 'READY') {
21321 this.fillBuffer_();
21322 }
21323
21324 if (this.checkBufferTimeout_) {
21325 window_1.clearTimeout(this.checkBufferTimeout_);
21326 }
21327
21328 this.checkBufferTimeout_ = window_1.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
21329 }
21330
21331 /**
21332 * fill the buffer with segements unless the sourceBuffers are
21333 * currently updating
21334 *
21335 * Note: this function should only ever be called by monitorBuffer_
21336 * and never directly
21337 *
21338 * @private
21339 */
21340
21341 }, {
21342 key: 'fillBuffer_',
21343 value: function fillBuffer_() {
21344 if (this.sourceUpdater_.updating()) {
21345 return;
21346 }
21347
21348 if (!this.syncPoint_) {
21349 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
21350 }
21351
21352 // see if we need to begin loading immediately
21353 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
21354
21355 if (!segmentInfo) {
21356 return;
21357 }
21358
21359 if (this.isEndOfStream_(segmentInfo.mediaIndex)) {
21360 this.endOfStream();
21361 return;
21362 }
21363
21364 if (segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
21365 return;
21366 }
21367
21368 // We will need to change timestampOffset of the sourceBuffer if:
21369 // - The segment.timeline !== this.currentTimeline
21370 // (we are crossing a discontinuity somehow)
21371 // - The "timestampOffset" for the start of this segment is less than
21372 // the currently set timestampOffset
21373 // Also, clear captions if we are crossing a discontinuity boundary
21374 // Previously, we changed the timestampOffset if the start of this segment
21375 // is less than the currently set timestampOffset but this isn't wanted
21376 // as it can produce bad behavior, especially around long running
21377 // live streams
21378 if (segmentInfo.timeline !== this.currentTimeline_) {
21379 this.syncController_.reset();
21380 segmentInfo.timestampOffset = segmentInfo.startOfSegment;
21381 if (this.captionParser_) {
21382 this.captionParser_.clearAllCaptions();
21383 }
21384 }
21385
21386 this.loadSegment_(segmentInfo);
21387 }
21388
21389 /**
21390 * Determines if this segment loader is at the end of it's stream.
21391 *
21392 * @param {Number} mediaIndex the index of segment we last appended
21393 * @param {Object} [playlist=this.playlist_] a media playlist object
21394 * @returns {Boolean} true if at end of stream, false otherwise.
21395 */
21396
21397 }, {
21398 key: 'isEndOfStream_',
21399 value: function isEndOfStream_(mediaIndex) {
21400 var playlist = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : this.playlist_;
21401
21402 return detectEndOfStream(playlist, this.mediaSource_, mediaIndex) && !this.sourceUpdater_.updating();
21403 }
21404
21405 /**
21406 * Determines what segment request should be made, given current playback
21407 * state.
21408 *
21409 * @param {TimeRanges} buffered - the state of the buffer
21410 * @param {Object} playlist - the playlist object to fetch segments from
21411 * @param {Number} mediaIndex - the previous mediaIndex fetched or null
21412 * @param {Boolean} hasPlayed - a flag indicating whether we have played or not
21413 * @param {Number} currentTime - the playback position in seconds
21414 * @param {Object} syncPoint - a segment info object that describes the
21415 * @returns {Object} a segment request object that describes the segment to load
21416 */
21417
21418 }, {
21419 key: 'checkBuffer_',
21420 value: function checkBuffer_(buffered, playlist, mediaIndex, hasPlayed, currentTime, syncPoint) {
21421 var lastBufferedEnd = 0;
21422 var startOfSegment = void 0;
21423
21424 if (buffered.length) {
21425 lastBufferedEnd = buffered.end(buffered.length - 1);
21426 }
21427
21428 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
21429
21430 if (!playlist.segments.length) {
21431 return null;
21432 }
21433
21434 // if there is plenty of content buffered, and the video has
21435 // been played before relax for awhile
21436 if (bufferedTime >= this.goalBufferLength_()) {
21437 return null;
21438 }
21439
21440 // if the video has not yet played once, and we already have
21441 // one segment downloaded do nothing
21442 if (!hasPlayed && bufferedTime >= 1) {
21443 return null;
21444 }
21445
21446 // When the syncPoint is null, there is no way of determining a good
21447 // conservative segment index to fetch from
21448 // The best thing to do here is to get the kind of sync-point data by
21449 // making a request
21450 if (syncPoint === null) {
21451 mediaIndex = this.getSyncSegmentCandidate_(playlist);
21452 return this.generateSegmentInfo_(playlist, mediaIndex, null, true);
21453 }
21454
21455 // Under normal playback conditions fetching is a simple walk forward
21456 if (mediaIndex !== null) {
21457 var segment = playlist.segments[mediaIndex];
21458
21459 startOfSegment = lastBufferedEnd;
21460
21461 return this.generateSegmentInfo_(playlist, mediaIndex + 1, startOfSegment, false);
21462 }
21463
21464 // There is a sync-point but the lack of a mediaIndex indicates that
21465 // we need to make a good conservative guess about which segment to
21466 // fetch
21467 if (this.fetchAtBuffer_) {
21468 // Find the segment containing the end of the buffer
21469 var mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, lastBufferedEnd, syncPoint.segmentIndex, syncPoint.time);
21470
21471 mediaIndex = mediaSourceInfo.mediaIndex;
21472 startOfSegment = mediaSourceInfo.startTime;
21473 } else {
21474 // Find the segment containing currentTime
21475 var _mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, currentTime, syncPoint.segmentIndex, syncPoint.time);
21476
21477 mediaIndex = _mediaSourceInfo.mediaIndex;
21478 startOfSegment = _mediaSourceInfo.startTime;
21479 }
21480
21481 return this.generateSegmentInfo_(playlist, mediaIndex, startOfSegment, false);
21482 }
21483
21484 /**
21485 * The segment loader has no recourse except to fetch a segment in the
21486 * current playlist and use the internal timestamps in that segment to
21487 * generate a syncPoint. This function returns a good candidate index
21488 * for that process.
21489 *
21490 * @param {Object} playlist - the playlist object to look for a
21491 * @returns {Number} An index of a segment from the playlist to load
21492 */
21493
21494 }, {
21495 key: 'getSyncSegmentCandidate_',
21496 value: function getSyncSegmentCandidate_(playlist) {
21497 var _this2 = this;
21498
21499 if (this.currentTimeline_ === -1) {
21500 return 0;
21501 }
21502
21503 var segmentIndexArray = playlist.segments.map(function (s, i) {
21504 return {
21505 timeline: s.timeline,
21506 segmentIndex: i
21507 };
21508 }).filter(function (s) {
21509 return s.timeline === _this2.currentTimeline_;
21510 });
21511
21512 if (segmentIndexArray.length) {
21513 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
21514 }
21515
21516 return Math.max(playlist.segments.length - 1, 0);
21517 }
21518 }, {
21519 key: 'generateSegmentInfo_',
21520 value: function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest) {
21521 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
21522 return null;
21523 }
21524
21525 var segment = playlist.segments[mediaIndex];
21526
21527 return {
21528 requestId: 'segment-loader-' + Math.random(),
21529 // resolve the segment URL relative to the playlist
21530 uri: segment.resolvedUri,
21531 // the segment's mediaIndex at the time it was requested
21532 mediaIndex: mediaIndex,
21533 // whether or not to update the SegmentLoader's state with this
21534 // segment's mediaIndex
21535 isSyncRequest: isSyncRequest,
21536 startOfSegment: startOfSegment,
21537 // the segment's playlist
21538 playlist: playlist,
21539 // unencrypted bytes of the segment
21540 bytes: null,
21541 // when a key is defined for this segment, the encrypted bytes
21542 encryptedBytes: null,
21543 // The target timestampOffset for this segment when we append it
21544 // to the source buffer
21545 timestampOffset: null,
21546 // The timeline that the segment is in
21547 timeline: segment.timeline,
21548 // The expected duration of the segment in seconds
21549 duration: segment.duration,
21550 // retain the segment in case the playlist updates while doing an async process
21551 segment: segment
21552 };
21553 }
21554
21555 /**
21556 * Determines if the network has enough bandwidth to complete the current segment
21557 * request in a timely manner. If not, the request will be aborted early and bandwidth
21558 * updated to trigger a playlist switch.
21559 *
21560 * @param {Object} stats
21561 * Object containing stats about the request timing and size
21562 * @return {Boolean} True if the request was aborted, false otherwise
21563 * @private
21564 */
21565
21566 }, {
21567 key: 'abortRequestEarly_',
21568 value: function abortRequestEarly_(stats) {
21569 if (this.hls_.tech_.paused() ||
21570 // Don't abort if the current playlist is on the lowestEnabledRendition
21571 // TODO: Replace using timeout with a boolean indicating whether this playlist is
21572 // the lowestEnabledRendition.
21573 !this.xhrOptions_.timeout ||
21574 // Don't abort if we have no bandwidth information to estimate segment sizes
21575 !this.playlist_.attributes.BANDWIDTH) {
21576 return false;
21577 }
21578
21579 // Wait at least 1 second since the first byte of data has been received before
21580 // using the calculated bandwidth from the progress event to allow the bitrate
21581 // to stabilize
21582 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
21583 return false;
21584 }
21585
21586 var currentTime = this.currentTime_();
21587 var measuredBandwidth = stats.bandwidth;
21588 var segmentDuration = this.pendingSegment_.duration;
21589
21590 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived);
21591
21592 // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
21593 // if we are only left with less than 1 second when the request completes.
21594 // A negative timeUntilRebuffering indicates we are already rebuffering
21595 var timeUntilRebuffer$$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.hls_.tech_.playbackRate()) - 1;
21596
21597 // Only consider aborting early if the estimated time to finish the download
21598 // is larger than the estimated time until the player runs out of forward buffer
21599 if (requestTimeRemaining <= timeUntilRebuffer$$1) {
21600 return false;
21601 }
21602
21603 var switchCandidate = minRebufferMaxBandwidthSelector({
21604 master: this.hls_.playlists.master,
21605 currentTime: currentTime,
21606 bandwidth: measuredBandwidth,
21607 duration: this.duration_(),
21608 segmentDuration: segmentDuration,
21609 timeUntilRebuffer: timeUntilRebuffer$$1,
21610 currentTimeline: this.currentTimeline_,
21611 syncController: this.syncController_
21612 });
21613
21614 if (!switchCandidate) {
21615 return;
21616 }
21617
21618 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$$1;
21619
21620 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
21621
21622 var minimumTimeSaving = 0.5;
21623
21624 // If we are already rebuffering, increase the amount of variance we add to the
21625 // potential round trip time of the new request so that we are not too aggressive
21626 // with switching to a playlist that might save us a fraction of a second.
21627 if (timeUntilRebuffer$$1 <= TIME_FUDGE_FACTOR) {
21628 minimumTimeSaving = 1;
21629 }
21630
21631 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
21632 return false;
21633 }
21634
21635 // set the bandwidth to that of the desired playlist being sure to scale by
21636 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
21637 // don't trigger a bandwidthupdate as the bandwidth is artifial
21638 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
21639 this.abort();
21640 this.trigger('earlyabort');
21641 return true;
21642 }
21643
21644 /**
21645 * XHR `progress` event handler
21646 *
21647 * @param {Event}
21648 * The XHR `progress` event
21649 * @param {Object} simpleSegment
21650 * A simplified segment object copy
21651 * @private
21652 */
21653
21654 }, {
21655 key: 'handleProgress_',
21656 value: function handleProgress_(event, simpleSegment) {
21657 if (!this.pendingSegment_ || simpleSegment.requestId !== this.pendingSegment_.requestId || this.abortRequestEarly_(simpleSegment.stats)) {
21658 return;
21659 }
21660
21661 this.trigger('progress');
21662 }
21663
21664 /**
21665 * load a specific segment from a request into the buffer
21666 *
21667 * @private
21668 */
21669
21670 }, {
21671 key: 'loadSegment_',
21672 value: function loadSegment_(segmentInfo) {
21673 this.state = 'WAITING';
21674 this.pendingSegment_ = segmentInfo;
21675 this.trimBackBuffer_(segmentInfo);
21676
21677 segmentInfo.abortRequests = mediaSegmentRequest(this.hls_.xhr, this.xhrOptions_, this.decrypter_, this.captionParser_, this.createSimplifiedSegmentObj_(segmentInfo),
21678 // progress callback
21679 this.handleProgress_.bind(this), this.segmentRequestFinished_.bind(this));
21680 }
21681
21682 /**
21683 * trim the back buffer so that we don't have too much data
21684 * in the source buffer
21685 *
21686 * @private
21687 *
21688 * @param {Object} segmentInfo - the current segment
21689 */
21690
21691 }, {
21692 key: 'trimBackBuffer_',
21693 value: function trimBackBuffer_(segmentInfo) {
21694 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10);
21695
21696 // Chrome has a hard limit of 150MB of
21697 // buffer and a very conservative "garbage collector"
21698 // We manually clear out the old buffer to ensure
21699 // we don't trigger the QuotaExceeded error
21700 // on the source buffer during subsequent appends
21701
21702 if (removeToTime > 0) {
21703 this.remove(0, removeToTime);
21704 }
21705 }
21706
21707 /**
21708 * created a simplified copy of the segment object with just the
21709 * information necessary to perform the XHR and decryption
21710 *
21711 * @private
21712 *
21713 * @param {Object} segmentInfo - the current segment
21714 * @returns {Object} a simplified segment object copy
21715 */
21716
21717 }, {
21718 key: 'createSimplifiedSegmentObj_',
21719 value: function createSimplifiedSegmentObj_(segmentInfo) {
21720 var segment = segmentInfo.segment;
21721 var simpleSegment = {
21722 resolvedUri: segment.resolvedUri,
21723 byterange: segment.byterange,
21724 requestId: segmentInfo.requestId
21725 };
21726
21727 if (segment.key) {
21728 // if the media sequence is greater than 2^32, the IV will be incorrect
21729 // assuming 10s segments, that would be about 1300 years
21730 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
21731
21732 simpleSegment.key = this.segmentKey(segment.key);
21733 simpleSegment.key.iv = iv;
21734 }
21735
21736 if (segment.map) {
21737 simpleSegment.map = this.initSegment(segment.map);
21738 }
21739
21740 return simpleSegment;
21741 }
21742
21743 /**
21744 * Handle the callback from the segmentRequest function and set the
21745 * associated SegmentLoader state and errors if necessary
21746 *
21747 * @private
21748 */
21749
21750 }, {
21751 key: 'segmentRequestFinished_',
21752 value: function segmentRequestFinished_(error, simpleSegment) {
21753 // every request counts as a media request even if it has been aborted
21754 // or canceled due to a timeout
21755 this.mediaRequests += 1;
21756
21757 if (simpleSegment.stats) {
21758 this.mediaBytesTransferred += simpleSegment.stats.bytesReceived;
21759 this.mediaTransferDuration += simpleSegment.stats.roundTripTime;
21760 }
21761
21762 // The request was aborted and the SegmentLoader has already been reset
21763 if (!this.pendingSegment_) {
21764 this.mediaRequestsAborted += 1;
21765 return;
21766 }
21767
21768 // the request was aborted and the SegmentLoader has already started
21769 // another request. this can happen when the timeout for an aborted
21770 // request triggers due to a limitation in the XHR library
21771 // do not count this as any sort of request or we risk double-counting
21772 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
21773 return;
21774 }
21775
21776 // an error occurred from the active pendingSegment_ so reset everything
21777 if (error) {
21778 this.pendingSegment_ = null;
21779 this.state = 'READY';
21780
21781 // the requests were aborted just record the aborted stat and exit
21782 // this is not a true error condition and nothing corrective needs
21783 // to be done
21784 if (error.code === REQUEST_ERRORS.ABORTED) {
21785 this.mediaRequestsAborted += 1;
21786 return;
21787 }
21788
21789 this.pause();
21790
21791 // the error is really just that at least one of the requests timed-out
21792 // set the bandwidth to a very low value and trigger an ABR switch to
21793 // take emergency action
21794 if (error.code === REQUEST_ERRORS.TIMEOUT) {
21795 this.mediaRequestsTimedout += 1;
21796 this.bandwidth = 1;
21797 this.roundTrip = NaN;
21798 this.trigger('bandwidthupdate');
21799 return;
21800 }
21801
21802 // if control-flow has arrived here, then the error is real
21803 // emit an error event to blacklist the current playlist
21804 this.mediaRequestsErrored += 1;
21805 this.error(error);
21806 this.trigger('error');
21807 return;
21808 }
21809
21810 // the response was a success so set any bandwidth stats the request
21811 // generated for ABR purposes
21812 this.bandwidth = simpleSegment.stats.bandwidth;
21813 this.roundTrip = simpleSegment.stats.roundTripTime;
21814
21815 // if this request included an initialization segment, save that data
21816 // to the initSegment cache
21817 if (simpleSegment.map) {
21818 simpleSegment.map = this.initSegment(simpleSegment.map, true);
21819 }
21820
21821 // if this request included a segment key, save that data in the cache
21822 if (simpleSegment.key) {
21823 this.segmentKey(simpleSegment.key, true);
21824 }
21825
21826 this.processSegmentResponse_(simpleSegment);
21827 }
21828
21829 /**
21830 * Move any important data from the simplified segment object
21831 * back to the real segment object for future phases
21832 *
21833 * @private
21834 */
21835
21836 }, {
21837 key: 'processSegmentResponse_',
21838 value: function processSegmentResponse_(simpleSegment) {
21839 var segmentInfo = this.pendingSegment_;
21840
21841 segmentInfo.bytes = simpleSegment.bytes;
21842 if (simpleSegment.map) {
21843 segmentInfo.segment.map.bytes = simpleSegment.map.bytes;
21844 }
21845
21846 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
21847
21848 // This has fmp4 captions, add them to text tracks
21849 if (simpleSegment.fmp4Captions) {
21850 createCaptionsTrackIfNotExists(this.inbandTextTracks_, this.hls_.tech_, simpleSegment.captionStreams);
21851 addCaptionData({
21852 inbandTextTracks: this.inbandTextTracks_,
21853 captionArray: simpleSegment.fmp4Captions,
21854 // fmp4s will not have a timestamp offset
21855 timestampOffset: 0
21856 });
21857 // Reset stored captions since we added parsed
21858 // captions to a text track at this point
21859 if (this.captionParser_) {
21860 this.captionParser_.clearParsedCaptions();
21861 }
21862 }
21863
21864 this.handleSegment_();
21865 }
21866
21867 /**
21868 * append a decrypted segement to the SourceBuffer through a SourceUpdater
21869 *
21870 * @private
21871 */
21872
21873 }, {
21874 key: 'handleSegment_',
21875 value: function handleSegment_() {
21876 var _this3 = this;
21877
21878 if (!this.pendingSegment_) {
21879 this.state = 'READY';
21880 return;
21881 }
21882
21883 var segmentInfo = this.pendingSegment_;
21884 var segment = segmentInfo.segment;
21885 var timingInfo = this.syncController_.probeSegmentInfo(segmentInfo);
21886
21887 // When we have our first timing info, determine what media types this loader is
21888 // dealing with. Although we're maintaining extra state, it helps to preserve the
21889 // separation of segment loader from the actual source buffers.
21890 if (typeof this.startingMedia_ === 'undefined' && timingInfo && (
21891 // Guard against cases where we're not getting timing info at all until we are
21892 // certain that all streams will provide it.
21893 timingInfo.containsAudio || timingInfo.containsVideo)) {
21894 this.startingMedia_ = {
21895 containsAudio: timingInfo.containsAudio,
21896 containsVideo: timingInfo.containsVideo
21897 };
21898 }
21899
21900 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.startingMedia_, timingInfo);
21901
21902 if (illegalMediaSwitchError) {
21903 this.error({
21904 message: illegalMediaSwitchError,
21905 blacklistDuration: Infinity
21906 });
21907 this.trigger('error');
21908 return;
21909 }
21910
21911 if (segmentInfo.isSyncRequest) {
21912 this.trigger('syncinfoupdate');
21913 this.pendingSegment_ = null;
21914 this.state = 'READY';
21915 return;
21916 }
21917
21918 if (segmentInfo.timestampOffset !== null && segmentInfo.timestampOffset !== this.sourceUpdater_.timestampOffset()) {
21919
21920 // Subtract any difference between the PTS and DTS times of the first frame
21921 // from the timeStampOffset (which currently equals the buffered.end) to prevent
21922 // creating any gaps in the buffer
21923 if (timingInfo && timingInfo.segmentTimestampInfo) {
21924 var ptsStartTime = timingInfo.segmentTimestampInfo[0].ptsTime;
21925 var dtsStartTime = timingInfo.segmentTimestampInfo[0].dtsTime;
21926
21927 segmentInfo.timestampOffset -= ptsStartTime - dtsStartTime;
21928 }
21929
21930 this.sourceUpdater_.timestampOffset(segmentInfo.timestampOffset);
21931 // fired when a timestamp offset is set in HLS (can also identify discontinuities)
21932 this.trigger('timestampoffset');
21933 }
21934
21935 var timelineMapping = this.syncController_.mappingForTimeline(segmentInfo.timeline);
21936
21937 if (timelineMapping !== null) {
21938 this.trigger({
21939 type: 'segmenttimemapping',
21940 mapping: timelineMapping
21941 });
21942 }
21943
21944 this.state = 'APPENDING';
21945
21946 // if the media initialization segment is changing, append it
21947 // before the content segment
21948 if (segment.map) {
21949 var initId = initSegmentId(segment.map);
21950
21951 if (!this.activeInitSegmentId_ || this.activeInitSegmentId_ !== initId) {
21952 var initSegment = this.initSegment(segment.map);
21953
21954 this.sourceUpdater_.appendBuffer({
21955 bytes: initSegment.bytes
21956 }, function () {
21957 _this3.activeInitSegmentId_ = initId;
21958 });
21959 }
21960 }
21961
21962 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
21963 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
21964 this.mediaSecondsLoaded += segment.end - segment.start;
21965 } else {
21966 this.mediaSecondsLoaded += segment.duration;
21967 }
21968
21969 this.logger_(segmentInfoString(segmentInfo));
21970
21971 this.sourceUpdater_.appendBuffer({
21972 bytes: segmentInfo.bytes,
21973 videoSegmentTimingInfoCallback: this.handleVideoSegmentTimingInfo_.bind(this, segmentInfo.requestId)
21974 }, this.handleUpdateEnd_.bind(this));
21975 }
21976 }, {
21977 key: 'handleVideoSegmentTimingInfo_',
21978 value: function handleVideoSegmentTimingInfo_(requestId, event) {
21979 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
21980 return;
21981 }
21982
21983 var segment = this.pendingSegment_.segment;
21984
21985 if (!segment.videoTimingInfo) {
21986 segment.videoTimingInfo = {};
21987 }
21988
21989 segment.videoTimingInfo.transmuxerPrependedSeconds = event.videoSegmentTimingInfo.prependedContentDuration || 0;
21990 segment.videoTimingInfo.transmuxedPresentationStart = event.videoSegmentTimingInfo.start.presentation;
21991 segment.videoTimingInfo.transmuxedPresentationEnd = event.videoSegmentTimingInfo.end.presentation;
21992 // mainly used as a reference for debugging
21993 segment.videoTimingInfo.baseMediaDecodeTime = event.videoSegmentTimingInfo.baseMediaDecodeTime;
21994 }
21995
21996 /**
21997 * callback to run when appendBuffer is finished. detects if we are
21998 * in a good state to do things with the data we got, or if we need
21999 * to wait for more
22000 *
22001 * @private
22002 */
22003
22004 }, {
22005 key: 'handleUpdateEnd_',
22006 value: function handleUpdateEnd_() {
22007 if (!this.pendingSegment_) {
22008 this.state = 'READY';
22009 if (!this.paused()) {
22010 this.monitorBuffer_();
22011 }
22012 return;
22013 }
22014
22015 var segmentInfo = this.pendingSegment_;
22016 var segment = segmentInfo.segment;
22017 var isWalkingForward = this.mediaIndex !== null;
22018
22019 this.pendingSegment_ = null;
22020 this.recordThroughput_(segmentInfo);
22021 this.addSegmentMetadataCue_(segmentInfo);
22022
22023 this.state = 'READY';
22024
22025 this.mediaIndex = segmentInfo.mediaIndex;
22026 this.fetchAtBuffer_ = true;
22027 this.currentTimeline_ = segmentInfo.timeline;
22028
22029 // We must update the syncinfo to recalculate the seekable range before
22030 // the following conditional otherwise it may consider this a bad "guess"
22031 // and attempt to resync when the post-update seekable window and live
22032 // point would mean that this was the perfect segment to fetch
22033 this.trigger('syncinfoupdate');
22034
22035 // If we previously appended a segment that ends more than 3 targetDurations before
22036 // the currentTime_ that means that our conservative guess was too conservative.
22037 // In that case, reset the loader state so that we try to use any information gained
22038 // from the previous request to create a new, more accurate, sync-point.
22039 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
22040 this.resetEverything();
22041 return;
22042 }
22043
22044 // Don't do a rendition switch unless we have enough time to get a sync segment
22045 // and conservatively guess
22046 if (isWalkingForward) {
22047 this.trigger('bandwidthupdate');
22048 }
22049 this.trigger('progress');
22050
22051 // any time an update finishes and the last segment is in the
22052 // buffer, end the stream. this ensures the "ended" event will
22053 // fire if playback reaches that point.
22054 if (this.isEndOfStream_(segmentInfo.mediaIndex + 1, segmentInfo.playlist)) {
22055 this.endOfStream();
22056 }
22057
22058 if (!this.paused()) {
22059 this.monitorBuffer_();
22060 }
22061 }
22062
22063 /**
22064 * Records the current throughput of the decrypt, transmux, and append
22065 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
22066 * moving average of the throughput. `throughput.count` is the number of
22067 * data points in the average.
22068 *
22069 * @private
22070 * @param {Object} segmentInfo the object returned by loadSegment
22071 */
22072
22073 }, {
22074 key: 'recordThroughput_',
22075 value: function recordThroughput_(segmentInfo) {
22076 var rate = this.throughput.rate;
22077 // Add one to the time to ensure that we don't accidentally attempt to divide
22078 // by zero in the case where the throughput is ridiculously high
22079 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1;
22080 // Multiply by 8000 to convert from bytes/millisecond to bits/second
22081 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000);
22082
22083 // This is just a cumulative moving average calculation:
22084 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
22085 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
22086 }
22087
22088 /**
22089 * Adds a cue to the segment-metadata track with some metadata information about the
22090 * segment
22091 *
22092 * @private
22093 * @param {Object} segmentInfo
22094 * the object returned by loadSegment
22095 * @method addSegmentMetadataCue_
22096 */
22097
22098 }, {
22099 key: 'addSegmentMetadataCue_',
22100 value: function addSegmentMetadataCue_(segmentInfo) {
22101 if (!this.segmentMetadataTrack_) {
22102 return;
22103 }
22104
22105 var segment = segmentInfo.segment;
22106 var start = segment.start;
22107 var end = segment.end;
22108
22109 // Do not try adding the cue if the start and end times are invalid.
22110 if (!finite(start) || !finite(end)) {
22111 return;
22112 }
22113
22114 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
22115
22116 var Cue = window_1.WebKitDataCue || window_1.VTTCue;
22117 var value = {
22118 custom: segment.custom,
22119 dateTimeObject: segment.dateTimeObject,
22120 dateTimeString: segment.dateTimeString,
22121 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
22122 resolution: segmentInfo.playlist.attributes.RESOLUTION,
22123 codecs: segmentInfo.playlist.attributes.CODECS,
22124 byteLength: segmentInfo.byteLength,
22125 uri: segmentInfo.uri,
22126 timeline: segmentInfo.timeline,
22127 playlist: segmentInfo.playlist.id,
22128 start: start,
22129 end: end
22130 };
22131 var data = JSON.stringify(value);
22132 var cue = new Cue(start, end, data);
22133
22134 // Attach the metadata to the value property of the cue to keep consistency between
22135 // the differences of WebKitDataCue in safari and VTTCue in other browsers
22136 cue.value = value;
22137
22138 this.segmentMetadataTrack_.addCue(cue);
22139 }
22140 }]);
22141 return SegmentLoader;
22142 }(videojs.EventTarget);
22143
22144 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
22145 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
22146 };
22147
22148 /**
22149 * @file vtt-segment-loader.js
22150 */
22151
22152 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
22153 return char.charCodeAt(0);
22154 }));
22155
22156 /**
22157 * An object that manages segment loading and appending.
22158 *
22159 * @class VTTSegmentLoader
22160 * @param {Object} options required and optional options
22161 * @extends videojs.EventTarget
22162 */
22163
22164 var VTTSegmentLoader = function (_SegmentLoader) {
22165 inherits(VTTSegmentLoader, _SegmentLoader);
22166
22167 function VTTSegmentLoader(settings) {
22168 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
22169 classCallCheck(this, VTTSegmentLoader);
22170
22171 // SegmentLoader requires a MediaSource be specified or it will throw an error;
22172 // however, VTTSegmentLoader has no need of a media source, so delete the reference
22173 var _this = possibleConstructorReturn(this, (VTTSegmentLoader.__proto__ || Object.getPrototypeOf(VTTSegmentLoader)).call(this, settings, options));
22174
22175 _this.mediaSource_ = null;
22176
22177 _this.subtitlesTrack_ = null;
22178
22179 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
22180 return _this;
22181 }
22182
22183 /**
22184 * Indicates which time ranges are buffered
22185 *
22186 * @return {TimeRange}
22187 * TimeRange object representing the current buffered ranges
22188 */
22189
22190
22191 createClass(VTTSegmentLoader, [{
22192 key: 'buffered_',
22193 value: function buffered_() {
22194 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues.length) {
22195 return videojs.createTimeRanges();
22196 }
22197
22198 var cues = this.subtitlesTrack_.cues;
22199 var start = cues[0].startTime;
22200 var end = cues[cues.length - 1].startTime;
22201
22202 return videojs.createTimeRanges([[start, end]]);
22203 }
22204
22205 /**
22206 * Gets and sets init segment for the provided map
22207 *
22208 * @param {Object} map
22209 * The map object representing the init segment to get or set
22210 * @param {Boolean=} set
22211 * If true, the init segment for the provided map should be saved
22212 * @return {Object}
22213 * map object for desired init segment
22214 */
22215
22216 }, {
22217 key: 'initSegment',
22218 value: function initSegment(map) {
22219 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
22220
22221 if (!map) {
22222 return null;
22223 }
22224
22225 var id = initSegmentId(map);
22226 var storedMap = this.initSegments_[id];
22227
22228 if (set$$1 && !storedMap && map.bytes) {
22229 // append WebVTT line terminators to the media initialization segment if it exists
22230 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
22231 // requires two or more WebVTT line terminators between the WebVTT header and the
22232 // rest of the file
22233 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
22234 var combinedSegment = new Uint8Array(combinedByteLength);
22235
22236 combinedSegment.set(map.bytes);
22237 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
22238
22239 this.initSegments_[id] = storedMap = {
22240 resolvedUri: map.resolvedUri,
22241 byterange: map.byterange,
22242 bytes: combinedSegment
22243 };
22244 }
22245
22246 return storedMap || map;
22247 }
22248
22249 /**
22250 * Returns true if all configuration required for loading is present, otherwise false.
22251 *
22252 * @return {Boolean} True if the all configuration is ready for loading
22253 * @private
22254 */
22255
22256 }, {
22257 key: 'couldBeginLoading_',
22258 value: function couldBeginLoading_() {
22259 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
22260 }
22261
22262 /**
22263 * Once all the starting parameters have been specified, begin
22264 * operation. This method should only be invoked from the INIT
22265 * state.
22266 *
22267 * @private
22268 */
22269
22270 }, {
22271 key: 'init_',
22272 value: function init_() {
22273 this.state = 'READY';
22274 this.resetEverything();
22275 return this.monitorBuffer_();
22276 }
22277
22278 /**
22279 * Set a subtitle track on the segment loader to add subtitles to
22280 *
22281 * @param {TextTrack=} track
22282 * The text track to add loaded subtitles to
22283 * @return {TextTrack}
22284 * Returns the subtitles track
22285 */
22286
22287 }, {
22288 key: 'track',
22289 value: function track(_track) {
22290 if (typeof _track === 'undefined') {
22291 return this.subtitlesTrack_;
22292 }
22293
22294 this.subtitlesTrack_ = _track;
22295
22296 // if we were unpaused but waiting for a sourceUpdater, start
22297 // buffering now
22298 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22299 this.init_();
22300 }
22301
22302 return this.subtitlesTrack_;
22303 }
22304
22305 /**
22306 * Remove any data in the source buffer between start and end times
22307 * @param {Number} start - the start time of the region to remove from the buffer
22308 * @param {Number} end - the end time of the region to remove from the buffer
22309 */
22310
22311 }, {
22312 key: 'remove',
22313 value: function remove(start, end) {
22314 removeCuesFromTrack(start, end, this.subtitlesTrack_);
22315 }
22316
22317 /**
22318 * fill the buffer with segements unless the sourceBuffers are
22319 * currently updating
22320 *
22321 * Note: this function should only ever be called by monitorBuffer_
22322 * and never directly
22323 *
22324 * @private
22325 */
22326
22327 }, {
22328 key: 'fillBuffer_',
22329 value: function fillBuffer_() {
22330 var _this2 = this;
22331
22332 if (!this.syncPoint_) {
22333 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
22334 }
22335
22336 // see if we need to begin loading immediately
22337 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
22338
22339 segmentInfo = this.skipEmptySegments_(segmentInfo);
22340
22341 if (!segmentInfo) {
22342 return;
22343 }
22344
22345 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
22346 // We don't have the timestamp offset that we need to sync subtitles.
22347 // Rerun on a timestamp offset or user interaction.
22348 var checkTimestampOffset = function checkTimestampOffset() {
22349 _this2.state = 'READY';
22350 if (!_this2.paused()) {
22351 // if not paused, queue a buffer check as soon as possible
22352 _this2.monitorBuffer_();
22353 }
22354 };
22355
22356 this.syncController_.one('timestampoffset', checkTimestampOffset);
22357 this.state = 'WAITING_ON_TIMELINE';
22358 return;
22359 }
22360
22361 this.loadSegment_(segmentInfo);
22362 }
22363
22364 /**
22365 * Prevents the segment loader from requesting segments we know contain no subtitles
22366 * by walking forward until we find the next segment that we don't know whether it is
22367 * empty or not.
22368 *
22369 * @param {Object} segmentInfo
22370 * a segment info object that describes the current segment
22371 * @return {Object}
22372 * a segment info object that describes the current segment
22373 */
22374
22375 }, {
22376 key: 'skipEmptySegments_',
22377 value: function skipEmptySegments_(segmentInfo) {
22378 while (segmentInfo && segmentInfo.segment.empty) {
22379 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
22380 }
22381 return segmentInfo;
22382 }
22383
22384 /**
22385 * append a decrypted segement to the SourceBuffer through a SourceUpdater
22386 *
22387 * @private
22388 */
22389
22390 }, {
22391 key: 'handleSegment_',
22392 value: function handleSegment_() {
22393 var _this3 = this;
22394
22395 if (!this.pendingSegment_ || !this.subtitlesTrack_) {
22396 this.state = 'READY';
22397 return;
22398 }
22399
22400 this.state = 'APPENDING';
22401
22402 var segmentInfo = this.pendingSegment_;
22403 var segment = segmentInfo.segment;
22404
22405 // Make sure that vttjs has loaded, otherwise, wait till it finished loading
22406 if (typeof window_1.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
22407
22408 var loadHandler = void 0;
22409 var errorHandler = function errorHandler() {
22410 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
22411 _this3.error({
22412 message: 'Error loading vtt.js'
22413 });
22414 _this3.state = 'READY';
22415 _this3.pause();
22416 _this3.trigger('error');
22417 };
22418
22419 loadHandler = function loadHandler() {
22420 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
22421 _this3.handleSegment_();
22422 };
22423
22424 this.state = 'WAITING_ON_VTTJS';
22425 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
22426 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
22427
22428 return;
22429 }
22430
22431 segment.requested = true;
22432
22433 try {
22434 this.parseVTTCues_(segmentInfo);
22435 } catch (e) {
22436 this.error({
22437 message: e.message
22438 });
22439 this.state = 'READY';
22440 this.pause();
22441 return this.trigger('error');
22442 }
22443
22444 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
22445
22446 if (segmentInfo.isSyncRequest) {
22447 this.trigger('syncinfoupdate');
22448 this.pendingSegment_ = null;
22449 this.state = 'READY';
22450 return;
22451 }
22452
22453 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
22454
22455 this.mediaSecondsLoaded += segment.duration;
22456
22457 if (segmentInfo.cues.length) {
22458 // remove any overlapping cues to prevent doubling
22459 this.remove(segmentInfo.cues[0].endTime, segmentInfo.cues[segmentInfo.cues.length - 1].endTime);
22460 }
22461
22462 segmentInfo.cues.forEach(function (cue) {
22463 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window_1.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
22464 });
22465
22466 this.handleUpdateEnd_();
22467 }
22468
22469 /**
22470 * Uses the WebVTT parser to parse the segment response
22471 *
22472 * @param {Object} segmentInfo
22473 * a segment info object that describes the current segment
22474 * @private
22475 */
22476
22477 }, {
22478 key: 'parseVTTCues_',
22479 value: function parseVTTCues_(segmentInfo) {
22480 var decoder = void 0;
22481 var decodeBytesToString = false;
22482
22483 if (typeof window_1.TextDecoder === 'function') {
22484 decoder = new window_1.TextDecoder('utf8');
22485 } else {
22486 decoder = window_1.WebVTT.StringDecoder();
22487 decodeBytesToString = true;
22488 }
22489
22490 var parser = new window_1.WebVTT.Parser(window_1, window_1.vttjs, decoder);
22491
22492 segmentInfo.cues = [];
22493 segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
22494
22495 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
22496 parser.ontimestampmap = function (map) {
22497 return segmentInfo.timestampmap = map;
22498 };
22499 parser.onparsingerror = function (error) {
22500 videojs.log.warn('Error encountered when parsing cues: ' + error.message);
22501 };
22502
22503 if (segmentInfo.segment.map) {
22504 var mapData = segmentInfo.segment.map.bytes;
22505
22506 if (decodeBytesToString) {
22507 mapData = uint8ToUtf8(mapData);
22508 }
22509
22510 parser.parse(mapData);
22511 }
22512
22513 var segmentData = segmentInfo.bytes;
22514
22515 if (decodeBytesToString) {
22516 segmentData = uint8ToUtf8(segmentData);
22517 }
22518
22519 parser.parse(segmentData);
22520 parser.flush();
22521 }
22522
22523 /**
22524 * Updates the start and end times of any cues parsed by the WebVTT parser using
22525 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
22526 * from the SyncController
22527 *
22528 * @param {Object} segmentInfo
22529 * a segment info object that describes the current segment
22530 * @param {Object} mappingObj
22531 * object containing a mapping from TS to media time
22532 * @param {Object} playlist
22533 * the playlist object containing the segment
22534 * @private
22535 */
22536
22537 }, {
22538 key: 'updateTimeMapping_',
22539 value: function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
22540 var segment = segmentInfo.segment;
22541
22542 if (!mappingObj) {
22543 // If the sync controller does not have a mapping of TS to Media Time for the
22544 // timeline, then we don't have enough information to update the cue
22545 // start/end times
22546 return;
22547 }
22548
22549 if (!segmentInfo.cues.length) {
22550 // If there are no cues, we also do not have enough information to figure out
22551 // segment timing. Mark that the segment contains no cues so we don't re-request
22552 // an empty segment.
22553 segment.empty = true;
22554 return;
22555 }
22556
22557 var timestampmap = segmentInfo.timestampmap;
22558 var diff = timestampmap.MPEGTS / 90000 - timestampmap.LOCAL + mappingObj.mapping;
22559
22560 segmentInfo.cues.forEach(function (cue) {
22561 // First convert cue time to TS time using the timestamp-map provided within the vtt
22562 cue.startTime += diff;
22563 cue.endTime += diff;
22564 });
22565
22566 if (!playlist.syncInfo) {
22567 var firstStart = segmentInfo.cues[0].startTime;
22568 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
22569
22570 playlist.syncInfo = {
22571 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
22572 time: Math.min(firstStart, lastStart - segment.duration)
22573 };
22574 }
22575 }
22576 }]);
22577 return VTTSegmentLoader;
22578 }(SegmentLoader);
22579
22580 /**
22581 * @file ad-cue-tags.js
22582 */
22583
22584 /**
22585 * Searches for an ad cue that overlaps with the given mediaTime
22586 */
22587 var findAdCue = function findAdCue(track, mediaTime) {
22588 var cues = track.cues;
22589
22590 for (var i = 0; i < cues.length; i++) {
22591 var cue = cues[i];
22592
22593 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
22594 return cue;
22595 }
22596 }
22597 return null;
22598 };
22599
22600 var updateAdCues = function updateAdCues(media, track) {
22601 var offset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
22602
22603 if (!media.segments) {
22604 return;
22605 }
22606
22607 var mediaTime = offset;
22608 var cue = void 0;
22609
22610 for (var i = 0; i < media.segments.length; i++) {
22611 var segment = media.segments[i];
22612
22613 if (!cue) {
22614 // Since the cues will span for at least the segment duration, adding a fudge
22615 // factor of half segment duration will prevent duplicate cues from being
22616 // created when timing info is not exact (e.g. cue start time initialized
22617 // at 10.006677, but next call mediaTime is 10.003332 )
22618 cue = findAdCue(track, mediaTime + segment.duration / 2);
22619 }
22620
22621 if (cue) {
22622 if ('cueIn' in segment) {
22623 // Found a CUE-IN so end the cue
22624 cue.endTime = mediaTime;
22625 cue.adEndTime = mediaTime;
22626 mediaTime += segment.duration;
22627 cue = null;
22628 continue;
22629 }
22630
22631 if (mediaTime < cue.endTime) {
22632 // Already processed this mediaTime for this cue
22633 mediaTime += segment.duration;
22634 continue;
22635 }
22636
22637 // otherwise extend cue until a CUE-IN is found
22638 cue.endTime += segment.duration;
22639 } else {
22640 if ('cueOut' in segment) {
22641 cue = new window_1.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
22642 cue.adStartTime = mediaTime;
22643 // Assumes tag format to be
22644 // #EXT-X-CUE-OUT:30
22645 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
22646 track.addCue(cue);
22647 }
22648
22649 if ('cueOutCont' in segment) {
22650 // Entered into the middle of an ad cue
22651 var adOffset = void 0;
22652 var adTotal = void 0;
22653
22654 // Assumes tag formate to be
22655 // #EXT-X-CUE-OUT-CONT:10/30
22656
22657 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat);
22658
22659 var _segment$cueOutCont$s2 = slicedToArray(_segment$cueOutCont$s, 2);
22660
22661 adOffset = _segment$cueOutCont$s2[0];
22662 adTotal = _segment$cueOutCont$s2[1];
22663
22664
22665 cue = new window_1.VTTCue(mediaTime, mediaTime + segment.duration, '');
22666 cue.adStartTime = mediaTime - adOffset;
22667 cue.adEndTime = cue.adStartTime + adTotal;
22668 track.addCue(cue);
22669 }
22670 }
22671 mediaTime += segment.duration;
22672 }
22673 };
22674
22675 /**
22676 * mux.js
22677 *
22678 * Copyright (c) Brightcove
22679 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
22680 */
22681
22682 var streamTypes = {
22683 H264_STREAM_TYPE: 0x1B,
22684 ADTS_STREAM_TYPE: 0x0F,
22685 METADATA_STREAM_TYPE: 0x15
22686 };
22687
22688 var MAX_TS = 8589934592;
22689
22690 var RO_THRESH = 4294967296;
22691
22692 var TYPE_SHARED = 'shared';
22693
22694 var handleRollover = function handleRollover(value, reference) {
22695 var direction = 1;
22696
22697 if (value > reference) {
22698 // If the current timestamp value is greater than our reference timestamp and we detect a
22699 // timestamp rollover, this means the roll over is happening in the opposite direction.
22700 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
22701 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
22702 // rollover point. In loading this segment, the timestamp values will be very large,
22703 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
22704 // the time stamp to be `value - 2^33`.
22705 direction = -1;
22706 }
22707
22708 // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
22709 // cause an incorrect adjustment.
22710 while (Math.abs(reference - value) > RO_THRESH) {
22711 value += direction * MAX_TS;
22712 }
22713
22714 return value;
22715 };
22716
22717 var TimestampRolloverStream = function TimestampRolloverStream(type) {
22718 var lastDTS, referenceDTS;
22719
22720 TimestampRolloverStream.prototype.init.call(this);
22721
22722 // The "shared" type is used in cases where a stream will contain muxed
22723 // video and audio. We could use `undefined` here, but having a string
22724 // makes debugging a little clearer.
22725 this.type_ = type || TYPE_SHARED;
22726
22727 this.push = function (data) {
22728
22729 // Any "shared" rollover streams will accept _all_ data. Otherwise,
22730 // streams will only accept data that matches their type.
22731 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
22732 return;
22733 }
22734
22735 if (referenceDTS === undefined) {
22736 referenceDTS = data.dts;
22737 }
22738
22739 data.dts = handleRollover(data.dts, referenceDTS);
22740 data.pts = handleRollover(data.pts, referenceDTS);
22741
22742 lastDTS = data.dts;
22743
22744 this.trigger('data', data);
22745 };
22746
22747 this.flush = function () {
22748 referenceDTS = lastDTS;
22749 this.trigger('done');
22750 };
22751
22752 this.endTimeline = function () {
22753 this.flush();
22754 this.trigger('endedtimeline');
22755 };
22756
22757 this.discontinuity = function () {
22758 referenceDTS = void 0;
22759 lastDTS = void 0;
22760 };
22761
22762 this.reset = function () {
22763 this.discontinuity();
22764 this.trigger('reset');
22765 };
22766 };
22767
22768 TimestampRolloverStream.prototype = new stream();
22769
22770 var timestampRolloverStream = {
22771 TimestampRolloverStream: TimestampRolloverStream,
22772 handleRollover: handleRollover
22773 };
22774
22775 var parsePid = function parsePid(packet) {
22776 var pid = packet[1] & 0x1f;
22777 pid <<= 8;
22778 pid |= packet[2];
22779 return pid;
22780 };
22781
22782 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
22783 return !!(packet[1] & 0x40);
22784 };
22785
22786 var parseAdaptionField = function parseAdaptionField(packet) {
22787 var offset = 0;
22788 // if an adaption field is present, its length is specified by the
22789 // fifth byte of the TS packet header. The adaptation field is
22790 // used to add stuffing to PES packets that don't fill a complete
22791 // TS packet, and to specify some forms of timing and control data
22792 // that we do not currently use.
22793 if ((packet[3] & 0x30) >>> 4 > 0x01) {
22794 offset += packet[4] + 1;
22795 }
22796 return offset;
22797 };
22798
22799 var parseType$1 = function parseType(packet, pmtPid) {
22800 var pid = parsePid(packet);
22801 if (pid === 0) {
22802 return 'pat';
22803 } else if (pid === pmtPid) {
22804 return 'pmt';
22805 } else if (pmtPid) {
22806 return 'pes';
22807 }
22808 return null;
22809 };
22810
22811 var parsePat = function parsePat(packet) {
22812 var pusi = parsePayloadUnitStartIndicator(packet);
22813 var offset = 4 + parseAdaptionField(packet);
22814
22815 if (pusi) {
22816 offset += packet[offset] + 1;
22817 }
22818
22819 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
22820 };
22821
22822 var parsePmt = function parsePmt(packet) {
22823 var programMapTable = {};
22824 var pusi = parsePayloadUnitStartIndicator(packet);
22825 var payloadOffset = 4 + parseAdaptionField(packet);
22826
22827 if (pusi) {
22828 payloadOffset += packet[payloadOffset] + 1;
22829 }
22830
22831 // PMTs can be sent ahead of the time when they should actually
22832 // take effect. We don't believe this should ever be the case
22833 // for HLS but we'll ignore "forward" PMT declarations if we see
22834 // them. Future PMT declarations have the current_next_indicator
22835 // set to zero.
22836 if (!(packet[payloadOffset + 5] & 0x01)) {
22837 return;
22838 }
22839
22840 var sectionLength, tableEnd, programInfoLength;
22841 // the mapping table ends at the end of the current section
22842 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
22843 tableEnd = 3 + sectionLength - 4;
22844
22845 // to determine where the table is, we have to figure out how
22846 // long the program info descriptors are
22847 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
22848
22849 // advance the offset to the first entry in the mapping table
22850 var offset = 12 + programInfoLength;
22851 while (offset < tableEnd) {
22852 var i = payloadOffset + offset;
22853 // add an entry that maps the elementary_pid to the stream_type
22854 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
22855
22856 // move to the next table entry
22857 // skip past the elementary stream descriptors, if present
22858 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
22859 }
22860 return programMapTable;
22861 };
22862
22863 var parsePesType = function parsePesType(packet, programMapTable) {
22864 var pid = parsePid(packet);
22865 var type = programMapTable[pid];
22866 switch (type) {
22867 case streamTypes.H264_STREAM_TYPE:
22868 return 'video';
22869 case streamTypes.ADTS_STREAM_TYPE:
22870 return 'audio';
22871 case streamTypes.METADATA_STREAM_TYPE:
22872 return 'timed-metadata';
22873 default:
22874 return null;
22875 }
22876 };
22877
22878 var parsePesTime = function parsePesTime(packet) {
22879 var pusi = parsePayloadUnitStartIndicator(packet);
22880 if (!pusi) {
22881 return null;
22882 }
22883
22884 var offset = 4 + parseAdaptionField(packet);
22885
22886 if (offset >= packet.byteLength) {
22887 // From the H 222.0 MPEG-TS spec
22888 // "For transport stream packets carrying PES packets, stuffing is needed when there
22889 // is insufficient PES packet data to completely fill the transport stream packet
22890 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
22891 // the sum of the lengths of the data elements in it, so that the payload bytes
22892 // remaining after the adaptation field exactly accommodates the available PES packet
22893 // data."
22894 //
22895 // If the offset is >= the length of the packet, then the packet contains no data
22896 // and instead is just adaption field stuffing bytes
22897 return null;
22898 }
22899
22900 var pes = null;
22901 var ptsDtsFlags;
22902
22903 // PES packets may be annotated with a PTS value, or a PTS value
22904 // and a DTS value. Determine what combination of values is
22905 // available to work with.
22906 ptsDtsFlags = packet[offset + 7];
22907
22908 // PTS and DTS are normally stored as a 33-bit number. Javascript
22909 // performs all bitwise operations on 32-bit integers but javascript
22910 // supports a much greater range (52-bits) of integer using standard
22911 // mathematical operations.
22912 // We construct a 31-bit value using bitwise operators over the 31
22913 // most significant bits and then multiply by 4 (equal to a left-shift
22914 // of 2) before we add the final 2 least significant bits of the
22915 // timestamp (equal to an OR.)
22916 if (ptsDtsFlags & 0xC0) {
22917 pes = {};
22918 // the PTS and DTS are not written out directly. For information
22919 // on how they are encoded, see
22920 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
22921 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
22922 pes.pts *= 4; // Left shift by 2
22923 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
22924 pes.dts = pes.pts;
22925 if (ptsDtsFlags & 0x40) {
22926 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
22927 pes.dts *= 4; // Left shift by 2
22928 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
22929 }
22930 }
22931 return pes;
22932 };
22933
22934 var parseNalUnitType = function parseNalUnitType(type) {
22935 switch (type) {
22936 case 0x05:
22937 return 'slice_layer_without_partitioning_rbsp_idr';
22938 case 0x06:
22939 return 'sei_rbsp';
22940 case 0x07:
22941 return 'seq_parameter_set_rbsp';
22942 case 0x08:
22943 return 'pic_parameter_set_rbsp';
22944 case 0x09:
22945 return 'access_unit_delimiter_rbsp';
22946 default:
22947 return null;
22948 }
22949 };
22950
22951 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
22952 var offset = 4 + parseAdaptionField(packet);
22953 var frameBuffer = packet.subarray(offset);
22954 var frameI = 0;
22955 var frameSyncPoint = 0;
22956 var foundKeyFrame = false;
22957 var nalType;
22958
22959 // advance the sync point to a NAL start, if necessary
22960 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
22961 if (frameBuffer[frameSyncPoint + 2] === 1) {
22962 // the sync point is properly aligned
22963 frameI = frameSyncPoint + 5;
22964 break;
22965 }
22966 }
22967
22968 while (frameI < frameBuffer.byteLength) {
22969 // look at the current byte to determine if we've hit the end of
22970 // a NAL unit boundary
22971 switch (frameBuffer[frameI]) {
22972 case 0:
22973 // skip past non-sync sequences
22974 if (frameBuffer[frameI - 1] !== 0) {
22975 frameI += 2;
22976 break;
22977 } else if (frameBuffer[frameI - 2] !== 0) {
22978 frameI++;
22979 break;
22980 }
22981
22982 if (frameSyncPoint + 3 !== frameI - 2) {
22983 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
22984 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
22985 foundKeyFrame = true;
22986 }
22987 }
22988
22989 // drop trailing zeroes
22990 do {
22991 frameI++;
22992 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
22993 frameSyncPoint = frameI - 2;
22994 frameI += 3;
22995 break;
22996 case 1:
22997 // skip past non-sync sequences
22998 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
22999 frameI += 3;
23000 break;
23001 }
23002
23003 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
23004 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
23005 foundKeyFrame = true;
23006 }
23007 frameSyncPoint = frameI - 2;
23008 frameI += 3;
23009 break;
23010 default:
23011 // the current byte isn't a one or zero, so it cannot be part
23012 // of a sync sequence
23013 frameI += 3;
23014 break;
23015 }
23016 }
23017 frameBuffer = frameBuffer.subarray(frameSyncPoint);
23018 frameI -= frameSyncPoint;
23019 frameSyncPoint = 0;
23020 // parse the final nal
23021 if (frameBuffer && frameBuffer.byteLength > 3) {
23022 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
23023 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
23024 foundKeyFrame = true;
23025 }
23026 }
23027
23028 return foundKeyFrame;
23029 };
23030
23031 var probe$1 = {
23032 parseType: parseType$1,
23033 parsePat: parsePat,
23034 parsePmt: parsePmt,
23035 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
23036 parsePesType: parsePesType,
23037 parsePesTime: parsePesTime,
23038 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
23039 };
23040
23041 /**
23042 * mux.js
23043 *
23044 * Copyright (c) Brightcove
23045 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
23046 *
23047 * Utilities to detect basic properties and metadata about Aac data.
23048 */
23049
23050 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
23051
23052 var isLikelyAacData = function isLikelyAacData(data) {
23053 if (data[0] === 'I'.charCodeAt(0) && data[1] === 'D'.charCodeAt(0) && data[2] === '3'.charCodeAt(0)) {
23054 return true;
23055 }
23056 return false;
23057 };
23058
23059 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
23060 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
23061 };
23062
23063 // return a percent-encoded representation of the specified byte range
23064 // @see http://en.wikipedia.org/wiki/Percent-encoding
23065 var percentEncode = function percentEncode(bytes, start, end) {
23066 var i,
23067 result = '';
23068 for (i = start; i < end; i++) {
23069 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
23070 }
23071 return result;
23072 };
23073
23074 // return the string representation of the specified byte range,
23075 // interpreted as ISO-8859-1.
23076 var parseIso88591 = function parseIso88591(bytes, start, end) {
23077 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
23078 };
23079
23080 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
23081 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
23082 flags = header[byteIndex + 5],
23083 footerPresent = (flags & 16) >> 4;
23084
23085 if (footerPresent) {
23086 return returnSize + 20;
23087 }
23088 return returnSize + 10;
23089 };
23090
23091 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
23092 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
23093 middle = header[byteIndex + 4] << 3,
23094 highTwo = header[byteIndex + 3] & 0x3 << 11;
23095
23096 return highTwo | middle | lowThree;
23097 };
23098
23099 var parseType$2 = function parseType(header, byteIndex) {
23100 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
23101 return 'timed-metadata';
23102 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
23103 return 'audio';
23104 }
23105 return null;
23106 };
23107
23108 var parseSampleRate = function parseSampleRate(packet) {
23109 var i = 0;
23110
23111 while (i + 5 < packet.length) {
23112 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
23113 // If a valid header was not found, jump one forward and attempt to
23114 // find a valid ADTS header starting at the next byte
23115 i++;
23116 continue;
23117 }
23118 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
23119 }
23120
23121 return null;
23122 };
23123
23124 var parseAacTimestamp = function parseAacTimestamp(packet) {
23125 var frameStart, frameSize, frame, frameHeader;
23126
23127 // find the start of the first frame and the end of the tag
23128 frameStart = 10;
23129 if (packet[5] & 0x40) {
23130 // advance the frame start past the extended header
23131 frameStart += 4; // header size field
23132 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
23133 }
23134
23135 // parse one or more ID3 frames
23136 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
23137 do {
23138 // determine the number of bytes in this frame
23139 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
23140 if (frameSize < 1) {
23141 return null;
23142 }
23143 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
23144
23145 if (frameHeader === 'PRIV') {
23146 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
23147
23148 for (var i = 0; i < frame.byteLength; i++) {
23149 if (frame[i] === 0) {
23150 var owner = parseIso88591(frame, 0, i);
23151 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
23152 var d = frame.subarray(i + 1);
23153 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
23154 size *= 4;
23155 size += d[7] & 0x03;
23156
23157 return size;
23158 }
23159 break;
23160 }
23161 }
23162 }
23163
23164 frameStart += 10; // advance past the frame header
23165 frameStart += frameSize; // advance past the frame body
23166 } while (frameStart < packet.byteLength);
23167 return null;
23168 };
23169
23170 var utils$1 = {
23171 isLikelyAacData: isLikelyAacData,
23172 parseId3TagSize: parseId3TagSize,
23173 parseAdtsSize: parseAdtsSize,
23174 parseType: parseType$2,
23175 parseSampleRate: parseSampleRate,
23176 parseAacTimestamp: parseAacTimestamp
23177 };
23178
23179 /**
23180 * mux.js
23181 *
23182 * Copyright (c) Brightcove
23183 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
23184 */
23185 var ONE_SECOND_IN_TS$1 = 90000,
23186 // 90kHz clock
23187 secondsToVideoTs,
23188 secondsToAudioTs,
23189 videoTsToSeconds,
23190 audioTsToSeconds,
23191 audioTsToVideoTs,
23192 videoTsToAudioTs,
23193 metadataTsToSeconds;
23194
23195 secondsToVideoTs = function secondsToVideoTs(seconds) {
23196 return seconds * ONE_SECOND_IN_TS$1;
23197 };
23198
23199 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
23200 return seconds * sampleRate;
23201 };
23202
23203 videoTsToSeconds = function videoTsToSeconds(timestamp) {
23204 return timestamp / ONE_SECOND_IN_TS$1;
23205 };
23206
23207 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
23208 return timestamp / sampleRate;
23209 };
23210
23211 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
23212 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
23213 };
23214
23215 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
23216 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
23217 };
23218
23219 /**
23220 * Adjust ID3 tag or caption timing information by the timeline pts values
23221 * (if keepOriginalTimestamps is false) and convert to seconds
23222 */
23223 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
23224 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
23225 };
23226
23227 var clock = {
23228 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$1,
23229 secondsToVideoTs: secondsToVideoTs,
23230 secondsToAudioTs: secondsToAudioTs,
23231 videoTsToSeconds: videoTsToSeconds,
23232 audioTsToSeconds: audioTsToSeconds,
23233 audioTsToVideoTs: audioTsToVideoTs,
23234 videoTsToAudioTs: videoTsToAudioTs,
23235 metadataTsToSeconds: metadataTsToSeconds
23236 };
23237
23238 var handleRollover$1 = timestampRolloverStream.handleRollover;
23239 var probe$2 = {};
23240 probe$2.ts = probe$1;
23241 probe$2.aac = utils$1;
23242 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
23243
23244 var MP2T_PACKET_LENGTH = 188,
23245 // bytes
23246 SYNC_BYTE = 0x47;
23247
23248 /**
23249 * walks through segment data looking for pat and pmt packets to parse out
23250 * program map table information
23251 */
23252 var parsePsi_ = function parsePsi_(bytes, pmt) {
23253 var startIndex = 0,
23254 endIndex = MP2T_PACKET_LENGTH,
23255 packet,
23256 type;
23257
23258 while (endIndex < bytes.byteLength) {
23259 // Look for a pair of start and end sync bytes in the data..
23260 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
23261 // We found a packet
23262 packet = bytes.subarray(startIndex, endIndex);
23263 type = probe$2.ts.parseType(packet, pmt.pid);
23264
23265 switch (type) {
23266 case 'pat':
23267 if (!pmt.pid) {
23268 pmt.pid = probe$2.ts.parsePat(packet);
23269 }
23270 break;
23271 case 'pmt':
23272 if (!pmt.table) {
23273 pmt.table = probe$2.ts.parsePmt(packet);
23274 }
23275 break;
23276 default:
23277 break;
23278 }
23279
23280 // Found the pat and pmt, we can stop walking the segment
23281 if (pmt.pid && pmt.table) {
23282 return;
23283 }
23284
23285 startIndex += MP2T_PACKET_LENGTH;
23286 endIndex += MP2T_PACKET_LENGTH;
23287 continue;
23288 }
23289
23290 // If we get here, we have somehow become de-synchronized and we need to step
23291 // forward one byte at a time until we find a pair of sync bytes that denote
23292 // a packet
23293 startIndex++;
23294 endIndex++;
23295 }
23296 };
23297
23298 /**
23299 * walks through the segment data from the start and end to get timing information
23300 * for the first and last audio pes packets
23301 */
23302 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
23303 var startIndex = 0,
23304 endIndex = MP2T_PACKET_LENGTH,
23305 packet,
23306 type,
23307 pesType,
23308 pusi,
23309 parsed;
23310
23311 var endLoop = false;
23312
23313 // Start walking from start of segment to get first audio packet
23314 while (endIndex <= bytes.byteLength) {
23315 // Look for a pair of start and end sync bytes in the data..
23316 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
23317 // We found a packet
23318 packet = bytes.subarray(startIndex, endIndex);
23319 type = probe$2.ts.parseType(packet, pmt.pid);
23320
23321 switch (type) {
23322 case 'pes':
23323 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23324 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23325 if (pesType === 'audio' && pusi) {
23326 parsed = probe$2.ts.parsePesTime(packet);
23327 if (parsed) {
23328 parsed.type = 'audio';
23329 result.audio.push(parsed);
23330 endLoop = true;
23331 }
23332 }
23333 break;
23334 default:
23335 break;
23336 }
23337
23338 if (endLoop) {
23339 break;
23340 }
23341
23342 startIndex += MP2T_PACKET_LENGTH;
23343 endIndex += MP2T_PACKET_LENGTH;
23344 continue;
23345 }
23346
23347 // If we get here, we have somehow become de-synchronized and we need to step
23348 // forward one byte at a time until we find a pair of sync bytes that denote
23349 // a packet
23350 startIndex++;
23351 endIndex++;
23352 }
23353
23354 // Start walking from end of segment to get last audio packet
23355 endIndex = bytes.byteLength;
23356 startIndex = endIndex - MP2T_PACKET_LENGTH;
23357 endLoop = false;
23358 while (startIndex >= 0) {
23359 // Look for a pair of start and end sync bytes in the data..
23360 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
23361 // We found a packet
23362 packet = bytes.subarray(startIndex, endIndex);
23363 type = probe$2.ts.parseType(packet, pmt.pid);
23364
23365 switch (type) {
23366 case 'pes':
23367 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23368 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23369 if (pesType === 'audio' && pusi) {
23370 parsed = probe$2.ts.parsePesTime(packet);
23371 if (parsed) {
23372 parsed.type = 'audio';
23373 result.audio.push(parsed);
23374 endLoop = true;
23375 }
23376 }
23377 break;
23378 default:
23379 break;
23380 }
23381
23382 if (endLoop) {
23383 break;
23384 }
23385
23386 startIndex -= MP2T_PACKET_LENGTH;
23387 endIndex -= MP2T_PACKET_LENGTH;
23388 continue;
23389 }
23390
23391 // If we get here, we have somehow become de-synchronized and we need to step
23392 // forward one byte at a time until we find a pair of sync bytes that denote
23393 // a packet
23394 startIndex--;
23395 endIndex--;
23396 }
23397 };
23398
23399 /**
23400 * walks through the segment data from the start and end to get timing information
23401 * for the first and last video pes packets as well as timing information for the first
23402 * key frame.
23403 */
23404 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
23405 var startIndex = 0,
23406 endIndex = MP2T_PACKET_LENGTH,
23407 packet,
23408 type,
23409 pesType,
23410 pusi,
23411 parsed,
23412 frame,
23413 i,
23414 pes;
23415
23416 var endLoop = false;
23417
23418 var currentFrame = {
23419 data: [],
23420 size: 0
23421 };
23422
23423 // Start walking from start of segment to get first video packet
23424 while (endIndex < bytes.byteLength) {
23425 // Look for a pair of start and end sync bytes in the data..
23426 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
23427 // We found a packet
23428 packet = bytes.subarray(startIndex, endIndex);
23429 type = probe$2.ts.parseType(packet, pmt.pid);
23430
23431 switch (type) {
23432 case 'pes':
23433 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23434 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23435 if (pesType === 'video') {
23436 if (pusi && !endLoop) {
23437 parsed = probe$2.ts.parsePesTime(packet);
23438 if (parsed) {
23439 parsed.type = 'video';
23440 result.video.push(parsed);
23441 endLoop = true;
23442 }
23443 }
23444 if (!result.firstKeyFrame) {
23445 if (pusi) {
23446 if (currentFrame.size !== 0) {
23447 frame = new Uint8Array(currentFrame.size);
23448 i = 0;
23449 while (currentFrame.data.length) {
23450 pes = currentFrame.data.shift();
23451 frame.set(pes, i);
23452 i += pes.byteLength;
23453 }
23454 if (probe$2.ts.videoPacketContainsKeyFrame(frame)) {
23455 var firstKeyFrame = probe$2.ts.parsePesTime(frame);
23456
23457 // PTS/DTS may not be available. Simply *not* setting
23458 // the keyframe seems to work fine with HLS playback
23459 // and definitely preferable to a crash with TypeError...
23460 if (firstKeyFrame) {
23461 result.firstKeyFrame = firstKeyFrame;
23462 result.firstKeyFrame.type = 'video';
23463 } else {
23464 // eslint-disable-next-line
23465 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
23466 }
23467 }
23468 currentFrame.size = 0;
23469 }
23470 }
23471 currentFrame.data.push(packet);
23472 currentFrame.size += packet.byteLength;
23473 }
23474 }
23475 break;
23476 default:
23477 break;
23478 }
23479
23480 if (endLoop && result.firstKeyFrame) {
23481 break;
23482 }
23483
23484 startIndex += MP2T_PACKET_LENGTH;
23485 endIndex += MP2T_PACKET_LENGTH;
23486 continue;
23487 }
23488
23489 // If we get here, we have somehow become de-synchronized and we need to step
23490 // forward one byte at a time until we find a pair of sync bytes that denote
23491 // a packet
23492 startIndex++;
23493 endIndex++;
23494 }
23495
23496 // Start walking from end of segment to get last video packet
23497 endIndex = bytes.byteLength;
23498 startIndex = endIndex - MP2T_PACKET_LENGTH;
23499 endLoop = false;
23500 while (startIndex >= 0) {
23501 // Look for a pair of start and end sync bytes in the data..
23502 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
23503 // We found a packet
23504 packet = bytes.subarray(startIndex, endIndex);
23505 type = probe$2.ts.parseType(packet, pmt.pid);
23506
23507 switch (type) {
23508 case 'pes':
23509 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23510 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23511 if (pesType === 'video' && pusi) {
23512 parsed = probe$2.ts.parsePesTime(packet);
23513 if (parsed) {
23514 parsed.type = 'video';
23515 result.video.push(parsed);
23516 endLoop = true;
23517 }
23518 }
23519 break;
23520 default:
23521 break;
23522 }
23523
23524 if (endLoop) {
23525 break;
23526 }
23527
23528 startIndex -= MP2T_PACKET_LENGTH;
23529 endIndex -= MP2T_PACKET_LENGTH;
23530 continue;
23531 }
23532
23533 // If we get here, we have somehow become de-synchronized and we need to step
23534 // forward one byte at a time until we find a pair of sync bytes that denote
23535 // a packet
23536 startIndex--;
23537 endIndex--;
23538 }
23539 };
23540
23541 /**
23542 * Adjusts the timestamp information for the segment to account for
23543 * rollover and convert to seconds based on pes packet timescale (90khz clock)
23544 */
23545 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
23546 if (segmentInfo.audio && segmentInfo.audio.length) {
23547 var audioBaseTimestamp = baseTimestamp;
23548 if (typeof audioBaseTimestamp === 'undefined') {
23549 audioBaseTimestamp = segmentInfo.audio[0].dts;
23550 }
23551 segmentInfo.audio.forEach(function (info) {
23552 info.dts = handleRollover$1(info.dts, audioBaseTimestamp);
23553 info.pts = handleRollover$1(info.pts, audioBaseTimestamp);
23554 // time in seconds
23555 info.dtsTime = info.dts / ONE_SECOND_IN_TS$2;
23556 info.ptsTime = info.pts / ONE_SECOND_IN_TS$2;
23557 });
23558 }
23559
23560 if (segmentInfo.video && segmentInfo.video.length) {
23561 var videoBaseTimestamp = baseTimestamp;
23562 if (typeof videoBaseTimestamp === 'undefined') {
23563 videoBaseTimestamp = segmentInfo.video[0].dts;
23564 }
23565 segmentInfo.video.forEach(function (info) {
23566 info.dts = handleRollover$1(info.dts, videoBaseTimestamp);
23567 info.pts = handleRollover$1(info.pts, videoBaseTimestamp);
23568 // time in seconds
23569 info.dtsTime = info.dts / ONE_SECOND_IN_TS$2;
23570 info.ptsTime = info.pts / ONE_SECOND_IN_TS$2;
23571 });
23572 if (segmentInfo.firstKeyFrame) {
23573 var frame = segmentInfo.firstKeyFrame;
23574 frame.dts = handleRollover$1(frame.dts, videoBaseTimestamp);
23575 frame.pts = handleRollover$1(frame.pts, videoBaseTimestamp);
23576 // time in seconds
23577 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS$2;
23578 frame.ptsTime = frame.dts / ONE_SECOND_IN_TS$2;
23579 }
23580 }
23581 };
23582
23583 /**
23584 * inspects the aac data stream for start and end time information
23585 */
23586 var inspectAac_ = function inspectAac_(bytes) {
23587 var endLoop = false,
23588 audioCount = 0,
23589 sampleRate = null,
23590 timestamp = null,
23591 frameSize = 0,
23592 byteIndex = 0,
23593 packet;
23594
23595 while (bytes.length - byteIndex >= 3) {
23596 var type = probe$2.aac.parseType(bytes, byteIndex);
23597 switch (type) {
23598 case 'timed-metadata':
23599 // Exit early because we don't have enough to parse
23600 // the ID3 tag header
23601 if (bytes.length - byteIndex < 10) {
23602 endLoop = true;
23603 break;
23604 }
23605
23606 frameSize = probe$2.aac.parseId3TagSize(bytes, byteIndex);
23607
23608 // Exit early if we don't have enough in the buffer
23609 // to emit a full packet
23610 if (frameSize > bytes.length) {
23611 endLoop = true;
23612 break;
23613 }
23614 if (timestamp === null) {
23615 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
23616 timestamp = probe$2.aac.parseAacTimestamp(packet);
23617 }
23618 byteIndex += frameSize;
23619 break;
23620 case 'audio':
23621 // Exit early because we don't have enough to parse
23622 // the ADTS frame header
23623 if (bytes.length - byteIndex < 7) {
23624 endLoop = true;
23625 break;
23626 }
23627
23628 frameSize = probe$2.aac.parseAdtsSize(bytes, byteIndex);
23629
23630 // Exit early if we don't have enough in the buffer
23631 // to emit a full packet
23632 if (frameSize > bytes.length) {
23633 endLoop = true;
23634 break;
23635 }
23636 if (sampleRate === null) {
23637 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
23638 sampleRate = probe$2.aac.parseSampleRate(packet);
23639 }
23640 audioCount++;
23641 byteIndex += frameSize;
23642 break;
23643 default:
23644 byteIndex++;
23645 break;
23646 }
23647 if (endLoop) {
23648 return null;
23649 }
23650 }
23651 if (sampleRate === null || timestamp === null) {
23652 return null;
23653 }
23654
23655 var audioTimescale = ONE_SECOND_IN_TS$2 / sampleRate;
23656
23657 var result = {
23658 audio: [{
23659 type: 'audio',
23660 dts: timestamp,
23661 pts: timestamp
23662 }, {
23663 type: 'audio',
23664 dts: timestamp + audioCount * 1024 * audioTimescale,
23665 pts: timestamp + audioCount * 1024 * audioTimescale
23666 }]
23667 };
23668
23669 return result;
23670 };
23671
23672 /**
23673 * inspects the transport stream segment data for start and end time information
23674 * of the audio and video tracks (when present) as well as the first key frame's
23675 * start time.
23676 */
23677 var inspectTs_ = function inspectTs_(bytes) {
23678 var pmt = {
23679 pid: null,
23680 table: null
23681 };
23682
23683 var result = {};
23684
23685 parsePsi_(bytes, pmt);
23686
23687 for (var pid in pmt.table) {
23688 if (pmt.table.hasOwnProperty(pid)) {
23689 var type = pmt.table[pid];
23690 switch (type) {
23691 case streamTypes.H264_STREAM_TYPE:
23692 result.video = [];
23693 parseVideoPes_(bytes, pmt, result);
23694 if (result.video.length === 0) {
23695 delete result.video;
23696 }
23697 break;
23698 case streamTypes.ADTS_STREAM_TYPE:
23699 result.audio = [];
23700 parseAudioPes_(bytes, pmt, result);
23701 if (result.audio.length === 0) {
23702 delete result.audio;
23703 }
23704 break;
23705 default:
23706 break;
23707 }
23708 }
23709 }
23710 return result;
23711 };
23712
23713 /**
23714 * Inspects segment byte data and returns an object with start and end timing information
23715 *
23716 * @param {Uint8Array} bytes The segment byte data
23717 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
23718 * timestamps for rollover. This value must be in 90khz clock.
23719 * @return {Object} Object containing start and end frame timing info of segment.
23720 */
23721 var inspect = function inspect(bytes, baseTimestamp) {
23722 var isAacData = probe$2.aac.isLikelyAacData(bytes);
23723
23724 var result;
23725
23726 if (isAacData) {
23727 result = inspectAac_(bytes);
23728 } else {
23729 result = inspectTs_(bytes);
23730 }
23731
23732 if (!result || !result.audio && !result.video) {
23733 return null;
23734 }
23735
23736 adjustTimestamp_(result, baseTimestamp);
23737
23738 return result;
23739 };
23740
23741 var tsInspector = {
23742 inspect: inspect,
23743 parseAudioPes_: parseAudioPes_
23744 };
23745
23746 /**
23747 * @file sync-controller.js
23748 */
23749
23750 var tsprobe = tsInspector.inspect;
23751
23752 var syncPointStrategies = [
23753 // Stategy "VOD": Handle the VOD-case where the sync-point is *always*
23754 // the equivalence display-time 0 === segment-index 0
23755 {
23756 name: 'VOD',
23757 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23758 if (duration$$1 !== Infinity) {
23759 var syncPoint = {
23760 time: 0,
23761 segmentIndex: 0
23762 };
23763
23764 return syncPoint;
23765 }
23766 return null;
23767 }
23768 },
23769 // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
23770 {
23771 name: 'ProgramDateTime',
23772 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23773 if (!syncController.datetimeToDisplayTime) {
23774 return null;
23775 }
23776
23777 var segments = playlist.segments || [];
23778 var syncPoint = null;
23779 var lastDistance = null;
23780
23781 currentTime = currentTime || 0;
23782
23783 for (var i = 0; i < segments.length; i++) {
23784 var segment = segments[i];
23785
23786 if (segment.dateTimeObject) {
23787 var segmentTime = segment.dateTimeObject.getTime() / 1000;
23788 var segmentStart = segmentTime + syncController.datetimeToDisplayTime;
23789 var distance = Math.abs(currentTime - segmentStart);
23790
23791 // Once the distance begins to increase, or if distance is 0, we have passed
23792 // currentTime and can stop looking for better candidates
23793 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
23794 break;
23795 }
23796
23797 lastDistance = distance;
23798 syncPoint = {
23799 time: segmentStart,
23800 segmentIndex: i
23801 };
23802 }
23803 }
23804 return syncPoint;
23805 }
23806 },
23807 // Stategy "Segment": We have a known time mapping for a timeline and a
23808 // segment in the current timeline with timing data
23809 {
23810 name: 'Segment',
23811 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23812 var segments = playlist.segments || [];
23813 var syncPoint = null;
23814 var lastDistance = null;
23815
23816 currentTime = currentTime || 0;
23817
23818 for (var i = 0; i < segments.length; i++) {
23819 var segment = segments[i];
23820
23821 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
23822 var distance = Math.abs(currentTime - segment.start);
23823
23824 // Once the distance begins to increase, we have passed
23825 // currentTime and can stop looking for better candidates
23826 if (lastDistance !== null && lastDistance < distance) {
23827 break;
23828 }
23829
23830 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
23831 lastDistance = distance;
23832 syncPoint = {
23833 time: segment.start,
23834 segmentIndex: i
23835 };
23836 }
23837 }
23838 }
23839 return syncPoint;
23840 }
23841 },
23842 // Stategy "Discontinuity": We have a discontinuity with a known
23843 // display-time
23844 {
23845 name: 'Discontinuity',
23846 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23847 var syncPoint = null;
23848
23849 currentTime = currentTime || 0;
23850
23851 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
23852 var lastDistance = null;
23853
23854 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
23855 var segmentIndex = playlist.discontinuityStarts[i];
23856 var discontinuity = playlist.discontinuitySequence + i + 1;
23857 var discontinuitySync = syncController.discontinuities[discontinuity];
23858
23859 if (discontinuitySync) {
23860 var distance = Math.abs(currentTime - discontinuitySync.time);
23861
23862 // Once the distance begins to increase, we have passed
23863 // currentTime and can stop looking for better candidates
23864 if (lastDistance !== null && lastDistance < distance) {
23865 break;
23866 }
23867
23868 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
23869 lastDistance = distance;
23870 syncPoint = {
23871 time: discontinuitySync.time,
23872 segmentIndex: segmentIndex
23873 };
23874 }
23875 }
23876 }
23877 }
23878 return syncPoint;
23879 }
23880 },
23881 // Stategy "Playlist": We have a playlist with a known mapping of
23882 // segment index to display time
23883 {
23884 name: 'Playlist',
23885 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23886 if (playlist.syncInfo) {
23887 var syncPoint = {
23888 time: playlist.syncInfo.time,
23889 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
23890 };
23891
23892 return syncPoint;
23893 }
23894 return null;
23895 }
23896 }];
23897
23898 var SyncController = function (_videojs$EventTarget) {
23899 inherits(SyncController, _videojs$EventTarget);
23900
23901 function SyncController() {
23902 classCallCheck(this, SyncController);
23903
23904 // Segment Loader state variables...
23905 // ...for synching across variants
23906 var _this = possibleConstructorReturn(this, (SyncController.__proto__ || Object.getPrototypeOf(SyncController)).call(this));
23907
23908 _this.inspectCache_ = undefined;
23909
23910 // ...for synching across variants
23911 _this.timelines = [];
23912 _this.discontinuities = [];
23913 _this.datetimeToDisplayTime = null;
23914
23915 _this.logger_ = logger('SyncController');
23916 return _this;
23917 }
23918
23919 /**
23920 * Find a sync-point for the playlist specified
23921 *
23922 * A sync-point is defined as a known mapping from display-time to
23923 * a segment-index in the current playlist.
23924 *
23925 * @param {Playlist} playlist
23926 * The playlist that needs a sync-point
23927 * @param {Number} duration
23928 * Duration of the MediaSource (Infinite if playing a live source)
23929 * @param {Number} currentTimeline
23930 * The last timeline from which a segment was loaded
23931 * @returns {Object}
23932 * A sync-point object
23933 */
23934
23935
23936 createClass(SyncController, [{
23937 key: 'getSyncPoint',
23938 value: function getSyncPoint(playlist, duration$$1, currentTimeline, currentTime) {
23939 var syncPoints = this.runStrategies_(playlist, duration$$1, currentTimeline, currentTime);
23940
23941 if (!syncPoints.length) {
23942 // Signal that we need to attempt to get a sync-point manually
23943 // by fetching a segment in the playlist and constructing
23944 // a sync-point from that information
23945 return null;
23946 }
23947
23948 // Now find the sync-point that is closest to the currentTime because
23949 // that should result in the most accurate guess about which segment
23950 // to fetch
23951 return this.selectSyncPoint_(syncPoints, { key: 'time', value: currentTime });
23952 }
23953
23954 /**
23955 * Calculate the amount of time that has expired off the playlist during playback
23956 *
23957 * @param {Playlist} playlist
23958 * Playlist object to calculate expired from
23959 * @param {Number} duration
23960 * Duration of the MediaSource (Infinity if playling a live source)
23961 * @returns {Number|null}
23962 * The amount of time that has expired off the playlist during playback. Null
23963 * if no sync-points for the playlist can be found.
23964 */
23965
23966 }, {
23967 key: 'getExpiredTime',
23968 value: function getExpiredTime(playlist, duration$$1) {
23969 if (!playlist || !playlist.segments) {
23970 return null;
23971 }
23972
23973 var syncPoints = this.runStrategies_(playlist, duration$$1, playlist.discontinuitySequence, 0);
23974
23975 // Without sync-points, there is not enough information to determine the expired time
23976 if (!syncPoints.length) {
23977 return null;
23978 }
23979
23980 var syncPoint = this.selectSyncPoint_(syncPoints, {
23981 key: 'segmentIndex',
23982 value: 0
23983 });
23984
23985 // If the sync-point is beyond the start of the playlist, we want to subtract the
23986 // duration from index 0 to syncPoint.segmentIndex instead of adding.
23987 if (syncPoint.segmentIndex > 0) {
23988 syncPoint.time *= -1;
23989 }
23990
23991 return Math.abs(syncPoint.time + sumDurations(playlist, syncPoint.segmentIndex, 0));
23992 }
23993
23994 /**
23995 * Runs each sync-point strategy and returns a list of sync-points returned by the
23996 * strategies
23997 *
23998 * @private
23999 * @param {Playlist} playlist
24000 * The playlist that needs a sync-point
24001 * @param {Number} duration
24002 * Duration of the MediaSource (Infinity if playing a live source)
24003 * @param {Number} currentTimeline
24004 * The last timeline from which a segment was loaded
24005 * @returns {Array}
24006 * A list of sync-point objects
24007 */
24008
24009 }, {
24010 key: 'runStrategies_',
24011 value: function runStrategies_(playlist, duration$$1, currentTimeline, currentTime) {
24012 var syncPoints = [];
24013
24014 // Try to find a sync-point in by utilizing various strategies...
24015 for (var i = 0; i < syncPointStrategies.length; i++) {
24016 var strategy = syncPointStrategies[i];
24017 var syncPoint = strategy.run(this, playlist, duration$$1, currentTimeline, currentTime);
24018
24019 if (syncPoint) {
24020 syncPoint.strategy = strategy.name;
24021 syncPoints.push({
24022 strategy: strategy.name,
24023 syncPoint: syncPoint
24024 });
24025 }
24026 }
24027
24028 return syncPoints;
24029 }
24030
24031 /**
24032 * Selects the sync-point nearest the specified target
24033 *
24034 * @private
24035 * @param {Array} syncPoints
24036 * List of sync-points to select from
24037 * @param {Object} target
24038 * Object specifying the property and value we are targeting
24039 * @param {String} target.key
24040 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
24041 * @param {Number} target.value
24042 * The value to target for the specified key.
24043 * @returns {Object}
24044 * The sync-point nearest the target
24045 */
24046
24047 }, {
24048 key: 'selectSyncPoint_',
24049 value: function selectSyncPoint_(syncPoints, target) {
24050 var bestSyncPoint = syncPoints[0].syncPoint;
24051 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
24052 var bestStrategy = syncPoints[0].strategy;
24053
24054 for (var i = 1; i < syncPoints.length; i++) {
24055 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
24056
24057 if (newDistance < bestDistance) {
24058 bestDistance = newDistance;
24059 bestSyncPoint = syncPoints[i].syncPoint;
24060 bestStrategy = syncPoints[i].strategy;
24061 }
24062 }
24063
24064 this.logger_('syncPoint for [' + target.key + ': ' + target.value + '] chosen with strategy' + (' [' + bestStrategy + ']: [time:' + bestSyncPoint.time + ',') + (' segmentIndex:' + bestSyncPoint.segmentIndex + ']'));
24065
24066 return bestSyncPoint;
24067 }
24068
24069 /**
24070 * Save any meta-data present on the segments when segments leave
24071 * the live window to the playlist to allow for synchronization at the
24072 * playlist level later.
24073 *
24074 * @param {Playlist} oldPlaylist - The previous active playlist
24075 * @param {Playlist} newPlaylist - The updated and most current playlist
24076 */
24077
24078 }, {
24079 key: 'saveExpiredSegmentInfo',
24080 value: function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
24081 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
24082
24083 // When a segment expires from the playlist and it has a start time
24084 // save that information as a possible sync-point reference in future
24085 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
24086 var lastRemovedSegment = oldPlaylist.segments[i];
24087
24088 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
24089 newPlaylist.syncInfo = {
24090 mediaSequence: oldPlaylist.mediaSequence + i,
24091 time: lastRemovedSegment.start
24092 };
24093 this.logger_('playlist refresh sync: [time:' + newPlaylist.syncInfo.time + ',' + (' mediaSequence: ' + newPlaylist.syncInfo.mediaSequence + ']'));
24094 this.trigger('syncinfoupdate');
24095 break;
24096 }
24097 }
24098 }
24099
24100 /**
24101 * Save the mapping from playlist's ProgramDateTime to display. This should
24102 * only ever happen once at the start of playback.
24103 *
24104 * @param {Playlist} playlist - The currently active playlist
24105 */
24106
24107 }, {
24108 key: 'setDateTimeMapping',
24109 value: function setDateTimeMapping(playlist) {
24110 if (!this.datetimeToDisplayTime && playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
24111 var playlistTimestamp = playlist.segments[0].dateTimeObject.getTime() / 1000;
24112
24113 this.datetimeToDisplayTime = -playlistTimestamp;
24114 }
24115 }
24116
24117 /**
24118 * Reset the state of the inspection cache when we do a rendition
24119 * switch
24120 */
24121
24122 }, {
24123 key: 'reset',
24124 value: function reset() {
24125 this.inspectCache_ = undefined;
24126 }
24127
24128 /**
24129 * Probe or inspect a fmp4 or an mpeg2-ts segment to determine the start
24130 * and end of the segment in it's internal "media time". Used to generate
24131 * mappings from that internal "media time" to the display time that is
24132 * shown on the player.
24133 *
24134 * @param {SegmentInfo} segmentInfo - The current active request information
24135 */
24136
24137 }, {
24138 key: 'probeSegmentInfo',
24139 value: function probeSegmentInfo(segmentInfo) {
24140 var segment = segmentInfo.segment;
24141 var playlist = segmentInfo.playlist;
24142 var timingInfo = void 0;
24143
24144 if (segment.map) {
24145 timingInfo = this.probeMp4Segment_(segmentInfo);
24146 } else {
24147 timingInfo = this.probeTsSegment_(segmentInfo);
24148 }
24149
24150 if (timingInfo) {
24151 if (this.calculateSegmentTimeMapping_(segmentInfo, timingInfo)) {
24152 this.saveDiscontinuitySyncInfo_(segmentInfo);
24153
24154 // If the playlist does not have sync information yet, record that information
24155 // now with segment timing information
24156 if (!playlist.syncInfo) {
24157 playlist.syncInfo = {
24158 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
24159 time: segment.start
24160 };
24161 }
24162 }
24163 }
24164
24165 return timingInfo;
24166 }
24167
24168 /**
24169 * Probe an fmp4 segment to determine the start of the segment
24170 * in it's internal "composition time", which is equal to the base
24171 * media decode time plus the composition time offset value
24172 *
24173 * @private
24174 * @param {SegmentInfo} segmentInfo - The current active request information
24175 * @return {object} The start and end time of the current segment in "composition time"
24176 */
24177
24178 }, {
24179 key: 'probeMp4Segment_',
24180 value: function probeMp4Segment_(segmentInfo) {
24181 var segment = segmentInfo.segment;
24182 // get timescales from init segment
24183 var timescales = probe.timescale(segment.map.bytes);
24184 // calculate composition start time using the timescales and information
24185 // contained within the media segment
24186 var compositionStartTime = probe.compositionStartTime(timescales, segmentInfo.bytes);
24187
24188 if (segmentInfo.timestampOffset !== null) {
24189 segmentInfo.timestampOffset -= compositionStartTime;
24190 }
24191
24192 return {
24193 start: compositionStartTime,
24194 end: compositionStartTime + segment.duration
24195 };
24196 }
24197
24198 /**
24199 * Probe an mpeg2-ts segment to determine the start and end of the segment
24200 * in it's internal "media time".
24201 *
24202 * @private
24203 * @param {SegmentInfo} segmentInfo - The current active request information
24204 * @return {object} The start and end time of the current segment in "media time"
24205 */
24206
24207 }, {
24208 key: 'probeTsSegment_',
24209 value: function probeTsSegment_(segmentInfo) {
24210 var timeInfo = tsprobe(segmentInfo.bytes, this.inspectCache_);
24211 var segmentStartTime = void 0;
24212 var segmentEndTime = void 0;
24213 var segmentTimestampInfo = void 0;
24214
24215 if (!timeInfo) {
24216 return null;
24217 }
24218
24219 if (timeInfo.video && timeInfo.video.length === 2) {
24220 this.inspectCache_ = timeInfo.video[1].dts;
24221 segmentStartTime = timeInfo.video[0].dtsTime;
24222 segmentEndTime = timeInfo.video[1].dtsTime;
24223 segmentTimestampInfo = timeInfo.video;
24224 } else if (timeInfo.audio && timeInfo.audio.length === 2) {
24225 this.inspectCache_ = timeInfo.audio[1].dts;
24226 segmentStartTime = timeInfo.audio[0].dtsTime;
24227 segmentEndTime = timeInfo.audio[1].dtsTime;
24228 segmentTimestampInfo = timeInfo.audio;
24229 }
24230
24231 var probedInfo = {
24232 segmentTimestampInfo: segmentTimestampInfo,
24233 start: segmentStartTime,
24234 end: segmentEndTime,
24235 containsVideo: timeInfo.video && timeInfo.video.length === 2,
24236 containsAudio: timeInfo.audio && timeInfo.audio.length === 2
24237 };
24238
24239 return probedInfo;
24240 }
24241 }, {
24242 key: 'timestampOffsetForTimeline',
24243 value: function timestampOffsetForTimeline(timeline) {
24244 if (typeof this.timelines[timeline] === 'undefined') {
24245 return null;
24246 }
24247 return this.timelines[timeline].time;
24248 }
24249 }, {
24250 key: 'mappingForTimeline',
24251 value: function mappingForTimeline(timeline) {
24252 if (typeof this.timelines[timeline] === 'undefined') {
24253 return null;
24254 }
24255 return this.timelines[timeline].mapping;
24256 }
24257
24258 /**
24259 * Use the "media time" for a segment to generate a mapping to "display time" and
24260 * save that display time to the segment.
24261 *
24262 * @private
24263 * @param {SegmentInfo} segmentInfo
24264 * The current active request information
24265 * @param {object} timingInfo
24266 * The start and end time of the current segment in "media time"
24267 * @returns {Boolean}
24268 * Returns false if segment time mapping could not be calculated
24269 */
24270
24271 }, {
24272 key: 'calculateSegmentTimeMapping_',
24273 value: function calculateSegmentTimeMapping_(segmentInfo, timingInfo) {
24274 var segment = segmentInfo.segment;
24275 var mappingObj = this.timelines[segmentInfo.timeline];
24276
24277 if (segmentInfo.timestampOffset !== null) {
24278 mappingObj = {
24279 time: segmentInfo.startOfSegment,
24280 mapping: segmentInfo.startOfSegment - timingInfo.start
24281 };
24282 this.timelines[segmentInfo.timeline] = mappingObj;
24283 this.trigger('timestampoffset');
24284
24285 this.logger_('time mapping for timeline ' + segmentInfo.timeline + ': ' + ('[time: ' + mappingObj.time + '] [mapping: ' + mappingObj.mapping + ']'));
24286
24287 segment.start = segmentInfo.startOfSegment;
24288 segment.end = timingInfo.end + mappingObj.mapping;
24289 } else if (mappingObj) {
24290 segment.start = timingInfo.start + mappingObj.mapping;
24291 segment.end = timingInfo.end + mappingObj.mapping;
24292 } else {
24293 return false;
24294 }
24295
24296 return true;
24297 }
24298
24299 /**
24300 * Each time we have discontinuity in the playlist, attempt to calculate the location
24301 * in display of the start of the discontinuity and save that. We also save an accuracy
24302 * value so that we save values with the most accuracy (closest to 0.)
24303 *
24304 * @private
24305 * @param {SegmentInfo} segmentInfo - The current active request information
24306 */
24307
24308 }, {
24309 key: 'saveDiscontinuitySyncInfo_',
24310 value: function saveDiscontinuitySyncInfo_(segmentInfo) {
24311 var playlist = segmentInfo.playlist;
24312 var segment = segmentInfo.segment;
24313
24314 // If the current segment is a discontinuity then we know exactly where
24315 // the start of the range and it's accuracy is 0 (greater accuracy values
24316 // mean more approximation)
24317 if (segment.discontinuity) {
24318 this.discontinuities[segment.timeline] = {
24319 time: segment.start,
24320 accuracy: 0
24321 };
24322 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
24323 // Search for future discontinuities that we can provide better timing
24324 // information for and save that information for sync purposes
24325 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
24326 var segmentIndex = playlist.discontinuityStarts[i];
24327 var discontinuity = playlist.discontinuitySequence + i + 1;
24328 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
24329 var accuracy = Math.abs(mediaIndexDiff);
24330
24331 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
24332 var time = void 0;
24333
24334 if (mediaIndexDiff < 0) {
24335 time = segment.start - sumDurations(playlist, segmentInfo.mediaIndex, segmentIndex);
24336 } else {
24337 time = segment.end + sumDurations(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
24338 }
24339
24340 this.discontinuities[discontinuity] = {
24341 time: time,
24342 accuracy: accuracy
24343 };
24344 }
24345 }
24346 }
24347 }
24348 }, {
24349 key: 'dispose',
24350 value: function dispose() {
24351 this.trigger('dispose');
24352 this.off();
24353 }
24354 }]);
24355 return SyncController;
24356 }(videojs.EventTarget);
24357
24358 var Decrypter$1 = new shimWorker("./decrypter-worker.worker.js", function (window, document) {
24359 var self = this;
24360 var decrypterWorker = function () {
24361
24362 /*
24363 * pkcs7.pad
24364 * https://github.com/brightcove/pkcs7
24365 *
24366 * Copyright (c) 2014 Brightcove
24367 * Licensed under the apache2 license.
24368 */
24369
24370 /**
24371 * Returns the subarray of a Uint8Array without PKCS#7 padding.
24372 * @param padded {Uint8Array} unencrypted bytes that have been padded
24373 * @return {Uint8Array} the unpadded bytes
24374 * @see http://tools.ietf.org/html/rfc5652
24375 */
24376
24377 function unpad(padded) {
24378 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
24379 }
24380
24381 var classCallCheck = function classCallCheck(instance, Constructor) {
24382 if (!(instance instanceof Constructor)) {
24383 throw new TypeError("Cannot call a class as a function");
24384 }
24385 };
24386
24387 var createClass = function () {
24388 function defineProperties(target, props) {
24389 for (var i = 0; i < props.length; i++) {
24390 var descriptor = props[i];
24391 descriptor.enumerable = descriptor.enumerable || false;
24392 descriptor.configurable = true;
24393 if ("value" in descriptor) descriptor.writable = true;
24394 Object.defineProperty(target, descriptor.key, descriptor);
24395 }
24396 }
24397
24398 return function (Constructor, protoProps, staticProps) {
24399 if (protoProps) defineProperties(Constructor.prototype, protoProps);
24400 if (staticProps) defineProperties(Constructor, staticProps);
24401 return Constructor;
24402 };
24403 }();
24404
24405 var inherits = function inherits(subClass, superClass) {
24406 if (typeof superClass !== "function" && superClass !== null) {
24407 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
24408 }
24409
24410 subClass.prototype = Object.create(superClass && superClass.prototype, {
24411 constructor: {
24412 value: subClass,
24413 enumerable: false,
24414 writable: true,
24415 configurable: true
24416 }
24417 });
24418 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
24419 };
24420
24421 var possibleConstructorReturn = function possibleConstructorReturn(self, call) {
24422 if (!self) {
24423 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
24424 }
24425
24426 return call && (typeof call === "object" || typeof call === "function") ? call : self;
24427 };
24428
24429 /**
24430 * @file aes.js
24431 *
24432 * This file contains an adaptation of the AES decryption algorithm
24433 * from the Standford Javascript Cryptography Library. That work is
24434 * covered by the following copyright and permissions notice:
24435 *
24436 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
24437 * All rights reserved.
24438 *
24439 * Redistribution and use in source and binary forms, with or without
24440 * modification, are permitted provided that the following conditions are
24441 * met:
24442 *
24443 * 1. Redistributions of source code must retain the above copyright
24444 * notice, this list of conditions and the following disclaimer.
24445 *
24446 * 2. Redistributions in binary form must reproduce the above
24447 * copyright notice, this list of conditions and the following
24448 * disclaimer in the documentation and/or other materials provided
24449 * with the distribution.
24450 *
24451 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
24452 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
24453 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24454 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
24455 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24456 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24457 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24458 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24459 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
24460 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
24461 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24462 *
24463 * The views and conclusions contained in the software and documentation
24464 * are those of the authors and should not be interpreted as representing
24465 * official policies, either expressed or implied, of the authors.
24466 */
24467
24468 /**
24469 * Expand the S-box tables.
24470 *
24471 * @private
24472 */
24473 var precompute = function precompute() {
24474 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
24475 var encTable = tables[0];
24476 var decTable = tables[1];
24477 var sbox = encTable[4];
24478 var sboxInv = decTable[4];
24479 var i = void 0;
24480 var x = void 0;
24481 var xInv = void 0;
24482 var d = [];
24483 var th = [];
24484 var x2 = void 0;
24485 var x4 = void 0;
24486 var x8 = void 0;
24487 var s = void 0;
24488 var tEnc = void 0;
24489 var tDec = void 0;
24490
24491 // Compute double and third tables
24492 for (i = 0; i < 256; i++) {
24493 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
24494 }
24495
24496 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
24497 // Compute sbox
24498 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
24499 s = s >> 8 ^ s & 255 ^ 99;
24500 sbox[x] = s;
24501 sboxInv[s] = x;
24502
24503 // Compute MixColumns
24504 x8 = d[x4 = d[x2 = d[x]]];
24505 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
24506 tEnc = d[s] * 0x101 ^ s * 0x1010100;
24507
24508 for (i = 0; i < 4; i++) {
24509 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
24510 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
24511 }
24512 }
24513
24514 // Compactify. Considerable speedup on Firefox.
24515 for (i = 0; i < 5; i++) {
24516 encTable[i] = encTable[i].slice(0);
24517 decTable[i] = decTable[i].slice(0);
24518 }
24519 return tables;
24520 };
24521 var aesTables = null;
24522
24523 /**
24524 * Schedule out an AES key for both encryption and decryption. This
24525 * is a low-level class. Use a cipher mode to do bulk encryption.
24526 *
24527 * @class AES
24528 * @param key {Array} The key as an array of 4, 6 or 8 words.
24529 */
24530
24531 var AES = function () {
24532 function AES(key) {
24533 classCallCheck(this, AES);
24534
24535 /**
24536 * The expanded S-box and inverse S-box tables. These will be computed
24537 * on the client so that we don't have to send them down the wire.
24538 *
24539 * There are two tables, _tables[0] is for encryption and
24540 * _tables[1] is for decryption.
24541 *
24542 * The first 4 sub-tables are the expanded S-box with MixColumns. The
24543 * last (_tables[01][4]) is the S-box itself.
24544 *
24545 * @private
24546 */
24547 // if we have yet to precompute the S-box tables
24548 // do so now
24549 if (!aesTables) {
24550 aesTables = precompute();
24551 }
24552 // then make a copy of that object for use
24553 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
24554 var i = void 0;
24555 var j = void 0;
24556 var tmp = void 0;
24557 var encKey = void 0;
24558 var decKey = void 0;
24559 var sbox = this._tables[0][4];
24560 var decTable = this._tables[1];
24561 var keyLen = key.length;
24562 var rcon = 1;
24563
24564 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
24565 throw new Error('Invalid aes key size');
24566 }
24567
24568 encKey = key.slice(0);
24569 decKey = [];
24570 this._key = [encKey, decKey];
24571
24572 // schedule encryption keys
24573 for (i = keyLen; i < 4 * keyLen + 28; i++) {
24574 tmp = encKey[i - 1];
24575
24576 // apply sbox
24577 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
24578 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
24579
24580 // shift rows and add rcon
24581 if (i % keyLen === 0) {
24582 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
24583 rcon = rcon << 1 ^ (rcon >> 7) * 283;
24584 }
24585 }
24586
24587 encKey[i] = encKey[i - keyLen] ^ tmp;
24588 }
24589
24590 // schedule decryption keys
24591 for (j = 0; i; j++, i--) {
24592 tmp = encKey[j & 3 ? i : i - 4];
24593 if (i <= 4 || j < 4) {
24594 decKey[j] = tmp;
24595 } else {
24596 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
24597 }
24598 }
24599 }
24600
24601 /**
24602 * Decrypt 16 bytes, specified as four 32-bit words.
24603 *
24604 * @param {Number} encrypted0 the first word to decrypt
24605 * @param {Number} encrypted1 the second word to decrypt
24606 * @param {Number} encrypted2 the third word to decrypt
24607 * @param {Number} encrypted3 the fourth word to decrypt
24608 * @param {Int32Array} out the array to write the decrypted words
24609 * into
24610 * @param {Number} offset the offset into the output array to start
24611 * writing results
24612 * @return {Array} The plaintext.
24613 */
24614
24615 AES.prototype.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
24616 var key = this._key[1];
24617 // state variables a,b,c,d are loaded with pre-whitened data
24618 var a = encrypted0 ^ key[0];
24619 var b = encrypted3 ^ key[1];
24620 var c = encrypted2 ^ key[2];
24621 var d = encrypted1 ^ key[3];
24622 var a2 = void 0;
24623 var b2 = void 0;
24624 var c2 = void 0;
24625
24626 // key.length === 2 ?
24627 var nInnerRounds = key.length / 4 - 2;
24628 var i = void 0;
24629 var kIndex = 4;
24630 var table = this._tables[1];
24631
24632 // load up the tables
24633 var table0 = table[0];
24634 var table1 = table[1];
24635 var table2 = table[2];
24636 var table3 = table[3];
24637 var sbox = table[4];
24638
24639 // Inner rounds. Cribbed from OpenSSL.
24640 for (i = 0; i < nInnerRounds; i++) {
24641 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
24642 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
24643 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
24644 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
24645 kIndex += 4;
24646 a = a2;b = b2;c = c2;
24647 }
24648
24649 // Last round.
24650 for (i = 0; i < 4; i++) {
24651 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
24652 a2 = a;a = b;b = c;c = d;d = a2;
24653 }
24654 };
24655
24656 return AES;
24657 }();
24658
24659 /**
24660 * @file stream.js
24661 */
24662 /**
24663 * A lightweight readable stream implemention that handles event dispatching.
24664 *
24665 * @class Stream
24666 */
24667 var Stream = function () {
24668 function Stream() {
24669 classCallCheck(this, Stream);
24670
24671 this.listeners = {};
24672 }
24673
24674 /**
24675 * Add a listener for a specified event type.
24676 *
24677 * @param {String} type the event name
24678 * @param {Function} listener the callback to be invoked when an event of
24679 * the specified type occurs
24680 */
24681
24682 Stream.prototype.on = function on(type, listener) {
24683 if (!this.listeners[type]) {
24684 this.listeners[type] = [];
24685 }
24686 this.listeners[type].push(listener);
24687 };
24688
24689 /**
24690 * Remove a listener for a specified event type.
24691 *
24692 * @param {String} type the event name
24693 * @param {Function} listener a function previously registered for this
24694 * type of event through `on`
24695 * @return {Boolean} if we could turn it off or not
24696 */
24697
24698 Stream.prototype.off = function off(type, listener) {
24699 if (!this.listeners[type]) {
24700 return false;
24701 }
24702
24703 var index = this.listeners[type].indexOf(listener);
24704
24705 this.listeners[type].splice(index, 1);
24706 return index > -1;
24707 };
24708
24709 /**
24710 * Trigger an event of the specified type on this stream. Any additional
24711 * arguments to this function are passed as parameters to event listeners.
24712 *
24713 * @param {String} type the event name
24714 */
24715
24716 Stream.prototype.trigger = function trigger(type) {
24717 var callbacks = this.listeners[type];
24718
24719 if (!callbacks) {
24720 return;
24721 }
24722
24723 // Slicing the arguments on every invocation of this method
24724 // can add a significant amount of overhead. Avoid the
24725 // intermediate object creation for the common case of a
24726 // single callback argument
24727 if (arguments.length === 2) {
24728 var length = callbacks.length;
24729
24730 for (var i = 0; i < length; ++i) {
24731 callbacks[i].call(this, arguments[1]);
24732 }
24733 } else {
24734 var args = Array.prototype.slice.call(arguments, 1);
24735 var _length = callbacks.length;
24736
24737 for (var _i = 0; _i < _length; ++_i) {
24738 callbacks[_i].apply(this, args);
24739 }
24740 }
24741 };
24742
24743 /**
24744 * Destroys the stream and cleans up.
24745 */
24746
24747 Stream.prototype.dispose = function dispose() {
24748 this.listeners = {};
24749 };
24750 /**
24751 * Forwards all `data` events on this stream to the destination stream. The
24752 * destination stream should provide a method `push` to receive the data
24753 * events as they arrive.
24754 *
24755 * @param {Stream} destination the stream that will receive all `data` events
24756 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
24757 */
24758
24759 Stream.prototype.pipe = function pipe(destination) {
24760 this.on('data', function (data) {
24761 destination.push(data);
24762 });
24763 };
24764
24765 return Stream;
24766 }();
24767
24768 /**
24769 * @file async-stream.js
24770 */
24771 /**
24772 * A wrapper around the Stream class to use setTiemout
24773 * and run stream "jobs" Asynchronously
24774 *
24775 * @class AsyncStream
24776 * @extends Stream
24777 */
24778
24779 var AsyncStream = function (_Stream) {
24780 inherits(AsyncStream, _Stream);
24781
24782 function AsyncStream() {
24783 classCallCheck(this, AsyncStream);
24784
24785 var _this = possibleConstructorReturn(this, _Stream.call(this, Stream));
24786
24787 _this.jobs = [];
24788 _this.delay = 1;
24789 _this.timeout_ = null;
24790 return _this;
24791 }
24792
24793 /**
24794 * process an async job
24795 *
24796 * @private
24797 */
24798
24799 AsyncStream.prototype.processJob_ = function processJob_() {
24800 this.jobs.shift()();
24801 if (this.jobs.length) {
24802 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
24803 } else {
24804 this.timeout_ = null;
24805 }
24806 };
24807
24808 /**
24809 * push a job into the stream
24810 *
24811 * @param {Function} job the job to push into the stream
24812 */
24813
24814 AsyncStream.prototype.push = function push(job) {
24815 this.jobs.push(job);
24816 if (!this.timeout_) {
24817 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
24818 }
24819 };
24820
24821 return AsyncStream;
24822 }(Stream);
24823
24824 /**
24825 * @file decrypter.js
24826 *
24827 * An asynchronous implementation of AES-128 CBC decryption with
24828 * PKCS#7 padding.
24829 */
24830
24831 /**
24832 * Convert network-order (big-endian) bytes into their little-endian
24833 * representation.
24834 */
24835 var ntoh = function ntoh(word) {
24836 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
24837 };
24838
24839 /**
24840 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
24841 *
24842 * @param {Uint8Array} encrypted the encrypted bytes
24843 * @param {Uint32Array} key the bytes of the decryption key
24844 * @param {Uint32Array} initVector the initialization vector (IV) to
24845 * use for the first round of CBC.
24846 * @return {Uint8Array} the decrypted bytes
24847 *
24848 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
24849 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
24850 * @see https://tools.ietf.org/html/rfc2315
24851 */
24852 var decrypt = function decrypt(encrypted, key, initVector) {
24853 // word-level access to the encrypted bytes
24854 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
24855
24856 var decipher = new AES(Array.prototype.slice.call(key));
24857
24858 // byte and word-level access for the decrypted output
24859 var decrypted = new Uint8Array(encrypted.byteLength);
24860 var decrypted32 = new Int32Array(decrypted.buffer);
24861
24862 // temporary variables for working with the IV, encrypted, and
24863 // decrypted data
24864 var init0 = void 0;
24865 var init1 = void 0;
24866 var init2 = void 0;
24867 var init3 = void 0;
24868 var encrypted0 = void 0;
24869 var encrypted1 = void 0;
24870 var encrypted2 = void 0;
24871 var encrypted3 = void 0;
24872
24873 // iteration variable
24874 var wordIx = void 0;
24875
24876 // pull out the words of the IV to ensure we don't modify the
24877 // passed-in reference and easier access
24878 init0 = initVector[0];
24879 init1 = initVector[1];
24880 init2 = initVector[2];
24881 init3 = initVector[3];
24882
24883 // decrypt four word sequences, applying cipher-block chaining (CBC)
24884 // to each decrypted block
24885 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
24886 // convert big-endian (network order) words into little-endian
24887 // (javascript order)
24888 encrypted0 = ntoh(encrypted32[wordIx]);
24889 encrypted1 = ntoh(encrypted32[wordIx + 1]);
24890 encrypted2 = ntoh(encrypted32[wordIx + 2]);
24891 encrypted3 = ntoh(encrypted32[wordIx + 3]);
24892
24893 // decrypt the block
24894 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
24895
24896 // XOR with the IV, and restore network byte-order to obtain the
24897 // plaintext
24898 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
24899 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
24900 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
24901 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
24902
24903 // setup the IV for the next round
24904 init0 = encrypted0;
24905 init1 = encrypted1;
24906 init2 = encrypted2;
24907 init3 = encrypted3;
24908 }
24909
24910 return decrypted;
24911 };
24912
24913 /**
24914 * The `Decrypter` class that manages decryption of AES
24915 * data through `AsyncStream` objects and the `decrypt`
24916 * function
24917 *
24918 * @param {Uint8Array} encrypted the encrypted bytes
24919 * @param {Uint32Array} key the bytes of the decryption key
24920 * @param {Uint32Array} initVector the initialization vector (IV) to
24921 * @param {Function} done the function to run when done
24922 * @class Decrypter
24923 */
24924
24925 var Decrypter = function () {
24926 function Decrypter(encrypted, key, initVector, done) {
24927 classCallCheck(this, Decrypter);
24928
24929 var step = Decrypter.STEP;
24930 var encrypted32 = new Int32Array(encrypted.buffer);
24931 var decrypted = new Uint8Array(encrypted.byteLength);
24932 var i = 0;
24933
24934 this.asyncStream_ = new AsyncStream();
24935
24936 // split up the encryption job and do the individual chunks asynchronously
24937 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
24938 for (i = step; i < encrypted32.length; i += step) {
24939 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
24940 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
24941 }
24942 // invoke the done() callback when everything is finished
24943 this.asyncStream_.push(function () {
24944 // remove pkcs#7 padding from the decrypted bytes
24945 done(null, unpad(decrypted));
24946 });
24947 }
24948
24949 /**
24950 * a getter for step the maximum number of bytes to process at one time
24951 *
24952 * @return {Number} the value of step 32000
24953 */
24954
24955 /**
24956 * @private
24957 */
24958 Decrypter.prototype.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
24959 return function () {
24960 var bytes = decrypt(encrypted, key, initVector);
24961
24962 decrypted.set(bytes, encrypted.byteOffset);
24963 };
24964 };
24965
24966 createClass(Decrypter, null, [{
24967 key: 'STEP',
24968 get: function get$$1() {
24969 // 4 * 8000;
24970 return 32000;
24971 }
24972 }]);
24973 return Decrypter;
24974 }();
24975
24976 /**
24977 * @file bin-utils.js
24978 */
24979
24980 /**
24981 * Creates an object for sending to a web worker modifying properties that are TypedArrays
24982 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
24983 *
24984 * @param {Object} message
24985 * Object of properties and values to send to the web worker
24986 * @return {Object}
24987 * Modified message with TypedArray values expanded
24988 * @function createTransferableMessage
24989 */
24990 var createTransferableMessage = function createTransferableMessage(message) {
24991 var transferable = {};
24992
24993 Object.keys(message).forEach(function (key) {
24994 var value = message[key];
24995
24996 if (ArrayBuffer.isView(value)) {
24997 transferable[key] = {
24998 bytes: value.buffer,
24999 byteOffset: value.byteOffset,
25000 byteLength: value.byteLength
25001 };
25002 } else {
25003 transferable[key] = value;
25004 }
25005 });
25006
25007 return transferable;
25008 };
25009
25010 /**
25011 * Our web worker interface so that things can talk to aes-decrypter
25012 * that will be running in a web worker. the scope is passed to this by
25013 * webworkify.
25014 *
25015 * @param {Object} self
25016 * the scope for the web worker
25017 */
25018 var DecrypterWorker = function DecrypterWorker(self) {
25019 self.onmessage = function (event) {
25020 var data = event.data;
25021 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
25022 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
25023 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
25024
25025 /* eslint-disable no-new, handle-callback-err */
25026 new Decrypter(encrypted, key, iv, function (err, bytes) {
25027 self.postMessage(createTransferableMessage({
25028 source: data.source,
25029 decrypted: bytes
25030 }), [bytes.buffer]);
25031 });
25032 /* eslint-enable */
25033 };
25034 };
25035
25036 var decrypterWorker = new DecrypterWorker(self);
25037
25038 return decrypterWorker;
25039 }();
25040 });
25041
25042 /**
25043 * Convert the properties of an HLS track into an audioTrackKind.
25044 *
25045 * @private
25046 */
25047 var audioTrackKind_ = function audioTrackKind_(properties) {
25048 var kind = properties.default ? 'main' : 'alternative';
25049
25050 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
25051 kind = 'main-desc';
25052 }
25053
25054 return kind;
25055 };
25056
25057 /**
25058 * Pause provided segment loader and playlist loader if active
25059 *
25060 * @param {SegmentLoader} segmentLoader
25061 * SegmentLoader to pause
25062 * @param {Object} mediaType
25063 * Active media type
25064 * @function stopLoaders
25065 */
25066 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
25067 segmentLoader.abort();
25068 segmentLoader.pause();
25069
25070 if (mediaType && mediaType.activePlaylistLoader) {
25071 mediaType.activePlaylistLoader.pause();
25072 mediaType.activePlaylistLoader = null;
25073 }
25074 };
25075
25076 /**
25077 * Start loading provided segment loader and playlist loader
25078 *
25079 * @param {PlaylistLoader} playlistLoader
25080 * PlaylistLoader to start loading
25081 * @param {Object} mediaType
25082 * Active media type
25083 * @function startLoaders
25084 */
25085 var startLoaders = function startLoaders(playlistLoader, mediaType) {
25086 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
25087 // playlist loader
25088 mediaType.activePlaylistLoader = playlistLoader;
25089 playlistLoader.load();
25090 };
25091
25092 /**
25093 * Returns a function to be called when the media group changes. It performs a
25094 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
25095 * change of group is merely a rendition switch of the same content at another encoding,
25096 * rather than a change of content, such as switching audio from English to Spanish.
25097 *
25098 * @param {String} type
25099 * MediaGroup type
25100 * @param {Object} settings
25101 * Object containing required information for media groups
25102 * @return {Function}
25103 * Handler for a non-destructive resync of SegmentLoader when the active media
25104 * group changes.
25105 * @function onGroupChanged
25106 */
25107 var onGroupChanged = function onGroupChanged(type, settings) {
25108 return function () {
25109 var _settings$segmentLoad = settings.segmentLoaders,
25110 segmentLoader = _settings$segmentLoad[type],
25111 mainSegmentLoader = _settings$segmentLoad.main,
25112 mediaType = settings.mediaTypes[type];
25113
25114 var activeTrack = mediaType.activeTrack();
25115 var activeGroup = mediaType.activeGroup(activeTrack);
25116 var previousActiveLoader = mediaType.activePlaylistLoader;
25117
25118 stopLoaders(segmentLoader, mediaType);
25119
25120 if (!activeGroup) {
25121 // there is no group active
25122 return;
25123 }
25124
25125 if (!activeGroup.playlistLoader) {
25126 if (previousActiveLoader) {
25127 // The previous group had a playlist loader but the new active group does not
25128 // this means we are switching from demuxed to muxed audio. In this case we want to
25129 // do a destructive reset of the main segment loader and not restart the audio
25130 // loaders.
25131 mainSegmentLoader.resetEverything();
25132 }
25133 return;
25134 }
25135
25136 // Non-destructive resync
25137 segmentLoader.resyncLoader();
25138
25139 startLoaders(activeGroup.playlistLoader, mediaType);
25140 };
25141 };
25142
25143 /**
25144 * Returns a function to be called when the media track changes. It performs a
25145 * destructive reset of the SegmentLoader to ensure we start loading as close to
25146 * currentTime as possible.
25147 *
25148 * @param {String} type
25149 * MediaGroup type
25150 * @param {Object} settings
25151 * Object containing required information for media groups
25152 * @return {Function}
25153 * Handler for a destructive reset of SegmentLoader when the active media
25154 * track changes.
25155 * @function onTrackChanged
25156 */
25157 var onTrackChanged = function onTrackChanged(type, settings) {
25158 return function () {
25159 var _settings$segmentLoad2 = settings.segmentLoaders,
25160 segmentLoader = _settings$segmentLoad2[type],
25161 mainSegmentLoader = _settings$segmentLoad2.main,
25162 mediaType = settings.mediaTypes[type];
25163
25164 var activeTrack = mediaType.activeTrack();
25165 var activeGroup = mediaType.activeGroup(activeTrack);
25166 var previousActiveLoader = mediaType.activePlaylistLoader;
25167
25168 stopLoaders(segmentLoader, mediaType);
25169
25170 if (!activeGroup) {
25171 // there is no group active so we do not want to restart loaders
25172 return;
25173 }
25174
25175 if (!activeGroup.playlistLoader) {
25176 // when switching from demuxed audio/video to muxed audio/video (noted by no playlist
25177 // loader for the audio group), we want to do a destructive reset of the main segment
25178 // loader and not restart the audio loaders
25179 mainSegmentLoader.resetEverything();
25180 return;
25181 }
25182
25183 if (previousActiveLoader === activeGroup.playlistLoader) {
25184 // Nothing has actually changed. This can happen because track change events can fire
25185 // multiple times for a "single" change. One for enabling the new active track, and
25186 // one for disabling the track that was active
25187 startLoaders(activeGroup.playlistLoader, mediaType);
25188 return;
25189 }
25190
25191 if (segmentLoader.track) {
25192 // For WebVTT, set the new text track in the segmentloader
25193 segmentLoader.track(activeTrack);
25194 }
25195
25196 // destructive reset
25197 segmentLoader.resetEverything();
25198
25199 startLoaders(activeGroup.playlistLoader, mediaType);
25200 };
25201 };
25202
25203 var onError = {
25204 /**
25205 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
25206 * an error.
25207 *
25208 * @param {String} type
25209 * MediaGroup type
25210 * @param {Object} settings
25211 * Object containing required information for media groups
25212 * @return {Function}
25213 * Error handler. Logs warning (or error if the playlist is blacklisted) to
25214 * console and switches back to default audio track.
25215 * @function onError.AUDIO
25216 */
25217 AUDIO: function AUDIO(type, settings) {
25218 return function () {
25219 var segmentLoader = settings.segmentLoaders[type],
25220 mediaType = settings.mediaTypes[type],
25221 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
25222
25223
25224 stopLoaders(segmentLoader, mediaType);
25225
25226 // switch back to default audio track
25227 var activeTrack = mediaType.activeTrack();
25228 var activeGroup = mediaType.activeGroup();
25229 var id = (activeGroup.filter(function (group) {
25230 return group.default;
25231 })[0] || activeGroup[0]).id;
25232 var defaultTrack = mediaType.tracks[id];
25233
25234 if (activeTrack === defaultTrack) {
25235 // Default track encountered an error. All we can do now is blacklist the current
25236 // rendition and hope another will switch audio groups
25237 blacklistCurrentPlaylist({
25238 message: 'Problem encountered loading the default audio track.'
25239 });
25240 return;
25241 }
25242
25243 videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
25244
25245 for (var trackId in mediaType.tracks) {
25246 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
25247 }
25248
25249 mediaType.onTrackChanged();
25250 };
25251 },
25252 /**
25253 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
25254 * an error.
25255 *
25256 * @param {String} type
25257 * MediaGroup type
25258 * @param {Object} settings
25259 * Object containing required information for media groups
25260 * @return {Function}
25261 * Error handler. Logs warning to console and disables the active subtitle track
25262 * @function onError.SUBTITLES
25263 */
25264 SUBTITLES: function SUBTITLES(type, settings) {
25265 return function () {
25266 var segmentLoader = settings.segmentLoaders[type],
25267 mediaType = settings.mediaTypes[type];
25268
25269
25270 videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
25271
25272 stopLoaders(segmentLoader, mediaType);
25273
25274 var track = mediaType.activeTrack();
25275
25276 if (track) {
25277 track.mode = 'disabled';
25278 }
25279
25280 mediaType.onTrackChanged();
25281 };
25282 }
25283 };
25284
25285 var setupListeners = {
25286 /**
25287 * Setup event listeners for audio playlist loader
25288 *
25289 * @param {String} type
25290 * MediaGroup type
25291 * @param {PlaylistLoader|null} playlistLoader
25292 * PlaylistLoader to register listeners on
25293 * @param {Object} settings
25294 * Object containing required information for media groups
25295 * @function setupListeners.AUDIO
25296 */
25297 AUDIO: function AUDIO(type, playlistLoader, settings) {
25298 if (!playlistLoader) {
25299 // no playlist loader means audio will be muxed with the video
25300 return;
25301 }
25302
25303 var tech = settings.tech,
25304 requestOptions = settings.requestOptions,
25305 segmentLoader = settings.segmentLoaders[type];
25306
25307
25308 playlistLoader.on('loadedmetadata', function () {
25309 var media = playlistLoader.media();
25310
25311 segmentLoader.playlist(media, requestOptions);
25312
25313 // if the video is already playing, or if this isn't a live video and preload
25314 // permits, start downloading segments
25315 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
25316 segmentLoader.load();
25317 }
25318 });
25319
25320 playlistLoader.on('loadedplaylist', function () {
25321 segmentLoader.playlist(playlistLoader.media(), requestOptions);
25322
25323 // If the player isn't paused, ensure that the segment loader is running
25324 if (!tech.paused()) {
25325 segmentLoader.load();
25326 }
25327 });
25328
25329 playlistLoader.on('error', onError[type](type, settings));
25330 },
25331 /**
25332 * Setup event listeners for subtitle playlist loader
25333 *
25334 * @param {String} type
25335 * MediaGroup type
25336 * @param {PlaylistLoader|null} playlistLoader
25337 * PlaylistLoader to register listeners on
25338 * @param {Object} settings
25339 * Object containing required information for media groups
25340 * @function setupListeners.SUBTITLES
25341 */
25342 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
25343 var tech = settings.tech,
25344 requestOptions = settings.requestOptions,
25345 segmentLoader = settings.segmentLoaders[type],
25346 mediaType = settings.mediaTypes[type];
25347
25348
25349 playlistLoader.on('loadedmetadata', function () {
25350 var media = playlistLoader.media();
25351
25352 segmentLoader.playlist(media, requestOptions);
25353 segmentLoader.track(mediaType.activeTrack());
25354
25355 // if the video is already playing, or if this isn't a live video and preload
25356 // permits, start downloading segments
25357 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
25358 segmentLoader.load();
25359 }
25360 });
25361
25362 playlistLoader.on('loadedplaylist', function () {
25363 segmentLoader.playlist(playlistLoader.media(), requestOptions);
25364
25365 // If the player isn't paused, ensure that the segment loader is running
25366 if (!tech.paused()) {
25367 segmentLoader.load();
25368 }
25369 });
25370
25371 playlistLoader.on('error', onError[type](type, settings));
25372 }
25373 };
25374
25375 var initialize = {
25376 /**
25377 * Setup PlaylistLoaders and AudioTracks for the audio groups
25378 *
25379 * @param {String} type
25380 * MediaGroup type
25381 * @param {Object} settings
25382 * Object containing required information for media groups
25383 * @function initialize.AUDIO
25384 */
25385 'AUDIO': function AUDIO(type, settings) {
25386 var hls = settings.hls,
25387 sourceType = settings.sourceType,
25388 segmentLoader = settings.segmentLoaders[type],
25389 requestOptions = settings.requestOptions,
25390 mediaGroups = settings.master.mediaGroups,
25391 _settings$mediaTypes$ = settings.mediaTypes[type],
25392 groups = _settings$mediaTypes$.groups,
25393 tracks = _settings$mediaTypes$.tracks,
25394 masterPlaylistLoader = settings.masterPlaylistLoader;
25395
25396 // force a default if we have none
25397
25398 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
25399 mediaGroups[type] = { main: { default: { default: true } } };
25400 }
25401
25402 for (var groupId in mediaGroups[type]) {
25403 if (!groups[groupId]) {
25404 groups[groupId] = [];
25405 }
25406
25407 // List of playlists that have an AUDIO attribute value matching the current
25408 // group ID
25409
25410 for (var variantLabel in mediaGroups[type][groupId]) {
25411 var properties = mediaGroups[type][groupId][variantLabel];
25412 var playlistLoader = void 0;
25413
25414 if (properties.resolvedUri) {
25415 playlistLoader = new PlaylistLoader(properties.resolvedUri, hls, requestOptions);
25416 } else if (properties.playlists && sourceType === 'dash') {
25417 playlistLoader = new DashPlaylistLoader(properties.playlists[0], hls, requestOptions, masterPlaylistLoader);
25418 } else {
25419 // no resolvedUri means the audio is muxed with the video when using this
25420 // audio track
25421 playlistLoader = null;
25422 }
25423
25424 properties = videojs.mergeOptions({ id: variantLabel, playlistLoader: playlistLoader }, properties);
25425
25426 setupListeners[type](type, properties.playlistLoader, settings);
25427
25428 groups[groupId].push(properties);
25429
25430 if (typeof tracks[variantLabel] === 'undefined') {
25431 var track = new videojs.AudioTrack({
25432 id: variantLabel,
25433 kind: audioTrackKind_(properties),
25434 enabled: false,
25435 language: properties.language,
25436 default: properties.default,
25437 label: variantLabel
25438 });
25439
25440 tracks[variantLabel] = track;
25441 }
25442 }
25443 }
25444
25445 // setup single error event handler for the segment loader
25446 segmentLoader.on('error', onError[type](type, settings));
25447 },
25448 /**
25449 * Setup PlaylistLoaders and TextTracks for the subtitle groups
25450 *
25451 * @param {String} type
25452 * MediaGroup type
25453 * @param {Object} settings
25454 * Object containing required information for media groups
25455 * @function initialize.SUBTITLES
25456 */
25457 'SUBTITLES': function SUBTITLES(type, settings) {
25458 var tech = settings.tech,
25459 hls = settings.hls,
25460 sourceType = settings.sourceType,
25461 segmentLoader = settings.segmentLoaders[type],
25462 requestOptions = settings.requestOptions,
25463 mediaGroups = settings.master.mediaGroups,
25464 _settings$mediaTypes$2 = settings.mediaTypes[type],
25465 groups = _settings$mediaTypes$2.groups,
25466 tracks = _settings$mediaTypes$2.tracks,
25467 masterPlaylistLoader = settings.masterPlaylistLoader;
25468
25469
25470 for (var groupId in mediaGroups[type]) {
25471 if (!groups[groupId]) {
25472 groups[groupId] = [];
25473 }
25474
25475 for (var variantLabel in mediaGroups[type][groupId]) {
25476 if (mediaGroups[type][groupId][variantLabel].forced) {
25477 // Subtitle playlists with the forced attribute are not selectable in Safari.
25478 // According to Apple's HLS Authoring Specification:
25479 // If content has forced subtitles and regular subtitles in a given language,
25480 // the regular subtitles track in that language MUST contain both the forced
25481 // subtitles and the regular subtitles for that language.
25482 // Because of this requirement and that Safari does not add forced subtitles,
25483 // forced subtitles are skipped here to maintain consistent experience across
25484 // all platforms
25485 continue;
25486 }
25487
25488 var properties = mediaGroups[type][groupId][variantLabel];
25489
25490 var playlistLoader = void 0;
25491
25492 if (sourceType === 'hls') {
25493 playlistLoader = new PlaylistLoader(properties.resolvedUri, hls, requestOptions);
25494 } else if (sourceType === 'dash') {
25495 playlistLoader = new DashPlaylistLoader(properties.playlists[0], hls, requestOptions, masterPlaylistLoader);
25496 }
25497
25498 properties = videojs.mergeOptions({
25499 id: variantLabel,
25500 playlistLoader: playlistLoader
25501 }, properties);
25502
25503 setupListeners[type](type, properties.playlistLoader, settings);
25504
25505 groups[groupId].push(properties);
25506
25507 if (typeof tracks[variantLabel] === 'undefined') {
25508 var track = tech.addRemoteTextTrack({
25509 id: variantLabel,
25510 kind: 'subtitles',
25511 default: properties.default && properties.autoselect,
25512 language: properties.language,
25513 label: variantLabel
25514 }, false).track;
25515
25516 tracks[variantLabel] = track;
25517 }
25518 }
25519 }
25520
25521 // setup single error event handler for the segment loader
25522 segmentLoader.on('error', onError[type](type, settings));
25523 },
25524 /**
25525 * Setup TextTracks for the closed-caption groups
25526 *
25527 * @param {String} type
25528 * MediaGroup type
25529 * @param {Object} settings
25530 * Object containing required information for media groups
25531 * @function initialize['CLOSED-CAPTIONS']
25532 */
25533 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
25534 var tech = settings.tech,
25535 mediaGroups = settings.master.mediaGroups,
25536 _settings$mediaTypes$3 = settings.mediaTypes[type],
25537 groups = _settings$mediaTypes$3.groups,
25538 tracks = _settings$mediaTypes$3.tracks;
25539
25540
25541 for (var groupId in mediaGroups[type]) {
25542 if (!groups[groupId]) {
25543 groups[groupId] = [];
25544 }
25545
25546 for (var variantLabel in mediaGroups[type][groupId]) {
25547 var properties = mediaGroups[type][groupId][variantLabel];
25548
25549 // We only support CEA608 captions for now, so ignore anything that
25550 // doesn't use a CCx INSTREAM-ID
25551 if (!properties.instreamId.match(/CC\d/)) {
25552 continue;
25553 }
25554
25555 // No PlaylistLoader is required for Closed-Captions because the captions are
25556 // embedded within the video stream
25557 groups[groupId].push(videojs.mergeOptions({ id: variantLabel }, properties));
25558
25559 if (typeof tracks[variantLabel] === 'undefined') {
25560 var track = tech.addRemoteTextTrack({
25561 id: properties.instreamId,
25562 kind: 'captions',
25563 default: properties.default && properties.autoselect,
25564 language: properties.language,
25565 label: variantLabel
25566 }, false).track;
25567
25568 tracks[variantLabel] = track;
25569 }
25570 }
25571 }
25572 }
25573 };
25574
25575 /**
25576 * Returns a function used to get the active group of the provided type
25577 *
25578 * @param {String} type
25579 * MediaGroup type
25580 * @param {Object} settings
25581 * Object containing required information for media groups
25582 * @return {Function}
25583 * Function that returns the active media group for the provided type. Takes an
25584 * optional parameter {TextTrack} track. If no track is provided, a list of all
25585 * variants in the group, otherwise the variant corresponding to the provided
25586 * track is returned.
25587 * @function activeGroup
25588 */
25589 var activeGroup = function activeGroup(type, settings) {
25590 return function (track) {
25591 var masterPlaylistLoader = settings.masterPlaylistLoader,
25592 groups = settings.mediaTypes[type].groups;
25593
25594
25595 var media = masterPlaylistLoader.media();
25596
25597 if (!media) {
25598 return null;
25599 }
25600
25601 var variants = null;
25602
25603 if (media.attributes[type]) {
25604 variants = groups[media.attributes[type]];
25605 }
25606
25607 variants = variants || groups.main;
25608
25609 if (typeof track === 'undefined') {
25610 return variants;
25611 }
25612
25613 if (track === null) {
25614 // An active track was specified so a corresponding group is expected. track === null
25615 // means no track is currently active so there is no corresponding group
25616 return null;
25617 }
25618
25619 return variants.filter(function (props) {
25620 return props.id === track.id;
25621 })[0] || null;
25622 };
25623 };
25624
25625 var activeTrack = {
25626 /**
25627 * Returns a function used to get the active track of type provided
25628 *
25629 * @param {String} type
25630 * MediaGroup type
25631 * @param {Object} settings
25632 * Object containing required information for media groups
25633 * @return {Function}
25634 * Function that returns the active media track for the provided type. Returns
25635 * null if no track is active
25636 * @function activeTrack.AUDIO
25637 */
25638 AUDIO: function AUDIO(type, settings) {
25639 return function () {
25640 var tracks = settings.mediaTypes[type].tracks;
25641
25642
25643 for (var id in tracks) {
25644 if (tracks[id].enabled) {
25645 return tracks[id];
25646 }
25647 }
25648
25649 return null;
25650 };
25651 },
25652 /**
25653 * Returns a function used to get the active track of type provided
25654 *
25655 * @param {String} type
25656 * MediaGroup type
25657 * @param {Object} settings
25658 * Object containing required information for media groups
25659 * @return {Function}
25660 * Function that returns the active media track for the provided type. Returns
25661 * null if no track is active
25662 * @function activeTrack.SUBTITLES
25663 */
25664 SUBTITLES: function SUBTITLES(type, settings) {
25665 return function () {
25666 var tracks = settings.mediaTypes[type].tracks;
25667
25668
25669 for (var id in tracks) {
25670 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
25671 return tracks[id];
25672 }
25673 }
25674
25675 return null;
25676 };
25677 }
25678 };
25679
25680 /**
25681 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
25682 * Closed-Captions) specified in the master manifest.
25683 *
25684 * @param {Object} settings
25685 * Object containing required information for setting up the media groups
25686 * @param {SegmentLoader} settings.segmentLoaders.AUDIO
25687 * Audio segment loader
25688 * @param {SegmentLoader} settings.segmentLoaders.SUBTITLES
25689 * Subtitle segment loader
25690 * @param {SegmentLoader} settings.segmentLoaders.main
25691 * Main segment loader
25692 * @param {Tech} settings.tech
25693 * The tech of the player
25694 * @param {Object} settings.requestOptions
25695 * XHR request options used by the segment loaders
25696 * @param {PlaylistLoader} settings.masterPlaylistLoader
25697 * PlaylistLoader for the master source
25698 * @param {HlsHandler} settings.hls
25699 * HLS SourceHandler
25700 * @param {Object} settings.master
25701 * The parsed master manifest
25702 * @param {Object} settings.mediaTypes
25703 * Object to store the loaders, tracks, and utility methods for each media type
25704 * @param {Function} settings.blacklistCurrentPlaylist
25705 * Blacklists the current rendition and forces a rendition switch.
25706 * @function setupMediaGroups
25707 */
25708 var setupMediaGroups = function setupMediaGroups(settings) {
25709 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
25710 initialize[type](type, settings);
25711 });
25712
25713 var mediaTypes = settings.mediaTypes,
25714 masterPlaylistLoader = settings.masterPlaylistLoader,
25715 tech = settings.tech,
25716 hls = settings.hls;
25717
25718 // setup active group and track getters and change event handlers
25719
25720 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
25721 mediaTypes[type].activeGroup = activeGroup(type, settings);
25722 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
25723 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
25724 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
25725 });
25726
25727 // DO NOT enable the default subtitle or caption track.
25728 // DO enable the default audio track
25729 var audioGroup = mediaTypes.AUDIO.activeGroup();
25730 var groupId = (audioGroup.filter(function (group) {
25731 return group.default;
25732 })[0] || audioGroup[0]).id;
25733
25734 mediaTypes.AUDIO.tracks[groupId].enabled = true;
25735 mediaTypes.AUDIO.onTrackChanged();
25736
25737 masterPlaylistLoader.on('mediachange', function () {
25738 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
25739 return mediaTypes[type].onGroupChanged();
25740 });
25741 });
25742
25743 // custom audio track change event handler for usage event
25744 var onAudioTrackChanged = function onAudioTrackChanged() {
25745 mediaTypes.AUDIO.onTrackChanged();
25746 tech.trigger({ type: 'usage', name: 'hls-audio-change' });
25747 };
25748
25749 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
25750 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
25751
25752 hls.on('dispose', function () {
25753 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
25754 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
25755 });
25756
25757 // clear existing audio tracks and add the ones we just created
25758 tech.clearTracks('audio');
25759
25760 for (var id in mediaTypes.AUDIO.tracks) {
25761 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
25762 }
25763 };
25764
25765 /**
25766 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
25767 * media type
25768 *
25769 * @return {Object}
25770 * Object to store the loaders, tracks, and utility methods for each media type
25771 * @function createMediaTypes
25772 */
25773 var createMediaTypes = function createMediaTypes() {
25774 var mediaTypes = {};
25775
25776 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
25777 mediaTypes[type] = {
25778 groups: {},
25779 tracks: {},
25780 activePlaylistLoader: null,
25781 activeGroup: noop,
25782 activeTrack: noop,
25783 onGroupChanged: noop,
25784 onTrackChanged: noop
25785 };
25786 });
25787
25788 return mediaTypes;
25789 };
25790
25791 /**
25792 * @file master-playlist-controller.js
25793 */
25794
25795 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
25796
25797 var Hls = void 0;
25798
25799 // SegmentLoader stats that need to have each loader's
25800 // values summed to calculate the final value
25801 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
25802 var sumLoaderStat = function sumLoaderStat(stat) {
25803 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
25804 };
25805 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
25806 var currentPlaylist = _ref.currentPlaylist,
25807 nextPlaylist = _ref.nextPlaylist,
25808 forwardBuffer = _ref.forwardBuffer,
25809 bufferLowWaterLine = _ref.bufferLowWaterLine,
25810 duration$$1 = _ref.duration,
25811 log = _ref.log;
25812
25813 // we have no other playlist to switch to
25814 if (!nextPlaylist) {
25815 videojs.log.warn('We received no playlist to switch to. Please check your stream.');
25816 return false;
25817 }
25818
25819 // If the playlist is live, then we want to not take low water line into account.
25820 // This is because in LIVE, the player plays 3 segments from the end of the
25821 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
25822 // in those segments, a viewer will never experience a rendition upswitch.
25823 if (!currentPlaylist.endList) {
25824 return true;
25825 }
25826
25827 // For the same reason as LIVE, we ignore the low water line when the VOD
25828 // duration is below the max potential low water line
25829 if (duration$$1 < Config.MAX_BUFFER_LOW_WATER_LINE) {
25830 return true;
25831 }
25832
25833 // we want to switch down to lower resolutions quickly to continue playback, but
25834 if (nextPlaylist.attributes.BANDWIDTH < currentPlaylist.attributes.BANDWIDTH) {
25835 return true;
25836 }
25837
25838 // ensure we have some buffer before we switch up to prevent us running out of
25839 // buffer while loading a higher rendition.
25840 if (forwardBuffer >= bufferLowWaterLine) {
25841 return true;
25842 }
25843
25844 return false;
25845 };
25846
25847 /**
25848 * the master playlist controller controller all interactons
25849 * between playlists and segmentloaders. At this time this mainly
25850 * involves a master playlist and a series of audio playlists
25851 * if they are available
25852 *
25853 * @class MasterPlaylistController
25854 * @extends videojs.EventTarget
25855 */
25856 var MasterPlaylistController = function (_videojs$EventTarget) {
25857 inherits(MasterPlaylistController, _videojs$EventTarget);
25858
25859 function MasterPlaylistController(options) {
25860 classCallCheck(this, MasterPlaylistController);
25861
25862 var _this = possibleConstructorReturn(this, (MasterPlaylistController.__proto__ || Object.getPrototypeOf(MasterPlaylistController)).call(this));
25863
25864 var url = options.url,
25865 handleManifestRedirects = options.handleManifestRedirects,
25866 withCredentials = options.withCredentials,
25867 tech = options.tech,
25868 bandwidth = options.bandwidth,
25869 externHls = options.externHls,
25870 useCueTags = options.useCueTags,
25871 blacklistDuration = options.blacklistDuration,
25872 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
25873 cacheEncryptionKeys = options.cacheEncryptionKeys,
25874 sourceType = options.sourceType;
25875
25876
25877 if (!url) {
25878 throw new Error('A non-empty playlist URL is required');
25879 }
25880
25881 Hls = externHls;
25882
25883 _this.withCredentials = withCredentials;
25884 _this.tech_ = tech;
25885 _this.hls_ = tech.hls;
25886 _this.sourceType_ = sourceType;
25887 _this.useCueTags_ = useCueTags;
25888 _this.blacklistDuration = blacklistDuration;
25889 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
25890 if (_this.useCueTags_) {
25891 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
25892 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
25893 }
25894
25895 _this.requestOptions_ = {
25896 withCredentials: withCredentials,
25897 handleManifestRedirects: handleManifestRedirects,
25898 timeout: null
25899 };
25900
25901 _this.mediaTypes_ = createMediaTypes();
25902
25903 _this.mediaSource = new videojs.MediaSource();
25904
25905 // load the media source into the player
25906 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_.bind(_this));
25907
25908 _this.seekable_ = videojs.createTimeRanges();
25909 _this.hasPlayed_ = false;
25910
25911 _this.syncController_ = new SyncController(options);
25912 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
25913 kind: 'metadata',
25914 label: 'segment-metadata'
25915 }, false).track;
25916
25917 _this.decrypter_ = new Decrypter$1();
25918 _this.inbandTextTracks_ = {};
25919
25920 var segmentLoaderSettings = {
25921 hls: _this.hls_,
25922 mediaSource: _this.mediaSource,
25923 currentTime: _this.tech_.currentTime.bind(_this.tech_),
25924 seekable: function seekable$$1() {
25925 return _this.seekable();
25926 },
25927 seeking: function seeking() {
25928 return _this.tech_.seeking();
25929 },
25930 duration: function duration$$1() {
25931 return _this.mediaSource.duration;
25932 },
25933 hasPlayed: function hasPlayed() {
25934 return _this.hasPlayed_;
25935 },
25936 goalBufferLength: function goalBufferLength() {
25937 return _this.goalBufferLength();
25938 },
25939 bandwidth: bandwidth,
25940 syncController: _this.syncController_,
25941 decrypter: _this.decrypter_,
25942 sourceType: _this.sourceType_,
25943 inbandTextTracks: _this.inbandTextTracks_,
25944 cacheEncryptionKeys: cacheEncryptionKeys
25945 };
25946
25947 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(url, _this.hls_, _this.requestOptions_) : new PlaylistLoader(url, _this.hls_, _this.requestOptions_);
25948 _this.setupMasterPlaylistLoaderListeners_();
25949
25950 // setup segment loaders
25951 // combined audio/video or just video when alternate audio track is selected
25952 _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
25953 segmentMetadataTrack: _this.segmentMetadataTrack_,
25954 loaderType: 'main'
25955 }), options);
25956
25957 // alternate audio track
25958 _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
25959 loaderType: 'audio'
25960 }), options);
25961
25962 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
25963 loaderType: 'vtt',
25964 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
25965 }), options);
25966
25967 _this.setupSegmentLoaderListeners_();
25968
25969 // Create SegmentLoader stat-getters
25970 loaderStats.forEach(function (stat) {
25971 _this[stat + '_'] = sumLoaderStat.bind(_this, stat);
25972 });
25973
25974 _this.logger_ = logger('MPC');
25975
25976 _this.masterPlaylistLoader_.load();
25977 return _this;
25978 }
25979
25980 /**
25981 * Register event handlers on the master playlist loader. A helper
25982 * function for construction time.
25983 *
25984 * @private
25985 */
25986
25987
25988 createClass(MasterPlaylistController, [{
25989 key: 'setupMasterPlaylistLoaderListeners_',
25990 value: function setupMasterPlaylistLoaderListeners_() {
25991 var _this2 = this;
25992
25993 this.masterPlaylistLoader_.on('loadedmetadata', function () {
25994 var media = _this2.masterPlaylistLoader_.media();
25995 var requestTimeout = media.targetDuration * 1.5 * 1000;
25996
25997 // If we don't have any more available playlists, we don't want to
25998 // timeout the request.
25999 if (isLowestEnabledRendition(_this2.masterPlaylistLoader_.master, _this2.masterPlaylistLoader_.media())) {
26000 _this2.requestOptions_.timeout = 0;
26001 } else {
26002 _this2.requestOptions_.timeout = requestTimeout;
26003 }
26004
26005 // if this isn't a live video and preload permits, start
26006 // downloading segments
26007 if (media.endList && _this2.tech_.preload() !== 'none') {
26008 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
26009 _this2.mainSegmentLoader_.load();
26010 }
26011
26012 setupMediaGroups({
26013 sourceType: _this2.sourceType_,
26014 segmentLoaders: {
26015 AUDIO: _this2.audioSegmentLoader_,
26016 SUBTITLES: _this2.subtitleSegmentLoader_,
26017 main: _this2.mainSegmentLoader_
26018 },
26019 tech: _this2.tech_,
26020 requestOptions: _this2.requestOptions_,
26021 masterPlaylistLoader: _this2.masterPlaylistLoader_,
26022 hls: _this2.hls_,
26023 master: _this2.master(),
26024 mediaTypes: _this2.mediaTypes_,
26025 blacklistCurrentPlaylist: _this2.blacklistCurrentPlaylist.bind(_this2)
26026 });
26027
26028 _this2.triggerPresenceUsage_(_this2.master(), media);
26029
26030 try {
26031 _this2.setupSourceBuffers_();
26032 } catch (e) {
26033 videojs.log.warn('Failed to create SourceBuffers', e);
26034 return _this2.mediaSource.endOfStream('decode');
26035 }
26036 _this2.setupFirstPlay();
26037
26038 if (!_this2.mediaTypes_.AUDIO.activePlaylistLoader || _this2.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
26039 _this2.trigger('selectedinitialmedia');
26040 } else {
26041 // We must wait for the active audio playlist loader to
26042 // finish setting up before triggering this event so the
26043 // representations API and EME setup is correct
26044 _this2.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
26045 _this2.trigger('selectedinitialmedia');
26046 });
26047 }
26048 });
26049
26050 this.masterPlaylistLoader_.on('loadedplaylist', function () {
26051 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
26052
26053 if (!updatedPlaylist) {
26054 // blacklist any variants that are not supported by the browser before selecting
26055 // an initial media as the playlist selectors do not consider browser support
26056 _this2.excludeUnsupportedVariants_();
26057
26058 var selectedMedia = void 0;
26059
26060 if (_this2.enableLowInitialPlaylist) {
26061 selectedMedia = _this2.selectInitialPlaylist();
26062 }
26063
26064 if (!selectedMedia) {
26065 selectedMedia = _this2.selectPlaylist();
26066 }
26067
26068 _this2.initialMedia_ = selectedMedia;
26069 _this2.masterPlaylistLoader_.media(_this2.initialMedia_);
26070 return;
26071 }
26072
26073 if (_this2.useCueTags_) {
26074 _this2.updateAdCues_(updatedPlaylist);
26075 }
26076
26077 // TODO: Create a new event on the PlaylistLoader that signals
26078 // that the segments have changed in some way and use that to
26079 // update the SegmentLoader instead of doing it twice here and
26080 // on `mediachange`
26081 _this2.mainSegmentLoader_.playlist(updatedPlaylist, _this2.requestOptions_);
26082 _this2.updateDuration();
26083
26084 // If the player isn't paused, ensure that the segment loader is running,
26085 // as it is possible that it was temporarily stopped while waiting for
26086 // a playlist (e.g., in case the playlist errored and we re-requested it).
26087 if (!_this2.tech_.paused()) {
26088 _this2.mainSegmentLoader_.load();
26089 if (_this2.audioSegmentLoader_) {
26090 _this2.audioSegmentLoader_.load();
26091 }
26092 }
26093
26094 if (!updatedPlaylist.endList) {
26095 var addSeekableRange = function addSeekableRange() {
26096 var seekable$$1 = _this2.seekable();
26097
26098 if (seekable$$1.length !== 0) {
26099 _this2.mediaSource.addSeekableRange_(seekable$$1.start(0), seekable$$1.end(0));
26100 }
26101 };
26102
26103 if (_this2.duration() !== Infinity) {
26104 var onDurationchange = function onDurationchange() {
26105 if (_this2.duration() === Infinity) {
26106 addSeekableRange();
26107 } else {
26108 _this2.tech_.one('durationchange', onDurationchange);
26109 }
26110 };
26111
26112 _this2.tech_.one('durationchange', onDurationchange);
26113 } else {
26114 addSeekableRange();
26115 }
26116 }
26117 });
26118
26119 this.masterPlaylistLoader_.on('error', function () {
26120 _this2.blacklistCurrentPlaylist(_this2.masterPlaylistLoader_.error);
26121 });
26122
26123 this.masterPlaylistLoader_.on('mediachanging', function () {
26124 _this2.mainSegmentLoader_.abort();
26125 _this2.mainSegmentLoader_.pause();
26126 });
26127
26128 this.masterPlaylistLoader_.on('mediachange', function () {
26129 var media = _this2.masterPlaylistLoader_.media();
26130 var requestTimeout = media.targetDuration * 1.5 * 1000;
26131
26132 // If we don't have any more available playlists, we don't want to
26133 // timeout the request.
26134 if (isLowestEnabledRendition(_this2.masterPlaylistLoader_.master, _this2.masterPlaylistLoader_.media())) {
26135 _this2.requestOptions_.timeout = 0;
26136 } else {
26137 _this2.requestOptions_.timeout = requestTimeout;
26138 }
26139
26140 // TODO: Create a new event on the PlaylistLoader that signals
26141 // that the segments have changed in some way and use that to
26142 // update the SegmentLoader instead of doing it twice here and
26143 // on `loadedplaylist`
26144 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
26145
26146 _this2.mainSegmentLoader_.load();
26147
26148 _this2.tech_.trigger({
26149 type: 'mediachange',
26150 bubbles: true
26151 });
26152 });
26153
26154 this.masterPlaylistLoader_.on('playlistunchanged', function () {
26155 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
26156 var playlistOutdated = _this2.stuckAtPlaylistEnd_(updatedPlaylist);
26157
26158 if (playlistOutdated) {
26159 // Playlist has stopped updating and we're stuck at its end. Try to
26160 // blacklist it and switch to another playlist in the hope that that
26161 // one is updating (and give the player a chance to re-adjust to the
26162 // safe live point).
26163 _this2.blacklistCurrentPlaylist({
26164 message: 'Playlist no longer updating.'
26165 });
26166 // useful for monitoring QoS
26167 _this2.tech_.trigger('playliststuck');
26168 }
26169 });
26170
26171 this.masterPlaylistLoader_.on('renditiondisabled', function () {
26172 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-disabled' });
26173 });
26174 this.masterPlaylistLoader_.on('renditionenabled', function () {
26175 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-enabled' });
26176 });
26177 }
26178
26179 /**
26180 * A helper function for triggerring presence usage events once per source
26181 *
26182 * @private
26183 */
26184
26185 }, {
26186 key: 'triggerPresenceUsage_',
26187 value: function triggerPresenceUsage_(master, media) {
26188 var mediaGroups = master.mediaGroups || {};
26189 var defaultDemuxed = true;
26190 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
26191
26192 for (var mediaGroup in mediaGroups.AUDIO) {
26193 for (var label in mediaGroups.AUDIO[mediaGroup]) {
26194 var properties = mediaGroups.AUDIO[mediaGroup][label];
26195
26196 if (!properties.uri) {
26197 defaultDemuxed = false;
26198 }
26199 }
26200 }
26201
26202 if (defaultDemuxed) {
26203 this.tech_.trigger({ type: 'usage', name: 'hls-demuxed' });
26204 }
26205
26206 if (Object.keys(mediaGroups.SUBTITLES).length) {
26207 this.tech_.trigger({ type: 'usage', name: 'hls-webvtt' });
26208 }
26209
26210 if (Hls.Playlist.isAes(media)) {
26211 this.tech_.trigger({ type: 'usage', name: 'hls-aes' });
26212 }
26213
26214 if (Hls.Playlist.isFmp4(media)) {
26215 this.tech_.trigger({ type: 'usage', name: 'hls-fmp4' });
26216 }
26217
26218 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
26219 this.tech_.trigger({ type: 'usage', name: 'hls-alternate-audio' });
26220 }
26221
26222 if (this.useCueTags_) {
26223 this.tech_.trigger({ type: 'usage', name: 'hls-playlist-cue-tags' });
26224 }
26225 }
26226 /**
26227 * Register event handlers on the segment loaders. A helper function
26228 * for construction time.
26229 *
26230 * @private
26231 */
26232
26233 }, {
26234 key: 'setupSegmentLoaderListeners_',
26235 value: function setupSegmentLoaderListeners_() {
26236 var _this3 = this;
26237
26238 this.mainSegmentLoader_.on('bandwidthupdate', function () {
26239 var nextPlaylist = _this3.selectPlaylist();
26240 var currentPlaylist = _this3.masterPlaylistLoader_.media();
26241 var buffered = _this3.tech_.buffered();
26242 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - _this3.tech_.currentTime() : 0;
26243
26244 var bufferLowWaterLine = _this3.bufferLowWaterLine();
26245
26246 if (shouldSwitchToMedia({
26247 currentPlaylist: currentPlaylist,
26248 nextPlaylist: nextPlaylist,
26249 forwardBuffer: forwardBuffer,
26250 bufferLowWaterLine: bufferLowWaterLine,
26251 duration: _this3.duration(),
26252 log: _this3.logger_
26253 })) {
26254 _this3.masterPlaylistLoader_.media(nextPlaylist);
26255 }
26256
26257 _this3.tech_.trigger('bandwidthupdate');
26258 });
26259 this.mainSegmentLoader_.on('progress', function () {
26260 _this3.trigger('progress');
26261 });
26262
26263 this.mainSegmentLoader_.on('error', function () {
26264 _this3.blacklistCurrentPlaylist(_this3.mainSegmentLoader_.error());
26265 });
26266
26267 this.mainSegmentLoader_.on('syncinfoupdate', function () {
26268 _this3.onSyncInfoUpdate_();
26269 });
26270
26271 this.mainSegmentLoader_.on('timestampoffset', function () {
26272 _this3.tech_.trigger({ type: 'usage', name: 'hls-timestamp-offset' });
26273 });
26274 this.audioSegmentLoader_.on('syncinfoupdate', function () {
26275 _this3.onSyncInfoUpdate_();
26276 });
26277
26278 this.mainSegmentLoader_.on('ended', function () {
26279 _this3.onEndOfStream();
26280 });
26281
26282 this.mainSegmentLoader_.on('earlyabort', function () {
26283 _this3.blacklistCurrentPlaylist({
26284 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
26285 }, ABORT_EARLY_BLACKLIST_SECONDS);
26286 });
26287
26288 this.mainSegmentLoader_.on('reseteverything', function () {
26289 // If playing an MTS stream, a videojs.MediaSource is listening for
26290 // hls-reset to reset caption parsing state in the transmuxer
26291 _this3.tech_.trigger('hls-reset');
26292 });
26293
26294 this.mainSegmentLoader_.on('segmenttimemapping', function (event) {
26295 // If playing an MTS stream in html, a videojs.MediaSource is listening for
26296 // hls-segment-time-mapping update its internal mapping of stream to display time
26297 _this3.tech_.trigger({
26298 type: 'hls-segment-time-mapping',
26299 mapping: event.mapping
26300 });
26301 });
26302
26303 this.audioSegmentLoader_.on('ended', function () {
26304 _this3.onEndOfStream();
26305 });
26306 }
26307 }, {
26308 key: 'mediaSecondsLoaded_',
26309 value: function mediaSecondsLoaded_() {
26310 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
26311 }
26312
26313 /**
26314 * Call load on our SegmentLoaders
26315 */
26316
26317 }, {
26318 key: 'load',
26319 value: function load() {
26320 this.mainSegmentLoader_.load();
26321 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26322 this.audioSegmentLoader_.load();
26323 }
26324 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
26325 this.subtitleSegmentLoader_.load();
26326 }
26327 }
26328
26329 /**
26330 * Re-tune playback quality level for the current player
26331 * conditions without performing destructive actions, like
26332 * removing already buffered content
26333 *
26334 * @private
26335 */
26336
26337 }, {
26338 key: 'smoothQualityChange_',
26339 value: function smoothQualityChange_() {
26340 var media = this.selectPlaylist();
26341
26342 if (media !== this.masterPlaylistLoader_.media()) {
26343 this.masterPlaylistLoader_.media(media);
26344
26345 this.mainSegmentLoader_.resetLoader();
26346 // don't need to reset audio as it is reset when media changes
26347 }
26348 }
26349
26350 /**
26351 * Re-tune playback quality level for the current player
26352 * conditions. This method will perform destructive actions like removing
26353 * already buffered content in order to readjust the currently active
26354 * playlist quickly. This is good for manual quality changes
26355 *
26356 * @private
26357 */
26358
26359 }, {
26360 key: 'fastQualityChange_',
26361 value: function fastQualityChange_() {
26362 var _this4 = this;
26363
26364 var media = this.selectPlaylist();
26365
26366 if (media === this.masterPlaylistLoader_.media()) {
26367 return;
26368 }
26369
26370 this.masterPlaylistLoader_.media(media);
26371
26372 // Delete all buffered data to allow an immediate quality switch, then seek to give
26373 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
26374 // ahead is roughly the minimum that will accomplish this across a variety of content
26375 // in IE and Edge, but seeking in place is sufficient on all other browsers)
26376 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
26377 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
26378 this.mainSegmentLoader_.resetEverything(function () {
26379 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
26380 // from the previously enabled rendition to load before the new playlist has finished loading
26381 if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
26382 _this4.tech_.setCurrentTime(_this4.tech_.currentTime() + 0.04);
26383 } else {
26384 _this4.tech_.setCurrentTime(_this4.tech_.currentTime());
26385 }
26386 });
26387
26388 // don't need to reset audio as it is reset when media changes
26389 }
26390
26391 /**
26392 * Begin playback.
26393 */
26394
26395 }, {
26396 key: 'play',
26397 value: function play() {
26398 if (this.setupFirstPlay()) {
26399 return;
26400 }
26401
26402 if (this.tech_.ended()) {
26403 this.tech_.setCurrentTime(0);
26404 }
26405
26406 if (this.hasPlayed_) {
26407 this.load();
26408 }
26409
26410 var seekable$$1 = this.tech_.seekable();
26411
26412 // if the viewer has paused and we fell out of the live window,
26413 // seek forward to the live point
26414 if (this.tech_.duration() === Infinity) {
26415 if (this.tech_.currentTime() < seekable$$1.start(0)) {
26416 return this.tech_.setCurrentTime(seekable$$1.end(seekable$$1.length - 1));
26417 }
26418 }
26419 }
26420
26421 /**
26422 * Seek to the latest media position if this is a live video and the
26423 * player and video are loaded and initialized.
26424 */
26425
26426 }, {
26427 key: 'setupFirstPlay',
26428 value: function setupFirstPlay() {
26429 var _this5 = this;
26430
26431 var media = this.masterPlaylistLoader_.media();
26432
26433 // Check that everything is ready to begin buffering for the first call to play
26434 // If 1) there is no active media
26435 // 2) the player is paused
26436 // 3) the first play has already been setup
26437 // then exit early
26438 if (!media || this.tech_.paused() || this.hasPlayed_) {
26439 return false;
26440 }
26441
26442 // when the video is a live stream
26443 if (!media.endList) {
26444 var seekable$$1 = this.seekable();
26445
26446 if (!seekable$$1.length) {
26447 // without a seekable range, the player cannot seek to begin buffering at the live
26448 // point
26449 return false;
26450 }
26451
26452 if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
26453 // IE11 throws an InvalidStateError if you try to set currentTime while the
26454 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
26455 this.tech_.one('loadedmetadata', function () {
26456 _this5.trigger('firstplay');
26457 _this5.tech_.setCurrentTime(seekable$$1.end(0));
26458 _this5.hasPlayed_ = true;
26459 });
26460
26461 return false;
26462 }
26463
26464 // trigger firstplay to inform the source handler to ignore the next seek event
26465 this.trigger('firstplay');
26466 // seek to the live point
26467 this.tech_.setCurrentTime(seekable$$1.end(0));
26468 }
26469
26470 this.hasPlayed_ = true;
26471 // we can begin loading now that everything is ready
26472 this.load();
26473 return true;
26474 }
26475
26476 /**
26477 * handle the sourceopen event on the MediaSource
26478 *
26479 * @private
26480 */
26481
26482 }, {
26483 key: 'handleSourceOpen_',
26484 value: function handleSourceOpen_() {
26485 // Only attempt to create the source buffer if none already exist.
26486 // handleSourceOpen is also called when we are "re-opening" a source buffer
26487 // after `endOfStream` has been called (in response to a seek for instance)
26488 try {
26489 this.setupSourceBuffers_();
26490 } catch (e) {
26491 videojs.log.warn('Failed to create Source Buffers', e);
26492 return this.mediaSource.endOfStream('decode');
26493 }
26494
26495 // if autoplay is enabled, begin playback. This is duplicative of
26496 // code in video.js but is required because play() must be invoked
26497 // *after* the media source has opened.
26498 if (this.tech_.autoplay()) {
26499 var playPromise = this.tech_.play();
26500
26501 // Catch/silence error when a pause interrupts a play request
26502 // on browsers which return a promise
26503 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
26504 playPromise.then(null, function (e) {});
26505 }
26506 }
26507
26508 this.trigger('sourceopen');
26509 }
26510
26511 /**
26512 * Calls endOfStream on the media source when all active stream types have called
26513 * endOfStream
26514 *
26515 * @param {string} streamType
26516 * Stream type of the segment loader that called endOfStream
26517 * @private
26518 */
26519
26520 }, {
26521 key: 'onEndOfStream',
26522 value: function onEndOfStream() {
26523 var isEndOfStream = this.mainSegmentLoader_.ended_;
26524
26525 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26526 // if the audio playlist loader exists, then alternate audio is active
26527 if (!this.mainSegmentLoader_.startingMedia_ || this.mainSegmentLoader_.startingMedia_.containsVideo) {
26528 // if we do not know if the main segment loader contains video yet or if we
26529 // definitively know the main segment loader contains video, then we need to wait
26530 // for both main and audio segment loaders to call endOfStream
26531 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
26532 } else {
26533 // otherwise just rely on the audio loader
26534 isEndOfStream = this.audioSegmentLoader_.ended_;
26535 }
26536 }
26537
26538 if (!isEndOfStream) {
26539 return;
26540 }
26541
26542 this.logger_('calling mediaSource.endOfStream()');
26543 // on chrome calling endOfStream can sometimes cause an exception,
26544 // even when the media source is in a valid state.
26545 try {
26546 this.mediaSource.endOfStream();
26547 } catch (e) {
26548 videojs.log.warn('Failed to call media source endOfStream', e);
26549 }
26550 }
26551
26552 /**
26553 * Check if a playlist has stopped being updated
26554 * @param {Object} playlist the media playlist object
26555 * @return {boolean} whether the playlist has stopped being updated or not
26556 */
26557
26558 }, {
26559 key: 'stuckAtPlaylistEnd_',
26560 value: function stuckAtPlaylistEnd_(playlist) {
26561 var seekable$$1 = this.seekable();
26562
26563 if (!seekable$$1.length) {
26564 // playlist doesn't have enough information to determine whether we are stuck
26565 return false;
26566 }
26567
26568 var expired = this.syncController_.getExpiredTime(playlist, this.mediaSource.duration);
26569
26570 if (expired === null) {
26571 return false;
26572 }
26573
26574 // does not use the safe live end to calculate playlist end, since we
26575 // don't want to say we are stuck while there is still content
26576 var absolutePlaylistEnd = Hls.Playlist.playlistEnd(playlist, expired);
26577 var currentTime = this.tech_.currentTime();
26578 var buffered = this.tech_.buffered();
26579
26580 if (!buffered.length) {
26581 // return true if the playhead reached the absolute end of the playlist
26582 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
26583 }
26584 var bufferedEnd = buffered.end(buffered.length - 1);
26585
26586 // return true if there is too little buffer left and buffer has reached absolute
26587 // end of playlist
26588 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
26589 }
26590
26591 /**
26592 * Blacklists a playlist when an error occurs for a set amount of time
26593 * making it unavailable for selection by the rendition selection algorithm
26594 * and then forces a new playlist (rendition) selection.
26595 *
26596 * @param {Object=} error an optional error that may include the playlist
26597 * to blacklist
26598 * @param {Number=} blacklistDuration an optional number of seconds to blacklist the
26599 * playlist
26600 */
26601
26602 }, {
26603 key: 'blacklistCurrentPlaylist',
26604 value: function blacklistCurrentPlaylist() {
26605 var error = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
26606 var blacklistDuration = arguments[1];
26607
26608 var currentPlaylist = void 0;
26609 var nextPlaylist = void 0;
26610
26611 // If the `error` was generated by the playlist loader, it will contain
26612 // the playlist we were trying to load (but failed) and that should be
26613 // blacklisted instead of the currently selected playlist which is likely
26614 // out-of-date in this scenario
26615 currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
26616
26617 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration;
26618
26619 // If there is no current playlist, then an error occurred while we were
26620 // trying to load the master OR while we were disposing of the tech
26621 if (!currentPlaylist) {
26622 this.error = error;
26623
26624 try {
26625 return this.mediaSource.endOfStream('network');
26626 } catch (e) {
26627 return this.trigger('error');
26628 }
26629 }
26630
26631 var isFinalRendition = this.masterPlaylistLoader_.master.playlists.filter(isEnabled).length === 1;
26632 var playlists = this.masterPlaylistLoader_.master.playlists;
26633
26634 if (playlists.length === 1) {
26635 // Never blacklisting this playlist because it's the only playlist
26636 videojs.log.warn('Problem encountered with the current ' + 'HLS playlist. Trying again since it is the only playlist.');
26637
26638 this.tech_.trigger('retryplaylist');
26639 return this.masterPlaylistLoader_.load(isFinalRendition);
26640 }
26641
26642 if (isFinalRendition) {
26643 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
26644 // it, instead of erring the player or retrying this playlist, clear out the current
26645 // blacklist. This allows other playlists to be attempted in case any have been
26646 // fixed.
26647 videojs.log.warn('Removing all playlists from the blacklist because the last ' + 'rendition is about to be blacklisted.');
26648 playlists.forEach(function (playlist) {
26649 if (playlist.excludeUntil !== Infinity) {
26650 delete playlist.excludeUntil;
26651 }
26652 });
26653 // Technically we are retrying a playlist, in that we are simply retrying a previous
26654 // playlist. This is needed for users relying on the retryplaylist event to catch a
26655 // case where the player might be stuck and looping through "dead" playlists.
26656 this.tech_.trigger('retryplaylist');
26657 }
26658
26659 // Blacklist this playlist
26660 currentPlaylist.excludeUntil = Date.now() + blacklistDuration * 1000;
26661 this.tech_.trigger('blacklistplaylist');
26662 this.tech_.trigger({ type: 'usage', name: 'hls-rendition-blacklisted' });
26663
26664 // Select a new playlist
26665 nextPlaylist = this.selectPlaylist();
26666 videojs.log.warn('Problem encountered with the current HLS playlist.' + (error.message ? ' ' + error.message : '') + ' Switching to another playlist.');
26667
26668 return this.masterPlaylistLoader_.media(nextPlaylist, isFinalRendition);
26669 }
26670
26671 /**
26672 * Pause all segment loaders
26673 */
26674
26675 }, {
26676 key: 'pauseLoading',
26677 value: function pauseLoading() {
26678 this.mainSegmentLoader_.pause();
26679 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26680 this.audioSegmentLoader_.pause();
26681 }
26682 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
26683 this.subtitleSegmentLoader_.pause();
26684 }
26685 }
26686
26687 /**
26688 * set the current time on all segment loaders
26689 *
26690 * @param {TimeRange} currentTime the current time to set
26691 * @return {TimeRange} the current time
26692 */
26693
26694 }, {
26695 key: 'setCurrentTime',
26696 value: function setCurrentTime(currentTime) {
26697 var buffered = findRange(this.tech_.buffered(), currentTime);
26698
26699 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
26700 // return immediately if the metadata is not ready yet
26701 return 0;
26702 }
26703
26704 // it's clearly an edge-case but don't thrown an error if asked to
26705 // seek within an empty playlist
26706 if (!this.masterPlaylistLoader_.media().segments) {
26707 return 0;
26708 }
26709
26710 // In flash playback, the segment loaders should be reset on every seek, even
26711 // in buffer seeks. If the seek location is already buffered, continue buffering as
26712 // usual
26713 // TODO: redo this comment
26714 if (buffered && buffered.length) {
26715 return currentTime;
26716 }
26717
26718 // cancel outstanding requests so we begin buffering at the new
26719 // location
26720 this.mainSegmentLoader_.resetEverything();
26721 this.mainSegmentLoader_.abort();
26722 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26723 this.audioSegmentLoader_.resetEverything();
26724 this.audioSegmentLoader_.abort();
26725 }
26726 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
26727 this.subtitleSegmentLoader_.resetEverything();
26728 this.subtitleSegmentLoader_.abort();
26729 }
26730
26731 // start segment loader loading in case they are paused
26732 this.load();
26733 }
26734
26735 /**
26736 * get the current duration
26737 *
26738 * @return {TimeRange} the duration
26739 */
26740
26741 }, {
26742 key: 'duration',
26743 value: function duration$$1() {
26744 if (!this.masterPlaylistLoader_) {
26745 return 0;
26746 }
26747
26748 if (this.mediaSource) {
26749 return this.mediaSource.duration;
26750 }
26751
26752 return Hls.Playlist.duration(this.masterPlaylistLoader_.media());
26753 }
26754
26755 /**
26756 * check the seekable range
26757 *
26758 * @return {TimeRange} the seekable range
26759 */
26760
26761 }, {
26762 key: 'seekable',
26763 value: function seekable$$1() {
26764 return this.seekable_;
26765 }
26766 }, {
26767 key: 'onSyncInfoUpdate_',
26768 value: function onSyncInfoUpdate_() {
26769 var audioSeekable = void 0;
26770
26771 if (!this.masterPlaylistLoader_) {
26772 return;
26773 }
26774
26775 var media = this.masterPlaylistLoader_.media();
26776
26777 if (!media) {
26778 return;
26779 }
26780
26781 var expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
26782
26783 if (expired === null) {
26784 // not enough information to update seekable
26785 return;
26786 }
26787
26788 var suggestedPresentationDelay = this.masterPlaylistLoader_.master.suggestedPresentationDelay;
26789 var mainSeekable = Hls.Playlist.seekable(media, expired, suggestedPresentationDelay);
26790
26791 if (mainSeekable.length === 0) {
26792 return;
26793 }
26794
26795 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26796 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
26797 expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
26798
26799 if (expired === null) {
26800 return;
26801 }
26802
26803 audioSeekable = Hls.Playlist.seekable(media, expired, suggestedPresentationDelay);
26804
26805 if (audioSeekable.length === 0) {
26806 return;
26807 }
26808 }
26809
26810 var oldEnd = void 0;
26811 var oldStart = void 0;
26812
26813 if (this.seekable_ && this.seekable_.length) {
26814 oldEnd = this.seekable_.end(0);
26815 oldStart = this.seekable_.start(0);
26816 }
26817
26818 if (!audioSeekable) {
26819 // seekable has been calculated based on buffering video data so it
26820 // can be returned directly
26821 this.seekable_ = mainSeekable;
26822 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
26823 // seekables are pretty far off, rely on main
26824 this.seekable_ = mainSeekable;
26825 } else {
26826 this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
26827 }
26828
26829 // seekable is the same as last time
26830 if (this.seekable_ && this.seekable_.length) {
26831 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
26832 return;
26833 }
26834 }
26835
26836 this.logger_('seekable updated [' + printableRange(this.seekable_) + ']');
26837
26838 this.tech_.trigger('seekablechanged');
26839 }
26840
26841 /**
26842 * Update the player duration
26843 */
26844
26845 }, {
26846 key: 'updateDuration',
26847 value: function updateDuration() {
26848 var _this6 = this;
26849
26850 var oldDuration = this.mediaSource.duration;
26851 var newDuration = Hls.Playlist.duration(this.masterPlaylistLoader_.media());
26852 var buffered = this.tech_.buffered();
26853 var setDuration = function setDuration() {
26854 // on firefox setting the duration may sometimes cause an exception
26855 // even if the media source is open and source buffers are not
26856 // updating, something about the media source being in an invalid state.
26857 _this6.logger_('Setting duration from ' + _this6.mediaSource.duration + ' => ' + newDuration);
26858 try {
26859 _this6.mediaSource.duration = newDuration;
26860 } catch (e) {
26861 videojs.log.warn('Failed to set media source duration', e);
26862 }
26863 _this6.tech_.trigger('durationchange');
26864
26865 _this6.mediaSource.removeEventListener('sourceopen', setDuration);
26866 };
26867
26868 if (buffered.length > 0) {
26869 newDuration = Math.max(newDuration, buffered.end(buffered.length - 1));
26870 }
26871
26872 // if the duration has changed, invalidate the cached value
26873 if (oldDuration !== newDuration) {
26874 // update the duration
26875 if (this.mediaSource.readyState !== 'open') {
26876 this.mediaSource.addEventListener('sourceopen', setDuration);
26877 } else {
26878 setDuration();
26879 }
26880 }
26881 }
26882
26883 /**
26884 * dispose of the MasterPlaylistController and everything
26885 * that it controls
26886 */
26887
26888 }, {
26889 key: 'dispose',
26890 value: function dispose() {
26891 var _this7 = this;
26892
26893 this.trigger('dispose');
26894 if (this.decrypter_) {
26895 this.decrypter_.terminate();
26896 }
26897 this.masterPlaylistLoader_.dispose();
26898 this.mainSegmentLoader_.dispose();
26899
26900 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
26901 var groups = _this7.mediaTypes_[type].groups;
26902
26903 for (var id in groups) {
26904 groups[id].forEach(function (group) {
26905 if (group.playlistLoader) {
26906 group.playlistLoader.dispose();
26907 }
26908 });
26909 }
26910 });
26911
26912 this.audioSegmentLoader_.dispose();
26913 this.subtitleSegmentLoader_.dispose();
26914 this.off();
26915
26916 if (this.mediaSource.dispose) {
26917 this.mediaSource.dispose();
26918 }
26919 }
26920
26921 /**
26922 * return the master playlist object if we have one
26923 *
26924 * @return {Object} the master playlist object that we parsed
26925 */
26926
26927 }, {
26928 key: 'master',
26929 value: function master() {
26930 return this.masterPlaylistLoader_.master;
26931 }
26932
26933 /**
26934 * return the currently selected playlist
26935 *
26936 * @return {Object} the currently selected playlist object that we parsed
26937 */
26938
26939 }, {
26940 key: 'media',
26941 value: function media() {
26942 // playlist loader will not return media if it has not been fully loaded
26943 return this.masterPlaylistLoader_.media() || this.initialMedia_;
26944 }
26945
26946 /**
26947 * setup our internal source buffers on our segment Loaders
26948 *
26949 * @private
26950 */
26951
26952 }, {
26953 key: 'setupSourceBuffers_',
26954 value: function setupSourceBuffers_() {
26955 var media = this.masterPlaylistLoader_.media();
26956 var mimeTypes = void 0;
26957
26958 // wait until a media playlist is available and the Media Source is
26959 // attached
26960 if (!media || this.mediaSource.readyState !== 'open') {
26961 return;
26962 }
26963
26964 mimeTypes = mimeTypesForPlaylist(this.masterPlaylistLoader_.master, media);
26965 if (mimeTypes.length < 1) {
26966 this.error = 'No compatible SourceBuffer configuration for the variant stream:' + media.resolvedUri;
26967 return this.mediaSource.endOfStream('decode');
26968 }
26969
26970 this.configureLoaderMimeTypes_(mimeTypes);
26971 // exclude any incompatible variant streams from future playlist
26972 // selection
26973 this.excludeIncompatibleVariants_(media);
26974 }
26975 }, {
26976 key: 'configureLoaderMimeTypes_',
26977 value: function configureLoaderMimeTypes_(mimeTypes) {
26978 // If the content is demuxed, we can't start appending segments to a source buffer
26979 // until both source buffers are set up, or else the browser may not let us add the
26980 // second source buffer (it will assume we are playing either audio only or video
26981 // only).
26982 var sourceBufferEmitter =
26983 // If there is more than one mime type
26984 mimeTypes.length > 1 &&
26985 // and the first mime type does not have muxed video and audio
26986 mimeTypes[0].indexOf(',') === -1 &&
26987 // and the two mime types are different (they can be the same in the case of audio
26988 // only with alternate audio)
26989 mimeTypes[0] !== mimeTypes[1] ?
26990 // then we want to wait on the second source buffer
26991 new videojs.EventTarget() :
26992 // otherwise there is no need to wait as the content is either audio only,
26993 // video only, or muxed content.
26994 null;
26995
26996 this.mainSegmentLoader_.mimeType(mimeTypes[0], sourceBufferEmitter);
26997 if (mimeTypes[1]) {
26998 this.audioSegmentLoader_.mimeType(mimeTypes[1], sourceBufferEmitter);
26999 }
27000 }
27001
27002 /**
27003 * Blacklists playlists with codecs that are unsupported by the browser.
27004 */
27005
27006 }, {
27007 key: 'excludeUnsupportedVariants_',
27008 value: function excludeUnsupportedVariants_() {
27009 this.master().playlists.forEach(function (variant) {
27010 if (variant.attributes.CODECS && window_1.MediaSource && window_1.MediaSource.isTypeSupported && !window_1.MediaSource.isTypeSupported('video/mp4; codecs="' + mapLegacyAvcCodecs(variant.attributes.CODECS) + '"')) {
27011 variant.excludeUntil = Infinity;
27012 }
27013 });
27014 }
27015
27016 /**
27017 * Blacklist playlists that are known to be codec or
27018 * stream-incompatible with the SourceBuffer configuration. For
27019 * instance, Media Source Extensions would cause the video element to
27020 * stall waiting for video data if you switched from a variant with
27021 * video and audio to an audio-only one.
27022 *
27023 * @param {Object} media a media playlist compatible with the current
27024 * set of SourceBuffers. Variants in the current master playlist that
27025 * do not appear to have compatible codec or stream configurations
27026 * will be excluded from the default playlist selection algorithm
27027 * indefinitely.
27028 * @private
27029 */
27030
27031 }, {
27032 key: 'excludeIncompatibleVariants_',
27033 value: function excludeIncompatibleVariants_(media) {
27034 var codecCount = 2;
27035 var videoCodec = null;
27036 var codecs = void 0;
27037
27038 if (media.attributes.CODECS) {
27039 codecs = parseCodecs(media.attributes.CODECS);
27040 videoCodec = codecs.videoCodec;
27041 codecCount = codecs.codecCount;
27042 }
27043
27044 this.master().playlists.forEach(function (variant) {
27045 var variantCodecs = {
27046 codecCount: 2,
27047 videoCodec: null
27048 };
27049
27050 if (variant.attributes.CODECS) {
27051 variantCodecs = parseCodecs(variant.attributes.CODECS);
27052 }
27053
27054 // if the streams differ in the presence or absence of audio or
27055 // video, they are incompatible
27056 if (variantCodecs.codecCount !== codecCount) {
27057 variant.excludeUntil = Infinity;
27058 }
27059
27060 // if h.264 is specified on the current playlist, some flavor of
27061 // it must be specified on all compatible variants
27062 if (variantCodecs.videoCodec !== videoCodec) {
27063 variant.excludeUntil = Infinity;
27064 }
27065 });
27066 }
27067 }, {
27068 key: 'updateAdCues_',
27069 value: function updateAdCues_(media) {
27070 var offset = 0;
27071 var seekable$$1 = this.seekable();
27072
27073 if (seekable$$1.length) {
27074 offset = seekable$$1.start(0);
27075 }
27076
27077 updateAdCues(media, this.cueTagsTrack_, offset);
27078 }
27079
27080 /**
27081 * Calculates the desired forward buffer length based on current time
27082 *
27083 * @return {Number} Desired forward buffer length in seconds
27084 */
27085
27086 }, {
27087 key: 'goalBufferLength',
27088 value: function goalBufferLength() {
27089 var currentTime = this.tech_.currentTime();
27090 var initial = Config.GOAL_BUFFER_LENGTH;
27091 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
27092 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
27093
27094 return Math.min(initial + currentTime * rate, max);
27095 }
27096
27097 /**
27098 * Calculates the desired buffer low water line based on current time
27099 *
27100 * @return {Number} Desired buffer low water line in seconds
27101 */
27102
27103 }, {
27104 key: 'bufferLowWaterLine',
27105 value: function bufferLowWaterLine() {
27106 var currentTime = this.tech_.currentTime();
27107 var initial = Config.BUFFER_LOW_WATER_LINE;
27108 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
27109 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
27110
27111 return Math.min(initial + currentTime * rate, max);
27112 }
27113 }]);
27114 return MasterPlaylistController;
27115 }(videojs.EventTarget);
27116
27117 /**
27118 * Returns a function that acts as the Enable/disable playlist function.
27119 *
27120 * @param {PlaylistLoader} loader - The master playlist loader
27121
27122 * @param {string} playlistID - id of the playlist
27123 * @param {Function} changePlaylistFn - A function to be called after a
27124 * playlist's enabled-state has been changed. Will NOT be called if a
27125 * playlist's enabled-state is unchanged
27126 * @param {Boolean=} enable - Value to set the playlist enabled-state to
27127 * or if undefined returns the current enabled-state for the playlist
27128 * @return {Function} Function for setting/getting enabled
27129 */
27130 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
27131 return function (enable) {
27132 var playlist = loader.master.playlists[playlistID];
27133 var incompatible = isIncompatible(playlist);
27134 var currentlyEnabled = isEnabled(playlist);
27135
27136 if (typeof enable === 'undefined') {
27137 return currentlyEnabled;
27138 }
27139
27140 if (enable) {
27141 delete playlist.disabled;
27142 } else {
27143 playlist.disabled = true;
27144 }
27145
27146 if (enable !== currentlyEnabled && !incompatible) {
27147 // Ensure the outside world knows about our changes
27148 changePlaylistFn();
27149 if (enable) {
27150 loader.trigger('renditionenabled');
27151 } else {
27152 loader.trigger('renditiondisabled');
27153 }
27154 }
27155 return enable;
27156 };
27157 };
27158
27159 /**
27160 * The representation object encapsulates the publicly visible information
27161 * in a media playlist along with a setter/getter-type function (enabled)
27162 * for changing the enabled-state of a particular playlist entry
27163 *
27164 * @class Representation
27165 */
27166
27167 var Representation = function Representation(hlsHandler, playlist, id) {
27168 classCallCheck(this, Representation);
27169 var mpc = hlsHandler.masterPlaylistController_,
27170 smoothQualityChange = hlsHandler.options_.smoothQualityChange;
27171 // Get a reference to a bound version of the quality change function
27172
27173 var changeType = smoothQualityChange ? 'smooth' : 'fast';
27174 var qualityChangeFunction = mpc[changeType + 'QualityChange_'].bind(mpc);
27175
27176 // some playlist attributes are optional
27177 if (playlist.attributes.RESOLUTION) {
27178 var resolution = playlist.attributes.RESOLUTION;
27179
27180 this.width = resolution.width;
27181 this.height = resolution.height;
27182 }
27183
27184 this.bandwidth = playlist.attributes.BANDWIDTH;
27185
27186 // The id is simply the ordinality of the media playlist
27187 // within the master playlist
27188 this.id = id;
27189
27190 // Partially-apply the enableFunction to create a playlist-
27191 // specific variant
27192 this.enabled = enableFunction(hlsHandler.playlists, playlist.id, qualityChangeFunction);
27193 };
27194
27195 /**
27196 * A mixin function that adds the `representations` api to an instance
27197 * of the HlsHandler class
27198 * @param {HlsHandler} hlsHandler - An instance of HlsHandler to add the
27199 * representation API into
27200 */
27201
27202
27203 var renditionSelectionMixin = function renditionSelectionMixin(hlsHandler) {
27204 var playlists = hlsHandler.playlists;
27205
27206 // Add a single API-specific function to the HlsHandler instance
27207 hlsHandler.representations = function () {
27208 if (!playlists || !playlists.master || !playlists.master.playlists) {
27209 return [];
27210 }
27211 return playlists.master.playlists.filter(function (media) {
27212 return !isIncompatible(media);
27213 }).map(function (e, i) {
27214 return new Representation(hlsHandler, e, e.id);
27215 });
27216 };
27217 };
27218
27219 /**
27220 * @file playback-watcher.js
27221 *
27222 * Playback starts, and now my watch begins. It shall not end until my death. I shall
27223 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
27224 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
27225 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
27226 * my life and honor to the Playback Watch, for this Player and all the Players to come.
27227 */
27228
27229 // Set of events that reset the playback-watcher time check logic and clear the timeout
27230 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
27231
27232 /**
27233 * @class PlaybackWatcher
27234 */
27235
27236 var PlaybackWatcher = function () {
27237 /**
27238 * Represents an PlaybackWatcher object.
27239 * @constructor
27240 * @param {object} options an object that includes the tech and settings
27241 */
27242 function PlaybackWatcher(options) {
27243 var _this = this;
27244
27245 classCallCheck(this, PlaybackWatcher);
27246
27247 this.tech_ = options.tech;
27248 this.seekable = options.seekable;
27249 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
27250 this.media = options.media;
27251
27252 this.consecutiveUpdates = 0;
27253 this.lastRecordedTime = null;
27254 this.timer_ = null;
27255 this.checkCurrentTimeTimeout_ = null;
27256 this.logger_ = logger('PlaybackWatcher');
27257
27258 this.logger_('initialize');
27259
27260 var canPlayHandler = function canPlayHandler() {
27261 return _this.monitorCurrentTime_();
27262 };
27263 var waitingHandler = function waitingHandler() {
27264 return _this.techWaiting_();
27265 };
27266 var cancelTimerHandler = function cancelTimerHandler() {
27267 return _this.cancelTimer_();
27268 };
27269 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
27270 return _this.fixesBadSeeks_();
27271 };
27272
27273 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
27274 this.tech_.on('waiting', waitingHandler);
27275 this.tech_.on(timerCancelEvents, cancelTimerHandler);
27276 this.tech_.on('canplay', canPlayHandler);
27277
27278 // Define the dispose function to clean up our events
27279 this.dispose = function () {
27280 _this.logger_('dispose');
27281 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
27282 _this.tech_.off('waiting', waitingHandler);
27283 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
27284 _this.tech_.off('canplay', canPlayHandler);
27285 if (_this.checkCurrentTimeTimeout_) {
27286 window_1.clearTimeout(_this.checkCurrentTimeTimeout_);
27287 }
27288 _this.cancelTimer_();
27289 };
27290 }
27291
27292 /**
27293 * Periodically check current time to see if playback stopped
27294 *
27295 * @private
27296 */
27297
27298
27299 createClass(PlaybackWatcher, [{
27300 key: 'monitorCurrentTime_',
27301 value: function monitorCurrentTime_() {
27302 this.checkCurrentTime_();
27303
27304 if (this.checkCurrentTimeTimeout_) {
27305 window_1.clearTimeout(this.checkCurrentTimeTimeout_);
27306 }
27307
27308 // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
27309 this.checkCurrentTimeTimeout_ = window_1.setTimeout(this.monitorCurrentTime_.bind(this), 250);
27310 }
27311
27312 /**
27313 * The purpose of this function is to emulate the "waiting" event on
27314 * browsers that do not emit it when they are waiting for more
27315 * data to continue playback
27316 *
27317 * @private
27318 */
27319
27320 }, {
27321 key: 'checkCurrentTime_',
27322 value: function checkCurrentTime_() {
27323 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
27324 this.consecutiveUpdates = 0;
27325 this.lastRecordedTime = this.tech_.currentTime();
27326 return;
27327 }
27328
27329 if (this.tech_.paused() || this.tech_.seeking()) {
27330 return;
27331 }
27332
27333 var currentTime = this.tech_.currentTime();
27334 var buffered = this.tech_.buffered();
27335
27336 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
27337 // If current time is at the end of the final buffered region, then any playback
27338 // stall is most likely caused by buffering in a low bandwidth environment. The tech
27339 // should fire a `waiting` event in this scenario, but due to browser and tech
27340 // inconsistencies. Calling `techWaiting_` here allows us to simulate
27341 // responding to a native `waiting` event when the tech fails to emit one.
27342 return this.techWaiting_();
27343 }
27344
27345 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
27346 this.consecutiveUpdates++;
27347 this.waiting_();
27348 } else if (currentTime === this.lastRecordedTime) {
27349 this.consecutiveUpdates++;
27350 } else {
27351 this.consecutiveUpdates = 0;
27352 this.lastRecordedTime = currentTime;
27353 }
27354 }
27355
27356 /**
27357 * Cancels any pending timers and resets the 'timeupdate' mechanism
27358 * designed to detect that we are stalled
27359 *
27360 * @private
27361 */
27362
27363 }, {
27364 key: 'cancelTimer_',
27365 value: function cancelTimer_() {
27366 this.consecutiveUpdates = 0;
27367
27368 if (this.timer_) {
27369 this.logger_('cancelTimer_');
27370 clearTimeout(this.timer_);
27371 }
27372
27373 this.timer_ = null;
27374 }
27375
27376 /**
27377 * Fixes situations where there's a bad seek
27378 *
27379 * @return {Boolean} whether an action was taken to fix the seek
27380 * @private
27381 */
27382
27383 }, {
27384 key: 'fixesBadSeeks_',
27385 value: function fixesBadSeeks_() {
27386 var seeking = this.tech_.seeking();
27387
27388 if (!seeking) {
27389 return false;
27390 }
27391
27392 var seekable = this.seekable();
27393 var currentTime = this.tech_.currentTime();
27394 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
27395 var seekTo = void 0;
27396
27397 if (isAfterSeekableRange) {
27398 var seekableEnd = seekable.end(seekable.length - 1);
27399
27400 // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
27401 seekTo = seekableEnd;
27402 }
27403
27404 if (this.beforeSeekableWindow_(seekable, currentTime)) {
27405 var seekableStart = seekable.start(0);
27406
27407 // sync to the beginning of the live window
27408 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
27409 seekTo = seekableStart + SAFE_TIME_DELTA;
27410 }
27411
27412 if (typeof seekTo !== 'undefined') {
27413 this.logger_('Trying to seek outside of seekable at time ' + currentTime + ' with ' + ('seekable range ' + printableRange(seekable) + '. Seeking to ') + (seekTo + '.'));
27414
27415 this.tech_.setCurrentTime(seekTo);
27416 return true;
27417 }
27418
27419 return false;
27420 }
27421
27422 /**
27423 * Handler for situations when we determine the player is waiting.
27424 *
27425 * @private
27426 */
27427
27428 }, {
27429 key: 'waiting_',
27430 value: function waiting_() {
27431 if (this.techWaiting_()) {
27432 return;
27433 }
27434
27435 // All tech waiting checks failed. Use last resort correction
27436 var currentTime = this.tech_.currentTime();
27437 var buffered = this.tech_.buffered();
27438 var currentRange = findRange(buffered, currentTime);
27439
27440 // Sometimes the player can stall for unknown reasons within a contiguous buffered
27441 // region with no indication that anything is amiss (seen in Firefox). Seeking to
27442 // currentTime is usually enough to kickstart the player. This checks that the player
27443 // is currently within a buffered region before attempting a corrective seek.
27444 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
27445 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
27446 // make sure there is ~3 seconds of forward buffer before taking any corrective action
27447 // to avoid triggering an `unknownwaiting` event when the network is slow.
27448 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
27449 this.cancelTimer_();
27450 this.tech_.setCurrentTime(currentTime);
27451
27452 this.logger_('Stopped at ' + currentTime + ' while inside a buffered region ' + ('[' + currentRange.start(0) + ' -> ' + currentRange.end(0) + ']. Attempting to resume ') + 'playback by seeking to the current time.');
27453
27454 // unknown waiting corrections may be useful for monitoring QoS
27455 this.tech_.trigger({ type: 'usage', name: 'hls-unknown-waiting' });
27456 return;
27457 }
27458 }
27459
27460 /**
27461 * Handler for situations when the tech fires a `waiting` event
27462 *
27463 * @return {Boolean}
27464 * True if an action (or none) was needed to correct the waiting. False if no
27465 * checks passed
27466 * @private
27467 */
27468
27469 }, {
27470 key: 'techWaiting_',
27471 value: function techWaiting_() {
27472 var seekable = this.seekable();
27473 var currentTime = this.tech_.currentTime();
27474
27475 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
27476 // Tech is seeking or bad seek fixed, no action needed
27477 return true;
27478 }
27479
27480 if (this.tech_.seeking() || this.timer_ !== null) {
27481 // Tech is seeking or already waiting on another action, no action needed
27482 return true;
27483 }
27484
27485 if (this.beforeSeekableWindow_(seekable, currentTime)) {
27486 var livePoint = seekable.end(seekable.length - 1);
27487
27488 this.logger_('Fell out of live window at time ' + currentTime + '. Seeking to ' + ('live point (seekable end) ' + livePoint));
27489 this.cancelTimer_();
27490 this.tech_.setCurrentTime(livePoint);
27491
27492 // live window resyncs may be useful for monitoring QoS
27493 this.tech_.trigger({ type: 'usage', name: 'hls-live-resync' });
27494 return true;
27495 }
27496
27497 var buffered = this.tech_.buffered();
27498 var nextRange = findNextRange(buffered, currentTime);
27499
27500 if (this.videoUnderflow_(nextRange, buffered, currentTime)) {
27501 // Even though the video underflowed and was stuck in a gap, the audio overplayed
27502 // the gap, leading currentTime into a buffered range. Seeking to currentTime
27503 // allows the video to catch up to the audio position without losing any audio
27504 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
27505 this.cancelTimer_();
27506 this.tech_.setCurrentTime(currentTime);
27507
27508 // video underflow may be useful for monitoring QoS
27509 this.tech_.trigger({ type: 'usage', name: 'hls-video-underflow' });
27510 return true;
27511 }
27512
27513 // check for gap
27514 if (nextRange.length > 0) {
27515 var difference = nextRange.start(0) - currentTime;
27516
27517 this.logger_('Stopped at ' + currentTime + ', setting timer for ' + difference + ', seeking ' + ('to ' + nextRange.start(0)));
27518
27519 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
27520 return true;
27521 }
27522
27523 // All checks failed. Returning false to indicate failure to correct waiting
27524 return false;
27525 }
27526 }, {
27527 key: 'afterSeekableWindow_',
27528 value: function afterSeekableWindow_(seekable, currentTime, playlist) {
27529 var allowSeeksWithinUnsafeLiveWindow = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
27530
27531 if (!seekable.length) {
27532 // we can't make a solid case if there's no seekable, default to false
27533 return false;
27534 }
27535
27536 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
27537 var isLive = !playlist.endList;
27538
27539 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
27540 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
27541 }
27542
27543 if (currentTime > allowedEnd) {
27544 return true;
27545 }
27546
27547 return false;
27548 }
27549 }, {
27550 key: 'beforeSeekableWindow_',
27551 value: function beforeSeekableWindow_(seekable, currentTime) {
27552 if (seekable.length &&
27553 // can't fall before 0 and 0 seekable start identifies VOD stream
27554 seekable.start(0) > 0 && currentTime < seekable.start(0) - SAFE_TIME_DELTA) {
27555 return true;
27556 }
27557
27558 return false;
27559 }
27560 }, {
27561 key: 'videoUnderflow_',
27562 value: function videoUnderflow_(nextRange, buffered, currentTime) {
27563 if (nextRange.length === 0) {
27564 // Even if there is no available next range, there is still a possibility we are
27565 // stuck in a gap due to video underflow.
27566 var gap = this.gapFromVideoUnderflow_(buffered, currentTime);
27567
27568 if (gap) {
27569 this.logger_('Encountered a gap in video from ' + gap.start + ' to ' + gap.end + '. ' + ('Seeking to current time ' + currentTime));
27570
27571 return true;
27572 }
27573 }
27574
27575 return false;
27576 }
27577
27578 /**
27579 * Timer callback. If playback still has not proceeded, then we seek
27580 * to the start of the next buffered region.
27581 *
27582 * @private
27583 */
27584
27585 }, {
27586 key: 'skipTheGap_',
27587 value: function skipTheGap_(scheduledCurrentTime) {
27588 var buffered = this.tech_.buffered();
27589 var currentTime = this.tech_.currentTime();
27590 var nextRange = findNextRange(buffered, currentTime);
27591
27592 this.cancelTimer_();
27593
27594 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
27595 return;
27596 }
27597
27598 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0));
27599
27600 // only seek if we still have not played
27601 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
27602
27603 this.tech_.trigger({ type: 'usage', name: 'hls-gap-skip' });
27604 }
27605 }, {
27606 key: 'gapFromVideoUnderflow_',
27607 value: function gapFromVideoUnderflow_(buffered, currentTime) {
27608 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
27609 // playing for ~3 seconds after the video gap starts. This is done to account for
27610 // video buffer underflow/underrun (note that this is not done when there is audio
27611 // buffer underflow/underrun -- in that case the video will stop as soon as it
27612 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
27613 // video stalls). The player's time will reflect the playthrough of audio, so the
27614 // time will appear as if we are in a buffered region, even if we are stuck in a
27615 // "gap."
27616 //
27617 // Example:
27618 // video buffer: 0 => 10.1, 10.2 => 20
27619 // audio buffer: 0 => 20
27620 // overall buffer: 0 => 10.1, 10.2 => 20
27621 // current time: 13
27622 //
27623 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
27624 // however, the audio continued playing until it reached ~3 seconds past the gap
27625 // (13 seconds), at which point it stops as well. Since current time is past the
27626 // gap, findNextRange will return no ranges.
27627 //
27628 // To check for this issue, we see if there is a gap that starts somewhere within
27629 // a 3 second range (3 seconds +/- 1 second) back from our current time.
27630 var gaps = findGaps(buffered);
27631
27632 for (var i = 0; i < gaps.length; i++) {
27633 var start = gaps.start(i);
27634 var end = gaps.end(i);
27635
27636 // gap is starts no more than 4 seconds back
27637 if (currentTime - start < 4 && currentTime - start > 2) {
27638 return {
27639 start: start,
27640 end: end
27641 };
27642 }
27643 }
27644
27645 return null;
27646 }
27647 }]);
27648 return PlaybackWatcher;
27649 }();
27650
27651 var defaultOptions = {
27652 errorInterval: 30,
27653 getSource: function getSource(next) {
27654 var tech = this.tech({ IWillNotUseThisInPlugins: true });
27655 var sourceObj = tech.currentSource_;
27656
27657 return next(sourceObj);
27658 }
27659 };
27660
27661 /**
27662 * Main entry point for the plugin
27663 *
27664 * @param {Player} player a reference to a videojs Player instance
27665 * @param {Object} [options] an object with plugin options
27666 * @private
27667 */
27668 var initPlugin = function initPlugin(player, options) {
27669 var lastCalled = 0;
27670 var seekTo = 0;
27671 var localOptions = videojs.mergeOptions(defaultOptions, options);
27672
27673 player.ready(function () {
27674 player.trigger({ type: 'usage', name: 'hls-error-reload-initialized' });
27675 });
27676
27677 /**
27678 * Player modifications to perform that must wait until `loadedmetadata`
27679 * has been triggered
27680 *
27681 * @private
27682 */
27683 var loadedMetadataHandler = function loadedMetadataHandler() {
27684 if (seekTo) {
27685 player.currentTime(seekTo);
27686 }
27687 };
27688
27689 /**
27690 * Set the source on the player element, play, and seek if necessary
27691 *
27692 * @param {Object} sourceObj An object specifying the source url and mime-type to play
27693 * @private
27694 */
27695 var setSource = function setSource(sourceObj) {
27696 if (sourceObj === null || sourceObj === undefined) {
27697 return;
27698 }
27699 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
27700
27701 player.one('loadedmetadata', loadedMetadataHandler);
27702
27703 player.src(sourceObj);
27704 player.trigger({ type: 'usage', name: 'hls-error-reload' });
27705 player.play();
27706 };
27707
27708 /**
27709 * Attempt to get a source from either the built-in getSource function
27710 * or a custom function provided via the options
27711 *
27712 * @private
27713 */
27714 var errorHandler = function errorHandler() {
27715 // Do not attempt to reload the source if a source-reload occurred before
27716 // 'errorInterval' time has elapsed since the last source-reload
27717 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
27718 player.trigger({ type: 'usage', name: 'hls-error-reload-canceled' });
27719 return;
27720 }
27721
27722 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
27723 videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
27724 return;
27725 }
27726 lastCalled = Date.now();
27727
27728 return localOptions.getSource.call(player, setSource);
27729 };
27730
27731 /**
27732 * Unbind any event handlers that were bound by the plugin
27733 *
27734 * @private
27735 */
27736 var cleanupEvents = function cleanupEvents() {
27737 player.off('loadedmetadata', loadedMetadataHandler);
27738 player.off('error', errorHandler);
27739 player.off('dispose', cleanupEvents);
27740 };
27741
27742 /**
27743 * Cleanup before re-initializing the plugin
27744 *
27745 * @param {Object} [newOptions] an object with plugin options
27746 * @private
27747 */
27748 var reinitPlugin = function reinitPlugin(newOptions) {
27749 cleanupEvents();
27750 initPlugin(player, newOptions);
27751 };
27752
27753 player.on('error', errorHandler);
27754 player.on('dispose', cleanupEvents);
27755
27756 // Overwrite the plugin function so that we can correctly cleanup before
27757 // initializing the plugin
27758 player.reloadSourceOnError = reinitPlugin;
27759 };
27760
27761 /**
27762 * Reload the source when an error is detected as long as there
27763 * wasn't an error previously within the last 30 seconds
27764 *
27765 * @param {Object} [options] an object with plugin options
27766 */
27767 var reloadSourceOnError = function reloadSourceOnError(options) {
27768 initPlugin(this, options);
27769 };
27770
27771 var version$2 = "1.13.2";
27772
27773 /**
27774 * @file videojs-http-streaming.js
27775 *
27776 * The main file for the HLS project.
27777 * License: https://github.com/videojs/videojs-http-streaming/blob/master/LICENSE
27778 */
27779
27780 var Hls$1 = {
27781 PlaylistLoader: PlaylistLoader,
27782 Playlist: Playlist,
27783 Decrypter: Decrypter,
27784 AsyncStream: AsyncStream,
27785 decrypt: decrypt,
27786 utils: utils,
27787
27788 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
27789 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
27790 comparePlaylistBandwidth: comparePlaylistBandwidth,
27791 comparePlaylistResolution: comparePlaylistResolution,
27792
27793 xhr: xhrFactory()
27794 };
27795
27796 // Define getter/setters for config properites
27797 ['GOAL_BUFFER_LENGTH', 'MAX_GOAL_BUFFER_LENGTH', 'GOAL_BUFFER_LENGTH_RATE', 'BUFFER_LOW_WATER_LINE', 'MAX_BUFFER_LOW_WATER_LINE', 'BUFFER_LOW_WATER_LINE_RATE', 'BANDWIDTH_VARIANCE'].forEach(function (prop) {
27798 Object.defineProperty(Hls$1, prop, {
27799 get: function get$$1() {
27800 videojs.log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
27801 return Config[prop];
27802 },
27803 set: function set$$1(value) {
27804 videojs.log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
27805
27806 if (typeof value !== 'number' || value < 0) {
27807 videojs.log.warn('value of Hls.' + prop + ' must be greater than or equal to 0');
27808 return;
27809 }
27810
27811 Config[prop] = value;
27812 }
27813 });
27814 });
27815
27816 var LOCAL_STORAGE_KEY = 'videojs-vhs';
27817
27818 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
27819 var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
27820
27821 if (mpegurlRE.test(type)) {
27822 return 'hls';
27823 }
27824
27825 var dashRE = /^application\/dash\+xml/i;
27826
27827 if (dashRE.test(type)) {
27828 return 'dash';
27829 }
27830
27831 return null;
27832 };
27833
27834 /**
27835 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.
27836 *
27837 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
27838 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
27839 * @function handleHlsMediaChange
27840 */
27841 var handleHlsMediaChange = function handleHlsMediaChange(qualityLevels, playlistLoader) {
27842 var newPlaylist = playlistLoader.media();
27843 var selectedIndex = -1;
27844
27845 for (var i = 0; i < qualityLevels.length; i++) {
27846 if (qualityLevels[i].id === newPlaylist.id) {
27847 selectedIndex = i;
27848 break;
27849 }
27850 }
27851
27852 qualityLevels.selectedIndex_ = selectedIndex;
27853 qualityLevels.trigger({
27854 selectedIndex: selectedIndex,
27855 type: 'change'
27856 });
27857 };
27858
27859 /**
27860 * Adds quality levels to list once playlist metadata is available
27861 *
27862 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
27863 * @param {Object} hls Hls object to listen to for media events.
27864 * @function handleHlsLoadedMetadata
27865 */
27866 var handleHlsLoadedMetadata = function handleHlsLoadedMetadata(qualityLevels, hls) {
27867 hls.representations().forEach(function (rep) {
27868 qualityLevels.addQualityLevel(rep);
27869 });
27870 handleHlsMediaChange(qualityLevels, hls.playlists);
27871 };
27872
27873 // HLS is a source handler, not a tech. Make sure attempts to use it
27874 // as one do not cause exceptions.
27875 Hls$1.canPlaySource = function () {
27876 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
27877 };
27878
27879 var emeKeySystems = function emeKeySystems(keySystemOptions, mainSegmentLoader, audioSegmentLoader) {
27880 if (!keySystemOptions) {
27881 return keySystemOptions;
27882 }
27883
27884 var videoMimeType = void 0;
27885 var audioMimeType = void 0;
27886
27887 // if there is a mimeType associated with the audioSegmentLoader, then the audio
27888 // and video mimeType and codec strings are already in the format we need to
27889 // pass with the other key systems
27890 if (audioSegmentLoader.mimeType_) {
27891 videoMimeType = mainSegmentLoader.mimeType_;
27892 audioMimeType = audioSegmentLoader.mimeType_;
27893
27894 // if there is no audioSegmentLoader mimeType, then we have to create the
27895 // the audio and video mimeType/codec strings from information extrapolated
27896 // from the mainSegmentLoader mimeType (ex. 'video/mp4; codecs="mp4, avc1"' -->
27897 // 'video/mp4; codecs="avc1"' and 'audio/mp4; codecs="mp4"')
27898 } else {
27899 var parsedMimeType = parseContentType(mainSegmentLoader.mimeType_);
27900 var codecs = parsedMimeType.parameters.codecs.split(',');
27901
27902 var audioCodec = void 0;
27903 var videoCodec = void 0;
27904
27905 codecs.forEach(function (codec) {
27906 codec = codec.trim();
27907
27908 if (isAudioCodec(codec)) {
27909 audioCodec = codec;
27910 } else if (isVideoCodec(codec)) {
27911 videoCodec = codec;
27912 }
27913 });
27914
27915 videoMimeType = parsedMimeType.type + '; codecs="' + videoCodec + '"';
27916 audioMimeType = parsedMimeType.type.replace('video', 'audio') + '; codecs="' + audioCodec + '"';
27917 }
27918
27919 // upsert the content types based on the selected playlist
27920 var keySystemContentTypes = {};
27921 var videoPlaylist = mainSegmentLoader.playlist_;
27922
27923 for (var keySystem in keySystemOptions) {
27924 keySystemContentTypes[keySystem] = {
27925 audioContentType: audioMimeType,
27926 videoContentType: videoMimeType
27927 };
27928
27929 if (videoPlaylist.contentProtection && videoPlaylist.contentProtection[keySystem] && videoPlaylist.contentProtection[keySystem].pssh) {
27930 keySystemContentTypes[keySystem].pssh = videoPlaylist.contentProtection[keySystem].pssh;
27931 }
27932
27933 // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
27934 // so we need to prevent overwriting the URL entirely
27935 if (typeof keySystemOptions[keySystem] === 'string') {
27936 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
27937 }
27938 }
27939
27940 return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
27941 };
27942
27943 var setupEmeOptions = function setupEmeOptions(hlsHandler) {
27944 var mainSegmentLoader = hlsHandler.masterPlaylistController_.mainSegmentLoader_;
27945 var audioSegmentLoader = hlsHandler.masterPlaylistController_.audioSegmentLoader_;
27946
27947 var player = videojs.players[hlsHandler.tech_.options_.playerId];
27948
27949 if (player.eme) {
27950 var sourceOptions = emeKeySystems(hlsHandler.source_.keySystems, mainSegmentLoader, audioSegmentLoader);
27951
27952 if (sourceOptions) {
27953 player.currentSource().keySystems = sourceOptions;
27954
27955 // Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449
27956 // in non-IE11 browsers. In IE11 this is too early to initialize media keys
27957 if (!(videojs.browser.IE_VERSION === 11) && player.eme.initializeMediaKeys) {
27958 player.eme.initializeMediaKeys();
27959 }
27960 }
27961 }
27962 };
27963
27964 var getVhsLocalStorage = function getVhsLocalStorage() {
27965 if (!window.localStorage) {
27966 return null;
27967 }
27968
27969 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
27970
27971 if (!storedObject) {
27972 return null;
27973 }
27974
27975 try {
27976 return JSON.parse(storedObject);
27977 } catch (e) {
27978 // someone may have tampered with the value
27979 return null;
27980 }
27981 };
27982
27983 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
27984 if (!window.localStorage) {
27985 return false;
27986 }
27987
27988 var objectToStore = getVhsLocalStorage();
27989
27990 objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
27991
27992 try {
27993 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
27994 } catch (e) {
27995 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
27996 // storage is set to 0).
27997 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
27998 // No need to perform any operation.
27999 return false;
28000 }
28001
28002 return objectToStore;
28003 };
28004
28005 /**
28006 * Whether the browser has built-in HLS support.
28007 */
28008 Hls$1.supportsNativeHls = function () {
28009 var video = document_1.createElement('video');
28010
28011 // native HLS is definitely not supported if HTML5 video isn't
28012 if (!videojs.getTech('Html5').isSupported()) {
28013 return false;
28014 }
28015
28016 // HLS manifests can go by many mime-types
28017 var canPlay = [
28018 // Apple santioned
28019 'application/vnd.apple.mpegurl',
28020 // Apple sanctioned for backwards compatibility
28021 'audio/mpegurl',
28022 // Very common
28023 'audio/x-mpegurl',
28024 // Very common
28025 'application/x-mpegurl',
28026 // Included for completeness
28027 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
28028
28029 return canPlay.some(function (canItPlay) {
28030 return (/maybe|probably/i.test(video.canPlayType(canItPlay))
28031 );
28032 });
28033 }();
28034
28035 Hls$1.supportsNativeDash = function () {
28036 if (!videojs.getTech('Html5').isSupported()) {
28037 return false;
28038 }
28039
28040 return (/maybe|probably/i.test(document_1.createElement('video').canPlayType('application/dash+xml'))
28041 );
28042 }();
28043
28044 Hls$1.supportsTypeNatively = function (type) {
28045 if (type === 'hls') {
28046 return Hls$1.supportsNativeHls;
28047 }
28048
28049 if (type === 'dash') {
28050 return Hls$1.supportsNativeDash;
28051 }
28052
28053 return false;
28054 };
28055
28056 /**
28057 * HLS is a source handler, not a tech. Make sure attempts to use it
28058 * as one do not cause exceptions.
28059 */
28060 Hls$1.isSupported = function () {
28061 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
28062 };
28063
28064 var Component = videojs.getComponent('Component');
28065
28066 /**
28067 * The Hls Handler object, where we orchestrate all of the parts
28068 * of HLS to interact with video.js
28069 *
28070 * @class HlsHandler
28071 * @extends videojs.Component
28072 * @param {Object} source the soruce object
28073 * @param {Tech} tech the parent tech object
28074 * @param {Object} options optional and required options
28075 */
28076
28077 var HlsHandler = function (_Component) {
28078 inherits(HlsHandler, _Component);
28079
28080 function HlsHandler(source, tech, options) {
28081 classCallCheck(this, HlsHandler);
28082
28083 // tech.player() is deprecated but setup a reference to HLS for
28084 // backwards-compatibility
28085 var _this = possibleConstructorReturn(this, (HlsHandler.__proto__ || Object.getPrototypeOf(HlsHandler)).call(this, tech, options.hls));
28086
28087 if (tech.options_ && tech.options_.playerId) {
28088 var _player = videojs(tech.options_.playerId);
28089
28090 if (!_player.hasOwnProperty('hls')) {
28091 Object.defineProperty(_player, 'hls', {
28092 get: function get$$1() {
28093 videojs.log.warn('player.hls is deprecated. Use player.tech().hls instead.');
28094 tech.trigger({ type: 'usage', name: 'hls-player-access' });
28095 return _this;
28096 },
28097 configurable: true
28098 });
28099 }
28100
28101 // Set up a reference to the HlsHandler from player.vhs. This allows users to start
28102 // migrating from player.tech_.hls... to player.vhs... for API access. Although this
28103 // isn't the most appropriate form of reference for video.js (since all APIs should
28104 // be provided through core video.js), it is a common pattern for plugins, and vhs
28105 // will act accordingly.
28106 _player.vhs = _this;
28107 // deprecated, for backwards compatibility
28108 _player.dash = _this;
28109
28110 _this.player_ = _player;
28111 }
28112
28113 _this.tech_ = tech;
28114 _this.source_ = source;
28115 _this.stats = {};
28116 _this.ignoreNextSeekingEvent_ = false;
28117 _this.setOptions_();
28118
28119 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
28120 tech.overrideNativeAudioTracks(true);
28121 tech.overrideNativeVideoTracks(true);
28122 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
28123 // overriding native HLS only works if audio tracks have been emulated
28124 // error early if we're misconfigured
28125 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
28126 }
28127
28128 // listen for fullscreenchange events for this player so that we
28129 // can adjust our quality selection quickly
28130 _this.on(document_1, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
28131 var fullscreenElement = document_1.fullscreenElement || document_1.webkitFullscreenElement || document_1.mozFullScreenElement || document_1.msFullscreenElement;
28132
28133 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
28134 _this.masterPlaylistController_.smoothQualityChange_();
28135 }
28136 });
28137
28138 _this.on(_this.tech_, 'seeking', function () {
28139 if (this.ignoreNextSeekingEvent_) {
28140 this.ignoreNextSeekingEvent_ = false;
28141 return;
28142 }
28143
28144 this.setCurrentTime(this.tech_.currentTime());
28145 });
28146
28147 _this.on(_this.tech_, 'error', function () {
28148 if (this.masterPlaylistController_) {
28149 this.masterPlaylistController_.pauseLoading();
28150 }
28151 });
28152
28153 _this.on(_this.tech_, 'play', _this.play);
28154 return _this;
28155 }
28156
28157 createClass(HlsHandler, [{
28158 key: 'setOptions_',
28159 value: function setOptions_() {
28160 var _this2 = this;
28161
28162 // defaults
28163 this.options_.withCredentials = this.options_.withCredentials || false;
28164 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects || false;
28165 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
28166 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
28167 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
28168 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
28169 this.options_.customTagParsers = this.options_.customTagParsers || [];
28170 this.options_.customTagMappers = this.options_.customTagMappers || [];
28171 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
28172
28173 if (typeof this.options_.blacklistDuration !== 'number') {
28174 this.options_.blacklistDuration = 5 * 60;
28175 }
28176
28177 if (typeof this.options_.bandwidth !== 'number') {
28178 if (this.options_.useBandwidthFromLocalStorage) {
28179 var storedObject = getVhsLocalStorage();
28180
28181 if (storedObject && storedObject.bandwidth) {
28182 this.options_.bandwidth = storedObject.bandwidth;
28183 this.tech_.trigger({ type: 'usage', name: 'hls-bandwidth-from-local-storage' });
28184 }
28185 if (storedObject && storedObject.throughput) {
28186 this.options_.throughput = storedObject.throughput;
28187 this.tech_.trigger({ type: 'usage', name: 'hls-throughput-from-local-storage' });
28188 }
28189 }
28190 }
28191 // if bandwidth was not set by options or pulled from local storage, start playlist
28192 // selection at a reasonable bandwidth
28193 if (typeof this.options_.bandwidth !== 'number') {
28194 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
28195 }
28196
28197 // If the bandwidth number is unchanged from the initial setting
28198 // then this takes precedence over the enableLowInitialPlaylist option
28199 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH;
28200
28201 // grab options passed to player.src
28202 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys'].forEach(function (option) {
28203 if (typeof _this2.source_[option] !== 'undefined') {
28204 _this2.options_[option] = _this2.source_[option];
28205 }
28206 });
28207
28208 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
28209 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
28210 }
28211 /**
28212 * called when player.src gets called, handle a new source
28213 *
28214 * @param {Object} src the source object to handle
28215 */
28216
28217 }, {
28218 key: 'src',
28219 value: function src(_src, type) {
28220 var _this3 = this;
28221
28222 // do nothing if the src is falsey
28223 if (!_src) {
28224 return;
28225 }
28226 this.setOptions_();
28227 // add master playlist controller options
28228 this.options_.url = this.source_.src;
28229 this.options_.tech = this.tech_;
28230 this.options_.externHls = Hls$1;
28231 this.options_.sourceType = simpleTypeFromSourceType(type);
28232 // Whenever we seek internally, we should update the tech
28233 this.options_.seekTo = function (time) {
28234 _this3.tech_.setCurrentTime(time);
28235 };
28236
28237 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
28238 this.playbackWatcher_ = new PlaybackWatcher(videojs.mergeOptions(this.options_, {
28239 seekable: function seekable$$1() {
28240 return _this3.seekable();
28241 },
28242 media: function media() {
28243 return _this3.masterPlaylistController_.media();
28244 }
28245 }));
28246
28247 this.masterPlaylistController_.on('error', function () {
28248 var player = videojs.players[_this3.tech_.options_.playerId];
28249
28250 player.error(_this3.masterPlaylistController_.error);
28251 });
28252
28253 // `this` in selectPlaylist should be the HlsHandler for backwards
28254 // compatibility with < v2
28255 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : Hls$1.STANDARD_PLAYLIST_SELECTOR.bind(this);
28256
28257 this.masterPlaylistController_.selectInitialPlaylist = Hls$1.INITIAL_PLAYLIST_SELECTOR.bind(this);
28258
28259 // re-expose some internal objects for backwards compatibility with < v2
28260 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
28261 this.mediaSource = this.masterPlaylistController_.mediaSource;
28262
28263 // Proxy assignment of some properties to the master playlist
28264 // controller. Using a custom property for backwards compatibility
28265 // with < v2
28266 Object.defineProperties(this, {
28267 selectPlaylist: {
28268 get: function get$$1() {
28269 return this.masterPlaylistController_.selectPlaylist;
28270 },
28271 set: function set$$1(selectPlaylist) {
28272 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
28273 }
28274 },
28275 throughput: {
28276 get: function get$$1() {
28277 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
28278 },
28279 set: function set$$1(throughput) {
28280 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput;
28281 // By setting `count` to 1 the throughput value becomes the starting value
28282 // for the cumulative average
28283 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
28284 }
28285 },
28286 bandwidth: {
28287 get: function get$$1() {
28288 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
28289 },
28290 set: function set$$1(bandwidth) {
28291 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth;
28292 // setting the bandwidth manually resets the throughput counter
28293 // `count` is set to zero that current value of `rate` isn't included
28294 // in the cumulative average
28295 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
28296 rate: 0,
28297 count: 0
28298 };
28299 }
28300 },
28301 /**
28302 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
28303 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
28304 * the entire process after that - decryption, transmuxing, and appending - provided
28305 * by `throughput`.
28306 *
28307 * Since the two process are serial, the overall system bandwidth is given by:
28308 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
28309 */
28310 systemBandwidth: {
28311 get: function get$$1() {
28312 var invBandwidth = 1 / (this.bandwidth || 1);
28313 var invThroughput = void 0;
28314
28315 if (this.throughput > 0) {
28316 invThroughput = 1 / this.throughput;
28317 } else {
28318 invThroughput = 0;
28319 }
28320
28321 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
28322
28323 return systemBitrate;
28324 },
28325 set: function set$$1() {
28326 videojs.log.error('The "systemBandwidth" property is read-only');
28327 }
28328 }
28329 });
28330
28331 if (this.options_.bandwidth) {
28332 this.bandwidth = this.options_.bandwidth;
28333 }
28334 if (this.options_.throughput) {
28335 this.throughput = this.options_.throughput;
28336 }
28337
28338 Object.defineProperties(this.stats, {
28339 bandwidth: {
28340 get: function get$$1() {
28341 return _this3.bandwidth || 0;
28342 },
28343 enumerable: true
28344 },
28345 mediaRequests: {
28346 get: function get$$1() {
28347 return _this3.masterPlaylistController_.mediaRequests_() || 0;
28348 },
28349 enumerable: true
28350 },
28351 mediaRequestsAborted: {
28352 get: function get$$1() {
28353 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
28354 },
28355 enumerable: true
28356 },
28357 mediaRequestsTimedout: {
28358 get: function get$$1() {
28359 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
28360 },
28361 enumerable: true
28362 },
28363 mediaRequestsErrored: {
28364 get: function get$$1() {
28365 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
28366 },
28367 enumerable: true
28368 },
28369 mediaTransferDuration: {
28370 get: function get$$1() {
28371 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
28372 },
28373 enumerable: true
28374 },
28375 mediaBytesTransferred: {
28376 get: function get$$1() {
28377 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
28378 },
28379 enumerable: true
28380 },
28381 mediaSecondsLoaded: {
28382 get: function get$$1() {
28383 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
28384 },
28385 enumerable: true
28386 },
28387 buffered: {
28388 get: function get$$1() {
28389 return timeRangesToArray(_this3.tech_.buffered());
28390 },
28391 enumerable: true
28392 },
28393 currentTime: {
28394 get: function get$$1() {
28395 return _this3.tech_.currentTime();
28396 },
28397 enumerable: true
28398 },
28399 currentSource: {
28400 get: function get$$1() {
28401 return _this3.tech_.currentSource_;
28402 },
28403 enumerable: true
28404 },
28405 currentTech: {
28406 get: function get$$1() {
28407 return _this3.tech_.name_;
28408 },
28409 enumerable: true
28410 },
28411 duration: {
28412 get: function get$$1() {
28413 return _this3.tech_.duration();
28414 },
28415 enumerable: true
28416 },
28417 master: {
28418 get: function get$$1() {
28419 return _this3.playlists.master;
28420 },
28421 enumerable: true
28422 },
28423 playerDimensions: {
28424 get: function get$$1() {
28425 return _this3.tech_.currentDimensions();
28426 },
28427 enumerable: true
28428 },
28429 seekable: {
28430 get: function get$$1() {
28431 return timeRangesToArray(_this3.tech_.seekable());
28432 },
28433 enumerable: true
28434 },
28435 timestamp: {
28436 get: function get$$1() {
28437 return Date.now();
28438 },
28439 enumerable: true
28440 },
28441 videoPlaybackQuality: {
28442 get: function get$$1() {
28443 return _this3.tech_.getVideoPlaybackQuality();
28444 },
28445 enumerable: true
28446 }
28447 });
28448
28449 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
28450
28451 this.tech_.on('bandwidthupdate', function () {
28452 if (_this3.options_.useBandwidthFromLocalStorage) {
28453 updateVhsLocalStorage({
28454 bandwidth: _this3.bandwidth,
28455 throughput: Math.round(_this3.throughput)
28456 });
28457 }
28458 });
28459
28460 this.masterPlaylistController_.on('selectedinitialmedia', function () {
28461 // Add the manual rendition mix-in to HlsHandler
28462 renditionSelectionMixin(_this3);
28463 setupEmeOptions(_this3);
28464 });
28465
28466 // the bandwidth of the primary segment loader is our best
28467 // estimate of overall bandwidth
28468 this.on(this.masterPlaylistController_, 'progress', function () {
28469 this.tech_.trigger('progress');
28470 });
28471
28472 // In the live case, we need to ignore the very first `seeking` event since
28473 // that will be the result of the seek-to-live behavior
28474 this.on(this.masterPlaylistController_, 'firstplay', function () {
28475 this.ignoreNextSeekingEvent_ = true;
28476 });
28477
28478 this.setupQualityLevels_();
28479
28480 // do nothing if the tech has been disposed already
28481 // this can occur if someone sets the src in player.ready(), for instance
28482 if (!this.tech_.el()) {
28483 return;
28484 }
28485
28486 this.tech_.src(videojs.URL.createObjectURL(this.masterPlaylistController_.mediaSource));
28487 }
28488
28489 /**
28490 * Initializes the quality levels and sets listeners to update them.
28491 *
28492 * @method setupQualityLevels_
28493 * @private
28494 */
28495
28496 }, {
28497 key: 'setupQualityLevels_',
28498 value: function setupQualityLevels_() {
28499 var _this4 = this;
28500
28501 var player = videojs.players[this.tech_.options_.playerId];
28502
28503 // if there isn't a player or there isn't a qualityLevels plugin
28504 // or qualityLevels_ listeners have already been setup, do nothing.
28505 if (!player || !player.qualityLevels || this.qualityLevels_) {
28506 return;
28507 }
28508
28509 this.qualityLevels_ = player.qualityLevels();
28510
28511 this.masterPlaylistController_.on('selectedinitialmedia', function () {
28512 handleHlsLoadedMetadata(_this4.qualityLevels_, _this4);
28513 });
28514
28515 this.playlists.on('mediachange', function () {
28516 handleHlsMediaChange(_this4.qualityLevels_, _this4.playlists);
28517 });
28518 }
28519
28520 /**
28521 * Begin playing the video.
28522 */
28523
28524 }, {
28525 key: 'play',
28526 value: function play() {
28527 this.masterPlaylistController_.play();
28528 }
28529
28530 /**
28531 * a wrapper around the function in MasterPlaylistController
28532 */
28533
28534 }, {
28535 key: 'setCurrentTime',
28536 value: function setCurrentTime(currentTime) {
28537 this.masterPlaylistController_.setCurrentTime(currentTime);
28538 }
28539
28540 /**
28541 * a wrapper around the function in MasterPlaylistController
28542 */
28543
28544 }, {
28545 key: 'duration',
28546 value: function duration$$1() {
28547 return this.masterPlaylistController_.duration();
28548 }
28549
28550 /**
28551 * a wrapper around the function in MasterPlaylistController
28552 */
28553
28554 }, {
28555 key: 'seekable',
28556 value: function seekable$$1() {
28557 return this.masterPlaylistController_.seekable();
28558 }
28559
28560 /**
28561 * Abort all outstanding work and cleanup.
28562 */
28563
28564 }, {
28565 key: 'dispose',
28566 value: function dispose() {
28567 if (this.playbackWatcher_) {
28568 this.playbackWatcher_.dispose();
28569 }
28570 if (this.masterPlaylistController_) {
28571 this.masterPlaylistController_.dispose();
28572 }
28573 if (this.qualityLevels_) {
28574 this.qualityLevels_.dispose();
28575 }
28576
28577 if (this.player_) {
28578 delete this.player_.vhs;
28579 delete this.player_.dash;
28580 delete this.player_.hls;
28581 }
28582
28583 if (this.tech_ && this.tech_.hls) {
28584 delete this.tech_.hls;
28585 }
28586
28587 get(HlsHandler.prototype.__proto__ || Object.getPrototypeOf(HlsHandler.prototype), 'dispose', this).call(this);
28588 }
28589 }, {
28590 key: 'convertToProgramTime',
28591 value: function convertToProgramTime(time, callback) {
28592 return getProgramTime({
28593 playlist: this.masterPlaylistController_.media(),
28594 time: time,
28595 callback: callback
28596 });
28597 }
28598
28599 // the player must be playing before calling this
28600
28601 }, {
28602 key: 'seekToProgramTime',
28603 value: function seekToProgramTime$$1(programTime, callback) {
28604 var pauseAfterSeek = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
28605 var retryCount = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 2;
28606
28607 return seekToProgramTime({
28608 programTime: programTime,
28609 playlist: this.masterPlaylistController_.media(),
28610 retryCount: retryCount,
28611 pauseAfterSeek: pauseAfterSeek,
28612 seekTo: this.options_.seekTo,
28613 tech: this.options_.tech,
28614 callback: callback
28615 });
28616 }
28617 }]);
28618 return HlsHandler;
28619 }(Component);
28620
28621 /**
28622 * The Source Handler object, which informs video.js what additional
28623 * MIME types are supported and sets up playback. It is registered
28624 * automatically to the appropriate tech based on the capabilities of
28625 * the browser it is running in. It is not necessary to use or modify
28626 * this object in normal usage.
28627 */
28628
28629
28630 var HlsSourceHandler = {
28631 name: 'videojs-http-streaming',
28632 VERSION: version$2,
28633 canHandleSource: function canHandleSource(srcObj) {
28634 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
28635
28636 var localOptions = videojs.mergeOptions(videojs.options, options);
28637
28638 return HlsSourceHandler.canPlayType(srcObj.type, localOptions);
28639 },
28640 handleSource: function handleSource(source, tech) {
28641 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
28642
28643 var localOptions = videojs.mergeOptions(videojs.options, options);
28644
28645 tech.hls = new HlsHandler(source, tech, localOptions);
28646 tech.hls.xhr = xhrFactory();
28647
28648 tech.hls.src(source.src, source.type);
28649 return tech.hls;
28650 },
28651 canPlayType: function canPlayType(type) {
28652 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
28653
28654 var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
28655 overrideNative = _videojs$mergeOptions.hls.overrideNative;
28656
28657 var supportedType = simpleTypeFromSourceType(type);
28658 var canUseMsePlayback = supportedType && (!Hls$1.supportsTypeNatively(supportedType) || overrideNative);
28659
28660 return canUseMsePlayback ? 'maybe' : '';
28661 }
28662 };
28663
28664 if (typeof videojs.MediaSource === 'undefined' || typeof videojs.URL === 'undefined') {
28665 videojs.MediaSource = MediaSource;
28666 videojs.URL = URL$1;
28667 }
28668
28669 // register source handlers with the appropriate techs
28670 if (MediaSource.supportsNativeMediaSources()) {
28671 videojs.getTech('Html5').registerSourceHandler(HlsSourceHandler, 0);
28672 }
28673
28674 videojs.HlsHandler = HlsHandler;
28675 videojs.HlsSourceHandler = HlsSourceHandler;
28676 videojs.Hls = Hls$1;
28677 if (!videojs.use) {
28678 videojs.registerComponent('Hls', Hls$1);
28679 }
28680 videojs.options.hls = videojs.options.hls || {};
28681
28682 if (videojs.registerPlugin) {
28683 videojs.registerPlugin('reloadSourceOnError', reloadSourceOnError);
28684 } else {
28685 videojs.plugin('reloadSourceOnError', reloadSourceOnError);
28686 }
28687
28688 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
28689 exports.Hls = Hls$1;
28690 exports.HlsHandler = HlsHandler;
28691 exports.HlsSourceHandler = HlsSourceHandler;
28692 exports.emeKeySystems = emeKeySystems;
28693 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
28694
28695 Object.defineProperty(exports, '__esModule', { value: true });
28696
28697})));