UNPKG

962 kBJavaScriptView Raw
1/**
2 * @videojs/http-streaming
3 * @version 1.13.4
4 * @copyright 2020 Brightcove, Inc
5 * @license Apache-2.0
6 */
7(function (global, factory) {
8 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js')) :
9 typeof define === 'function' && define.amd ? define(['exports', 'video.js'], factory) :
10 (factory((global.videojsHttpStreaming = {}),global.videojs));
11}(this, (function (exports,videojs) { 'use strict';
12
13 videojs = videojs && videojs.hasOwnProperty('default') ? videojs['default'] : videojs;
14
15 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
16
17 function createCommonjsModule(fn, module) {
18 return module = { exports: {} }, fn(module, module.exports), module.exports;
19 }
20
21 var minDoc = {};
22
23 var topLevel = typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : typeof window !== 'undefined' ? window : {};
24
25 var doccy;
26
27 if (typeof document !== 'undefined') {
28 doccy = document;
29 } else {
30 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
31
32 if (!doccy) {
33 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
34 }
35 }
36
37 var document_1 = doccy;
38
39 var urlToolkit = createCommonjsModule(function (module, exports) {
40 // see https://tools.ietf.org/html/rfc1808
41
42 /* jshint ignore:start */
43 (function (root) {
44 /* jshint ignore:end */
45
46 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/\?#]*\/)*.*?)??(;.*?)?(\?.*?)?(#.*?)?$/;
47 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)(.*)$/;
48 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
49 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/).*?(?=\/)/g;
50
51 var URLToolkit = { // jshint ignore:line
52 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
53 // E.g
54 // With opts.alwaysNormalize = false (default, spec compliant)
55 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
56 // With opts.alwaysNormalize = true (not spec compliant)
57 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
58 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
59 opts = opts || {};
60 // remove any remaining space and CRLF
61 baseURL = baseURL.trim();
62 relativeURL = relativeURL.trim();
63 if (!relativeURL) {
64 // 2a) If the embedded URL is entirely empty, it inherits the
65 // entire base URL (i.e., is set equal to the base URL)
66 // and we are done.
67 if (!opts.alwaysNormalize) {
68 return baseURL;
69 }
70 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
71 if (!basePartsForNormalise) {
72 throw new Error('Error trying to parse base URL.');
73 }
74 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
75 return URLToolkit.buildURLFromParts(basePartsForNormalise);
76 }
77 var relativeParts = URLToolkit.parseURL(relativeURL);
78 if (!relativeParts) {
79 throw new Error('Error trying to parse relative URL.');
80 }
81 if (relativeParts.scheme) {
82 // 2b) If the embedded URL starts with a scheme name, it is
83 // interpreted as an absolute URL and we are done.
84 if (!opts.alwaysNormalize) {
85 return relativeURL;
86 }
87 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
88 return URLToolkit.buildURLFromParts(relativeParts);
89 }
90 var baseParts = URLToolkit.parseURL(baseURL);
91 if (!baseParts) {
92 throw new Error('Error trying to parse base URL.');
93 }
94 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
95 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
96 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
97 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
98 baseParts.netLoc = pathParts[1];
99 baseParts.path = pathParts[2];
100 }
101 if (baseParts.netLoc && !baseParts.path) {
102 baseParts.path = '/';
103 }
104 var builtParts = {
105 // 2c) Otherwise, the embedded URL inherits the scheme of
106 // the base URL.
107 scheme: baseParts.scheme,
108 netLoc: relativeParts.netLoc,
109 path: null,
110 params: relativeParts.params,
111 query: relativeParts.query,
112 fragment: relativeParts.fragment
113 };
114 if (!relativeParts.netLoc) {
115 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
116 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
117 // (if any) of the base URL.
118 builtParts.netLoc = baseParts.netLoc;
119 // 4) If the embedded URL path is preceded by a slash "/", the
120 // path is not relative and we skip to Step 7.
121 if (relativeParts.path[0] !== '/') {
122 if (!relativeParts.path) {
123 // 5) If the embedded URL path is empty (and not preceded by a
124 // slash), then the embedded URL inherits the base URL path
125 builtParts.path = baseParts.path;
126 // 5a) if the embedded URL's <params> is non-empty, we skip to
127 // step 7; otherwise, it inherits the <params> of the base
128 // URL (if any) and
129 if (!relativeParts.params) {
130 builtParts.params = baseParts.params;
131 // 5b) if the embedded URL's <query> is non-empty, we skip to
132 // step 7; otherwise, it inherits the <query> of the base
133 // URL (if any) and we skip to step 7.
134 if (!relativeParts.query) {
135 builtParts.query = baseParts.query;
136 }
137 }
138 } else {
139 // 6) The last segment of the base URL's path (anything
140 // following the rightmost slash "/", or the entire path if no
141 // slash is present) is removed and the embedded URL's path is
142 // appended in its place.
143 var baseURLPath = baseParts.path;
144 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
145 builtParts.path = URLToolkit.normalizePath(newPath);
146 }
147 }
148 }
149 if (builtParts.path === null) {
150 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
151 }
152 return URLToolkit.buildURLFromParts(builtParts);
153 },
154 parseURL: function parseURL(url) {
155 var parts = URL_REGEX.exec(url);
156 if (!parts) {
157 return null;
158 }
159 return {
160 scheme: parts[1] || '',
161 netLoc: parts[2] || '',
162 path: parts[3] || '',
163 params: parts[4] || '',
164 query: parts[5] || '',
165 fragment: parts[6] || ''
166 };
167 },
168 normalizePath: function normalizePath(path) {
169 // The following operations are
170 // then applied, in order, to the new path:
171 // 6a) All occurrences of "./", where "." is a complete path
172 // segment, are removed.
173 // 6b) If the path ends with "." as a complete path segment,
174 // that "." is removed.
175 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, '');
176 // 6c) All occurrences of "<segment>/../", where <segment> is a
177 // complete path segment not equal to "..", are removed.
178 // Removal of these path segments is performed iteratively,
179 // removing the leftmost matching pattern on each iteration,
180 // until no matching pattern remains.
181 // 6d) If the path ends with "<segment>/..", where <segment> is a
182 // complete path segment not equal to "..", that
183 // "<segment>/.." is removed.
184 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {} // jshint ignore:line
185 return path.split('').reverse().join('');
186 },
187 buildURLFromParts: function buildURLFromParts(parts) {
188 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
189 }
190 };
191
192 /* jshint ignore:start */
193 module.exports = URLToolkit;
194 })(commonjsGlobal);
195 /* jshint ignore:end */
196 });
197
198 var win;
199
200 if (typeof window !== "undefined") {
201 win = window;
202 } else if (typeof commonjsGlobal !== "undefined") {
203 win = commonjsGlobal;
204 } else if (typeof self !== "undefined") {
205 win = self;
206 } else {
207 win = {};
208 }
209
210 var window_1 = win;
211
212 /**
213 * @file resolve-url.js - Handling how URLs are resolved and manipulated
214 */
215
216 var resolveUrl = function resolveUrl(baseURL, relativeURL) {
217 // return early if we don't need to resolve
218 if (/^[a-z]+:/i.test(relativeURL)) {
219 return relativeURL;
220 }
221
222 // if the base URL is relative then combine with the current location
223 if (!/\/\//i.test(baseURL)) {
224 baseURL = urlToolkit.buildAbsoluteURL(window_1.location.href, baseURL);
225 }
226
227 return urlToolkit.buildAbsoluteURL(baseURL, relativeURL);
228 };
229
230 /**
231 * Checks whether xhr request was redirected and returns correct url depending
232 * on `handleManifestRedirects` option
233 *
234 * @api private
235 *
236 * @param {String} url - an url being requested
237 * @param {XMLHttpRequest} req - xhr request result
238 *
239 * @return {String}
240 */
241 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
242 // To understand how the responseURL below is set and generated:
243 // - https://fetch.spec.whatwg.org/#concept-response-url
244 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
245 if (handleManifestRedirect && req.responseURL && url !== req.responseURL) {
246 return req.responseURL;
247 }
248
249 return url;
250 };
251
252 /*! @name m3u8-parser @version 4.4.0 @license Apache-2.0 */
253
254 function _extends() {
255 _extends = Object.assign || function (target) {
256 for (var i = 1; i < arguments.length; i++) {
257 var source = arguments[i];
258
259 for (var key in source) {
260 if (Object.prototype.hasOwnProperty.call(source, key)) {
261 target[key] = source[key];
262 }
263 }
264 }
265
266 return target;
267 };
268
269 return _extends.apply(this, arguments);
270 }
271
272 function _inheritsLoose(subClass, superClass) {
273 subClass.prototype = Object.create(superClass.prototype);
274 subClass.prototype.constructor = subClass;
275 subClass.__proto__ = superClass;
276 }
277
278 function _assertThisInitialized(self) {
279 if (self === void 0) {
280 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
281 }
282
283 return self;
284 }
285
286 /**
287 * @file stream.js
288 */
289
290 /**
291 * A lightweight readable stream implementation that handles event dispatching.
292 *
293 * @class Stream
294 */
295 var Stream =
296 /*#__PURE__*/
297 function () {
298 function Stream() {
299 this.listeners = {};
300 }
301 /**
302 * Add a listener for a specified event type.
303 *
304 * @param {string} type the event name
305 * @param {Function} listener the callback to be invoked when an event of
306 * the specified type occurs
307 */
308
309 var _proto = Stream.prototype;
310
311 _proto.on = function on(type, listener) {
312 if (!this.listeners[type]) {
313 this.listeners[type] = [];
314 }
315
316 this.listeners[type].push(listener);
317 }
318 /**
319 * Remove a listener for a specified event type.
320 *
321 * @param {string} type the event name
322 * @param {Function} listener a function previously registered for this
323 * type of event through `on`
324 * @return {boolean} if we could turn it off or not
325 */
326 ;
327
328 _proto.off = function off(type, listener) {
329 if (!this.listeners[type]) {
330 return false;
331 }
332
333 var index = this.listeners[type].indexOf(listener);
334 this.listeners[type].splice(index, 1);
335 return index > -1;
336 }
337 /**
338 * Trigger an event of the specified type on this stream. Any additional
339 * arguments to this function are passed as parameters to event listeners.
340 *
341 * @param {string} type the event name
342 */
343 ;
344
345 _proto.trigger = function trigger(type) {
346 var callbacks = this.listeners[type];
347 var i;
348 var length;
349 var args;
350
351 if (!callbacks) {
352 return;
353 } // Slicing the arguments on every invocation of this method
354 // can add a significant amount of overhead. Avoid the
355 // intermediate object creation for the common case of a
356 // single callback argument
357
358
359 if (arguments.length === 2) {
360 length = callbacks.length;
361
362 for (i = 0; i < length; ++i) {
363 callbacks[i].call(this, arguments[1]);
364 }
365 } else {
366 args = Array.prototype.slice.call(arguments, 1);
367 length = callbacks.length;
368
369 for (i = 0; i < length; ++i) {
370 callbacks[i].apply(this, args);
371 }
372 }
373 }
374 /**
375 * Destroys the stream and cleans up.
376 */
377 ;
378
379 _proto.dispose = function dispose() {
380 this.listeners = {};
381 }
382 /**
383 * Forwards all `data` events on this stream to the destination stream. The
384 * destination stream should provide a method `push` to receive the data
385 * events as they arrive.
386 *
387 * @param {Stream} destination the stream that will receive all `data` events
388 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
389 */
390 ;
391
392 _proto.pipe = function pipe(destination) {
393 this.on('data', function (data) {
394 destination.push(data);
395 });
396 };
397
398 return Stream;
399 }();
400
401 /**
402 * A stream that buffers string input and generates a `data` event for each
403 * line.
404 *
405 * @class LineStream
406 * @extends Stream
407 */
408
409 var LineStream =
410 /*#__PURE__*/
411 function (_Stream) {
412 _inheritsLoose(LineStream, _Stream);
413
414 function LineStream() {
415 var _this;
416
417 _this = _Stream.call(this) || this;
418 _this.buffer = '';
419 return _this;
420 }
421 /**
422 * Add new data to be parsed.
423 *
424 * @param {string} data the text to process
425 */
426
427 var _proto = LineStream.prototype;
428
429 _proto.push = function push(data) {
430 var nextNewline;
431 this.buffer += data;
432 nextNewline = this.buffer.indexOf('\n');
433
434 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
435 this.trigger('data', this.buffer.substring(0, nextNewline));
436 this.buffer = this.buffer.substring(nextNewline + 1);
437 }
438 };
439
440 return LineStream;
441 }(Stream);
442
443 /**
444 * "forgiving" attribute list psuedo-grammar:
445 * attributes -> keyvalue (',' keyvalue)*
446 * keyvalue -> key '=' value
447 * key -> [^=]*
448 * value -> '"' [^"]* '"' | [^,]*
449 */
450
451 var attributeSeparator = function attributeSeparator() {
452 var key = '[^=]*';
453 var value = '"[^"]*"|[^,]*';
454 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
455 return new RegExp('(?:^|,)(' + keyvalue + ')');
456 };
457 /**
458 * Parse attributes from a line given the separator
459 *
460 * @param {string} attributes the attribute line to parse
461 */
462
463 var parseAttributes = function parseAttributes(attributes) {
464 // split the string using attributes as the separator
465 var attrs = attributes.split(attributeSeparator());
466 var result = {};
467 var i = attrs.length;
468 var attr;
469
470 while (i--) {
471 // filter out unmatched portions of the string
472 if (attrs[i] === '') {
473 continue;
474 } // split the key and value
475
476
477 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
478
479 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
480 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
481 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
482 result[attr[0]] = attr[1];
483 }
484
485 return result;
486 };
487 /**
488 * A line-level M3U8 parser event stream. It expects to receive input one
489 * line at a time and performs a context-free parse of its contents. A stream
490 * interpretation of a manifest can be useful if the manifest is expected to
491 * be too large to fit comfortably into memory or the entirety of the input
492 * is not immediately available. Otherwise, it's probably much easier to work
493 * with a regular `Parser` object.
494 *
495 * Produces `data` events with an object that captures the parser's
496 * interpretation of the input. That object has a property `tag` that is one
497 * of `uri`, `comment`, or `tag`. URIs only have a single additional
498 * property, `line`, which captures the entirety of the input without
499 * interpretation. Comments similarly have a single additional property
500 * `text` which is the input without the leading `#`.
501 *
502 * Tags always have a property `tagType` which is the lower-cased version of
503 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
504 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
505 * tags are given the tag type `unknown` and a single additional property
506 * `data` with the remainder of the input.
507 *
508 * @class ParseStream
509 * @extends Stream
510 */
511
512 var ParseStream =
513 /*#__PURE__*/
514 function (_Stream) {
515 _inheritsLoose(ParseStream, _Stream);
516
517 function ParseStream() {
518 var _this;
519
520 _this = _Stream.call(this) || this;
521 _this.customParsers = [];
522 _this.tagMappers = [];
523 return _this;
524 }
525 /**
526 * Parses an additional line of input.
527 *
528 * @param {string} line a single line of an M3U8 file to parse
529 */
530
531 var _proto = ParseStream.prototype;
532
533 _proto.push = function push(line) {
534 var _this2 = this;
535
536 var match;
537 var event; // strip whitespace
538
539 line = line.trim();
540
541 if (line.length === 0) {
542 // ignore empty lines
543 return;
544 } // URIs
545
546
547 if (line[0] !== '#') {
548 this.trigger('data', {
549 type: 'uri',
550 uri: line
551 });
552 return;
553 } // map tags
554
555
556 var newLines = this.tagMappers.reduce(function (acc, mapper) {
557 var mappedLine = mapper(line); // skip if unchanged
558
559 if (mappedLine === line) {
560 return acc;
561 }
562
563 return acc.concat([mappedLine]);
564 }, [line]);
565 newLines.forEach(function (newLine) {
566 for (var i = 0; i < _this2.customParsers.length; i++) {
567 if (_this2.customParsers[i].call(_this2, newLine)) {
568 return;
569 }
570 } // Comments
571
572
573 if (newLine.indexOf('#EXT') !== 0) {
574 _this2.trigger('data', {
575 type: 'comment',
576 text: newLine.slice(1)
577 });
578
579 return;
580 } // strip off any carriage returns here so the regex matching
581 // doesn't have to account for them.
582
583
584 newLine = newLine.replace('\r', ''); // Tags
585
586 match = /^#EXTM3U/.exec(newLine);
587
588 if (match) {
589 _this2.trigger('data', {
590 type: 'tag',
591 tagType: 'm3u'
592 });
593
594 return;
595 }
596
597 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
598
599 if (match) {
600 event = {
601 type: 'tag',
602 tagType: 'inf'
603 };
604
605 if (match[1]) {
606 event.duration = parseFloat(match[1]);
607 }
608
609 if (match[2]) {
610 event.title = match[2];
611 }
612
613 _this2.trigger('data', event);
614
615 return;
616 }
617
618 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
619
620 if (match) {
621 event = {
622 type: 'tag',
623 tagType: 'targetduration'
624 };
625
626 if (match[1]) {
627 event.duration = parseInt(match[1], 10);
628 }
629
630 _this2.trigger('data', event);
631
632 return;
633 }
634
635 match = /^#ZEN-TOTAL-DURATION:?([0-9.]*)?/.exec(newLine);
636
637 if (match) {
638 event = {
639 type: 'tag',
640 tagType: 'totalduration'
641 };
642
643 if (match[1]) {
644 event.duration = parseInt(match[1], 10);
645 }
646
647 _this2.trigger('data', event);
648
649 return;
650 }
651
652 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
653
654 if (match) {
655 event = {
656 type: 'tag',
657 tagType: 'version'
658 };
659
660 if (match[1]) {
661 event.version = parseInt(match[1], 10);
662 }
663
664 _this2.trigger('data', event);
665
666 return;
667 }
668
669 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
670
671 if (match) {
672 event = {
673 type: 'tag',
674 tagType: 'media-sequence'
675 };
676
677 if (match[1]) {
678 event.number = parseInt(match[1], 10);
679 }
680
681 _this2.trigger('data', event);
682
683 return;
684 }
685
686 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
687
688 if (match) {
689 event = {
690 type: 'tag',
691 tagType: 'discontinuity-sequence'
692 };
693
694 if (match[1]) {
695 event.number = parseInt(match[1], 10);
696 }
697
698 _this2.trigger('data', event);
699
700 return;
701 }
702
703 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
704
705 if (match) {
706 event = {
707 type: 'tag',
708 tagType: 'playlist-type'
709 };
710
711 if (match[1]) {
712 event.playlistType = match[1];
713 }
714
715 _this2.trigger('data', event);
716
717 return;
718 }
719
720 match = /^#EXT-X-BYTERANGE:?([0-9.]*)?@?([0-9.]*)?/.exec(newLine);
721
722 if (match) {
723 event = {
724 type: 'tag',
725 tagType: 'byterange'
726 };
727
728 if (match[1]) {
729 event.length = parseInt(match[1], 10);
730 }
731
732 if (match[2]) {
733 event.offset = parseInt(match[2], 10);
734 }
735
736 _this2.trigger('data', event);
737
738 return;
739 }
740
741 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
742
743 if (match) {
744 event = {
745 type: 'tag',
746 tagType: 'allow-cache'
747 };
748
749 if (match[1]) {
750 event.allowed = !/NO/.test(match[1]);
751 }
752
753 _this2.trigger('data', event);
754
755 return;
756 }
757
758 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
759
760 if (match) {
761 event = {
762 type: 'tag',
763 tagType: 'map'
764 };
765
766 if (match[1]) {
767 var attributes = parseAttributes(match[1]);
768
769 if (attributes.URI) {
770 event.uri = attributes.URI;
771 }
772
773 if (attributes.BYTERANGE) {
774 var _attributes$BYTERANGE = attributes.BYTERANGE.split('@'),
775 length = _attributes$BYTERANGE[0],
776 offset = _attributes$BYTERANGE[1];
777
778 event.byterange = {};
779
780 if (length) {
781 event.byterange.length = parseInt(length, 10);
782 }
783
784 if (offset) {
785 event.byterange.offset = parseInt(offset, 10);
786 }
787 }
788 }
789
790 _this2.trigger('data', event);
791
792 return;
793 }
794
795 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
796
797 if (match) {
798 event = {
799 type: 'tag',
800 tagType: 'stream-inf'
801 };
802
803 if (match[1]) {
804 event.attributes = parseAttributes(match[1]);
805
806 if (event.attributes.RESOLUTION) {
807 var split = event.attributes.RESOLUTION.split('x');
808 var resolution = {};
809
810 if (split[0]) {
811 resolution.width = parseInt(split[0], 10);
812 }
813
814 if (split[1]) {
815 resolution.height = parseInt(split[1], 10);
816 }
817
818 event.attributes.RESOLUTION = resolution;
819 }
820
821 if (event.attributes.BANDWIDTH) {
822 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
823 }
824
825 if (event.attributes['PROGRAM-ID']) {
826 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
827 }
828 }
829
830 _this2.trigger('data', event);
831
832 return;
833 }
834
835 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
836
837 if (match) {
838 event = {
839 type: 'tag',
840 tagType: 'media'
841 };
842
843 if (match[1]) {
844 event.attributes = parseAttributes(match[1]);
845 }
846
847 _this2.trigger('data', event);
848
849 return;
850 }
851
852 match = /^#EXT-X-ENDLIST/.exec(newLine);
853
854 if (match) {
855 _this2.trigger('data', {
856 type: 'tag',
857 tagType: 'endlist'
858 });
859
860 return;
861 }
862
863 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
864
865 if (match) {
866 _this2.trigger('data', {
867 type: 'tag',
868 tagType: 'discontinuity'
869 });
870
871 return;
872 }
873
874 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
875
876 if (match) {
877 event = {
878 type: 'tag',
879 tagType: 'program-date-time'
880 };
881
882 if (match[1]) {
883 event.dateTimeString = match[1];
884 event.dateTimeObject = new Date(match[1]);
885 }
886
887 _this2.trigger('data', event);
888
889 return;
890 }
891
892 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
893
894 if (match) {
895 event = {
896 type: 'tag',
897 tagType: 'key'
898 };
899
900 if (match[1]) {
901 event.attributes = parseAttributes(match[1]); // parse the IV string into a Uint32Array
902
903 if (event.attributes.IV) {
904 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
905 event.attributes.IV = event.attributes.IV.substring(2);
906 }
907
908 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
909 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
910 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
911 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
912 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
913 event.attributes.IV = new Uint32Array(event.attributes.IV);
914 }
915 }
916
917 _this2.trigger('data', event);
918
919 return;
920 }
921
922 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
923
924 if (match) {
925 event = {
926 type: 'tag',
927 tagType: 'start'
928 };
929
930 if (match[1]) {
931 event.attributes = parseAttributes(match[1]);
932 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
933 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
934 }
935
936 _this2.trigger('data', event);
937
938 return;
939 }
940
941 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
942
943 if (match) {
944 event = {
945 type: 'tag',
946 tagType: 'cue-out-cont'
947 };
948
949 if (match[1]) {
950 event.data = match[1];
951 } else {
952 event.data = '';
953 }
954
955 _this2.trigger('data', event);
956
957 return;
958 }
959
960 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
961
962 if (match) {
963 event = {
964 type: 'tag',
965 tagType: 'cue-out'
966 };
967
968 if (match[1]) {
969 event.data = match[1];
970 } else {
971 event.data = '';
972 }
973
974 _this2.trigger('data', event);
975
976 return;
977 }
978
979 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
980
981 if (match) {
982 event = {
983 type: 'tag',
984 tagType: 'cue-in'
985 };
986
987 if (match[1]) {
988 event.data = match[1];
989 } else {
990 event.data = '';
991 }
992
993 _this2.trigger('data', event);
994
995 return;
996 } // unknown tag type
997
998
999 _this2.trigger('data', {
1000 type: 'tag',
1001 data: newLine.slice(4)
1002 });
1003 });
1004 }
1005 /**
1006 * Add a parser for custom headers
1007 *
1008 * @param {Object} options a map of options for the added parser
1009 * @param {RegExp} options.expression a regular expression to match the custom header
1010 * @param {string} options.customType the custom type to register to the output
1011 * @param {Function} [options.dataParser] function to parse the line into an object
1012 * @param {boolean} [options.segment] should tag data be attached to the segment object
1013 */
1014 ;
1015
1016 _proto.addParser = function addParser(_ref) {
1017 var _this3 = this;
1018
1019 var expression = _ref.expression,
1020 customType = _ref.customType,
1021 dataParser = _ref.dataParser,
1022 segment = _ref.segment;
1023
1024 if (typeof dataParser !== 'function') {
1025 dataParser = function dataParser(line) {
1026 return line;
1027 };
1028 }
1029
1030 this.customParsers.push(function (line) {
1031 var match = expression.exec(line);
1032
1033 if (match) {
1034 _this3.trigger('data', {
1035 type: 'custom',
1036 data: dataParser(line),
1037 customType: customType,
1038 segment: segment
1039 });
1040
1041 return true;
1042 }
1043 });
1044 }
1045 /**
1046 * Add a custom header mapper
1047 *
1048 * @param {Object} options
1049 * @param {RegExp} options.expression a regular expression to match the custom header
1050 * @param {Function} options.map function to translate tag into a different tag
1051 */
1052 ;
1053
1054 _proto.addTagMapper = function addTagMapper(_ref2) {
1055 var expression = _ref2.expression,
1056 map = _ref2.map;
1057
1058 var mapFn = function mapFn(line) {
1059 if (expression.test(line)) {
1060 return map(line);
1061 }
1062
1063 return line;
1064 };
1065
1066 this.tagMappers.push(mapFn);
1067 };
1068
1069 return ParseStream;
1070 }(Stream);
1071
1072 function decodeB64ToUint8Array(b64Text) {
1073 var decodedString = window_1.atob(b64Text || '');
1074 var array = new Uint8Array(decodedString.length);
1075
1076 for (var i = 0; i < decodedString.length; i++) {
1077 array[i] = decodedString.charCodeAt(i);
1078 }
1079
1080 return array;
1081 }
1082
1083 /**
1084 * A parser for M3U8 files. The current interpretation of the input is
1085 * exposed as a property `manifest` on parser objects. It's just two lines to
1086 * create and parse a manifest once you have the contents available as a string:
1087 *
1088 * ```js
1089 * var parser = new m3u8.Parser();
1090 * parser.push(xhr.responseText);
1091 * ```
1092 *
1093 * New input can later be applied to update the manifest object by calling
1094 * `push` again.
1095 *
1096 * The parser attempts to create a usable manifest object even if the
1097 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1098 * events during the parse if it encounters input that seems invalid or
1099 * requires some property of the manifest object to be defaulted.
1100 *
1101 * @class Parser
1102 * @extends Stream
1103 */
1104
1105 var Parser =
1106 /*#__PURE__*/
1107 function (_Stream) {
1108 _inheritsLoose(Parser, _Stream);
1109
1110 function Parser() {
1111 var _this;
1112
1113 _this = _Stream.call(this) || this;
1114 _this.lineStream = new LineStream();
1115 _this.parseStream = new ParseStream();
1116
1117 _this.lineStream.pipe(_this.parseStream);
1118 /* eslint-disable consistent-this */
1119
1120 var self = _assertThisInitialized(_this);
1121 /* eslint-enable consistent-this */
1122
1123 var uris = [];
1124 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1125
1126 var currentMap; // if specified, the active decryption key
1127
1128 var _key;
1129
1130 var noop = function noop() {};
1131
1132 var defaultMediaGroups = {
1133 'AUDIO': {},
1134 'VIDEO': {},
1135 'CLOSED-CAPTIONS': {},
1136 'SUBTITLES': {}
1137 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1138 // used in MPDs with Widevine encrypted streams.
1139
1140 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1141
1142 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1143
1144 _this.manifest = {
1145 allowCache: true,
1146 discontinuityStarts: [],
1147 segments: []
1148 }; // update the manifest with the m3u8 entry from the parse stream
1149
1150 _this.parseStream.on('data', function (entry) {
1151 var mediaGroup;
1152 var rendition;
1153 ({
1154 tag: function tag() {
1155 // switch based on the tag type
1156 (({
1157 'allow-cache': function allowCache() {
1158 this.manifest.allowCache = entry.allowed;
1159
1160 if (!('allowed' in entry)) {
1161 this.trigger('info', {
1162 message: 'defaulting allowCache to YES'
1163 });
1164 this.manifest.allowCache = true;
1165 }
1166 },
1167 byterange: function byterange() {
1168 var byterange = {};
1169
1170 if ('length' in entry) {
1171 currentUri.byterange = byterange;
1172 byterange.length = entry.length;
1173
1174 if (!('offset' in entry)) {
1175 this.trigger('info', {
1176 message: 'defaulting offset to zero'
1177 });
1178 entry.offset = 0;
1179 }
1180 }
1181
1182 if ('offset' in entry) {
1183 currentUri.byterange = byterange;
1184 byterange.offset = entry.offset;
1185 }
1186 },
1187 endlist: function endlist() {
1188 this.manifest.endList = true;
1189 },
1190 inf: function inf() {
1191 if (!('mediaSequence' in this.manifest)) {
1192 this.manifest.mediaSequence = 0;
1193 this.trigger('info', {
1194 message: 'defaulting media sequence to zero'
1195 });
1196 }
1197
1198 if (!('discontinuitySequence' in this.manifest)) {
1199 this.manifest.discontinuitySequence = 0;
1200 this.trigger('info', {
1201 message: 'defaulting discontinuity sequence to zero'
1202 });
1203 }
1204
1205 if (entry.duration > 0) {
1206 currentUri.duration = entry.duration;
1207 }
1208
1209 if (entry.duration === 0) {
1210 currentUri.duration = 0.01;
1211 this.trigger('info', {
1212 message: 'updating zero segment duration to a small value'
1213 });
1214 }
1215
1216 this.manifest.segments = uris;
1217 },
1218 key: function key() {
1219 if (!entry.attributes) {
1220 this.trigger('warn', {
1221 message: 'ignoring key declaration without attribute list'
1222 });
1223 return;
1224 } // clear the active encryption key
1225
1226
1227 if (entry.attributes.METHOD === 'NONE') {
1228 _key = null;
1229 return;
1230 }
1231
1232 if (!entry.attributes.URI) {
1233 this.trigger('warn', {
1234 message: 'ignoring key declaration without URI'
1235 });
1236 return;
1237 } // check if the content is encrypted for Widevine
1238 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1239
1240
1241 if (entry.attributes.KEYFORMAT === widevineUuid) {
1242 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1243
1244 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1245 this.trigger('warn', {
1246 message: 'invalid key method provided for Widevine'
1247 });
1248 return;
1249 }
1250
1251 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1252 this.trigger('warn', {
1253 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1254 });
1255 }
1256
1257 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1258 this.trigger('warn', {
1259 message: 'invalid key URI provided for Widevine'
1260 });
1261 return;
1262 }
1263
1264 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1265 this.trigger('warn', {
1266 message: 'invalid key ID provided for Widevine'
1267 });
1268 return;
1269 } // if Widevine key attributes are valid, store them as `contentProtection`
1270 // on the manifest to emulate Widevine tag structure in a DASH mpd
1271
1272
1273 this.manifest.contentProtection = {
1274 'com.widevine.alpha': {
1275 attributes: {
1276 schemeIdUri: entry.attributes.KEYFORMAT,
1277 // remove '0x' from the key id string
1278 keyId: entry.attributes.KEYID.substring(2)
1279 },
1280 // decode the base64-encoded PSSH box
1281 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1282 }
1283 };
1284 return;
1285 }
1286
1287 if (!entry.attributes.METHOD) {
1288 this.trigger('warn', {
1289 message: 'defaulting key method to AES-128'
1290 });
1291 } // setup an encryption key for upcoming segments
1292
1293
1294 _key = {
1295 method: entry.attributes.METHOD || 'AES-128',
1296 uri: entry.attributes.URI
1297 };
1298
1299 if (typeof entry.attributes.IV !== 'undefined') {
1300 _key.iv = entry.attributes.IV;
1301 }
1302 },
1303 'media-sequence': function mediaSequence() {
1304 if (!isFinite(entry.number)) {
1305 this.trigger('warn', {
1306 message: 'ignoring invalid media sequence: ' + entry.number
1307 });
1308 return;
1309 }
1310
1311 this.manifest.mediaSequence = entry.number;
1312 },
1313 'discontinuity-sequence': function discontinuitySequence() {
1314 if (!isFinite(entry.number)) {
1315 this.trigger('warn', {
1316 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1317 });
1318 return;
1319 }
1320
1321 this.manifest.discontinuitySequence = entry.number;
1322 currentTimeline = entry.number;
1323 },
1324 'playlist-type': function playlistType() {
1325 if (!/VOD|EVENT/.test(entry.playlistType)) {
1326 this.trigger('warn', {
1327 message: 'ignoring unknown playlist type: ' + entry.playlist
1328 });
1329 return;
1330 }
1331
1332 this.manifest.playlistType = entry.playlistType;
1333 },
1334 map: function map() {
1335 currentMap = {};
1336
1337 if (entry.uri) {
1338 currentMap.uri = entry.uri;
1339 }
1340
1341 if (entry.byterange) {
1342 currentMap.byterange = entry.byterange;
1343 }
1344 },
1345 'stream-inf': function streamInf() {
1346 this.manifest.playlists = uris;
1347 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1348
1349 if (!entry.attributes) {
1350 this.trigger('warn', {
1351 message: 'ignoring empty stream-inf attributes'
1352 });
1353 return;
1354 }
1355
1356 if (!currentUri.attributes) {
1357 currentUri.attributes = {};
1358 }
1359
1360 _extends(currentUri.attributes, entry.attributes);
1361 },
1362 media: function media() {
1363 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1364
1365 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1366 this.trigger('warn', {
1367 message: 'ignoring incomplete or missing media group'
1368 });
1369 return;
1370 } // find the media group, creating defaults as necessary
1371
1372
1373 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1374 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1375 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1376
1377 rendition = {
1378 default: /yes/i.test(entry.attributes.DEFAULT)
1379 };
1380
1381 if (rendition.default) {
1382 rendition.autoselect = true;
1383 } else {
1384 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1385 }
1386
1387 if (entry.attributes.LANGUAGE) {
1388 rendition.language = entry.attributes.LANGUAGE;
1389 }
1390
1391 if (entry.attributes.URI) {
1392 rendition.uri = entry.attributes.URI;
1393 }
1394
1395 if (entry.attributes['INSTREAM-ID']) {
1396 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1397 }
1398
1399 if (entry.attributes.CHARACTERISTICS) {
1400 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1401 }
1402
1403 if (entry.attributes.FORCED) {
1404 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1405 } // insert the new rendition
1406
1407
1408 mediaGroup[entry.attributes.NAME] = rendition;
1409 },
1410 discontinuity: function discontinuity() {
1411 currentTimeline += 1;
1412 currentUri.discontinuity = true;
1413 this.manifest.discontinuityStarts.push(uris.length);
1414 },
1415 'program-date-time': function programDateTime() {
1416 if (typeof this.manifest.dateTimeString === 'undefined') {
1417 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1418 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1419 // to the manifest object
1420 // TODO: Consider removing this in future major version
1421 this.manifest.dateTimeString = entry.dateTimeString;
1422 this.manifest.dateTimeObject = entry.dateTimeObject;
1423 }
1424
1425 currentUri.dateTimeString = entry.dateTimeString;
1426 currentUri.dateTimeObject = entry.dateTimeObject;
1427 },
1428 targetduration: function targetduration() {
1429 if (!isFinite(entry.duration) || entry.duration < 0) {
1430 this.trigger('warn', {
1431 message: 'ignoring invalid target duration: ' + entry.duration
1432 });
1433 return;
1434 }
1435
1436 this.manifest.targetDuration = entry.duration;
1437 },
1438 totalduration: function totalduration() {
1439 if (!isFinite(entry.duration) || entry.duration < 0) {
1440 this.trigger('warn', {
1441 message: 'ignoring invalid total duration: ' + entry.duration
1442 });
1443 return;
1444 }
1445
1446 this.manifest.totalDuration = entry.duration;
1447 },
1448 start: function start() {
1449 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1450 this.trigger('warn', {
1451 message: 'ignoring start declaration without appropriate attribute list'
1452 });
1453 return;
1454 }
1455
1456 this.manifest.start = {
1457 timeOffset: entry.attributes['TIME-OFFSET'],
1458 precise: entry.attributes.PRECISE
1459 };
1460 },
1461 'cue-out': function cueOut() {
1462 currentUri.cueOut = entry.data;
1463 },
1464 'cue-out-cont': function cueOutCont() {
1465 currentUri.cueOutCont = entry.data;
1466 },
1467 'cue-in': function cueIn() {
1468 currentUri.cueIn = entry.data;
1469 }
1470 })[entry.tagType] || noop).call(self);
1471 },
1472 uri: function uri() {
1473 currentUri.uri = entry.uri;
1474 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1475
1476 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1477 this.trigger('warn', {
1478 message: 'defaulting segment duration to the target duration'
1479 });
1480 currentUri.duration = this.manifest.targetDuration;
1481 } // annotate with encryption information, if necessary
1482
1483
1484 if (_key) {
1485 currentUri.key = _key;
1486 }
1487
1488 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1489
1490 if (currentMap) {
1491 currentUri.map = currentMap;
1492 } // prepare for the next URI
1493
1494
1495 currentUri = {};
1496 },
1497 comment: function comment() {// comments are not important for playback
1498 },
1499 custom: function custom() {
1500 // if this is segment-level data attach the output to the segment
1501 if (entry.segment) {
1502 currentUri.custom = currentUri.custom || {};
1503 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1504 } else {
1505 this.manifest.custom = this.manifest.custom || {};
1506 this.manifest.custom[entry.customType] = entry.data;
1507 }
1508 }
1509 })[entry.type].call(self);
1510 });
1511
1512 return _this;
1513 }
1514 /**
1515 * Parse the input string and update the manifest object.
1516 *
1517 * @param {string} chunk a potentially incomplete portion of the manifest
1518 */
1519
1520 var _proto = Parser.prototype;
1521
1522 _proto.push = function push(chunk) {
1523 this.lineStream.push(chunk);
1524 }
1525 /**
1526 * Flush any remaining input. This can be handy if the last line of an M3U8
1527 * manifest did not contain a trailing newline but the file has been
1528 * completely received.
1529 */
1530 ;
1531
1532 _proto.end = function end() {
1533 // flush any buffered input
1534 this.lineStream.push('\n');
1535 }
1536 /**
1537 * Add an additional parser for non-standard tags
1538 *
1539 * @param {Object} options a map of options for the added parser
1540 * @param {RegExp} options.expression a regular expression to match the custom header
1541 * @param {string} options.type the type to register to the output
1542 * @param {Function} [options.dataParser] function to parse the line into an object
1543 * @param {boolean} [options.segment] should tag data be attached to the segment object
1544 */
1545 ;
1546
1547 _proto.addParser = function addParser(options) {
1548 this.parseStream.addParser(options);
1549 }
1550 /**
1551 * Add a custom header mapper
1552 *
1553 * @param {Object} options
1554 * @param {RegExp} options.expression a regular expression to match the custom header
1555 * @param {Function} options.map function to translate tag into a different tag
1556 */
1557 ;
1558
1559 _proto.addTagMapper = function addTagMapper(options) {
1560 this.parseStream.addTagMapper(options);
1561 };
1562
1563 return Parser;
1564 }(Stream);
1565
1566 var classCallCheck = function (instance, Constructor) {
1567 if (!(instance instanceof Constructor)) {
1568 throw new TypeError("Cannot call a class as a function");
1569 }
1570 };
1571
1572 var createClass = function () {
1573 function defineProperties(target, props) {
1574 for (var i = 0; i < props.length; i++) {
1575 var descriptor = props[i];
1576 descriptor.enumerable = descriptor.enumerable || false;
1577 descriptor.configurable = true;
1578 if ("value" in descriptor) descriptor.writable = true;
1579 Object.defineProperty(target, descriptor.key, descriptor);
1580 }
1581 }
1582
1583 return function (Constructor, protoProps, staticProps) {
1584 if (protoProps) defineProperties(Constructor.prototype, protoProps);
1585 if (staticProps) defineProperties(Constructor, staticProps);
1586 return Constructor;
1587 };
1588 }();
1589
1590 var get = function get(object, property, receiver) {
1591 if (object === null) object = Function.prototype;
1592 var desc = Object.getOwnPropertyDescriptor(object, property);
1593
1594 if (desc === undefined) {
1595 var parent = Object.getPrototypeOf(object);
1596
1597 if (parent === null) {
1598 return undefined;
1599 } else {
1600 return get(parent, property, receiver);
1601 }
1602 } else if ("value" in desc) {
1603 return desc.value;
1604 } else {
1605 var getter = desc.get;
1606
1607 if (getter === undefined) {
1608 return undefined;
1609 }
1610
1611 return getter.call(receiver);
1612 }
1613 };
1614
1615 var inherits = function (subClass, superClass) {
1616 if (typeof superClass !== "function" && superClass !== null) {
1617 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
1618 }
1619
1620 subClass.prototype = Object.create(superClass && superClass.prototype, {
1621 constructor: {
1622 value: subClass,
1623 enumerable: false,
1624 writable: true,
1625 configurable: true
1626 }
1627 });
1628 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
1629 };
1630
1631 var possibleConstructorReturn = function (self, call) {
1632 if (!self) {
1633 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
1634 }
1635
1636 return call && (typeof call === "object" || typeof call === "function") ? call : self;
1637 };
1638
1639 var slicedToArray = function () {
1640 function sliceIterator(arr, i) {
1641 var _arr = [];
1642 var _n = true;
1643 var _d = false;
1644 var _e = undefined;
1645
1646 try {
1647 for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
1648 _arr.push(_s.value);
1649
1650 if (i && _arr.length === i) break;
1651 }
1652 } catch (err) {
1653 _d = true;
1654 _e = err;
1655 } finally {
1656 try {
1657 if (!_n && _i["return"]) _i["return"]();
1658 } finally {
1659 if (_d) throw _e;
1660 }
1661 }
1662
1663 return _arr;
1664 }
1665
1666 return function (arr, i) {
1667 if (Array.isArray(arr)) {
1668 return arr;
1669 } else if (Symbol.iterator in Object(arr)) {
1670 return sliceIterator(arr, i);
1671 } else {
1672 throw new TypeError("Invalid attempt to destructure non-iterable instance");
1673 }
1674 };
1675 }();
1676
1677 /**
1678 * @file playlist-loader.js
1679 *
1680 * A state machine that manages the loading, caching, and updating of
1681 * M3U8 playlists.
1682 *
1683 */
1684
1685 var mergeOptions = videojs.mergeOptions,
1686 EventTarget = videojs.EventTarget,
1687 log = videojs.log;
1688
1689 /**
1690 * Loops through all supported media groups in master and calls the provided
1691 * callback for each group
1692 *
1693 * @param {Object} master
1694 * The parsed master manifest object
1695 * @param {Function} callback
1696 * Callback to call for each media group
1697 */
1698
1699 var forEachMediaGroup = function forEachMediaGroup(master, callback) {
1700 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
1701 for (var groupKey in master.mediaGroups[mediaType]) {
1702 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
1703 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
1704
1705 callback(mediaProperties, mediaType, groupKey, labelKey);
1706 }
1707 }
1708 });
1709 };
1710
1711 /**
1712 * Returns a new array of segments that is the result of merging
1713 * properties from an older list of segments onto an updated
1714 * list. No properties on the updated playlist will be overridden.
1715 *
1716 * @param {Array} original the outdated list of segments
1717 * @param {Array} update the updated list of segments
1718 * @param {Number=} offset the index of the first update
1719 * segment in the original segment list. For non-live playlists,
1720 * this should always be zero and does not need to be
1721 * specified. For live playlists, it should be the difference
1722 * between the media sequence numbers in the original and updated
1723 * playlists.
1724 * @return a list of merged segment objects
1725 */
1726 var updateSegments = function updateSegments(original, update, offset) {
1727 var result = update.slice();
1728
1729 offset = offset || 0;
1730 var length = Math.min(original.length, update.length + offset);
1731
1732 for (var i = offset; i < length; i++) {
1733 result[i - offset] = mergeOptions(original[i], result[i - offset]);
1734 }
1735 return result;
1736 };
1737
1738 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
1739 if (!segment.resolvedUri) {
1740 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
1741 }
1742 if (segment.key && !segment.key.resolvedUri) {
1743 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
1744 }
1745 if (segment.map && !segment.map.resolvedUri) {
1746 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
1747 }
1748 };
1749
1750 /**
1751 * Returns a new master playlist that is the result of merging an
1752 * updated media playlist into the original version. If the
1753 * updated media playlist does not match any of the playlist
1754 * entries in the original master playlist, null is returned.
1755 *
1756 * @param {Object} master a parsed master M3U8 object
1757 * @param {Object} media a parsed media M3U8 object
1758 * @return {Object} a new object that represents the original
1759 * master playlist with the updated media playlist merged in, or
1760 * null if the merge produced no change.
1761 */
1762 var updateMaster = function updateMaster(master, media) {
1763 var result = mergeOptions(master, {});
1764 var playlist = result.playlists[media.id];
1765
1766 if (!playlist) {
1767 return null;
1768 }
1769
1770 // consider the playlist unchanged if the number of segments is equal, the media
1771 // sequence number is unchanged, and this playlist hasn't become the end of the playlist
1772 if (playlist.segments && media.segments && playlist.segments.length === media.segments.length && playlist.endList === media.endList && playlist.mediaSequence === media.mediaSequence) {
1773 return null;
1774 }
1775
1776 var mergedPlaylist = mergeOptions(playlist, media);
1777
1778 // if the update could overlap existing segment information, merge the two segment lists
1779 if (playlist.segments) {
1780 mergedPlaylist.segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
1781 }
1782
1783 // resolve any segment URIs to prevent us from having to do it later
1784 mergedPlaylist.segments.forEach(function (segment) {
1785 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
1786 });
1787
1788 // TODO Right now in the playlists array there are two references to each playlist, one
1789 // that is referenced by index, and one by URI. The index reference may no longer be
1790 // necessary.
1791 for (var i = 0; i < result.playlists.length; i++) {
1792 if (result.playlists[i].id === media.id) {
1793 result.playlists[i] = mergedPlaylist;
1794 }
1795 }
1796 result.playlists[media.id] = mergedPlaylist;
1797 // URI reference added for backwards compatibility
1798 result.playlists[media.uri] = mergedPlaylist;
1799
1800 return result;
1801 };
1802
1803 var createPlaylistID = function createPlaylistID(index, uri) {
1804 return index + '-' + uri;
1805 };
1806
1807 var setupMediaPlaylists = function setupMediaPlaylists(master) {
1808 // setup by-URI lookups and resolve media playlist URIs
1809 var i = master.playlists.length;
1810
1811 while (i--) {
1812 var playlist = master.playlists[i];
1813
1814 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
1815 playlist.id = createPlaylistID(i, playlist.uri);
1816
1817 master.playlists[playlist.id] = playlist;
1818 // URI reference added for backwards compatibility
1819 master.playlists[playlist.uri] = playlist;
1820
1821 if (!playlist.attributes) {
1822 // Although the spec states an #EXT-X-STREAM-INF tag MUST have a
1823 // BANDWIDTH attribute, we can play the stream without it. This means a poorly
1824 // formatted master playlist may not have an attribute list. An attributes
1825 // property is added here to prevent undefined references when we encounter
1826 // this scenario.
1827 playlist.attributes = {};
1828
1829 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
1830 }
1831 }
1832 };
1833
1834 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
1835 forEachMediaGroup(master, function (properties) {
1836 if (properties.uri) {
1837 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
1838 }
1839 });
1840 };
1841
1842 /**
1843 * Calculates the time to wait before refreshing a live playlist
1844 *
1845 * @param {Object} media
1846 * The current media
1847 * @param {Boolean} update
1848 * True if there were any updates from the last refresh, false otherwise
1849 * @return {Number}
1850 * The time in ms to wait before refreshing the live playlist
1851 */
1852 var refreshDelay = function refreshDelay(media, update) {
1853 var lastSegment = media.segments[media.segments.length - 1];
1854 var delay = void 0;
1855
1856 if (update && lastSegment && lastSegment.duration) {
1857 delay = lastSegment.duration * 1000;
1858 } else {
1859 // if the playlist is unchanged since the last reload or last segment duration
1860 // cannot be determined, try again after half the target duration
1861 delay = (media.targetDuration || 10) * 500;
1862 }
1863 return delay;
1864 };
1865
1866 /**
1867 * Load a playlist from a remote location
1868 *
1869 * @class PlaylistLoader
1870 * @extends Stream
1871 * @param {String} srcUrl the url to start with
1872 * @param {Boolean} withCredentials the withCredentials xhr option
1873 * @constructor
1874 */
1875
1876 var PlaylistLoader = function (_EventTarget) {
1877 inherits(PlaylistLoader, _EventTarget);
1878
1879 function PlaylistLoader(srcUrl, hls) {
1880 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
1881 classCallCheck(this, PlaylistLoader);
1882
1883 var _this = possibleConstructorReturn(this, (PlaylistLoader.__proto__ || Object.getPrototypeOf(PlaylistLoader)).call(this));
1884
1885 var _options$withCredenti = options.withCredentials,
1886 withCredentials = _options$withCredenti === undefined ? false : _options$withCredenti,
1887 _options$handleManife = options.handleManifestRedirects,
1888 handleManifestRedirects = _options$handleManife === undefined ? false : _options$handleManife;
1889
1890
1891 _this.srcUrl = srcUrl;
1892 _this.hls_ = hls;
1893 _this.withCredentials = withCredentials;
1894 _this.handleManifestRedirects = handleManifestRedirects;
1895
1896 var hlsOptions = hls.options_;
1897
1898 _this.customTagParsers = hlsOptions && hlsOptions.customTagParsers || [];
1899 _this.customTagMappers = hlsOptions && hlsOptions.customTagMappers || [];
1900
1901 if (!_this.srcUrl) {
1902 throw new Error('A non-empty playlist URL is required');
1903 }
1904
1905 // initialize the loader state
1906 _this.state = 'HAVE_NOTHING';
1907
1908 // live playlist staleness timeout
1909 _this.on('mediaupdatetimeout', function () {
1910 if (_this.state !== 'HAVE_METADATA') {
1911 // only refresh the media playlist if no other activity is going on
1912 return;
1913 }
1914
1915 _this.state = 'HAVE_CURRENT_METADATA';
1916
1917 _this.request = _this.hls_.xhr({
1918 uri: resolveUrl(_this.master.uri, _this.media().uri),
1919 withCredentials: _this.withCredentials
1920 }, function (error, req) {
1921 // disposed
1922 if (!_this.request) {
1923 return;
1924 }
1925
1926 if (error) {
1927 return _this.playlistRequestError(_this.request, _this.media(), 'HAVE_METADATA');
1928 }
1929
1930 _this.haveMetadata(_this.request, _this.media().uri, _this.media().id);
1931 });
1932 });
1933 return _this;
1934 }
1935
1936 createClass(PlaylistLoader, [{
1937 key: 'playlistRequestError',
1938 value: function playlistRequestError(xhr, playlist, startingState) {
1939 var uri = playlist.uri,
1940 id = playlist.id;
1941
1942 // any in-flight request is now finished
1943
1944 this.request = null;
1945
1946 if (startingState) {
1947 this.state = startingState;
1948 }
1949
1950 this.error = {
1951 playlist: this.master.playlists[id],
1952 status: xhr.status,
1953 message: 'HLS playlist request error at URL: ' + uri + '.',
1954 responseText: xhr.responseText,
1955 code: xhr.status >= 500 ? 4 : 2
1956 };
1957
1958 this.trigger('error');
1959 }
1960
1961 // update the playlist loader's state in response to a new or
1962 // updated playlist.
1963
1964 }, {
1965 key: 'haveMetadata',
1966 value: function haveMetadata(xhr, url, id) {
1967 var _this2 = this;
1968
1969 // any in-flight request is now finished
1970 this.request = null;
1971 this.state = 'HAVE_METADATA';
1972
1973 var parser = new Parser();
1974
1975 // adding custom tag parsers
1976 this.customTagParsers.forEach(function (customParser) {
1977 return parser.addParser(customParser);
1978 });
1979
1980 // adding custom tag mappers
1981 this.customTagMappers.forEach(function (mapper) {
1982 return parser.addTagMapper(mapper);
1983 });
1984
1985 parser.push(xhr.responseText);
1986 parser.end();
1987 parser.manifest.uri = url;
1988 parser.manifest.id = id;
1989 // m3u8-parser does not attach an attributes property to media playlists so make
1990 // sure that the property is attached to avoid undefined reference errors
1991 parser.manifest.attributes = parser.manifest.attributes || {};
1992
1993 // merge this playlist into the master
1994 var update = updateMaster(this.master, parser.manifest);
1995
1996 this.targetDuration = parser.manifest.targetDuration;
1997
1998 if (update) {
1999 this.master = update;
2000 this.media_ = this.master.playlists[id];
2001 } else {
2002 this.trigger('playlistunchanged');
2003 }
2004
2005 // refresh live playlists after a target duration passes
2006 if (!this.media().endList) {
2007 window_1.clearTimeout(this.mediaUpdateTimeout);
2008 this.mediaUpdateTimeout = window_1.setTimeout(function () {
2009 _this2.trigger('mediaupdatetimeout');
2010 }, refreshDelay(this.media(), !!update));
2011 }
2012
2013 this.trigger('loadedplaylist');
2014 }
2015
2016 /**
2017 * Abort any outstanding work and clean up.
2018 */
2019
2020 }, {
2021 key: 'dispose',
2022 value: function dispose() {
2023 this.trigger('dispose');
2024 this.stopRequest();
2025 window_1.clearTimeout(this.mediaUpdateTimeout);
2026 window_1.clearTimeout(this.finalRenditionTimeout);
2027 this.off();
2028 }
2029 }, {
2030 key: 'stopRequest',
2031 value: function stopRequest() {
2032 if (this.request) {
2033 var oldRequest = this.request;
2034
2035 this.request = null;
2036 oldRequest.onreadystatechange = null;
2037 oldRequest.abort();
2038 }
2039 }
2040
2041 /**
2042 * When called without any arguments, returns the currently
2043 * active media playlist. When called with a single argument,
2044 * triggers the playlist loader to asynchronously switch to the
2045 * specified media playlist. Calling this method while the
2046 * loader is in the HAVE_NOTHING causes an error to be emitted
2047 * but otherwise has no effect.
2048 *
2049 * @param {Object=} playlist the parsed media playlist
2050 * object to switch to
2051 * @param {Boolean=} is this the last available playlist
2052 *
2053 * @return {Playlist} the current loaded media
2054 */
2055
2056 }, {
2057 key: 'media',
2058 value: function media(playlist, isFinalRendition) {
2059 var _this3 = this;
2060
2061 // getter
2062 if (!playlist) {
2063 return this.media_;
2064 }
2065
2066 // setter
2067 if (this.state === 'HAVE_NOTHING') {
2068 throw new Error('Cannot switch media playlist from ' + this.state);
2069 }
2070
2071 // find the playlist object if the target playlist has been
2072 // specified by URI
2073 if (typeof playlist === 'string') {
2074 if (!this.master.playlists[playlist]) {
2075 throw new Error('Unknown playlist URI: ' + playlist);
2076 }
2077 playlist = this.master.playlists[playlist];
2078 }
2079
2080 window_1.clearTimeout(this.finalRenditionTimeout);
2081
2082 if (isFinalRendition) {
2083 var delay = playlist.targetDuration / 2 * 1000 || 5 * 1000;
2084
2085 this.finalRenditionTimeout = window_1.setTimeout(this.media.bind(this, playlist, false), delay);
2086 return;
2087 }
2088
2089 var startingState = this.state;
2090 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
2091
2092 // switch to fully loaded playlists immediately
2093 if (this.master.playlists[playlist.id].endList) {
2094 // abort outstanding playlist requests
2095 if (this.request) {
2096 this.request.onreadystatechange = null;
2097 this.request.abort();
2098 this.request = null;
2099 }
2100 this.state = 'HAVE_METADATA';
2101 this.media_ = playlist;
2102
2103 // trigger media change if the active media has been updated
2104 if (mediaChange) {
2105 this.trigger('mediachanging');
2106 this.trigger('mediachange');
2107 }
2108 return;
2109 }
2110
2111 // switching to the active playlist is a no-op
2112 if (!mediaChange) {
2113 return;
2114 }
2115
2116 this.state = 'SWITCHING_MEDIA';
2117
2118 // there is already an outstanding playlist request
2119 if (this.request) {
2120 if (playlist.resolvedUri === this.request.url) {
2121 // requesting to switch to the same playlist multiple times
2122 // has no effect after the first
2123 return;
2124 }
2125 this.request.onreadystatechange = null;
2126 this.request.abort();
2127 this.request = null;
2128 }
2129
2130 // request the new playlist
2131 if (this.media_) {
2132 this.trigger('mediachanging');
2133 }
2134
2135 this.request = this.hls_.xhr({
2136 uri: playlist.resolvedUri,
2137 withCredentials: this.withCredentials
2138 }, function (error, req) {
2139 // disposed
2140 if (!_this3.request) {
2141 return;
2142 }
2143
2144 playlist.resolvedUri = resolveManifestRedirect(_this3.handleManifestRedirects, playlist.resolvedUri, req);
2145
2146 if (error) {
2147 return _this3.playlistRequestError(_this3.request, playlist, startingState);
2148 }
2149
2150 _this3.haveMetadata(req, playlist.uri, playlist.id);
2151
2152 // fire loadedmetadata the first time a media playlist is loaded
2153 if (startingState === 'HAVE_MASTER') {
2154 _this3.trigger('loadedmetadata');
2155 } else {
2156 _this3.trigger('mediachange');
2157 }
2158 });
2159 }
2160
2161 /**
2162 * pause loading of the playlist
2163 */
2164
2165 }, {
2166 key: 'pause',
2167 value: function pause() {
2168 this.stopRequest();
2169 window_1.clearTimeout(this.mediaUpdateTimeout);
2170 if (this.state === 'HAVE_NOTHING') {
2171 // If we pause the loader before any data has been retrieved, its as if we never
2172 // started, so reset to an unstarted state.
2173 this.started = false;
2174 }
2175 // Need to restore state now that no activity is happening
2176 if (this.state === 'SWITCHING_MEDIA') {
2177 // if the loader was in the process of switching media, it should either return to
2178 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2179 // playlist yet. This is determined by the existence of loader.media_
2180 if (this.media_) {
2181 this.state = 'HAVE_METADATA';
2182 } else {
2183 this.state = 'HAVE_MASTER';
2184 }
2185 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2186 this.state = 'HAVE_METADATA';
2187 }
2188 }
2189
2190 /**
2191 * start loading of the playlist
2192 */
2193
2194 }, {
2195 key: 'load',
2196 value: function load(isFinalRendition) {
2197 var _this4 = this;
2198
2199 window_1.clearTimeout(this.mediaUpdateTimeout);
2200
2201 var media = this.media();
2202
2203 if (isFinalRendition) {
2204 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
2205
2206 this.mediaUpdateTimeout = window_1.setTimeout(function () {
2207 return _this4.load();
2208 }, delay);
2209 return;
2210 }
2211
2212 if (!this.started) {
2213 this.start();
2214 return;
2215 }
2216
2217 if (media && !media.endList) {
2218 this.trigger('mediaupdatetimeout');
2219 } else {
2220 this.trigger('loadedplaylist');
2221 }
2222 }
2223
2224 /**
2225 * start loading of the playlist
2226 */
2227
2228 }, {
2229 key: 'start',
2230 value: function start() {
2231 var _this5 = this;
2232
2233 this.started = true;
2234
2235 // request the specified URL
2236 this.request = this.hls_.xhr({
2237 uri: this.srcUrl,
2238 withCredentials: this.withCredentials
2239 }, function (error, req) {
2240 // disposed
2241 if (!_this5.request) {
2242 return;
2243 }
2244
2245 // clear the loader's request reference
2246 _this5.request = null;
2247
2248 if (error) {
2249 _this5.error = {
2250 status: req.status,
2251 message: 'HLS playlist request error at URL: ' + _this5.srcUrl + '.',
2252 responseText: req.responseText,
2253 // MEDIA_ERR_NETWORK
2254 code: 2
2255 };
2256 if (_this5.state === 'HAVE_NOTHING') {
2257 _this5.started = false;
2258 }
2259 return _this5.trigger('error');
2260 }
2261
2262 var parser = new Parser();
2263
2264 // adding custom tag parsers
2265 _this5.customTagParsers.forEach(function (customParser) {
2266 return parser.addParser(customParser);
2267 });
2268
2269 // adding custom tag mappers
2270 _this5.customTagMappers.forEach(function (mapper) {
2271 return parser.addTagMapper(mapper);
2272 });
2273
2274 parser.push(req.responseText);
2275 parser.end();
2276
2277 _this5.state = 'HAVE_MASTER';
2278
2279 _this5.srcUrl = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.srcUrl, req);
2280
2281 parser.manifest.uri = _this5.srcUrl;
2282
2283 // loaded a master playlist
2284 if (parser.manifest.playlists) {
2285 _this5.master = parser.manifest;
2286
2287 setupMediaPlaylists(_this5.master);
2288 resolveMediaGroupUris(_this5.master);
2289
2290 _this5.trigger('loadedplaylist');
2291 if (!_this5.request) {
2292 // no media playlist was specifically selected so start
2293 // from the first listed one
2294 _this5.media(parser.manifest.playlists[0]);
2295 }
2296 return;
2297 }
2298
2299 var id = createPlaylistID(0, _this5.srcUrl);
2300
2301 // loaded a media playlist
2302 // infer a master playlist if none was previously requested
2303 _this5.master = {
2304 mediaGroups: {
2305 'AUDIO': {},
2306 'VIDEO': {},
2307 'CLOSED-CAPTIONS': {},
2308 'SUBTITLES': {}
2309 },
2310 uri: window_1.location.href,
2311 playlists: [{
2312 uri: _this5.srcUrl,
2313 id: id,
2314 resolvedUri: _this5.srcUrl,
2315 // m3u8-parser does not attach an attributes property to media playlists so make
2316 // sure that the property is attached to avoid undefined reference errors
2317 attributes: {}
2318 }]
2319 };
2320 _this5.master.playlists[id] = _this5.master.playlists[0];
2321 // URI reference added for backwards compatibility
2322 _this5.master.playlists[_this5.srcUrl] = _this5.master.playlists[0];
2323
2324 _this5.haveMetadata(req, _this5.srcUrl, id);
2325 return _this5.trigger('loadedmetadata');
2326 });
2327 }
2328 }]);
2329 return PlaylistLoader;
2330 }(EventTarget);
2331
2332 /**
2333 * @file playlist.js
2334 *
2335 * Playlist related utilities.
2336 */
2337
2338 var createTimeRange = videojs.createTimeRange;
2339
2340 /**
2341 * walk backward until we find a duration we can use
2342 * or return a failure
2343 *
2344 * @param {Playlist} playlist the playlist to walk through
2345 * @param {Number} endSequence the mediaSequence to stop walking on
2346 */
2347
2348 var backwardDuration = function backwardDuration(playlist, endSequence) {
2349 var result = 0;
2350 var i = endSequence - playlist.mediaSequence;
2351 // if a start time is available for segment immediately following
2352 // the interval, use it
2353 var segment = playlist.segments[i];
2354
2355 // Walk backward until we find the latest segment with timeline
2356 // information that is earlier than endSequence
2357 if (segment) {
2358 if (typeof segment.start !== 'undefined') {
2359 return { result: segment.start, precise: true };
2360 }
2361 if (typeof segment.end !== 'undefined') {
2362 return {
2363 result: segment.end - segment.duration,
2364 precise: true
2365 };
2366 }
2367 }
2368 while (i--) {
2369 segment = playlist.segments[i];
2370 if (typeof segment.end !== 'undefined') {
2371 return { result: result + segment.end, precise: true };
2372 }
2373
2374 result += segment.duration;
2375
2376 if (typeof segment.start !== 'undefined') {
2377 return { result: result + segment.start, precise: true };
2378 }
2379 }
2380 return { result: result, precise: false };
2381 };
2382
2383 /**
2384 * walk forward until we find a duration we can use
2385 * or return a failure
2386 *
2387 * @param {Playlist} playlist the playlist to walk through
2388 * @param {Number} endSequence the mediaSequence to stop walking on
2389 */
2390 var forwardDuration = function forwardDuration(playlist, endSequence) {
2391 var result = 0;
2392 var segment = void 0;
2393 var i = endSequence - playlist.mediaSequence;
2394 // Walk forward until we find the earliest segment with timeline
2395 // information
2396
2397 for (; i < playlist.segments.length; i++) {
2398 segment = playlist.segments[i];
2399 if (typeof segment.start !== 'undefined') {
2400 return {
2401 result: segment.start - result,
2402 precise: true
2403 };
2404 }
2405
2406 result += segment.duration;
2407
2408 if (typeof segment.end !== 'undefined') {
2409 return {
2410 result: segment.end - result,
2411 precise: true
2412 };
2413 }
2414 }
2415 // indicate we didn't find a useful duration estimate
2416 return { result: -1, precise: false };
2417 };
2418
2419 /**
2420 * Calculate the media duration from the segments associated with a
2421 * playlist. The duration of a subinterval of the available segments
2422 * may be calculated by specifying an end index.
2423 *
2424 * @param {Object} playlist a media playlist object
2425 * @param {Number=} endSequence an exclusive upper boundary
2426 * for the playlist. Defaults to playlist length.
2427 * @param {Number} expired the amount of time that has dropped
2428 * off the front of the playlist in a live scenario
2429 * @return {Number} the duration between the first available segment
2430 * and end index.
2431 */
2432 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
2433 var backward = void 0;
2434 var forward = void 0;
2435
2436 if (typeof endSequence === 'undefined') {
2437 endSequence = playlist.mediaSequence + playlist.segments.length;
2438 }
2439
2440 if (endSequence < playlist.mediaSequence) {
2441 return 0;
2442 }
2443
2444 // do a backward walk to estimate the duration
2445 backward = backwardDuration(playlist, endSequence);
2446 if (backward.precise) {
2447 // if we were able to base our duration estimate on timing
2448 // information provided directly from the Media Source, return
2449 // it
2450 return backward.result;
2451 }
2452
2453 // walk forward to see if a precise duration estimate can be made
2454 // that way
2455 forward = forwardDuration(playlist, endSequence);
2456 if (forward.precise) {
2457 // we found a segment that has been buffered and so it's
2458 // position is known precisely
2459 return forward.result;
2460 }
2461
2462 // return the less-precise, playlist-based duration estimate
2463 return backward.result + expired;
2464 };
2465
2466 /**
2467 * Calculates the duration of a playlist. If a start and end index
2468 * are specified, the duration will be for the subset of the media
2469 * timeline between those two indices. The total duration for live
2470 * playlists is always Infinity.
2471 *
2472 * @param {Object} playlist a media playlist object
2473 * @param {Number=} endSequence an exclusive upper
2474 * boundary for the playlist. Defaults to the playlist media
2475 * sequence number plus its length.
2476 * @param {Number=} expired the amount of time that has
2477 * dropped off the front of the playlist in a live scenario
2478 * @return {Number} the duration between the start index and end
2479 * index.
2480 */
2481 var duration = function duration(playlist, endSequence, expired) {
2482 if (!playlist) {
2483 return 0;
2484 }
2485
2486 if (typeof expired !== 'number') {
2487 expired = 0;
2488 }
2489
2490 // if a slice of the total duration is not requested, use
2491 // playlist-level duration indicators when they're present
2492 if (typeof endSequence === 'undefined') {
2493 // if present, use the duration specified in the playlist
2494 if (playlist.totalDuration) {
2495 return playlist.totalDuration;
2496 }
2497
2498 // duration should be Infinity for live playlists
2499 if (!playlist.endList) {
2500 return window_1.Infinity;
2501 }
2502 }
2503
2504 // calculate the total duration based on the segment durations
2505 return intervalDuration(playlist, endSequence, expired);
2506 };
2507
2508 /**
2509 * Calculate the time between two indexes in the current playlist
2510 * neight the start- nor the end-index need to be within the current
2511 * playlist in which case, the targetDuration of the playlist is used
2512 * to approximate the durations of the segments
2513 *
2514 * @param {Object} playlist a media playlist object
2515 * @param {Number} startIndex
2516 * @param {Number} endIndex
2517 * @return {Number} the number of seconds between startIndex and endIndex
2518 */
2519 var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
2520 var durations = 0;
2521
2522 if (startIndex > endIndex) {
2523 var _ref = [endIndex, startIndex];
2524 startIndex = _ref[0];
2525 endIndex = _ref[1];
2526 }
2527
2528 if (startIndex < 0) {
2529 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
2530 durations += playlist.targetDuration;
2531 }
2532 startIndex = 0;
2533 }
2534
2535 for (var _i = startIndex; _i < endIndex; _i++) {
2536 durations += playlist.segments[_i].duration;
2537 }
2538
2539 return durations;
2540 };
2541
2542 /**
2543 * Determines the media index of the segment corresponding to the safe edge of the live
2544 * window which is the duration of the last segment plus 2 target durations from the end
2545 * of the playlist.
2546 *
2547 * A liveEdgePadding can be provided which will be used instead of calculating the safe live edge.
2548 * This corresponds to suggestedPresentationDelay in DASH manifests.
2549 *
2550 * @param {Object} playlist
2551 * a media playlist object
2552 * @param {Number} [liveEdgePadding]
2553 * A number in seconds indicating how far from the end we want to be.
2554 * If provided, this value is used instead of calculating the safe live index from the target durations.
2555 * Corresponds to suggestedPresentationDelay in DASH manifests.
2556 * @return {Number}
2557 * The media index of the segment at the safe live point. 0 if there is no "safe"
2558 * point.
2559 * @function safeLiveIndex
2560 */
2561 var safeLiveIndex = function safeLiveIndex(playlist, liveEdgePadding) {
2562 if (!playlist.segments.length) {
2563 return 0;
2564 }
2565
2566 var i = playlist.segments.length;
2567 var lastSegmentDuration = playlist.segments[i - 1].duration || playlist.targetDuration;
2568 var safeDistance = typeof liveEdgePadding === 'number' ? liveEdgePadding : lastSegmentDuration + playlist.targetDuration * 2;
2569
2570 if (safeDistance === 0) {
2571 return i;
2572 }
2573
2574 var distanceFromEnd = 0;
2575
2576 while (i--) {
2577 distanceFromEnd += playlist.segments[i].duration;
2578
2579 if (distanceFromEnd >= safeDistance) {
2580 break;
2581 }
2582 }
2583
2584 return Math.max(0, i);
2585 };
2586
2587 /**
2588 * Calculates the playlist end time
2589 *
2590 * @param {Object} playlist a media playlist object
2591 * @param {Number=} expired the amount of time that has
2592 * dropped off the front of the playlist in a live scenario
2593 * @param {Boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
2594 * playlist end calculation should consider the safe live end
2595 * (truncate the playlist end by three segments). This is normally
2596 * used for calculating the end of the playlist's seekable range.
2597 * This takes into account the value of liveEdgePadding.
2598 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
2599 * @param {Number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
2600 * If this is provided, it is used in the safe live end calculation.
2601 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
2602 * Corresponds to suggestedPresentationDelay in DASH manifests.
2603 * @returns {Number} the end time of playlist
2604 * @function playlistEnd
2605 */
2606 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
2607 if (!playlist || !playlist.segments) {
2608 return null;
2609 }
2610 if (playlist.endList) {
2611 return duration(playlist);
2612 }
2613
2614 if (expired === null) {
2615 return null;
2616 }
2617
2618 expired = expired || 0;
2619
2620 var endSequence = useSafeLiveEnd ? safeLiveIndex(playlist, liveEdgePadding) : playlist.segments.length;
2621
2622 return intervalDuration(playlist, playlist.mediaSequence + endSequence, expired);
2623 };
2624
2625 /**
2626 * Calculates the interval of time that is currently seekable in a
2627 * playlist. The returned time ranges are relative to the earliest
2628 * moment in the specified playlist that is still available. A full
2629 * seekable implementation for live streams would need to offset
2630 * these values by the duration of content that has expired from the
2631 * stream.
2632 *
2633 * @param {Object} playlist a media playlist object
2634 * dropped off the front of the playlist in a live scenario
2635 * @param {Number=} expired the amount of time that has
2636 * dropped off the front of the playlist in a live scenario
2637 * @param {Number} liveEdgePadding how far from the end of the playlist we should be in seconds.
2638 * Corresponds to suggestedPresentationDelay in DASH manifests.
2639 * @return {TimeRanges} the periods of time that are valid targets
2640 * for seeking
2641 */
2642 var seekable = function seekable(playlist, expired, liveEdgePadding) {
2643 var useSafeLiveEnd = true;
2644 var seekableStart = expired || 0;
2645 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
2646
2647 if (seekableEnd === null) {
2648 return createTimeRange();
2649 }
2650 return createTimeRange(seekableStart, seekableEnd);
2651 };
2652
2653 var isWholeNumber = function isWholeNumber(num) {
2654 return num - Math.floor(num) === 0;
2655 };
2656
2657 var roundSignificantDigit = function roundSignificantDigit(increment, num) {
2658 // If we have a whole number, just add 1 to it
2659 if (isWholeNumber(num)) {
2660 return num + increment * 0.1;
2661 }
2662
2663 var numDecimalDigits = num.toString().split('.')[1].length;
2664
2665 for (var i = 1; i <= numDecimalDigits; i++) {
2666 var scale = Math.pow(10, i);
2667 var temp = num * scale;
2668
2669 if (isWholeNumber(temp) || i === numDecimalDigits) {
2670 return (temp + increment) / scale;
2671 }
2672 }
2673 };
2674
2675 var ceilLeastSignificantDigit = roundSignificantDigit.bind(null, 1);
2676 var floorLeastSignificantDigit = roundSignificantDigit.bind(null, -1);
2677
2678 /**
2679 * Determine the index and estimated starting time of the segment that
2680 * contains a specified playback position in a media playlist.
2681 *
2682 * @param {Object} playlist the media playlist to query
2683 * @param {Number} currentTime The number of seconds since the earliest
2684 * possible position to determine the containing segment for
2685 * @param {Number} startIndex
2686 * @param {Number} startTime
2687 * @return {Object}
2688 */
2689 var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
2690 var i = void 0;
2691 var segment = void 0;
2692 var numSegments = playlist.segments.length;
2693
2694 var time = currentTime - startTime;
2695
2696 if (time < 0) {
2697 // Walk backward from startIndex in the playlist, adding durations
2698 // until we find a segment that contains `time` and return it
2699 if (startIndex > 0) {
2700 for (i = startIndex - 1; i >= 0; i--) {
2701 segment = playlist.segments[i];
2702 time += floorLeastSignificantDigit(segment.duration);
2703 if (time > 0) {
2704 return {
2705 mediaIndex: i,
2706 startTime: startTime - sumDurations(playlist, startIndex, i)
2707 };
2708 }
2709 }
2710 }
2711 // We were unable to find a good segment within the playlist
2712 // so select the first segment
2713 return {
2714 mediaIndex: 0,
2715 startTime: currentTime
2716 };
2717 }
2718
2719 // When startIndex is negative, we first walk forward to first segment
2720 // adding target durations. If we "run out of time" before getting to
2721 // the first segment, return the first segment
2722 if (startIndex < 0) {
2723 for (i = startIndex; i < 0; i++) {
2724 time -= playlist.targetDuration;
2725 if (time < 0) {
2726 return {
2727 mediaIndex: 0,
2728 startTime: currentTime
2729 };
2730 }
2731 }
2732 startIndex = 0;
2733 }
2734
2735 // Walk forward from startIndex in the playlist, subtracting durations
2736 // until we find a segment that contains `time` and return it
2737 for (i = startIndex; i < numSegments; i++) {
2738 segment = playlist.segments[i];
2739 time -= ceilLeastSignificantDigit(segment.duration);
2740 if (time < 0) {
2741 return {
2742 mediaIndex: i,
2743 startTime: startTime + sumDurations(playlist, startIndex, i)
2744 };
2745 }
2746 }
2747
2748 // We are out of possible candidates so load the last one...
2749 return {
2750 mediaIndex: numSegments - 1,
2751 startTime: currentTime
2752 };
2753 };
2754
2755 /**
2756 * Check whether the playlist is blacklisted or not.
2757 *
2758 * @param {Object} playlist the media playlist object
2759 * @return {boolean} whether the playlist is blacklisted or not
2760 * @function isBlacklisted
2761 */
2762 var isBlacklisted = function isBlacklisted(playlist) {
2763 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
2764 };
2765
2766 /**
2767 * Check whether the playlist is compatible with current playback configuration or has
2768 * been blacklisted permanently for being incompatible.
2769 *
2770 * @param {Object} playlist the media playlist object
2771 * @return {boolean} whether the playlist is incompatible or not
2772 * @function isIncompatible
2773 */
2774 var isIncompatible = function isIncompatible(playlist) {
2775 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
2776 };
2777
2778 /**
2779 * Check whether the playlist is enabled or not.
2780 *
2781 * @param {Object} playlist the media playlist object
2782 * @return {boolean} whether the playlist is enabled or not
2783 * @function isEnabled
2784 */
2785 var isEnabled = function isEnabled(playlist) {
2786 var blacklisted = isBlacklisted(playlist);
2787
2788 return !playlist.disabled && !blacklisted;
2789 };
2790
2791 /**
2792 * Check whether the playlist has been manually disabled through the representations api.
2793 *
2794 * @param {Object} playlist the media playlist object
2795 * @return {boolean} whether the playlist is disabled manually or not
2796 * @function isDisabled
2797 */
2798 var isDisabled = function isDisabled(playlist) {
2799 return playlist.disabled;
2800 };
2801
2802 /**
2803 * Returns whether the current playlist is an AES encrypted HLS stream
2804 *
2805 * @return {Boolean} true if it's an AES encrypted HLS stream
2806 */
2807 var isAes = function isAes(media) {
2808 for (var i = 0; i < media.segments.length; i++) {
2809 if (media.segments[i].key) {
2810 return true;
2811 }
2812 }
2813 return false;
2814 };
2815
2816 /**
2817 * Returns whether the current playlist contains fMP4
2818 *
2819 * @return {Boolean} true if the playlist contains fMP4
2820 */
2821 var isFmp4 = function isFmp4(media) {
2822 for (var i = 0; i < media.segments.length; i++) {
2823 if (media.segments[i].map) {
2824 return true;
2825 }
2826 }
2827 return false;
2828 };
2829
2830 /**
2831 * Checks if the playlist has a value for the specified attribute
2832 *
2833 * @param {String} attr
2834 * Attribute to check for
2835 * @param {Object} playlist
2836 * The media playlist object
2837 * @return {Boolean}
2838 * Whether the playlist contains a value for the attribute or not
2839 * @function hasAttribute
2840 */
2841 var hasAttribute = function hasAttribute(attr, playlist) {
2842 return playlist.attributes && playlist.attributes[attr];
2843 };
2844
2845 /**
2846 * Estimates the time required to complete a segment download from the specified playlist
2847 *
2848 * @param {Number} segmentDuration
2849 * Duration of requested segment
2850 * @param {Number} bandwidth
2851 * Current measured bandwidth of the player
2852 * @param {Object} playlist
2853 * The media playlist object
2854 * @param {Number=} bytesReceived
2855 * Number of bytes already received for the request. Defaults to 0
2856 * @return {Number|NaN}
2857 * The estimated time to request the segment. NaN if bandwidth information for
2858 * the given playlist is unavailable
2859 * @function estimateSegmentRequestTime
2860 */
2861 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist) {
2862 var bytesReceived = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0;
2863
2864 if (!hasAttribute('BANDWIDTH', playlist)) {
2865 return NaN;
2866 }
2867
2868 var size = segmentDuration * playlist.attributes.BANDWIDTH;
2869
2870 return (size - bytesReceived * 8) / bandwidth;
2871 };
2872
2873 /*
2874 * Returns whether the current playlist is the lowest rendition
2875 *
2876 * @return {Boolean} true if on lowest rendition
2877 */
2878 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
2879 if (master.playlists.length === 1) {
2880 return true;
2881 }
2882
2883 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
2884
2885 return master.playlists.filter(function (playlist) {
2886 if (!isEnabled(playlist)) {
2887 return false;
2888 }
2889
2890 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
2891 }).length === 0;
2892 };
2893
2894 // exports
2895 var Playlist = {
2896 duration: duration,
2897 seekable: seekable,
2898 safeLiveIndex: safeLiveIndex,
2899 getMediaInfoForTime: getMediaInfoForTime,
2900 isEnabled: isEnabled,
2901 isDisabled: isDisabled,
2902 isBlacklisted: isBlacklisted,
2903 isIncompatible: isIncompatible,
2904 playlistEnd: playlistEnd,
2905 isAes: isAes,
2906 isFmp4: isFmp4,
2907 hasAttribute: hasAttribute,
2908 estimateSegmentRequestTime: estimateSegmentRequestTime,
2909 isLowestEnabledRendition: isLowestEnabledRendition
2910 };
2911
2912 /**
2913 * @file xhr.js
2914 */
2915
2916 var videojsXHR = videojs.xhr,
2917 mergeOptions$1 = videojs.mergeOptions;
2918
2919
2920 var xhrFactory = function xhrFactory() {
2921 var xhr = function XhrFunction(options, callback) {
2922 // Add a default timeout for all hls requests
2923 options = mergeOptions$1({
2924 timeout: 45e3
2925 }, options);
2926
2927 // Allow an optional user-specified function to modify the option
2928 // object before we construct the xhr request
2929 var beforeRequest = XhrFunction.beforeRequest || videojs.Hls.xhr.beforeRequest;
2930
2931 if (beforeRequest && typeof beforeRequest === 'function') {
2932 var newOptions = beforeRequest(options);
2933
2934 if (newOptions) {
2935 options = newOptions;
2936 }
2937 }
2938
2939 var request = videojsXHR(options, function (error, response) {
2940 var reqResponse = request.response;
2941
2942 if (!error && reqResponse) {
2943 request.responseTime = Date.now();
2944 request.roundTripTime = request.responseTime - request.requestTime;
2945 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2946 if (!request.bandwidth) {
2947 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2948 }
2949 }
2950
2951 if (response.headers) {
2952 request.responseHeaders = response.headers;
2953 }
2954
2955 // videojs.xhr now uses a specific code on the error
2956 // object to signal that a request has timed out instead
2957 // of setting a boolean on the request object
2958 if (error && error.code === 'ETIMEDOUT') {
2959 request.timedout = true;
2960 }
2961
2962 // videojs.xhr no longer considers status codes outside of 200 and 0
2963 // (for file uris) to be errors, but the old XHR did, so emulate that
2964 // behavior. Status 206 may be used in response to byterange requests.
2965 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2966 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2967 }
2968
2969 callback(error, request);
2970 });
2971 var originalAbort = request.abort;
2972
2973 request.abort = function () {
2974 request.aborted = true;
2975 return originalAbort.apply(request, arguments);
2976 };
2977 request.uri = options.uri;
2978 request.requestTime = Date.now();
2979 return request;
2980 };
2981
2982 return xhr;
2983 };
2984
2985 /**
2986 * Turns segment byterange into a string suitable for use in
2987 * HTTP Range requests
2988 *
2989 * @param {Object} byterange - an object with two values defining the start and end
2990 * of a byte-range
2991 */
2992 var byterangeStr = function byterangeStr(byterange) {
2993 var byterangeStart = void 0;
2994 var byterangeEnd = void 0;
2995
2996 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2997 // header uses inclusive ranges
2998 byterangeEnd = byterange.offset + byterange.length - 1;
2999 byterangeStart = byterange.offset;
3000 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
3001 };
3002
3003 /**
3004 * Defines headers for use in the xhr request for a particular segment.
3005 *
3006 * @param {Object} segment - a simplified copy of the segmentInfo object
3007 * from SegmentLoader
3008 */
3009 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
3010 var headers = {};
3011
3012 if (segment.byterange) {
3013 headers.Range = byterangeStr(segment.byterange);
3014 }
3015 return headers;
3016 };
3017
3018 /*
3019 * pkcs7.pad
3020 * https://github.com/brightcove/pkcs7
3021 *
3022 * Copyright (c) 2014 Brightcove
3023 * Licensed under the apache2 license.
3024 */
3025
3026 /**
3027 * Returns the subarray of a Uint8Array without PKCS#7 padding.
3028 * @param padded {Uint8Array} unencrypted bytes that have been padded
3029 * @return {Uint8Array} the unpadded bytes
3030 * @see http://tools.ietf.org/html/rfc5652
3031 */
3032 function unpad(padded) {
3033 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
3034 }
3035
3036 var classCallCheck$1 = function classCallCheck(instance, Constructor) {
3037 if (!(instance instanceof Constructor)) {
3038 throw new TypeError("Cannot call a class as a function");
3039 }
3040 };
3041
3042 var createClass$1 = function () {
3043 function defineProperties(target, props) {
3044 for (var i = 0; i < props.length; i++) {
3045 var descriptor = props[i];
3046 descriptor.enumerable = descriptor.enumerable || false;
3047 descriptor.configurable = true;
3048 if ("value" in descriptor) descriptor.writable = true;
3049 Object.defineProperty(target, descriptor.key, descriptor);
3050 }
3051 }
3052
3053 return function (Constructor, protoProps, staticProps) {
3054 if (protoProps) defineProperties(Constructor.prototype, protoProps);
3055 if (staticProps) defineProperties(Constructor, staticProps);
3056 return Constructor;
3057 };
3058 }();
3059
3060 var inherits$1 = function inherits(subClass, superClass) {
3061 if (typeof superClass !== "function" && superClass !== null) {
3062 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
3063 }
3064
3065 subClass.prototype = Object.create(superClass && superClass.prototype, {
3066 constructor: {
3067 value: subClass,
3068 enumerable: false,
3069 writable: true,
3070 configurable: true
3071 }
3072 });
3073 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
3074 };
3075
3076 var possibleConstructorReturn$1 = function possibleConstructorReturn(self, call) {
3077 if (!self) {
3078 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
3079 }
3080
3081 return call && (typeof call === "object" || typeof call === "function") ? call : self;
3082 };
3083
3084 /**
3085 * @file aes.js
3086 *
3087 * This file contains an adaptation of the AES decryption algorithm
3088 * from the Standford Javascript Cryptography Library. That work is
3089 * covered by the following copyright and permissions notice:
3090 *
3091 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
3092 * All rights reserved.
3093 *
3094 * Redistribution and use in source and binary forms, with or without
3095 * modification, are permitted provided that the following conditions are
3096 * met:
3097 *
3098 * 1. Redistributions of source code must retain the above copyright
3099 * notice, this list of conditions and the following disclaimer.
3100 *
3101 * 2. Redistributions in binary form must reproduce the above
3102 * copyright notice, this list of conditions and the following
3103 * disclaimer in the documentation and/or other materials provided
3104 * with the distribution.
3105 *
3106 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
3107 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
3108 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
3109 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
3110 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
3111 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
3112 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
3113 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
3114 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
3115 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
3116 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
3117 *
3118 * The views and conclusions contained in the software and documentation
3119 * are those of the authors and should not be interpreted as representing
3120 * official policies, either expressed or implied, of the authors.
3121 */
3122
3123 /**
3124 * Expand the S-box tables.
3125 *
3126 * @private
3127 */
3128 var precompute = function precompute() {
3129 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
3130 var encTable = tables[0];
3131 var decTable = tables[1];
3132 var sbox = encTable[4];
3133 var sboxInv = decTable[4];
3134 var i = void 0;
3135 var x = void 0;
3136 var xInv = void 0;
3137 var d = [];
3138 var th = [];
3139 var x2 = void 0;
3140 var x4 = void 0;
3141 var x8 = void 0;
3142 var s = void 0;
3143 var tEnc = void 0;
3144 var tDec = void 0;
3145
3146 // Compute double and third tables
3147 for (i = 0; i < 256; i++) {
3148 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
3149 }
3150
3151 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
3152 // Compute sbox
3153 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
3154 s = s >> 8 ^ s & 255 ^ 99;
3155 sbox[x] = s;
3156 sboxInv[s] = x;
3157
3158 // Compute MixColumns
3159 x8 = d[x4 = d[x2 = d[x]]];
3160 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
3161 tEnc = d[s] * 0x101 ^ s * 0x1010100;
3162
3163 for (i = 0; i < 4; i++) {
3164 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
3165 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
3166 }
3167 }
3168
3169 // Compactify. Considerable speedup on Firefox.
3170 for (i = 0; i < 5; i++) {
3171 encTable[i] = encTable[i].slice(0);
3172 decTable[i] = decTable[i].slice(0);
3173 }
3174 return tables;
3175 };
3176 var aesTables = null;
3177
3178 /**
3179 * Schedule out an AES key for both encryption and decryption. This
3180 * is a low-level class. Use a cipher mode to do bulk encryption.
3181 *
3182 * @class AES
3183 * @param key {Array} The key as an array of 4, 6 or 8 words.
3184 */
3185
3186 var AES = function () {
3187 function AES(key) {
3188 classCallCheck$1(this, AES);
3189
3190 /**
3191 * The expanded S-box and inverse S-box tables. These will be computed
3192 * on the client so that we don't have to send them down the wire.
3193 *
3194 * There are two tables, _tables[0] is for encryption and
3195 * _tables[1] is for decryption.
3196 *
3197 * The first 4 sub-tables are the expanded S-box with MixColumns. The
3198 * last (_tables[01][4]) is the S-box itself.
3199 *
3200 * @private
3201 */
3202 // if we have yet to precompute the S-box tables
3203 // do so now
3204 if (!aesTables) {
3205 aesTables = precompute();
3206 }
3207 // then make a copy of that object for use
3208 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
3209 var i = void 0;
3210 var j = void 0;
3211 var tmp = void 0;
3212 var encKey = void 0;
3213 var decKey = void 0;
3214 var sbox = this._tables[0][4];
3215 var decTable = this._tables[1];
3216 var keyLen = key.length;
3217 var rcon = 1;
3218
3219 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
3220 throw new Error('Invalid aes key size');
3221 }
3222
3223 encKey = key.slice(0);
3224 decKey = [];
3225 this._key = [encKey, decKey];
3226
3227 // schedule encryption keys
3228 for (i = keyLen; i < 4 * keyLen + 28; i++) {
3229 tmp = encKey[i - 1];
3230
3231 // apply sbox
3232 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
3233 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
3234
3235 // shift rows and add rcon
3236 if (i % keyLen === 0) {
3237 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
3238 rcon = rcon << 1 ^ (rcon >> 7) * 283;
3239 }
3240 }
3241
3242 encKey[i] = encKey[i - keyLen] ^ tmp;
3243 }
3244
3245 // schedule decryption keys
3246 for (j = 0; i; j++, i--) {
3247 tmp = encKey[j & 3 ? i : i - 4];
3248 if (i <= 4 || j < 4) {
3249 decKey[j] = tmp;
3250 } else {
3251 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
3252 }
3253 }
3254 }
3255
3256 /**
3257 * Decrypt 16 bytes, specified as four 32-bit words.
3258 *
3259 * @param {Number} encrypted0 the first word to decrypt
3260 * @param {Number} encrypted1 the second word to decrypt
3261 * @param {Number} encrypted2 the third word to decrypt
3262 * @param {Number} encrypted3 the fourth word to decrypt
3263 * @param {Int32Array} out the array to write the decrypted words
3264 * into
3265 * @param {Number} offset the offset into the output array to start
3266 * writing results
3267 * @return {Array} The plaintext.
3268 */
3269
3270 AES.prototype.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
3271 var key = this._key[1];
3272 // state variables a,b,c,d are loaded with pre-whitened data
3273 var a = encrypted0 ^ key[0];
3274 var b = encrypted3 ^ key[1];
3275 var c = encrypted2 ^ key[2];
3276 var d = encrypted1 ^ key[3];
3277 var a2 = void 0;
3278 var b2 = void 0;
3279 var c2 = void 0;
3280
3281 // key.length === 2 ?
3282 var nInnerRounds = key.length / 4 - 2;
3283 var i = void 0;
3284 var kIndex = 4;
3285 var table = this._tables[1];
3286
3287 // load up the tables
3288 var table0 = table[0];
3289 var table1 = table[1];
3290 var table2 = table[2];
3291 var table3 = table[3];
3292 var sbox = table[4];
3293
3294 // Inner rounds. Cribbed from OpenSSL.
3295 for (i = 0; i < nInnerRounds; i++) {
3296 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
3297 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
3298 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
3299 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
3300 kIndex += 4;
3301 a = a2;b = b2;c = c2;
3302 }
3303
3304 // Last round.
3305 for (i = 0; i < 4; i++) {
3306 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
3307 a2 = a;a = b;b = c;c = d;d = a2;
3308 }
3309 };
3310
3311 return AES;
3312 }();
3313
3314 /**
3315 * @file stream.js
3316 */
3317 /**
3318 * A lightweight readable stream implemention that handles event dispatching.
3319 *
3320 * @class Stream
3321 */
3322 var Stream$1 = function () {
3323 function Stream() {
3324 classCallCheck$1(this, Stream);
3325
3326 this.listeners = {};
3327 }
3328
3329 /**
3330 * Add a listener for a specified event type.
3331 *
3332 * @param {String} type the event name
3333 * @param {Function} listener the callback to be invoked when an event of
3334 * the specified type occurs
3335 */
3336
3337 Stream.prototype.on = function on(type, listener) {
3338 if (!this.listeners[type]) {
3339 this.listeners[type] = [];
3340 }
3341 this.listeners[type].push(listener);
3342 };
3343
3344 /**
3345 * Remove a listener for a specified event type.
3346 *
3347 * @param {String} type the event name
3348 * @param {Function} listener a function previously registered for this
3349 * type of event through `on`
3350 * @return {Boolean} if we could turn it off or not
3351 */
3352
3353 Stream.prototype.off = function off(type, listener) {
3354 if (!this.listeners[type]) {
3355 return false;
3356 }
3357
3358 var index = this.listeners[type].indexOf(listener);
3359
3360 this.listeners[type].splice(index, 1);
3361 return index > -1;
3362 };
3363
3364 /**
3365 * Trigger an event of the specified type on this stream. Any additional
3366 * arguments to this function are passed as parameters to event listeners.
3367 *
3368 * @param {String} type the event name
3369 */
3370
3371 Stream.prototype.trigger = function trigger(type) {
3372 var callbacks = this.listeners[type];
3373
3374 if (!callbacks) {
3375 return;
3376 }
3377
3378 // Slicing the arguments on every invocation of this method
3379 // can add a significant amount of overhead. Avoid the
3380 // intermediate object creation for the common case of a
3381 // single callback argument
3382 if (arguments.length === 2) {
3383 var length = callbacks.length;
3384
3385 for (var i = 0; i < length; ++i) {
3386 callbacks[i].call(this, arguments[1]);
3387 }
3388 } else {
3389 var args = Array.prototype.slice.call(arguments, 1);
3390 var _length = callbacks.length;
3391
3392 for (var _i = 0; _i < _length; ++_i) {
3393 callbacks[_i].apply(this, args);
3394 }
3395 }
3396 };
3397
3398 /**
3399 * Destroys the stream and cleans up.
3400 */
3401
3402 Stream.prototype.dispose = function dispose() {
3403 this.listeners = {};
3404 };
3405 /**
3406 * Forwards all `data` events on this stream to the destination stream. The
3407 * destination stream should provide a method `push` to receive the data
3408 * events as they arrive.
3409 *
3410 * @param {Stream} destination the stream that will receive all `data` events
3411 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
3412 */
3413
3414 Stream.prototype.pipe = function pipe(destination) {
3415 this.on('data', function (data) {
3416 destination.push(data);
3417 });
3418 };
3419
3420 return Stream;
3421 }();
3422
3423 /**
3424 * @file async-stream.js
3425 */
3426 /**
3427 * A wrapper around the Stream class to use setTiemout
3428 * and run stream "jobs" Asynchronously
3429 *
3430 * @class AsyncStream
3431 * @extends Stream
3432 */
3433
3434 var AsyncStream = function (_Stream) {
3435 inherits$1(AsyncStream, _Stream);
3436
3437 function AsyncStream() {
3438 classCallCheck$1(this, AsyncStream);
3439
3440 var _this = possibleConstructorReturn$1(this, _Stream.call(this, Stream$1));
3441
3442 _this.jobs = [];
3443 _this.delay = 1;
3444 _this.timeout_ = null;
3445 return _this;
3446 }
3447
3448 /**
3449 * process an async job
3450 *
3451 * @private
3452 */
3453
3454 AsyncStream.prototype.processJob_ = function processJob_() {
3455 this.jobs.shift()();
3456 if (this.jobs.length) {
3457 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
3458 } else {
3459 this.timeout_ = null;
3460 }
3461 };
3462
3463 /**
3464 * push a job into the stream
3465 *
3466 * @param {Function} job the job to push into the stream
3467 */
3468
3469 AsyncStream.prototype.push = function push(job) {
3470 this.jobs.push(job);
3471 if (!this.timeout_) {
3472 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
3473 }
3474 };
3475
3476 return AsyncStream;
3477 }(Stream$1);
3478
3479 /**
3480 * @file decrypter.js
3481 *
3482 * An asynchronous implementation of AES-128 CBC decryption with
3483 * PKCS#7 padding.
3484 */
3485
3486 /**
3487 * Convert network-order (big-endian) bytes into their little-endian
3488 * representation.
3489 */
3490 var ntoh = function ntoh(word) {
3491 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
3492 };
3493
3494 /**
3495 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
3496 *
3497 * @param {Uint8Array} encrypted the encrypted bytes
3498 * @param {Uint32Array} key the bytes of the decryption key
3499 * @param {Uint32Array} initVector the initialization vector (IV) to
3500 * use for the first round of CBC.
3501 * @return {Uint8Array} the decrypted bytes
3502 *
3503 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
3504 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
3505 * @see https://tools.ietf.org/html/rfc2315
3506 */
3507 var decrypt = function decrypt(encrypted, key, initVector) {
3508 // word-level access to the encrypted bytes
3509 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
3510
3511 var decipher = new AES(Array.prototype.slice.call(key));
3512
3513 // byte and word-level access for the decrypted output
3514 var decrypted = new Uint8Array(encrypted.byteLength);
3515 var decrypted32 = new Int32Array(decrypted.buffer);
3516
3517 // temporary variables for working with the IV, encrypted, and
3518 // decrypted data
3519 var init0 = void 0;
3520 var init1 = void 0;
3521 var init2 = void 0;
3522 var init3 = void 0;
3523 var encrypted0 = void 0;
3524 var encrypted1 = void 0;
3525 var encrypted2 = void 0;
3526 var encrypted3 = void 0;
3527
3528 // iteration variable
3529 var wordIx = void 0;
3530
3531 // pull out the words of the IV to ensure we don't modify the
3532 // passed-in reference and easier access
3533 init0 = initVector[0];
3534 init1 = initVector[1];
3535 init2 = initVector[2];
3536 init3 = initVector[3];
3537
3538 // decrypt four word sequences, applying cipher-block chaining (CBC)
3539 // to each decrypted block
3540 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
3541 // convert big-endian (network order) words into little-endian
3542 // (javascript order)
3543 encrypted0 = ntoh(encrypted32[wordIx]);
3544 encrypted1 = ntoh(encrypted32[wordIx + 1]);
3545 encrypted2 = ntoh(encrypted32[wordIx + 2]);
3546 encrypted3 = ntoh(encrypted32[wordIx + 3]);
3547
3548 // decrypt the block
3549 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
3550
3551 // XOR with the IV, and restore network byte-order to obtain the
3552 // plaintext
3553 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
3554 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
3555 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
3556 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
3557
3558 // setup the IV for the next round
3559 init0 = encrypted0;
3560 init1 = encrypted1;
3561 init2 = encrypted2;
3562 init3 = encrypted3;
3563 }
3564
3565 return decrypted;
3566 };
3567
3568 /**
3569 * The `Decrypter` class that manages decryption of AES
3570 * data through `AsyncStream` objects and the `decrypt`
3571 * function
3572 *
3573 * @param {Uint8Array} encrypted the encrypted bytes
3574 * @param {Uint32Array} key the bytes of the decryption key
3575 * @param {Uint32Array} initVector the initialization vector (IV) to
3576 * @param {Function} done the function to run when done
3577 * @class Decrypter
3578 */
3579
3580 var Decrypter = function () {
3581 function Decrypter(encrypted, key, initVector, done) {
3582 classCallCheck$1(this, Decrypter);
3583
3584 var step = Decrypter.STEP;
3585 var encrypted32 = new Int32Array(encrypted.buffer);
3586 var decrypted = new Uint8Array(encrypted.byteLength);
3587 var i = 0;
3588
3589 this.asyncStream_ = new AsyncStream();
3590
3591 // split up the encryption job and do the individual chunks asynchronously
3592 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
3593 for (i = step; i < encrypted32.length; i += step) {
3594 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
3595 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
3596 }
3597 // invoke the done() callback when everything is finished
3598 this.asyncStream_.push(function () {
3599 // remove pkcs#7 padding from the decrypted bytes
3600 done(null, unpad(decrypted));
3601 });
3602 }
3603
3604 /**
3605 * a getter for step the maximum number of bytes to process at one time
3606 *
3607 * @return {Number} the value of step 32000
3608 */
3609
3610 /**
3611 * @private
3612 */
3613 Decrypter.prototype.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
3614 return function () {
3615 var bytes = decrypt(encrypted, key, initVector);
3616
3617 decrypted.set(bytes, encrypted.byteOffset);
3618 };
3619 };
3620
3621 createClass$1(Decrypter, null, [{
3622 key: 'STEP',
3623 get: function get$$1() {
3624 // 4 * 8000;
3625 return 32000;
3626 }
3627 }]);
3628 return Decrypter;
3629 }();
3630
3631 /**
3632 * @file bin-utils.js
3633 */
3634
3635 /**
3636 * convert a TimeRange to text
3637 *
3638 * @param {TimeRange} range the timerange to use for conversion
3639 * @param {Number} i the iterator on the range to convert
3640 */
3641 var textRange = function textRange(range, i) {
3642 return range.start(i) + '-' + range.end(i);
3643 };
3644
3645 /**
3646 * format a number as hex string
3647 *
3648 * @param {Number} e The number
3649 * @param {Number} i the iterator
3650 */
3651 var formatHexString = function formatHexString(e, i) {
3652 var value = e.toString(16);
3653
3654 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
3655 };
3656 var formatAsciiString = function formatAsciiString(e) {
3657 if (e >= 0x20 && e < 0x7e) {
3658 return String.fromCharCode(e);
3659 }
3660 return '.';
3661 };
3662
3663 /**
3664 * Creates an object for sending to a web worker modifying properties that are TypedArrays
3665 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
3666 *
3667 * @param {Object} message
3668 * Object of properties and values to send to the web worker
3669 * @return {Object}
3670 * Modified message with TypedArray values expanded
3671 * @function createTransferableMessage
3672 */
3673 var createTransferableMessage = function createTransferableMessage(message) {
3674 var transferable = {};
3675
3676 Object.keys(message).forEach(function (key) {
3677 var value = message[key];
3678
3679 if (ArrayBuffer.isView(value)) {
3680 transferable[key] = {
3681 bytes: value.buffer,
3682 byteOffset: value.byteOffset,
3683 byteLength: value.byteLength
3684 };
3685 } else {
3686 transferable[key] = value;
3687 }
3688 });
3689
3690 return transferable;
3691 };
3692
3693 /**
3694 * Returns a unique string identifier for a media initialization
3695 * segment.
3696 */
3697 var initSegmentId = function initSegmentId(initSegment) {
3698 var byterange = initSegment.byterange || {
3699 length: Infinity,
3700 offset: 0
3701 };
3702
3703 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
3704 };
3705
3706 /**
3707 * Returns a unique string identifier for a media segment key.
3708 */
3709 var segmentKeyId = function segmentKeyId(key) {
3710 return key.resolvedUri;
3711 };
3712
3713 /**
3714 * utils to help dump binary data to the console
3715 */
3716 var hexDump = function hexDump(data) {
3717 var bytes = Array.prototype.slice.call(data);
3718 var step = 16;
3719 var result = '';
3720 var hex = void 0;
3721 var ascii = void 0;
3722
3723 for (var j = 0; j < bytes.length / step; j++) {
3724 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
3725 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
3726 result += hex + ' ' + ascii + '\n';
3727 }
3728
3729 return result;
3730 };
3731
3732 var tagDump = function tagDump(_ref) {
3733 var bytes = _ref.bytes;
3734 return hexDump(bytes);
3735 };
3736
3737 var textRanges = function textRanges(ranges) {
3738 var result = '';
3739 var i = void 0;
3740
3741 for (i = 0; i < ranges.length; i++) {
3742 result += textRange(ranges, i) + ' ';
3743 }
3744 return result;
3745 };
3746
3747 var utils = /*#__PURE__*/Object.freeze({
3748 createTransferableMessage: createTransferableMessage,
3749 initSegmentId: initSegmentId,
3750 segmentKeyId: segmentKeyId,
3751 hexDump: hexDump,
3752 tagDump: tagDump,
3753 textRanges: textRanges
3754 });
3755
3756 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
3757
3758 // Add 25% to the segment duration to account for small discrepencies in segment timing.
3759 // 25% was arbitrarily chosen, and may need to be refined over time.
3760 var SEGMENT_END_FUDGE_PERCENT = 0.25;
3761
3762 /**
3763 * Converts a player time (any time that can be gotten/set from player.currentTime(),
3764 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
3765 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
3766 *
3767 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
3768 * point" (a point where we have a mapping from program time to player time, with player
3769 * time being the post transmux start of the segment).
3770 *
3771 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
3772 *
3773 * @param {Number} playerTime the player time
3774 * @param {Object} segment the segment which contains the player time
3775 * @return {Date} program time
3776 */
3777 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
3778 if (!segment.dateTimeObject) {
3779 // Can't convert without an "anchor point" for the program time (i.e., a time that can
3780 // be used to map the start of a segment with a real world time).
3781 return null;
3782 }
3783
3784 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
3785 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart;
3786
3787 // get the start of the content from before old content is prepended
3788 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
3789 var offsetFromSegmentStart = playerTime - startOfSegment;
3790
3791 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
3792 };
3793
3794 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
3795 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
3796 };
3797
3798 /**
3799 * Finds a segment that contains the time requested given as an ISO-8601 string. The
3800 * returned segment might be an estimate or an accurate match.
3801 *
3802 * @param {String} programTime The ISO-8601 programTime to find a match for
3803 * @param {Object} playlist A playlist object to search within
3804 */
3805 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
3806 // Assumptions:
3807 // - verifyProgramDateTimeTags has already been run
3808 // - live streams have been started
3809
3810 var dateTimeObject = void 0;
3811
3812 try {
3813 dateTimeObject = new Date(programTime);
3814 } catch (e) {
3815 return null;
3816 }
3817
3818 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
3819 return null;
3820 }
3821
3822 var segment = playlist.segments[0];
3823
3824 if (dateTimeObject < segment.dateTimeObject) {
3825 // Requested time is before stream start.
3826 return null;
3827 }
3828
3829 for (var i = 0; i < playlist.segments.length - 1; i++) {
3830 segment = playlist.segments[i];
3831
3832 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
3833
3834 if (dateTimeObject < nextSegmentStart) {
3835 break;
3836 }
3837 }
3838
3839 var lastSegment = playlist.segments[playlist.segments.length - 1];
3840 var lastSegmentStart = lastSegment.dateTimeObject;
3841 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
3842 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
3843
3844 if (dateTimeObject > lastSegmentEnd) {
3845 // Beyond the end of the stream, or our best guess of the end of the stream.
3846 return null;
3847 }
3848
3849 if (dateTimeObject > lastSegmentStart) {
3850 segment = lastSegment;
3851 }
3852
3853 return {
3854 segment: segment,
3855 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
3856 // Although, given that all segments have accurate date time objects, the segment
3857 // selected should be accurate, unless the video has been transmuxed at some point
3858 // (determined by the presence of the videoTimingInfo object), the segment's "player
3859 // time" (the start time in the player) can't be considered accurate.
3860 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
3861 };
3862 };
3863
3864 /**
3865 * Finds a segment that contains the given player time(in seconds).
3866 *
3867 * @param {Number} time The player time to find a match for
3868 * @param {Object} playlist A playlist object to search within
3869 */
3870 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
3871 // Assumptions:
3872 // - there will always be a segment.duration
3873 // - we can start from zero
3874 // - segments are in time order
3875
3876 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
3877 return null;
3878 }
3879
3880 var segmentEnd = 0;
3881 var segment = void 0;
3882
3883 for (var i = 0; i < playlist.segments.length; i++) {
3884 segment = playlist.segments[i];
3885
3886 // videoTimingInfo is set after the segment is downloaded and transmuxed, and
3887 // should contain the most accurate values we have for the segment's player times.
3888 //
3889 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
3890 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
3891 // calculate an end value.
3892 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
3893
3894 if (time <= segmentEnd) {
3895 break;
3896 }
3897 }
3898
3899 var lastSegment = playlist.segments[playlist.segments.length - 1];
3900
3901 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
3902 // The time requested is beyond the stream end.
3903 return null;
3904 }
3905
3906 if (time > segmentEnd) {
3907 // The time is within or beyond the last segment.
3908 //
3909 // Check to see if the time is beyond a reasonable guess of the end of the stream.
3910 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
3911 // Technically, because the duration value is only an estimate, the time may still
3912 // exist in the last segment, however, there isn't enough information to make even
3913 // a reasonable estimate.
3914 return null;
3915 }
3916
3917 segment = lastSegment;
3918 }
3919
3920 return {
3921 segment: segment,
3922 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
3923 // Because videoTimingInfo is only set after transmux, it is the only way to get
3924 // accurate timing values.
3925 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
3926 };
3927 };
3928
3929 /**
3930 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
3931 * If the offset returned is positive, the programTime occurs after the
3932 * comparisonTimestamp.
3933 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
3934 *
3935 * @param {String} comparisonTimeStamp An ISO-8601 timestamp to compare against
3936 * @param {String} programTime The programTime as an ISO-8601 string
3937 * @return {Number} offset
3938 */
3939 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
3940 var segmentDateTime = void 0;
3941 var programDateTime = void 0;
3942
3943 try {
3944 segmentDateTime = new Date(comparisonTimeStamp);
3945 programDateTime = new Date(programTime);
3946 } catch (e) {
3947 // TODO handle error
3948 }
3949
3950 var segmentTimeEpoch = segmentDateTime.getTime();
3951 var programTimeEpoch = programDateTime.getTime();
3952
3953 return (programTimeEpoch - segmentTimeEpoch) / 1000;
3954 };
3955
3956 /**
3957 * Checks that all segments in this playlist have programDateTime tags.
3958 *
3959 * @param {Object} playlist A playlist object
3960 */
3961 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
3962 if (!playlist.segments || playlist.segments.length === 0) {
3963 return false;
3964 }
3965
3966 for (var i = 0; i < playlist.segments.length; i++) {
3967 var segment = playlist.segments[i];
3968
3969 if (!segment.dateTimeObject) {
3970 return false;
3971 }
3972 }
3973
3974 return true;
3975 };
3976
3977 /**
3978 * Returns the programTime of the media given a playlist and a playerTime.
3979 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
3980 * If the segments containing the time requested have not been buffered yet, an estimate
3981 * may be returned to the callback.
3982 *
3983 * @param {Object} args
3984 * @param {Object} args.playlist A playlist object to search within
3985 * @param {Number} time A playerTime in seconds
3986 * @param {Function} callback(err, programTime)
3987 * @returns {String} err.message A detailed error message
3988 * @returns {Object} programTime
3989 * @returns {Number} programTime.mediaSeconds The streamTime in seconds
3990 * @returns {String} programTime.programDateTime The programTime as an ISO-8601 String
3991 */
3992 var getProgramTime = function getProgramTime(_ref) {
3993 var playlist = _ref.playlist,
3994 _ref$time = _ref.time,
3995 time = _ref$time === undefined ? undefined : _ref$time,
3996 callback = _ref.callback;
3997
3998
3999 if (!callback) {
4000 throw new Error('getProgramTime: callback must be provided');
4001 }
4002
4003 if (!playlist || time === undefined) {
4004 return callback({
4005 message: 'getProgramTime: playlist and time must be provided'
4006 });
4007 }
4008
4009 var matchedSegment = findSegmentForPlayerTime(time, playlist);
4010
4011 if (!matchedSegment) {
4012 return callback({
4013 message: 'valid programTime was not found'
4014 });
4015 }
4016
4017 if (matchedSegment.type === 'estimate') {
4018 return callback({
4019 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
4020 seekTime: matchedSegment.estimatedStart
4021 });
4022 }
4023
4024 var programTimeObject = {
4025 mediaSeconds: time
4026 };
4027 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
4028
4029 if (programTime) {
4030 programTimeObject.programDateTime = programTime.toISOString();
4031 }
4032
4033 return callback(null, programTimeObject);
4034 };
4035
4036 /**
4037 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
4038 *
4039 * @param {Object} args
4040 * @param {String} args.programTime A programTime to seek to as an ISO-8601 String
4041 * @param {Object} args.playlist A playlist to look within
4042 * @param {Number} args.retryCount The number of times to try for an accurate seek. Default is 2.
4043 * @param {Function} args.seekTo A method to perform a seek
4044 * @param {Boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
4045 * @param {Object} args.tech The tech to seek on
4046 * @param {Function} args.callback(err, newTime) A callback to return the new time to
4047 * @returns {String} err.message A detailed error message
4048 * @returns {Number} newTime The exact time that was seeked to in seconds
4049 */
4050 var seekToProgramTime = function seekToProgramTime(_ref2) {
4051 var programTime = _ref2.programTime,
4052 playlist = _ref2.playlist,
4053 _ref2$retryCount = _ref2.retryCount,
4054 retryCount = _ref2$retryCount === undefined ? 2 : _ref2$retryCount,
4055 seekTo = _ref2.seekTo,
4056 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
4057 pauseAfterSeek = _ref2$pauseAfterSeek === undefined ? true : _ref2$pauseAfterSeek,
4058 tech = _ref2.tech,
4059 callback = _ref2.callback;
4060
4061
4062 if (!callback) {
4063 throw new Error('seekToProgramTime: callback must be provided');
4064 }
4065
4066 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
4067 return callback({
4068 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
4069 });
4070 }
4071
4072 if (!playlist.endList && !tech.hasStarted_) {
4073 return callback({
4074 message: 'player must be playing a live stream to start buffering'
4075 });
4076 }
4077
4078 if (!verifyProgramDateTimeTags(playlist)) {
4079 return callback({
4080 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
4081 });
4082 }
4083
4084 var matchedSegment = findSegmentForProgramTime(programTime, playlist);
4085
4086 // no match
4087 if (!matchedSegment) {
4088 return callback({
4089 message: programTime + ' was not found in the stream'
4090 });
4091 }
4092
4093 var segment = matchedSegment.segment;
4094 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
4095
4096 if (matchedSegment.type === 'estimate') {
4097 // we've run out of retries
4098 if (retryCount === 0) {
4099 return callback({
4100 message: programTime + ' is not buffered yet. Try again'
4101 });
4102 }
4103
4104 seekTo(matchedSegment.estimatedStart + mediaOffset);
4105
4106 tech.one('seeked', function () {
4107 seekToProgramTime({
4108 programTime: programTime,
4109 playlist: playlist,
4110 retryCount: retryCount - 1,
4111 seekTo: seekTo,
4112 pauseAfterSeek: pauseAfterSeek,
4113 tech: tech,
4114 callback: callback
4115 });
4116 });
4117
4118 return;
4119 }
4120
4121 // Since the segment.start value is determined from the buffered end or ending time
4122 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
4123 // modifications.
4124 var seekToTime = segment.start + mediaOffset;
4125 var seekedCallback = function seekedCallback() {
4126 return callback(null, tech.currentTime());
4127 };
4128
4129 // listen for seeked event
4130 tech.one('seeked', seekedCallback);
4131 // pause before seeking as video.js will restore this state
4132 if (pauseAfterSeek) {
4133 tech.pause();
4134 }
4135 seekTo(seekToTime);
4136 };
4137
4138 /**
4139 * ranges
4140 *
4141 * Utilities for working with TimeRanges.
4142 *
4143 */
4144
4145 // Fudge factor to account for TimeRanges rounding
4146 var TIME_FUDGE_FACTOR = 1 / 30;
4147 // Comparisons between time values such as current time and the end of the buffered range
4148 // can be misleading because of precision differences or when the current media has poorly
4149 // aligned audio and video, which can cause values to be slightly off from what you would
4150 // expect. This value is what we consider to be safe to use in such comparisons to account
4151 // for these scenarios.
4152 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
4153 var filterRanges = function filterRanges(timeRanges, predicate) {
4154 var results = [];
4155 var i = void 0;
4156
4157 if (timeRanges && timeRanges.length) {
4158 // Search for ranges that match the predicate
4159 for (i = 0; i < timeRanges.length; i++) {
4160 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
4161 results.push([timeRanges.start(i), timeRanges.end(i)]);
4162 }
4163 }
4164 }
4165
4166 return videojs.createTimeRanges(results);
4167 };
4168
4169 /**
4170 * Attempts to find the buffered TimeRange that contains the specified
4171 * time.
4172 * @param {TimeRanges} buffered - the TimeRanges object to query
4173 * @param {number} time - the time to filter on.
4174 * @returns {TimeRanges} a new TimeRanges object
4175 */
4176 var findRange = function findRange(buffered, time) {
4177 return filterRanges(buffered, function (start, end) {
4178 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
4179 });
4180 };
4181
4182 /**
4183 * Returns the TimeRanges that begin later than the specified time.
4184 * @param {TimeRanges} timeRanges - the TimeRanges object to query
4185 * @param {number} time - the time to filter on.
4186 * @returns {TimeRanges} a new TimeRanges object.
4187 */
4188 var findNextRange = function findNextRange(timeRanges, time) {
4189 return filterRanges(timeRanges, function (start) {
4190 return start - TIME_FUDGE_FACTOR >= time;
4191 });
4192 };
4193
4194 /**
4195 * Returns gaps within a list of TimeRanges
4196 * @param {TimeRanges} buffered - the TimeRanges object
4197 * @return {TimeRanges} a TimeRanges object of gaps
4198 */
4199 var findGaps = function findGaps(buffered) {
4200 if (buffered.length < 2) {
4201 return videojs.createTimeRanges();
4202 }
4203
4204 var ranges = [];
4205
4206 for (var i = 1; i < buffered.length; i++) {
4207 var start = buffered.end(i - 1);
4208 var end = buffered.start(i);
4209
4210 ranges.push([start, end]);
4211 }
4212
4213 return videojs.createTimeRanges(ranges);
4214 };
4215
4216 /**
4217 * Gets a human readable string for a TimeRange
4218 *
4219 * @param {TimeRange} range
4220 * @returns {String} a human readable string
4221 */
4222 var printableRange = function printableRange(range) {
4223 var strArr = [];
4224
4225 if (!range || !range.length) {
4226 return '';
4227 }
4228
4229 for (var i = 0; i < range.length; i++) {
4230 strArr.push(range.start(i) + ' => ' + range.end(i));
4231 }
4232
4233 return strArr.join(', ');
4234 };
4235
4236 /**
4237 * Calculates the amount of time left in seconds until the player hits the end of the
4238 * buffer and causes a rebuffer
4239 *
4240 * @param {TimeRange} buffered
4241 * The state of the buffer
4242 * @param {Numnber} currentTime
4243 * The current time of the player
4244 * @param {Number} playbackRate
4245 * The current playback rate of the player. Defaults to 1.
4246 * @return {Number}
4247 * Time until the player has to start rebuffering in seconds.
4248 * @function timeUntilRebuffer
4249 */
4250 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime) {
4251 var playbackRate = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1;
4252
4253 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
4254
4255 return (bufferedEnd - currentTime) / playbackRate;
4256 };
4257
4258 /**
4259 * Converts a TimeRanges object into an array representation
4260 * @param {TimeRanges} timeRanges
4261 * @returns {Array}
4262 */
4263 var timeRangesToArray = function timeRangesToArray(timeRanges) {
4264 var timeRangesList = [];
4265
4266 for (var i = 0; i < timeRanges.length; i++) {
4267 timeRangesList.push({
4268 start: timeRanges.start(i),
4269 end: timeRanges.end(i)
4270 });
4271 }
4272
4273 return timeRangesList;
4274 };
4275
4276 /**
4277 * @file create-text-tracks-if-necessary.js
4278 */
4279
4280 /**
4281 * Create text tracks on video.js if they exist on a segment.
4282 *
4283 * @param {Object} sourceBuffer the VSB or FSB
4284 * @param {Object} mediaSource the HTML media source
4285 * @param {Object} segment the segment that may contain the text track
4286 * @private
4287 */
4288 var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
4289 var player = mediaSource.player_;
4290
4291 // create an in-band caption track if one is present in the segment
4292 if (segment.captions && segment.captions.length) {
4293 if (!sourceBuffer.inbandTextTracks_) {
4294 sourceBuffer.inbandTextTracks_ = {};
4295 }
4296
4297 for (var trackId in segment.captionStreams) {
4298 if (!sourceBuffer.inbandTextTracks_[trackId]) {
4299 player.tech_.trigger({ type: 'usage', name: 'hls-608' });
4300 var track = player.textTracks().getTrackById(trackId);
4301
4302 if (track) {
4303 // Resuse an existing track with a CC# id because this was
4304 // very likely created by videojs-contrib-hls from information
4305 // in the m3u8 for us to use
4306 sourceBuffer.inbandTextTracks_[trackId] = track;
4307 } else {
4308 // Otherwise, create a track with the default `CC#` label and
4309 // without a language
4310 sourceBuffer.inbandTextTracks_[trackId] = player.addRemoteTextTrack({
4311 kind: 'captions',
4312 id: trackId,
4313 label: trackId
4314 }, false).track;
4315 }
4316 }
4317 }
4318 }
4319
4320 if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
4321 sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
4322 kind: 'metadata',
4323 label: 'Timed Metadata'
4324 }, false).track;
4325 sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
4326 }
4327 };
4328
4329 /**
4330 * @file remove-cues-from-track.js
4331 */
4332
4333 /**
4334 * Remove cues from a track on video.js.
4335 *
4336 * @param {Double} start start of where we should remove the cue
4337 * @param {Double} end end of where the we should remove the cue
4338 * @param {Object} track the text track to remove the cues from
4339 * @private
4340 */
4341 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
4342 var i = void 0;
4343 var cue = void 0;
4344
4345 if (!track) {
4346 return;
4347 }
4348
4349 if (!track.cues) {
4350 return;
4351 }
4352
4353 i = track.cues.length;
4354
4355 while (i--) {
4356 cue = track.cues[i];
4357
4358 // Remove any overlapping cue
4359 if (cue.startTime <= end && cue.endTime >= start) {
4360 track.removeCue(cue);
4361 }
4362 }
4363 };
4364
4365 /**
4366 * @file add-text-track-data.js
4367 */
4368 /**
4369 * Define properties on a cue for backwards compatability,
4370 * but warn the user that the way that they are using it
4371 * is depricated and will be removed at a later date.
4372 *
4373 * @param {Cue} cue the cue to add the properties on
4374 * @private
4375 */
4376 var deprecateOldCue = function deprecateOldCue(cue) {
4377 Object.defineProperties(cue.frame, {
4378 id: {
4379 get: function get() {
4380 videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
4381 return cue.value.key;
4382 }
4383 },
4384 value: {
4385 get: function get() {
4386 videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
4387 return cue.value.data;
4388 }
4389 },
4390 privateData: {
4391 get: function get() {
4392 videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
4393 return cue.value.data;
4394 }
4395 }
4396 });
4397 };
4398
4399 var durationOfVideo = function durationOfVideo(duration) {
4400 var dur = void 0;
4401
4402 if (isNaN(duration) || Math.abs(duration) === Infinity) {
4403 dur = Number.MAX_VALUE;
4404 } else {
4405 dur = duration;
4406 }
4407 return dur;
4408 };
4409 /**
4410 * Add text track data to a source handler given the captions and
4411 * metadata from the buffer.
4412 *
4413 * @param {Object} sourceHandler the virtual source buffer
4414 * @param {Array} captionArray an array of caption data
4415 * @param {Array} metadataArray an array of meta data
4416 * @private
4417 */
4418 var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
4419 var Cue = window_1.WebKitDataCue || window_1.VTTCue;
4420
4421 if (captionArray) {
4422 captionArray.forEach(function (caption) {
4423 var track = caption.stream;
4424
4425 this.inbandTextTracks_[track].addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
4426 }, sourceHandler);
4427 }
4428
4429 if (metadataArray) {
4430 var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
4431
4432 metadataArray.forEach(function (metadata) {
4433 var time = metadata.cueTime + this.timestampOffset;
4434
4435 // if time isn't a finite number between 0 and Infinity, like NaN,
4436 // ignore this bit of metadata.
4437 // This likely occurs when you have an non-timed ID3 tag like TIT2,
4438 // which is the "Title/Songname/Content description" frame
4439 if (typeof time !== 'number' || window_1.isNaN(time) || time < 0 || !(time < Infinity)) {
4440 return;
4441 }
4442
4443 metadata.frames.forEach(function (frame) {
4444 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
4445
4446 cue.frame = frame;
4447 cue.value = frame;
4448 deprecateOldCue(cue);
4449
4450 this.metadataTrack_.addCue(cue);
4451 }, this);
4452 }, sourceHandler);
4453
4454 // Updating the metadeta cues so that
4455 // the endTime of each cue is the startTime of the next cue
4456 // the endTime of last cue is the duration of the video
4457 if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
4458 var cues = sourceHandler.metadataTrack_.cues;
4459 var cuesArray = [];
4460
4461 // Create a copy of the TextTrackCueList...
4462 // ...disregarding cues with a falsey value
4463 for (var i = 0; i < cues.length; i++) {
4464 if (cues[i]) {
4465 cuesArray.push(cues[i]);
4466 }
4467 }
4468
4469 // Group cues by their startTime value
4470 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
4471 var timeSlot = obj[cue.startTime] || [];
4472
4473 timeSlot.push(cue);
4474 obj[cue.startTime] = timeSlot;
4475
4476 return obj;
4477 }, {});
4478
4479 // Sort startTimes by ascending order
4480 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
4481 return Number(a) - Number(b);
4482 });
4483
4484 // Map each cue group's endTime to the next group's startTime
4485 sortedStartTimes.forEach(function (startTime, idx) {
4486 var cueGroup = cuesGroupedByStartTime[startTime];
4487 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
4488
4489 // Map each cue's endTime the next group's startTime
4490 cueGroup.forEach(function (cue) {
4491 cue.endTime = nextTime;
4492 });
4493 });
4494 }
4495 }
4496 };
4497
4498 var win$1 = typeof window !== 'undefined' ? window : {},
4499 TARGET = typeof Symbol === 'undefined' ? '__target' : Symbol(),
4500 SCRIPT_TYPE = 'application/javascript',
4501 BlobBuilder = win$1.BlobBuilder || win$1.WebKitBlobBuilder || win$1.MozBlobBuilder || win$1.MSBlobBuilder,
4502 URL = win$1.URL || win$1.webkitURL || URL && URL.msURL,
4503 Worker = win$1.Worker;
4504
4505 /**
4506 * Returns a wrapper around Web Worker code that is constructible.
4507 *
4508 * @function shimWorker
4509 *
4510 * @param { String } filename The name of the file
4511 * @param { Function } fn Function wrapping the code of the worker
4512 */
4513 function shimWorker(filename, fn) {
4514 return function ShimWorker(forceFallback) {
4515 var o = this;
4516
4517 if (!fn) {
4518 return new Worker(filename);
4519 } else if (Worker && !forceFallback) {
4520 // Convert the function's inner code to a string to construct the worker
4521 var source = fn.toString().replace(/^function.+?{/, '').slice(0, -1),
4522 objURL = createSourceObject(source);
4523
4524 this[TARGET] = new Worker(objURL);
4525 wrapTerminate(this[TARGET], objURL);
4526 return this[TARGET];
4527 } else {
4528 var selfShim = {
4529 postMessage: function postMessage(m) {
4530 if (o.onmessage) {
4531 setTimeout(function () {
4532 o.onmessage({ data: m, target: selfShim });
4533 });
4534 }
4535 }
4536 };
4537
4538 fn.call(selfShim);
4539 this.postMessage = function (m) {
4540 setTimeout(function () {
4541 selfShim.onmessage({ data: m, target: o });
4542 });
4543 };
4544 this.isThisThread = true;
4545 }
4546 };
4547 }
4548 // Test Worker capabilities
4549 if (Worker) {
4550 var testWorker,
4551 objURL = createSourceObject('self.onmessage = function () {}'),
4552 testArray = new Uint8Array(1);
4553
4554 try {
4555 testWorker = new Worker(objURL);
4556
4557 // Native browser on some Samsung devices throws for transferables, let's detect it
4558 testWorker.postMessage(testArray, [testArray.buffer]);
4559 } catch (e) {
4560 Worker = null;
4561 } finally {
4562 URL.revokeObjectURL(objURL);
4563 if (testWorker) {
4564 testWorker.terminate();
4565 }
4566 }
4567 }
4568
4569 function createSourceObject(str) {
4570 try {
4571 return URL.createObjectURL(new Blob([str], { type: SCRIPT_TYPE }));
4572 } catch (e) {
4573 var blob = new BlobBuilder();
4574 blob.append(str);
4575 return URL.createObjectURL(blob.getBlob(type));
4576 }
4577 }
4578
4579 function wrapTerminate(worker, objURL) {
4580 if (!worker || !objURL) return;
4581 var term = worker.terminate;
4582 worker.objURL = objURL;
4583 worker.terminate = function () {
4584 if (worker.objURL) URL.revokeObjectURL(worker.objURL);
4585 term.call(worker);
4586 };
4587 }
4588
4589 var TransmuxWorker = new shimWorker("./transmuxer-worker.worker.js", function (window, document) {
4590 var self = this;
4591 var transmuxerWorker = function () {
4592
4593 /**
4594 * mux.js
4595 *
4596 * Copyright (c) Brightcove
4597 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4598 *
4599 * A lightweight readable stream implemention that handles event dispatching.
4600 * Objects that inherit from streams should call init in their constructors.
4601 */
4602
4603 var Stream = function Stream() {
4604 this.init = function () {
4605 var listeners = {};
4606 /**
4607 * Add a listener for a specified event type.
4608 * @param type {string} the event name
4609 * @param listener {function} the callback to be invoked when an event of
4610 * the specified type occurs
4611 */
4612 this.on = function (type, listener) {
4613 if (!listeners[type]) {
4614 listeners[type] = [];
4615 }
4616 listeners[type] = listeners[type].concat(listener);
4617 };
4618 /**
4619 * Remove a listener for a specified event type.
4620 * @param type {string} the event name
4621 * @param listener {function} a function previously registered for this
4622 * type of event through `on`
4623 */
4624 this.off = function (type, listener) {
4625 var index;
4626 if (!listeners[type]) {
4627 return false;
4628 }
4629 index = listeners[type].indexOf(listener);
4630 listeners[type] = listeners[type].slice();
4631 listeners[type].splice(index, 1);
4632 return index > -1;
4633 };
4634 /**
4635 * Trigger an event of the specified type on this stream. Any additional
4636 * arguments to this function are passed as parameters to event listeners.
4637 * @param type {string} the event name
4638 */
4639 this.trigger = function (type) {
4640 var callbacks, i, length, args;
4641 callbacks = listeners[type];
4642 if (!callbacks) {
4643 return;
4644 }
4645 // Slicing the arguments on every invocation of this method
4646 // can add a significant amount of overhead. Avoid the
4647 // intermediate object creation for the common case of a
4648 // single callback argument
4649 if (arguments.length === 2) {
4650 length = callbacks.length;
4651 for (i = 0; i < length; ++i) {
4652 callbacks[i].call(this, arguments[1]);
4653 }
4654 } else {
4655 args = [];
4656 i = arguments.length;
4657 for (i = 1; i < arguments.length; ++i) {
4658 args.push(arguments[i]);
4659 }
4660 length = callbacks.length;
4661 for (i = 0; i < length; ++i) {
4662 callbacks[i].apply(this, args);
4663 }
4664 }
4665 };
4666 /**
4667 * Destroys the stream and cleans up.
4668 */
4669 this.dispose = function () {
4670 listeners = {};
4671 };
4672 };
4673 };
4674
4675 /**
4676 * Forwards all `data` events on this stream to the destination stream. The
4677 * destination stream should provide a method `push` to receive the data
4678 * events as they arrive.
4679 * @param destination {stream} the stream that will receive all `data` events
4680 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
4681 * when the current stream emits a 'done' event
4682 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
4683 */
4684 Stream.prototype.pipe = function (destination) {
4685 this.on('data', function (data) {
4686 destination.push(data);
4687 });
4688
4689 this.on('done', function (flushSource) {
4690 destination.flush(flushSource);
4691 });
4692
4693 this.on('partialdone', function (flushSource) {
4694 destination.partialFlush(flushSource);
4695 });
4696
4697 this.on('endedtimeline', function (flushSource) {
4698 destination.endTimeline(flushSource);
4699 });
4700
4701 this.on('reset', function (flushSource) {
4702 destination.reset(flushSource);
4703 });
4704
4705 return destination;
4706 };
4707
4708 // Default stream functions that are expected to be overridden to perform
4709 // actual work. These are provided by the prototype as a sort of no-op
4710 // implementation so that we don't have to check for their existence in the
4711 // `pipe` function above.
4712 Stream.prototype.push = function (data) {
4713 this.trigger('data', data);
4714 };
4715
4716 Stream.prototype.flush = function (flushSource) {
4717 this.trigger('done', flushSource);
4718 };
4719
4720 Stream.prototype.partialFlush = function (flushSource) {
4721 this.trigger('partialdone', flushSource);
4722 };
4723
4724 Stream.prototype.endTimeline = function (flushSource) {
4725 this.trigger('endedtimeline', flushSource);
4726 };
4727
4728 Stream.prototype.reset = function (flushSource) {
4729 this.trigger('reset', flushSource);
4730 };
4731
4732 var stream = Stream;
4733
4734 /**
4735 * mux.js
4736 *
4737 * Copyright (c) Brightcove
4738 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4739 *
4740 * Functions that generate fragmented MP4s suitable for use with Media
4741 * Source Extensions.
4742 */
4743
4744 var UINT32_MAX = Math.pow(2, 32) - 1;
4745
4746 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
4747
4748 // pre-calculate constants
4749 (function () {
4750 var i;
4751 types = {
4752 avc1: [], // codingname
4753 avcC: [],
4754 btrt: [],
4755 dinf: [],
4756 dref: [],
4757 esds: [],
4758 ftyp: [],
4759 hdlr: [],
4760 mdat: [],
4761 mdhd: [],
4762 mdia: [],
4763 mfhd: [],
4764 minf: [],
4765 moof: [],
4766 moov: [],
4767 mp4a: [], // codingname
4768 mvex: [],
4769 mvhd: [],
4770 pasp: [],
4771 sdtp: [],
4772 smhd: [],
4773 stbl: [],
4774 stco: [],
4775 stsc: [],
4776 stsd: [],
4777 stsz: [],
4778 stts: [],
4779 styp: [],
4780 tfdt: [],
4781 tfhd: [],
4782 traf: [],
4783 trak: [],
4784 trun: [],
4785 trex: [],
4786 tkhd: [],
4787 vmhd: []
4788 };
4789
4790 // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
4791 // don't throw an error
4792 if (typeof Uint8Array === 'undefined') {
4793 return;
4794 }
4795
4796 for (i in types) {
4797 if (types.hasOwnProperty(i)) {
4798 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
4799 }
4800 }
4801
4802 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
4803 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
4804 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
4805 VIDEO_HDLR = new Uint8Array([0x00, // version 0
4806 0x00, 0x00, 0x00, // flags
4807 0x00, 0x00, 0x00, 0x00, // pre_defined
4808 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
4809 0x00, 0x00, 0x00, 0x00, // reserved
4810 0x00, 0x00, 0x00, 0x00, // reserved
4811 0x00, 0x00, 0x00, 0x00, // reserved
4812 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
4813 ]);
4814 AUDIO_HDLR = new Uint8Array([0x00, // version 0
4815 0x00, 0x00, 0x00, // flags
4816 0x00, 0x00, 0x00, 0x00, // pre_defined
4817 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
4818 0x00, 0x00, 0x00, 0x00, // reserved
4819 0x00, 0x00, 0x00, 0x00, // reserved
4820 0x00, 0x00, 0x00, 0x00, // reserved
4821 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
4822 ]);
4823 HDLR_TYPES = {
4824 video: VIDEO_HDLR,
4825 audio: AUDIO_HDLR
4826 };
4827 DREF = new Uint8Array([0x00, // version 0
4828 0x00, 0x00, 0x00, // flags
4829 0x00, 0x00, 0x00, 0x01, // entry_count
4830 0x00, 0x00, 0x00, 0x0c, // entry_size
4831 0x75, 0x72, 0x6c, 0x20, // 'url' type
4832 0x00, // version 0
4833 0x00, 0x00, 0x01 // entry_flags
4834 ]);
4835 SMHD = new Uint8Array([0x00, // version
4836 0x00, 0x00, 0x00, // flags
4837 0x00, 0x00, // balance, 0 means centered
4838 0x00, 0x00 // reserved
4839 ]);
4840 STCO = new Uint8Array([0x00, // version
4841 0x00, 0x00, 0x00, // flags
4842 0x00, 0x00, 0x00, 0x00 // entry_count
4843 ]);
4844 STSC = STCO;
4845 STSZ = new Uint8Array([0x00, // version
4846 0x00, 0x00, 0x00, // flags
4847 0x00, 0x00, 0x00, 0x00, // sample_size
4848 0x00, 0x00, 0x00, 0x00 // sample_count
4849 ]);
4850 STTS = STCO;
4851 VMHD = new Uint8Array([0x00, // version
4852 0x00, 0x00, 0x01, // flags
4853 0x00, 0x00, // graphicsmode
4854 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
4855 ]);
4856 })();
4857
4858 box = function box(type) {
4859 var payload = [],
4860 size = 0,
4861 i,
4862 result,
4863 view;
4864
4865 for (i = 1; i < arguments.length; i++) {
4866 payload.push(arguments[i]);
4867 }
4868
4869 i = payload.length;
4870
4871 // calculate the total size we need to allocate
4872 while (i--) {
4873 size += payload[i].byteLength;
4874 }
4875 result = new Uint8Array(size + 8);
4876 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
4877 view.setUint32(0, result.byteLength);
4878 result.set(type, 4);
4879
4880 // copy the payload into the result
4881 for (i = 0, size = 8; i < payload.length; i++) {
4882 result.set(payload[i], size);
4883 size += payload[i].byteLength;
4884 }
4885 return result;
4886 };
4887
4888 dinf = function dinf() {
4889 return box(types.dinf, box(types.dref, DREF));
4890 };
4891
4892 esds = function esds(track) {
4893 return box(types.esds, new Uint8Array([0x00, // version
4894 0x00, 0x00, 0x00, // flags
4895
4896 // ES_Descriptor
4897 0x03, // tag, ES_DescrTag
4898 0x19, // length
4899 0x00, 0x00, // ES_ID
4900 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
4901
4902 // DecoderConfigDescriptor
4903 0x04, // tag, DecoderConfigDescrTag
4904 0x11, // length
4905 0x40, // object type
4906 0x15, // streamType
4907 0x00, 0x06, 0x00, // bufferSizeDB
4908 0x00, 0x00, 0xda, 0xc0, // maxBitrate
4909 0x00, 0x00, 0xda, 0xc0, // avgBitrate
4910
4911 // DecoderSpecificInfo
4912 0x05, // tag, DecoderSpecificInfoTag
4913 0x02, // length
4914 // ISO/IEC 14496-3, AudioSpecificConfig
4915 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
4916 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
4917 ]));
4918 };
4919
4920 ftyp = function ftyp() {
4921 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
4922 };
4923
4924 hdlr = function hdlr(type) {
4925 return box(types.hdlr, HDLR_TYPES[type]);
4926 };
4927 mdat = function mdat(data) {
4928 return box(types.mdat, data);
4929 };
4930 mdhd = function mdhd(track) {
4931 var result = new Uint8Array([0x00, // version 0
4932 0x00, 0x00, 0x00, // flags
4933 0x00, 0x00, 0x00, 0x02, // creation_time
4934 0x00, 0x00, 0x00, 0x03, // modification_time
4935 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4936
4937 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
4938 0x55, 0xc4, // 'und' language (undetermined)
4939 0x00, 0x00]);
4940
4941 // Use the sample rate from the track metadata, when it is
4942 // defined. The sample rate can be parsed out of an ADTS header, for
4943 // instance.
4944 if (track.samplerate) {
4945 result[12] = track.samplerate >>> 24 & 0xFF;
4946 result[13] = track.samplerate >>> 16 & 0xFF;
4947 result[14] = track.samplerate >>> 8 & 0xFF;
4948 result[15] = track.samplerate & 0xFF;
4949 }
4950
4951 return box(types.mdhd, result);
4952 };
4953 mdia = function mdia(track) {
4954 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4955 };
4956 mfhd = function mfhd(sequenceNumber) {
4957 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4958 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4959 ]));
4960 };
4961 minf = function minf(track) {
4962 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4963 };
4964 moof = function moof(sequenceNumber, tracks) {
4965 var trackFragments = [],
4966 i = tracks.length;
4967 // build traf boxes for each track fragment
4968 while (i--) {
4969 trackFragments[i] = traf(tracks[i]);
4970 }
4971 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4972 };
4973 /**
4974 * Returns a movie box.
4975 * @param tracks {array} the tracks associated with this movie
4976 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4977 */
4978 moov = function moov(tracks) {
4979 var i = tracks.length,
4980 boxes = [];
4981
4982 while (i--) {
4983 boxes[i] = trak(tracks[i]);
4984 }
4985
4986 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4987 };
4988 mvex = function mvex(tracks) {
4989 var i = tracks.length,
4990 boxes = [];
4991
4992 while (i--) {
4993 boxes[i] = trex(tracks[i]);
4994 }
4995 return box.apply(null, [types.mvex].concat(boxes));
4996 };
4997 mvhd = function mvhd(duration) {
4998 var bytes = new Uint8Array([0x00, // version 0
4999 0x00, 0x00, 0x00, // flags
5000 0x00, 0x00, 0x00, 0x01, // creation_time
5001 0x00, 0x00, 0x00, 0x02, // modification_time
5002 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
5003 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
5004 0x00, 0x01, 0x00, 0x00, // 1.0 rate
5005 0x01, 0x00, // 1.0 volume
5006 0x00, 0x00, // reserved
5007 0x00, 0x00, 0x00, 0x00, // reserved
5008 0x00, 0x00, 0x00, 0x00, // reserved
5009 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
5010 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
5011 0xff, 0xff, 0xff, 0xff // next_track_ID
5012 ]);
5013 return box(types.mvhd, bytes);
5014 };
5015
5016 sdtp = function sdtp(track) {
5017 var samples = track.samples || [],
5018 bytes = new Uint8Array(4 + samples.length),
5019 flags,
5020 i;
5021
5022 // leave the full box header (4 bytes) all zero
5023
5024 // write the sample table
5025 for (i = 0; i < samples.length; i++) {
5026 flags = samples[i].flags;
5027
5028 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
5029 }
5030
5031 return box(types.sdtp, bytes);
5032 };
5033
5034 stbl = function stbl(track) {
5035 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
5036 };
5037
5038 (function () {
5039 var videoSample, audioSample;
5040
5041 stsd = function stsd(track) {
5042
5043 return box(types.stsd, new Uint8Array([0x00, // version 0
5044 0x00, 0x00, 0x00, // flags
5045 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
5046 };
5047
5048 videoSample = function videoSample(track) {
5049 var sps = track.sps || [],
5050 pps = track.pps || [],
5051 sequenceParameterSets = [],
5052 pictureParameterSets = [],
5053 i,
5054 avc1Box;
5055
5056 // assemble the SPSs
5057 for (i = 0; i < sps.length; i++) {
5058 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
5059 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
5060 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
5061 }
5062
5063 // assemble the PPSs
5064 for (i = 0; i < pps.length; i++) {
5065 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
5066 pictureParameterSets.push(pps[i].byteLength & 0xFF);
5067 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
5068 }
5069
5070 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
5071 0x00, 0x01, // data_reference_index
5072 0x00, 0x00, // pre_defined
5073 0x00, 0x00, // reserved
5074 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
5075 (track.width & 0xff00) >> 8, track.width & 0xff, // width
5076 (track.height & 0xff00) >> 8, track.height & 0xff, // height
5077 0x00, 0x48, 0x00, 0x00, // horizresolution
5078 0x00, 0x48, 0x00, 0x00, // vertresolution
5079 0x00, 0x00, 0x00, 0x00, // reserved
5080 0x00, 0x01, // frame_count
5081 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
5082 0x00, 0x18, // depth = 24
5083 0x11, 0x11 // pre_defined = -1
5084 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
5085 track.profileIdc, // AVCProfileIndication
5086 track.profileCompatibility, // profile_compatibility
5087 track.levelIdc, // AVCLevelIndication
5088 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
5089 ].concat([sps.length], // numOfSequenceParameterSets
5090 sequenceParameterSets, // "SPS"
5091 [pps.length], // numOfPictureParameterSets
5092 pictureParameterSets // "PPS"
5093 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
5094 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
5095 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
5096 ]))];
5097
5098 if (track.sarRatio) {
5099 var hSpacing = track.sarRatio[0],
5100 vSpacing = track.sarRatio[1];
5101
5102 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
5103 }
5104
5105 return box.apply(null, avc1Box);
5106 };
5107
5108 audioSample = function audioSample(track) {
5109 return box(types.mp4a, new Uint8Array([
5110
5111 // SampleEntry, ISO/IEC 14496-12
5112 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
5113 0x00, 0x01, // data_reference_index
5114
5115 // AudioSampleEntry, ISO/IEC 14496-12
5116 0x00, 0x00, 0x00, 0x00, // reserved
5117 0x00, 0x00, 0x00, 0x00, // reserved
5118 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
5119
5120 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
5121 0x00, 0x00, // pre_defined
5122 0x00, 0x00, // reserved
5123
5124 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
5125
5126 // MP4AudioSampleEntry, ISO/IEC 14496-14
5127 ]), esds(track));
5128 };
5129 })();
5130
5131 tkhd = function tkhd(track) {
5132 var result = new Uint8Array([0x00, // version 0
5133 0x00, 0x00, 0x07, // flags
5134 0x00, 0x00, 0x00, 0x00, // creation_time
5135 0x00, 0x00, 0x00, 0x00, // modification_time
5136 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
5137 0x00, 0x00, 0x00, 0x00, // reserved
5138 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
5139 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
5140 0x00, 0x00, // layer
5141 0x00, 0x00, // alternate_group
5142 0x01, 0x00, // non-audio track volume
5143 0x00, 0x00, // reserved
5144 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
5145 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
5146 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
5147 ]);
5148
5149 return box(types.tkhd, result);
5150 };
5151
5152 /**
5153 * Generate a track fragment (traf) box. A traf box collects metadata
5154 * about tracks in a movie fragment (moof) box.
5155 */
5156 traf = function traf(track) {
5157 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
5158
5159 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
5160 0x00, 0x00, 0x3a, // flags
5161 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
5162 0x00, 0x00, 0x00, 0x01, // sample_description_index
5163 0x00, 0x00, 0x00, 0x00, // default_sample_duration
5164 0x00, 0x00, 0x00, 0x00, // default_sample_size
5165 0x00, 0x00, 0x00, 0x00 // default_sample_flags
5166 ]));
5167
5168 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
5169 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
5170
5171 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
5172 0x00, 0x00, 0x00, // flags
5173 // baseMediaDecodeTime
5174 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF]));
5175
5176 // the data offset specifies the number of bytes from the start of
5177 // the containing moof to the first payload byte of the associated
5178 // mdat
5179 dataOffset = 32 + // tfhd
5180 20 + // tfdt
5181 8 + // traf header
5182 16 + // mfhd
5183 8 + // moof header
5184 8; // mdat header
5185
5186 // audio tracks require less metadata
5187 if (track.type === 'audio') {
5188 trackFragmentRun = trun(track, dataOffset);
5189 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
5190 }
5191
5192 // video tracks should contain an independent and disposable samples
5193 // box (sdtp)
5194 // generate one and adjust offsets to match
5195 sampleDependencyTable = sdtp(track);
5196 trackFragmentRun = trun(track, sampleDependencyTable.length + dataOffset);
5197 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
5198 };
5199
5200 /**
5201 * Generate a track box.
5202 * @param track {object} a track definition
5203 * @return {Uint8Array} the track box
5204 */
5205 trak = function trak(track) {
5206 track.duration = track.duration || 0xffffffff;
5207 return box(types.trak, tkhd(track), mdia(track));
5208 };
5209
5210 trex = function trex(track) {
5211 var result = new Uint8Array([0x00, // version 0
5212 0x00, 0x00, 0x00, // flags
5213 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
5214 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
5215 0x00, 0x00, 0x00, 0x00, // default_sample_duration
5216 0x00, 0x00, 0x00, 0x00, // default_sample_size
5217 0x00, 0x01, 0x00, 0x01 // default_sample_flags
5218 ]);
5219 // the last two bytes of default_sample_flags is the sample
5220 // degradation priority, a hint about the importance of this sample
5221 // relative to others. Lower the degradation priority for all sample
5222 // types other than video.
5223 if (track.type !== 'video') {
5224 result[result.length - 1] = 0x00;
5225 }
5226
5227 return box(types.trex, result);
5228 };
5229
5230 (function () {
5231 var audioTrun, videoTrun, trunHeader;
5232
5233 // This method assumes all samples are uniform. That is, if a
5234 // duration is present for the first sample, it will be present for
5235 // all subsequent samples.
5236 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
5237 trunHeader = function trunHeader(samples, offset) {
5238 var durationPresent = 0,
5239 sizePresent = 0,
5240 flagsPresent = 0,
5241 compositionTimeOffset = 0;
5242
5243 // trun flag constants
5244 if (samples.length) {
5245 if (samples[0].duration !== undefined) {
5246 durationPresent = 0x1;
5247 }
5248 if (samples[0].size !== undefined) {
5249 sizePresent = 0x2;
5250 }
5251 if (samples[0].flags !== undefined) {
5252 flagsPresent = 0x4;
5253 }
5254 if (samples[0].compositionTimeOffset !== undefined) {
5255 compositionTimeOffset = 0x8;
5256 }
5257 }
5258
5259 return [0x00, // version 0
5260 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
5261 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
5262 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
5263 ];
5264 };
5265
5266 videoTrun = function videoTrun(track, offset) {
5267 var bytesOffest, bytes, header, samples, sample, i;
5268
5269 samples = track.samples || [];
5270 offset += 8 + 12 + 16 * samples.length;
5271 header = trunHeader(samples, offset);
5272 bytes = new Uint8Array(header.length + samples.length * 16);
5273 bytes.set(header);
5274 bytesOffest = header.length;
5275
5276 for (i = 0; i < samples.length; i++) {
5277 sample = samples[i];
5278
5279 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
5280 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
5281 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
5282 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
5283 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
5284 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
5285 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
5286 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
5287 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
5288 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
5289 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
5290 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
5291 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
5292 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
5293 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
5294 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
5295 }
5296 return box(types.trun, bytes);
5297 };
5298
5299 audioTrun = function audioTrun(track, offset) {
5300 var bytes, bytesOffest, header, samples, sample, i;
5301
5302 samples = track.samples || [];
5303 offset += 8 + 12 + 8 * samples.length;
5304
5305 header = trunHeader(samples, offset);
5306 bytes = new Uint8Array(header.length + samples.length * 8);
5307 bytes.set(header);
5308 bytesOffest = header.length;
5309
5310 for (i = 0; i < samples.length; i++) {
5311 sample = samples[i];
5312 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
5313 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
5314 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
5315 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
5316 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
5317 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
5318 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
5319 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
5320 }
5321
5322 return box(types.trun, bytes);
5323 };
5324
5325 trun = function trun(track, offset) {
5326 if (track.type === 'audio') {
5327 return audioTrun(track, offset);
5328 }
5329
5330 return videoTrun(track, offset);
5331 };
5332 })();
5333
5334 var mp4Generator = {
5335 ftyp: ftyp,
5336 mdat: mdat,
5337 moof: moof,
5338 moov: moov,
5339 initSegment: function initSegment(tracks) {
5340 var fileType = ftyp(),
5341 movie = moov(tracks),
5342 result;
5343
5344 result = new Uint8Array(fileType.byteLength + movie.byteLength);
5345 result.set(fileType);
5346 result.set(movie, fileType.byteLength);
5347 return result;
5348 }
5349 };
5350
5351 /**
5352 * mux.js
5353 *
5354 * Copyright (c) Brightcove
5355 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5356 */
5357 // Convert an array of nal units into an array of frames with each frame being
5358 // composed of the nal units that make up that frame
5359 // Also keep track of cummulative data about the frame from the nal units such
5360 // as the frame duration, starting pts, etc.
5361 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
5362 var i,
5363 currentNal,
5364 currentFrame = [],
5365 frames = [];
5366
5367 // TODO added for LHLS, make sure this is OK
5368 frames.byteLength = 0;
5369 frames.nalCount = 0;
5370 frames.duration = 0;
5371
5372 currentFrame.byteLength = 0;
5373
5374 for (i = 0; i < nalUnits.length; i++) {
5375 currentNal = nalUnits[i];
5376
5377 // Split on 'aud'-type nal units
5378 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
5379 // Since the very first nal unit is expected to be an AUD
5380 // only push to the frames array when currentFrame is not empty
5381 if (currentFrame.length) {
5382 currentFrame.duration = currentNal.dts - currentFrame.dts;
5383 // TODO added for LHLS, make sure this is OK
5384 frames.byteLength += currentFrame.byteLength;
5385 frames.nalCount += currentFrame.length;
5386 frames.duration += currentFrame.duration;
5387 frames.push(currentFrame);
5388 }
5389 currentFrame = [currentNal];
5390 currentFrame.byteLength = currentNal.data.byteLength;
5391 currentFrame.pts = currentNal.pts;
5392 currentFrame.dts = currentNal.dts;
5393 } else {
5394 // Specifically flag key frames for ease of use later
5395 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
5396 currentFrame.keyFrame = true;
5397 }
5398 currentFrame.duration = currentNal.dts - currentFrame.dts;
5399 currentFrame.byteLength += currentNal.data.byteLength;
5400 currentFrame.push(currentNal);
5401 }
5402 }
5403
5404 // For the last frame, use the duration of the previous frame if we
5405 // have nothing better to go on
5406 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
5407 currentFrame.duration = frames[frames.length - 1].duration;
5408 }
5409
5410 // Push the final frame
5411 // TODO added for LHLS, make sure this is OK
5412 frames.byteLength += currentFrame.byteLength;
5413 frames.nalCount += currentFrame.length;
5414 frames.duration += currentFrame.duration;
5415
5416 frames.push(currentFrame);
5417 return frames;
5418 };
5419
5420 // Convert an array of frames into an array of Gop with each Gop being composed
5421 // of the frames that make up that Gop
5422 // Also keep track of cummulative data about the Gop from the frames such as the
5423 // Gop duration, starting pts, etc.
5424 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
5425 var i,
5426 currentFrame,
5427 currentGop = [],
5428 gops = [];
5429
5430 // We must pre-set some of the values on the Gop since we
5431 // keep running totals of these values
5432 currentGop.byteLength = 0;
5433 currentGop.nalCount = 0;
5434 currentGop.duration = 0;
5435 currentGop.pts = frames[0].pts;
5436 currentGop.dts = frames[0].dts;
5437
5438 // store some metadata about all the Gops
5439 gops.byteLength = 0;
5440 gops.nalCount = 0;
5441 gops.duration = 0;
5442 gops.pts = frames[0].pts;
5443 gops.dts = frames[0].dts;
5444
5445 for (i = 0; i < frames.length; i++) {
5446 currentFrame = frames[i];
5447
5448 if (currentFrame.keyFrame) {
5449 // Since the very first frame is expected to be an keyframe
5450 // only push to the gops array when currentGop is not empty
5451 if (currentGop.length) {
5452 gops.push(currentGop);
5453 gops.byteLength += currentGop.byteLength;
5454 gops.nalCount += currentGop.nalCount;
5455 gops.duration += currentGop.duration;
5456 }
5457
5458 currentGop = [currentFrame];
5459 currentGop.nalCount = currentFrame.length;
5460 currentGop.byteLength = currentFrame.byteLength;
5461 currentGop.pts = currentFrame.pts;
5462 currentGop.dts = currentFrame.dts;
5463 currentGop.duration = currentFrame.duration;
5464 } else {
5465 currentGop.duration += currentFrame.duration;
5466 currentGop.nalCount += currentFrame.length;
5467 currentGop.byteLength += currentFrame.byteLength;
5468 currentGop.push(currentFrame);
5469 }
5470 }
5471
5472 if (gops.length && currentGop.duration <= 0) {
5473 currentGop.duration = gops[gops.length - 1].duration;
5474 }
5475 gops.byteLength += currentGop.byteLength;
5476 gops.nalCount += currentGop.nalCount;
5477 gops.duration += currentGop.duration;
5478
5479 // push the final Gop
5480 gops.push(currentGop);
5481 return gops;
5482 };
5483
5484 /*
5485 * Search for the first keyframe in the GOPs and throw away all frames
5486 * until that keyframe. Then extend the duration of the pulled keyframe
5487 * and pull the PTS and DTS of the keyframe so that it covers the time
5488 * range of the frames that were disposed.
5489 *
5490 * @param {Array} gops video GOPs
5491 * @returns {Array} modified video GOPs
5492 */
5493 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
5494 var currentGop;
5495
5496 if (!gops[0][0].keyFrame && gops.length > 1) {
5497 // Remove the first GOP
5498 currentGop = gops.shift();
5499
5500 gops.byteLength -= currentGop.byteLength;
5501 gops.nalCount -= currentGop.nalCount;
5502
5503 // Extend the first frame of what is now the
5504 // first gop to cover the time period of the
5505 // frames we just removed
5506 gops[0][0].dts = currentGop.dts;
5507 gops[0][0].pts = currentGop.pts;
5508 gops[0][0].duration += currentGop.duration;
5509 }
5510
5511 return gops;
5512 };
5513
5514 /**
5515 * Default sample object
5516 * see ISO/IEC 14496-12:2012, section 8.6.4.3
5517 */
5518 var createDefaultSample = function createDefaultSample() {
5519 return {
5520 size: 0,
5521 flags: {
5522 isLeading: 0,
5523 dependsOn: 1,
5524 isDependedOn: 0,
5525 hasRedundancy: 0,
5526 degradationPriority: 0,
5527 isNonSyncSample: 1
5528 }
5529 };
5530 };
5531
5532 /*
5533 * Collates information from a video frame into an object for eventual
5534 * entry into an MP4 sample table.
5535 *
5536 * @param {Object} frame the video frame
5537 * @param {Number} dataOffset the byte offset to position the sample
5538 * @return {Object} object containing sample table info for a frame
5539 */
5540 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
5541 var sample = createDefaultSample();
5542
5543 sample.dataOffset = dataOffset;
5544 sample.compositionTimeOffset = frame.pts - frame.dts;
5545 sample.duration = frame.duration;
5546 sample.size = 4 * frame.length; // Space for nal unit size
5547 sample.size += frame.byteLength;
5548
5549 if (frame.keyFrame) {
5550 sample.flags.dependsOn = 2;
5551 sample.flags.isNonSyncSample = 0;
5552 }
5553
5554 return sample;
5555 };
5556
5557 // generate the track's sample table from an array of gops
5558 var generateSampleTable = function generateSampleTable(gops, baseDataOffset) {
5559 var h,
5560 i,
5561 sample,
5562 currentGop,
5563 currentFrame,
5564 dataOffset = baseDataOffset || 0,
5565 samples = [];
5566
5567 for (h = 0; h < gops.length; h++) {
5568 currentGop = gops[h];
5569
5570 for (i = 0; i < currentGop.length; i++) {
5571 currentFrame = currentGop[i];
5572
5573 sample = sampleForFrame(currentFrame, dataOffset);
5574
5575 dataOffset += sample.size;
5576
5577 samples.push(sample);
5578 }
5579 }
5580 return samples;
5581 };
5582
5583 // generate the track's raw mdat data from an array of gops
5584 var concatenateNalData = function concatenateNalData(gops) {
5585 var h,
5586 i,
5587 j,
5588 currentGop,
5589 currentFrame,
5590 currentNal,
5591 dataOffset = 0,
5592 nalsByteLength = gops.byteLength,
5593 numberOfNals = gops.nalCount,
5594 totalByteLength = nalsByteLength + 4 * numberOfNals,
5595 data = new Uint8Array(totalByteLength),
5596 view = new DataView(data.buffer);
5597
5598 // For each Gop..
5599 for (h = 0; h < gops.length; h++) {
5600 currentGop = gops[h];
5601
5602 // For each Frame..
5603 for (i = 0; i < currentGop.length; i++) {
5604 currentFrame = currentGop[i];
5605
5606 // For each NAL..
5607 for (j = 0; j < currentFrame.length; j++) {
5608 currentNal = currentFrame[j];
5609
5610 view.setUint32(dataOffset, currentNal.data.byteLength);
5611 dataOffset += 4;
5612 data.set(currentNal.data, dataOffset);
5613 dataOffset += currentNal.data.byteLength;
5614 }
5615 }
5616 }
5617 return data;
5618 };
5619
5620 // generate the track's sample table from a frame
5621 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
5622 var sample,
5623 dataOffset = baseDataOffset || 0,
5624 samples = [];
5625
5626 sample = sampleForFrame(frame, dataOffset);
5627 samples.push(sample);
5628
5629 return samples;
5630 };
5631
5632 // generate the track's raw mdat data from a frame
5633 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
5634 var i,
5635 currentNal,
5636 dataOffset = 0,
5637 nalsByteLength = frame.byteLength,
5638 numberOfNals = frame.length,
5639 totalByteLength = nalsByteLength + 4 * numberOfNals,
5640 data = new Uint8Array(totalByteLength),
5641 view = new DataView(data.buffer);
5642
5643 // For each NAL..
5644 for (i = 0; i < frame.length; i++) {
5645 currentNal = frame[i];
5646
5647 view.setUint32(dataOffset, currentNal.data.byteLength);
5648 dataOffset += 4;
5649 data.set(currentNal.data, dataOffset);
5650 dataOffset += currentNal.data.byteLength;
5651 }
5652
5653 return data;
5654 };
5655
5656 var frameUtils = {
5657 groupNalsIntoFrames: groupNalsIntoFrames,
5658 groupFramesIntoGops: groupFramesIntoGops,
5659 extendFirstKeyFrame: extendFirstKeyFrame,
5660 generateSampleTable: generateSampleTable,
5661 concatenateNalData: concatenateNalData,
5662 generateSampleTableForFrame: generateSampleTableForFrame,
5663 concatenateNalDataForFrame: concatenateNalDataForFrame
5664 };
5665
5666 /**
5667 * mux.js
5668 *
5669 * Copyright (c) Brightcove
5670 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5671 */
5672 var highPrefix = [33, 16, 5, 32, 164, 27];
5673 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
5674 var zeroFill = function zeroFill(count) {
5675 var a = [];
5676 while (count--) {
5677 a.push(0);
5678 }
5679 return a;
5680 };
5681
5682 var makeTable = function makeTable(metaTable) {
5683 return Object.keys(metaTable).reduce(function (obj, key) {
5684 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
5685 return arr.concat(part);
5686 }, []));
5687 return obj;
5688 }, {});
5689 };
5690
5691 var silence;
5692
5693 var silence_1 = function silence_1() {
5694 if (!silence) {
5695 // Frames-of-silence to use for filling in missing AAC frames
5696 var coneOfSilence = {
5697 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
5698 88200: [highPrefix, [231], zeroFill(170), [56]],
5699 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
5700 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
5701 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
5702 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
5703 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
5704 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
5705 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
5706 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
5707 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
5708 };
5709 silence = makeTable(coneOfSilence);
5710 }
5711 return silence;
5712 };
5713
5714 /**
5715 * mux.js
5716 *
5717 * Copyright (c) Brightcove
5718 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5719 */
5720 var ONE_SECOND_IN_TS = 90000,
5721
5722 // 90kHz clock
5723 secondsToVideoTs,
5724 secondsToAudioTs,
5725 videoTsToSeconds,
5726 audioTsToSeconds,
5727 audioTsToVideoTs,
5728 videoTsToAudioTs,
5729 metadataTsToSeconds;
5730
5731 secondsToVideoTs = function secondsToVideoTs(seconds) {
5732 return seconds * ONE_SECOND_IN_TS;
5733 };
5734
5735 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
5736 return seconds * sampleRate;
5737 };
5738
5739 videoTsToSeconds = function videoTsToSeconds(timestamp) {
5740 return timestamp / ONE_SECOND_IN_TS;
5741 };
5742
5743 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
5744 return timestamp / sampleRate;
5745 };
5746
5747 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
5748 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
5749 };
5750
5751 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
5752 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
5753 };
5754
5755 /**
5756 * Adjust ID3 tag or caption timing information by the timeline pts values
5757 * (if keepOriginalTimestamps is false) and convert to seconds
5758 */
5759 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
5760 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
5761 };
5762
5763 var clock = {
5764 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
5765 secondsToVideoTs: secondsToVideoTs,
5766 secondsToAudioTs: secondsToAudioTs,
5767 videoTsToSeconds: videoTsToSeconds,
5768 audioTsToSeconds: audioTsToSeconds,
5769 audioTsToVideoTs: audioTsToVideoTs,
5770 videoTsToAudioTs: videoTsToAudioTs,
5771 metadataTsToSeconds: metadataTsToSeconds
5772 };
5773
5774 /**
5775 * mux.js
5776 *
5777 * Copyright (c) Brightcove
5778 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5779 */
5780
5781 /**
5782 * Sum the `byteLength` properties of the data in each AAC frame
5783 */
5784 var sumFrameByteLengths = function sumFrameByteLengths(array) {
5785 var i,
5786 currentObj,
5787 sum = 0;
5788
5789 // sum the byteLength's all each nal unit in the frame
5790 for (i = 0; i < array.length; i++) {
5791 currentObj = array[i];
5792 sum += currentObj.data.byteLength;
5793 }
5794
5795 return sum;
5796 };
5797
5798 // Possibly pad (prefix) the audio track with silence if appending this track
5799 // would lead to the introduction of a gap in the audio buffer
5800 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
5801 var baseMediaDecodeTimeTs,
5802 frameDuration = 0,
5803 audioGapDuration = 0,
5804 audioFillFrameCount = 0,
5805 audioFillDuration = 0,
5806 silentFrame,
5807 i,
5808 firstFrame;
5809
5810 if (!frames.length) {
5811 return;
5812 }
5813
5814 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
5815 // determine frame clock duration based on sample rate, round up to avoid overfills
5816 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
5817
5818 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
5819 // insert the shortest possible amount (audio gap or audio to video gap)
5820 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
5821 // number of full frames in the audio gap
5822 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
5823 audioFillDuration = audioFillFrameCount * frameDuration;
5824 }
5825
5826 // don't attempt to fill gaps smaller than a single frame or larger
5827 // than a half second
5828 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
5829 return;
5830 }
5831
5832 silentFrame = silence_1()[track.samplerate];
5833
5834 if (!silentFrame) {
5835 // we don't have a silent frame pregenerated for the sample rate, so use a frame
5836 // from the content instead
5837 silentFrame = frames[0].data;
5838 }
5839
5840 for (i = 0; i < audioFillFrameCount; i++) {
5841 firstFrame = frames[0];
5842
5843 frames.splice(0, 0, {
5844 data: silentFrame,
5845 dts: firstFrame.dts - frameDuration,
5846 pts: firstFrame.pts - frameDuration
5847 });
5848 }
5849
5850 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
5851 };
5852
5853 // If the audio segment extends before the earliest allowed dts
5854 // value, remove AAC frames until starts at or after the earliest
5855 // allowed DTS so that we don't end up with a negative baseMedia-
5856 // DecodeTime for the audio track
5857 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
5858 if (track.minSegmentDts >= earliestAllowedDts) {
5859 return adtsFrames;
5860 }
5861
5862 // We will need to recalculate the earliest segment Dts
5863 track.minSegmentDts = Infinity;
5864
5865 return adtsFrames.filter(function (currentFrame) {
5866 // If this is an allowed frame, keep it and record it's Dts
5867 if (currentFrame.dts >= earliestAllowedDts) {
5868 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
5869 track.minSegmentPts = track.minSegmentDts;
5870 return true;
5871 }
5872 // Otherwise, discard it
5873 return false;
5874 });
5875 };
5876
5877 // generate the track's raw mdat data from an array of frames
5878 var generateSampleTable$1 = function generateSampleTable(frames) {
5879 var i,
5880 currentFrame,
5881 samples = [];
5882
5883 for (i = 0; i < frames.length; i++) {
5884 currentFrame = frames[i];
5885 samples.push({
5886 size: currentFrame.data.byteLength,
5887 duration: 1024 // For AAC audio, all samples contain 1024 samples
5888 });
5889 }
5890 return samples;
5891 };
5892
5893 // generate the track's sample table from an array of frames
5894 var concatenateFrameData = function concatenateFrameData(frames) {
5895 var i,
5896 currentFrame,
5897 dataOffset = 0,
5898 data = new Uint8Array(sumFrameByteLengths(frames));
5899
5900 for (i = 0; i < frames.length; i++) {
5901 currentFrame = frames[i];
5902
5903 data.set(currentFrame.data, dataOffset);
5904 dataOffset += currentFrame.data.byteLength;
5905 }
5906 return data;
5907 };
5908
5909 var audioFrameUtils = {
5910 prefixWithSilence: prefixWithSilence,
5911 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
5912 generateSampleTable: generateSampleTable$1,
5913 concatenateFrameData: concatenateFrameData
5914 };
5915
5916 /**
5917 * mux.js
5918 *
5919 * Copyright (c) Brightcove
5920 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5921 */
5922 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS;
5923
5924 /**
5925 * Store information about the start and end of the track and the
5926 * duration for each frame/sample we process in order to calculate
5927 * the baseMediaDecodeTime
5928 */
5929 var collectDtsInfo = function collectDtsInfo(track, data) {
5930 if (typeof data.pts === 'number') {
5931 if (track.timelineStartInfo.pts === undefined) {
5932 track.timelineStartInfo.pts = data.pts;
5933 }
5934
5935 if (track.minSegmentPts === undefined) {
5936 track.minSegmentPts = data.pts;
5937 } else {
5938 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
5939 }
5940
5941 if (track.maxSegmentPts === undefined) {
5942 track.maxSegmentPts = data.pts;
5943 } else {
5944 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
5945 }
5946 }
5947
5948 if (typeof data.dts === 'number') {
5949 if (track.timelineStartInfo.dts === undefined) {
5950 track.timelineStartInfo.dts = data.dts;
5951 }
5952
5953 if (track.minSegmentDts === undefined) {
5954 track.minSegmentDts = data.dts;
5955 } else {
5956 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5957 }
5958
5959 if (track.maxSegmentDts === undefined) {
5960 track.maxSegmentDts = data.dts;
5961 } else {
5962 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5963 }
5964 }
5965 };
5966
5967 /**
5968 * Clear values used to calculate the baseMediaDecodeTime between
5969 * tracks
5970 */
5971 var clearDtsInfo = function clearDtsInfo(track) {
5972 delete track.minSegmentDts;
5973 delete track.maxSegmentDts;
5974 delete track.minSegmentPts;
5975 delete track.maxSegmentPts;
5976 };
5977
5978 /**
5979 * Calculate the track's baseMediaDecodeTime based on the earliest
5980 * DTS the transmuxer has ever seen and the minimum DTS for the
5981 * current track
5982 * @param track {object} track metadata configuration
5983 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5984 * in the source; false to adjust the first segment to start at 0.
5985 */
5986 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5987 var baseMediaDecodeTime,
5988 scale,
5989 minSegmentDts = track.minSegmentDts;
5990
5991 // Optionally adjust the time so the first segment starts at zero.
5992 if (!keepOriginalTimestamps) {
5993 minSegmentDts -= track.timelineStartInfo.dts;
5994 }
5995
5996 // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5997 // we want the start of the first segment to be placed
5998 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
5999
6000 // Add to that the distance this segment is from the very first
6001 baseMediaDecodeTime += minSegmentDts;
6002
6003 // baseMediaDecodeTime must not become negative
6004 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
6005
6006 if (track.type === 'audio') {
6007 // Audio has a different clock equal to the sampling_rate so we need to
6008 // scale the PTS values into the clock rate of the track
6009 scale = track.samplerate / ONE_SECOND_IN_TS$1;
6010 baseMediaDecodeTime *= scale;
6011 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
6012 }
6013
6014 return baseMediaDecodeTime;
6015 };
6016
6017 var trackDecodeInfo = {
6018 clearDtsInfo: clearDtsInfo,
6019 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
6020 collectDtsInfo: collectDtsInfo
6021 };
6022
6023 /**
6024 * mux.js
6025 *
6026 * Copyright (c) Brightcove
6027 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
6028 *
6029 * Reads in-band caption information from a video elementary
6030 * stream. Captions must follow the CEA-708 standard for injection
6031 * into an MPEG-2 transport streams.
6032 * @see https://en.wikipedia.org/wiki/CEA-708
6033 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
6034 */
6035
6036 // Supplemental enhancement information (SEI) NAL units have a
6037 // payload type field to indicate how they are to be
6038 // interpreted. CEAS-708 caption content is always transmitted with
6039 // payload type 0x04.
6040
6041 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
6042 RBSP_TRAILING_BITS = 128;
6043
6044 /**
6045 * Parse a supplemental enhancement information (SEI) NAL unit.
6046 * Stops parsing once a message of type ITU T T35 has been found.
6047 *
6048 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
6049 * @return {object} the parsed SEI payload
6050 * @see Rec. ITU-T H.264, 7.3.2.3.1
6051 */
6052 var parseSei = function parseSei(bytes) {
6053 var i = 0,
6054 result = {
6055 payloadType: -1,
6056 payloadSize: 0
6057 },
6058 payloadType = 0,
6059 payloadSize = 0;
6060
6061 // go through the sei_rbsp parsing each each individual sei_message
6062 while (i < bytes.byteLength) {
6063 // stop once we have hit the end of the sei_rbsp
6064 if (bytes[i] === RBSP_TRAILING_BITS) {
6065 break;
6066 }
6067
6068 // Parse payload type
6069 while (bytes[i] === 0xFF) {
6070 payloadType += 255;
6071 i++;
6072 }
6073 payloadType += bytes[i++];
6074
6075 // Parse payload size
6076 while (bytes[i] === 0xFF) {
6077 payloadSize += 255;
6078 i++;
6079 }
6080 payloadSize += bytes[i++];
6081
6082 // this sei_message is a 608/708 caption so save it and break
6083 // there can only ever be one caption message in a frame's sei
6084 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
6085 result.payloadType = payloadType;
6086 result.payloadSize = payloadSize;
6087 result.payload = bytes.subarray(i, i + payloadSize);
6088 break;
6089 }
6090
6091 // skip the payload and parse the next message
6092 i += payloadSize;
6093 payloadType = 0;
6094 payloadSize = 0;
6095 }
6096
6097 return result;
6098 };
6099
6100 // see ANSI/SCTE 128-1 (2013), section 8.1
6101 var parseUserData = function parseUserData(sei) {
6102 // itu_t_t35_contry_code must be 181 (United States) for
6103 // captions
6104 if (sei.payload[0] !== 181) {
6105 return null;
6106 }
6107
6108 // itu_t_t35_provider_code should be 49 (ATSC) for captions
6109 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
6110 return null;
6111 }
6112
6113 // the user_identifier should be "GA94" to indicate ATSC1 data
6114 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
6115 return null;
6116 }
6117
6118 // finally, user_data_type_code should be 0x03 for caption data
6119 if (sei.payload[7] !== 0x03) {
6120 return null;
6121 }
6122
6123 // return the user_data_type_structure and strip the trailing
6124 // marker bits
6125 return sei.payload.subarray(8, sei.payload.length - 1);
6126 };
6127
6128 // see CEA-708-D, section 4.4
6129 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
6130 var results = [],
6131 i,
6132 count,
6133 offset,
6134 data;
6135
6136 // if this is just filler, return immediately
6137 if (!(userData[0] & 0x40)) {
6138 return results;
6139 }
6140
6141 // parse out the cc_data_1 and cc_data_2 fields
6142 count = userData[0] & 0x1f;
6143 for (i = 0; i < count; i++) {
6144 offset = i * 3;
6145 data = {
6146 type: userData[offset + 2] & 0x03,
6147 pts: pts
6148 };
6149
6150 // capture cc data when cc_valid is 1
6151 if (userData[offset + 2] & 0x04) {
6152 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
6153 results.push(data);
6154 }
6155 }
6156 return results;
6157 };
6158
6159 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
6160 var length = data.byteLength,
6161 emulationPreventionBytesPositions = [],
6162 i = 1,
6163 newLength,
6164 newData;
6165
6166 // Find all `Emulation Prevention Bytes`
6167 while (i < length - 2) {
6168 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
6169 emulationPreventionBytesPositions.push(i + 2);
6170 i += 2;
6171 } else {
6172 i++;
6173 }
6174 }
6175
6176 // If no Emulation Prevention Bytes were found just return the original
6177 // array
6178 if (emulationPreventionBytesPositions.length === 0) {
6179 return data;
6180 }
6181
6182 // Create a new array to hold the NAL unit data
6183 newLength = length - emulationPreventionBytesPositions.length;
6184 newData = new Uint8Array(newLength);
6185 var sourceIndex = 0;
6186
6187 for (i = 0; i < newLength; sourceIndex++, i++) {
6188 if (sourceIndex === emulationPreventionBytesPositions[0]) {
6189 // Skip this byte
6190 sourceIndex++;
6191 // Remove this position index
6192 emulationPreventionBytesPositions.shift();
6193 }
6194 newData[i] = data[sourceIndex];
6195 }
6196
6197 return newData;
6198 };
6199
6200 // exports
6201 var captionPacketParser = {
6202 parseSei: parseSei,
6203 parseUserData: parseUserData,
6204 parseCaptionPackets: parseCaptionPackets,
6205 discardEmulationPreventionBytes: discardEmulationPreventionBytes,
6206 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
6207 };
6208
6209 // -----------------
6210 // Link To Transport
6211 // -----------------
6212
6213
6214 var CaptionStream = function CaptionStream() {
6215
6216 CaptionStream.prototype.init.call(this);
6217
6218 this.captionPackets_ = [];
6219
6220 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
6221 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
6222 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
6223 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
6224 ];
6225
6226 this.reset();
6227
6228 // forward data and done events from CCs to this CaptionStream
6229 this.ccStreams_.forEach(function (cc) {
6230 cc.on('data', this.trigger.bind(this, 'data'));
6231 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
6232 cc.on('done', this.trigger.bind(this, 'done'));
6233 }, this);
6234 };
6235
6236 CaptionStream.prototype = new stream();
6237 CaptionStream.prototype.push = function (event) {
6238 var sei, userData, newCaptionPackets;
6239
6240 // only examine SEI NALs
6241 if (event.nalUnitType !== 'sei_rbsp') {
6242 return;
6243 }
6244
6245 // parse the sei
6246 sei = captionPacketParser.parseSei(event.escapedRBSP);
6247
6248 // ignore everything but user_data_registered_itu_t_t35
6249 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
6250 return;
6251 }
6252
6253 // parse out the user data payload
6254 userData = captionPacketParser.parseUserData(sei);
6255
6256 // ignore unrecognized userData
6257 if (!userData) {
6258 return;
6259 }
6260
6261 // Sometimes, the same segment # will be downloaded twice. To stop the
6262 // caption data from being processed twice, we track the latest dts we've
6263 // received and ignore everything with a dts before that. However, since
6264 // data for a specific dts can be split across packets on either side of
6265 // a segment boundary, we need to make sure we *don't* ignore the packets
6266 // from the *next* segment that have dts === this.latestDts_. By constantly
6267 // tracking the number of packets received with dts === this.latestDts_, we
6268 // know how many should be ignored once we start receiving duplicates.
6269 if (event.dts < this.latestDts_) {
6270 // We've started getting older data, so set the flag.
6271 this.ignoreNextEqualDts_ = true;
6272 return;
6273 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
6274 this.numSameDts_--;
6275 if (!this.numSameDts_) {
6276 // We've received the last duplicate packet, time to start processing again
6277 this.ignoreNextEqualDts_ = false;
6278 }
6279 return;
6280 }
6281
6282 // parse out CC data packets and save them for later
6283 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
6284 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
6285 if (this.latestDts_ !== event.dts) {
6286 this.numSameDts_ = 0;
6287 }
6288 this.numSameDts_++;
6289 this.latestDts_ = event.dts;
6290 };
6291
6292 CaptionStream.prototype.flushCCStreams = function (flushType) {
6293 this.ccStreams_.forEach(function (cc) {
6294 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
6295 }, this);
6296 };
6297
6298 CaptionStream.prototype.flushStream = function (flushType) {
6299 // make sure we actually parsed captions before proceeding
6300 if (!this.captionPackets_.length) {
6301 this.flushCCStreams(flushType);
6302 return;
6303 }
6304
6305 // In Chrome, the Array#sort function is not stable so add a
6306 // presortIndex that we can use to ensure we get a stable-sort
6307 this.captionPackets_.forEach(function (elem, idx) {
6308 elem.presortIndex = idx;
6309 });
6310
6311 // sort caption byte-pairs based on their PTS values
6312 this.captionPackets_.sort(function (a, b) {
6313 if (a.pts === b.pts) {
6314 return a.presortIndex - b.presortIndex;
6315 }
6316 return a.pts - b.pts;
6317 });
6318
6319 this.captionPackets_.forEach(function (packet) {
6320 if (packet.type < 2) {
6321 // Dispatch packet to the right Cea608Stream
6322 this.dispatchCea608Packet(packet);
6323 }
6324 // this is where an 'else' would go for a dispatching packets
6325 // to a theoretical Cea708Stream that handles SERVICEn data
6326 }, this);
6327
6328 this.captionPackets_.length = 0;
6329 this.flushCCStreams(flushType);
6330 };
6331
6332 CaptionStream.prototype.flush = function () {
6333 return this.flushStream('flush');
6334 };
6335
6336 // Only called if handling partial data
6337 CaptionStream.prototype.partialFlush = function () {
6338 return this.flushStream('partialFlush');
6339 };
6340
6341 CaptionStream.prototype.reset = function () {
6342 this.latestDts_ = null;
6343 this.ignoreNextEqualDts_ = false;
6344 this.numSameDts_ = 0;
6345 this.activeCea608Channel_ = [null, null];
6346 this.ccStreams_.forEach(function (ccStream) {
6347 ccStream.reset();
6348 });
6349 };
6350
6351 // From the CEA-608 spec:
6352 /*
6353 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
6354 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
6355 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
6356 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
6357 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
6358 * to switch to captioning or Text.
6359 */
6360 // With that in mind, we ignore any data between an XDS control code and a
6361 // subsequent closed-captioning control code.
6362 CaptionStream.prototype.dispatchCea608Packet = function (packet) {
6363 // NOTE: packet.type is the CEA608 field
6364 if (this.setsTextOrXDSActive(packet)) {
6365 this.activeCea608Channel_[packet.type] = null;
6366 } else if (this.setsChannel1Active(packet)) {
6367 this.activeCea608Channel_[packet.type] = 0;
6368 } else if (this.setsChannel2Active(packet)) {
6369 this.activeCea608Channel_[packet.type] = 1;
6370 }
6371 if (this.activeCea608Channel_[packet.type] === null) {
6372 // If we haven't received anything to set the active channel, or the
6373 // packets are Text/XDS data, discard the data; we don't want jumbled
6374 // captions
6375 return;
6376 }
6377 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
6378 };
6379
6380 CaptionStream.prototype.setsChannel1Active = function (packet) {
6381 return (packet.ccData & 0x7800) === 0x1000;
6382 };
6383 CaptionStream.prototype.setsChannel2Active = function (packet) {
6384 return (packet.ccData & 0x7800) === 0x1800;
6385 };
6386 CaptionStream.prototype.setsTextOrXDSActive = function (packet) {
6387 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
6388 };
6389
6390 // ----------------------
6391 // Session to Application
6392 // ----------------------
6393
6394 // This hash maps non-ASCII, special, and extended character codes to their
6395 // proper Unicode equivalent. The first keys that are only a single byte
6396 // are the non-standard ASCII characters, which simply map the CEA608 byte
6397 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6398 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6399 // can be performed regardless of the field and data channel on which the
6400 // character code was received.
6401 var CHARACTER_TRANSLATION = {
6402 0x2a: 0xe1, // á
6403 0x5c: 0xe9, // é
6404 0x5e: 0xed, // í
6405 0x5f: 0xf3, // ó
6406 0x60: 0xfa, // ú
6407 0x7b: 0xe7, // ç
6408 0x7c: 0xf7, // ÷
6409 0x7d: 0xd1, // Ñ
6410 0x7e: 0xf1, // ñ
6411 0x7f: 0x2588, // █
6412 0x0130: 0xae, // ®
6413 0x0131: 0xb0, // °
6414 0x0132: 0xbd, // ½
6415 0x0133: 0xbf, // ¿
6416 0x0134: 0x2122, // ™
6417 0x0135: 0xa2, // ¢
6418 0x0136: 0xa3, // £
6419 0x0137: 0x266a, // ♪
6420 0x0138: 0xe0, // à
6421 0x0139: 0xa0, //
6422 0x013a: 0xe8, // è
6423 0x013b: 0xe2, // â
6424 0x013c: 0xea, // ê
6425 0x013d: 0xee, // î
6426 0x013e: 0xf4, // ô
6427 0x013f: 0xfb, // û
6428 0x0220: 0xc1, // Á
6429 0x0221: 0xc9, // É
6430 0x0222: 0xd3, // Ó
6431 0x0223: 0xda, // Ú
6432 0x0224: 0xdc, // Ü
6433 0x0225: 0xfc, // ü
6434 0x0226: 0x2018, // ‘
6435 0x0227: 0xa1, // ¡
6436 0x0228: 0x2a, // *
6437 0x0229: 0x27, // '
6438 0x022a: 0x2014, // —
6439 0x022b: 0xa9, // ©
6440 0x022c: 0x2120, // ℠
6441 0x022d: 0x2022, // •
6442 0x022e: 0x201c, // “
6443 0x022f: 0x201d, // ”
6444 0x0230: 0xc0, // À
6445 0x0231: 0xc2, // Â
6446 0x0232: 0xc7, // Ç
6447 0x0233: 0xc8, // È
6448 0x0234: 0xca, // Ê
6449 0x0235: 0xcb, // Ë
6450 0x0236: 0xeb, // ë
6451 0x0237: 0xce, // Î
6452 0x0238: 0xcf, // Ï
6453 0x0239: 0xef, // ï
6454 0x023a: 0xd4, // Ô
6455 0x023b: 0xd9, // Ù
6456 0x023c: 0xf9, // ù
6457 0x023d: 0xdb, // Û
6458 0x023e: 0xab, // «
6459 0x023f: 0xbb, // »
6460 0x0320: 0xc3, // Ã
6461 0x0321: 0xe3, // ã
6462 0x0322: 0xcd, // Í
6463 0x0323: 0xcc, // Ì
6464 0x0324: 0xec, // ì
6465 0x0325: 0xd2, // Ò
6466 0x0326: 0xf2, // ò
6467 0x0327: 0xd5, // Õ
6468 0x0328: 0xf5, // õ
6469 0x0329: 0x7b, // {
6470 0x032a: 0x7d, // }
6471 0x032b: 0x5c, // \
6472 0x032c: 0x5e, // ^
6473 0x032d: 0x5f, // _
6474 0x032e: 0x7c, // |
6475 0x032f: 0x7e, // ~
6476 0x0330: 0xc4, // Ä
6477 0x0331: 0xe4, // ä
6478 0x0332: 0xd6, // Ö
6479 0x0333: 0xf6, // ö
6480 0x0334: 0xdf, // ß
6481 0x0335: 0xa5, // ¥
6482 0x0336: 0xa4, // ¤
6483 0x0337: 0x2502, // │
6484 0x0338: 0xc5, // Å
6485 0x0339: 0xe5, // å
6486 0x033a: 0xd8, // Ø
6487 0x033b: 0xf8, // ø
6488 0x033c: 0x250c, // ┌
6489 0x033d: 0x2510, // ┐
6490 0x033e: 0x2514, // └
6491 0x033f: 0x2518 // ┘
6492 };
6493
6494 var getCharFromCode = function getCharFromCode(code) {
6495 if (code === null) {
6496 return '';
6497 }
6498 code = CHARACTER_TRANSLATION[code] || code;
6499 return String.fromCharCode(code);
6500 };
6501
6502 // the index of the last row in a CEA-608 display buffer
6503 var BOTTOM_ROW = 14;
6504
6505 // This array is used for mapping PACs -> row #, since there's no way of
6506 // getting it through bit logic.
6507 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
6508
6509 // CEA-608 captions are rendered onto a 34x15 matrix of character
6510 // cells. The "bottom" row is the last element in the outer array.
6511 var createDisplayBuffer = function createDisplayBuffer() {
6512 var result = [],
6513 i = BOTTOM_ROW + 1;
6514 while (i--) {
6515 result.push('');
6516 }
6517 return result;
6518 };
6519
6520 var Cea608Stream = function Cea608Stream(field, dataChannel) {
6521 Cea608Stream.prototype.init.call(this);
6522
6523 this.field_ = field || 0;
6524 this.dataChannel_ = dataChannel || 0;
6525
6526 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
6527
6528 this.setConstants();
6529 this.reset();
6530
6531 this.push = function (packet) {
6532 var data, swap, char0, char1, text;
6533 // remove the parity bits
6534 data = packet.ccData & 0x7f7f;
6535
6536 // ignore duplicate control codes; the spec demands they're sent twice
6537 if (data === this.lastControlCode_) {
6538 this.lastControlCode_ = null;
6539 return;
6540 }
6541
6542 // Store control codes
6543 if ((data & 0xf000) === 0x1000) {
6544 this.lastControlCode_ = data;
6545 } else if (data !== this.PADDING_) {
6546 this.lastControlCode_ = null;
6547 }
6548
6549 char0 = data >>> 8;
6550 char1 = data & 0xff;
6551
6552 if (data === this.PADDING_) {
6553 return;
6554 } else if (data === this.RESUME_CAPTION_LOADING_) {
6555 this.mode_ = 'popOn';
6556 } else if (data === this.END_OF_CAPTION_) {
6557 // If an EOC is received while in paint-on mode, the displayed caption
6558 // text should be swapped to non-displayed memory as if it was a pop-on
6559 // caption. Because of that, we should explicitly switch back to pop-on
6560 // mode
6561 this.mode_ = 'popOn';
6562 this.clearFormatting(packet.pts);
6563 // if a caption was being displayed, it's gone now
6564 this.flushDisplayed(packet.pts);
6565
6566 // flip memory
6567 swap = this.displayed_;
6568 this.displayed_ = this.nonDisplayed_;
6569 this.nonDisplayed_ = swap;
6570
6571 // start measuring the time to display the caption
6572 this.startPts_ = packet.pts;
6573 } else if (data === this.ROLL_UP_2_ROWS_) {
6574 this.rollUpRows_ = 2;
6575 this.setRollUp(packet.pts);
6576 } else if (data === this.ROLL_UP_3_ROWS_) {
6577 this.rollUpRows_ = 3;
6578 this.setRollUp(packet.pts);
6579 } else if (data === this.ROLL_UP_4_ROWS_) {
6580 this.rollUpRows_ = 4;
6581 this.setRollUp(packet.pts);
6582 } else if (data === this.CARRIAGE_RETURN_) {
6583 this.clearFormatting(packet.pts);
6584 this.flushDisplayed(packet.pts);
6585 this.shiftRowsUp_();
6586 this.startPts_ = packet.pts;
6587 } else if (data === this.BACKSPACE_) {
6588 if (this.mode_ === 'popOn') {
6589 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6590 } else {
6591 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6592 }
6593 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
6594 this.flushDisplayed(packet.pts);
6595 this.displayed_ = createDisplayBuffer();
6596 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
6597 this.nonDisplayed_ = createDisplayBuffer();
6598 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
6599 if (this.mode_ !== 'paintOn') {
6600 // NOTE: This should be removed when proper caption positioning is
6601 // implemented
6602 this.flushDisplayed(packet.pts);
6603 this.displayed_ = createDisplayBuffer();
6604 }
6605 this.mode_ = 'paintOn';
6606 this.startPts_ = packet.pts;
6607
6608 // Append special characters to caption text
6609 } else if (this.isSpecialCharacter(char0, char1)) {
6610 // Bitmask char0 so that we can apply character transformations
6611 // regardless of field and data channel.
6612 // Then byte-shift to the left and OR with char1 so we can pass the
6613 // entire character code to `getCharFromCode`.
6614 char0 = (char0 & 0x03) << 8;
6615 text = getCharFromCode(char0 | char1);
6616 this[this.mode_](packet.pts, text);
6617 this.column_++;
6618
6619 // Append extended characters to caption text
6620 } else if (this.isExtCharacter(char0, char1)) {
6621 // Extended characters always follow their "non-extended" equivalents.
6622 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
6623 // decoders are supposed to drop the "è", while compliant decoders
6624 // backspace the "e" and insert "è".
6625
6626 // Delete the previous character
6627 if (this.mode_ === 'popOn') {
6628 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6629 } else {
6630 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6631 }
6632
6633 // Bitmask char0 so that we can apply character transformations
6634 // regardless of field and data channel.
6635 // Then byte-shift to the left and OR with char1 so we can pass the
6636 // entire character code to `getCharFromCode`.
6637 char0 = (char0 & 0x03) << 8;
6638 text = getCharFromCode(char0 | char1);
6639 this[this.mode_](packet.pts, text);
6640 this.column_++;
6641
6642 // Process mid-row codes
6643 } else if (this.isMidRowCode(char0, char1)) {
6644 // Attributes are not additive, so clear all formatting
6645 this.clearFormatting(packet.pts);
6646
6647 // According to the standard, mid-row codes
6648 // should be replaced with spaces, so add one now
6649 this[this.mode_](packet.pts, ' ');
6650 this.column_++;
6651
6652 if ((char1 & 0xe) === 0xe) {
6653 this.addFormatting(packet.pts, ['i']);
6654 }
6655
6656 if ((char1 & 0x1) === 0x1) {
6657 this.addFormatting(packet.pts, ['u']);
6658 }
6659
6660 // Detect offset control codes and adjust cursor
6661 } else if (this.isOffsetControlCode(char0, char1)) {
6662 // Cursor position is set by indent PAC (see below) in 4-column
6663 // increments, with an additional offset code of 1-3 to reach any
6664 // of the 32 columns specified by CEA-608. So all we need to do
6665 // here is increment the column cursor by the given offset.
6666 this.column_ += char1 & 0x03;
6667
6668 // Detect PACs (Preamble Address Codes)
6669 } else if (this.isPAC(char0, char1)) {
6670
6671 // There's no logic for PAC -> row mapping, so we have to just
6672 // find the row code in an array and use its index :(
6673 var row = ROWS.indexOf(data & 0x1f20);
6674
6675 // Configure the caption window if we're in roll-up mode
6676 if (this.mode_ === 'rollUp') {
6677 // This implies that the base row is incorrectly set.
6678 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
6679 // of roll-up rows set.
6680 if (row - this.rollUpRows_ + 1 < 0) {
6681 row = this.rollUpRows_ - 1;
6682 }
6683
6684 this.setRollUp(packet.pts, row);
6685 }
6686
6687 if (row !== this.row_) {
6688 // formatting is only persistent for current row
6689 this.clearFormatting(packet.pts);
6690 this.row_ = row;
6691 }
6692 // All PACs can apply underline, so detect and apply
6693 // (All odd-numbered second bytes set underline)
6694 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
6695 this.addFormatting(packet.pts, ['u']);
6696 }
6697
6698 if ((data & 0x10) === 0x10) {
6699 // We've got an indent level code. Each successive even number
6700 // increments the column cursor by 4, so we can get the desired
6701 // column position by bit-shifting to the right (to get n/2)
6702 // and multiplying by 4.
6703 this.column_ = ((data & 0xe) >> 1) * 4;
6704 }
6705
6706 if (this.isColorPAC(char1)) {
6707 // it's a color code, though we only support white, which
6708 // can be either normal or italicized. white italics can be
6709 // either 0x4e or 0x6e depending on the row, so we just
6710 // bitwise-and with 0xe to see if italics should be turned on
6711 if ((char1 & 0xe) === 0xe) {
6712 this.addFormatting(packet.pts, ['i']);
6713 }
6714 }
6715
6716 // We have a normal character in char0, and possibly one in char1
6717 } else if (this.isNormalChar(char0)) {
6718 if (char1 === 0x00) {
6719 char1 = null;
6720 }
6721 text = getCharFromCode(char0);
6722 text += getCharFromCode(char1);
6723 this[this.mode_](packet.pts, text);
6724 this.column_ += text.length;
6725 } // finish data processing
6726 };
6727 };
6728 Cea608Stream.prototype = new stream();
6729 // Trigger a cue point that captures the current state of the
6730 // display buffer
6731 Cea608Stream.prototype.flushDisplayed = function (pts) {
6732 var content = this.displayed_
6733 // remove spaces from the start and end of the string
6734 .map(function (row) {
6735 try {
6736 return row.trim();
6737 } catch (e) {
6738 // Ordinarily, this shouldn't happen. However, caption
6739 // parsing errors should not throw exceptions and
6740 // break playback.
6741 // eslint-disable-next-line no-console
6742 console.error('Skipping malformed caption.');
6743 return '';
6744 }
6745 })
6746 // combine all text rows to display in one cue
6747 .join('\n')
6748 // and remove blank rows from the start and end, but not the middle
6749 .replace(/^\n+|\n+$/g, '');
6750
6751 if (content.length) {
6752 this.trigger('data', {
6753 startPts: this.startPts_,
6754 endPts: pts,
6755 text: content,
6756 stream: this.name_
6757 });
6758 }
6759 };
6760
6761 /**
6762 * Zero out the data, used for startup and on seek
6763 */
6764 Cea608Stream.prototype.reset = function () {
6765 this.mode_ = 'popOn';
6766 // When in roll-up mode, the index of the last row that will
6767 // actually display captions. If a caption is shifted to a row
6768 // with a lower index than this, it is cleared from the display
6769 // buffer
6770 this.topRow_ = 0;
6771 this.startPts_ = 0;
6772 this.displayed_ = createDisplayBuffer();
6773 this.nonDisplayed_ = createDisplayBuffer();
6774 this.lastControlCode_ = null;
6775
6776 // Track row and column for proper line-breaking and spacing
6777 this.column_ = 0;
6778 this.row_ = BOTTOM_ROW;
6779 this.rollUpRows_ = 2;
6780
6781 // This variable holds currently-applied formatting
6782 this.formatting_ = [];
6783 };
6784
6785 /**
6786 * Sets up control code and related constants for this instance
6787 */
6788 Cea608Stream.prototype.setConstants = function () {
6789 // The following attributes have these uses:
6790 // ext_ : char0 for mid-row codes, and the base for extended
6791 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
6792 // extended codes)
6793 // control_: char0 for control codes, except byte-shifted to the
6794 // left so that we can do this.control_ | CONTROL_CODE
6795 // offset_: char0 for tab offset codes
6796 //
6797 // It's also worth noting that control codes, and _only_ control codes,
6798 // differ between field 1 and field2. Field 2 control codes are always
6799 // their field 1 value plus 1. That's why there's the "| field" on the
6800 // control value.
6801 if (this.dataChannel_ === 0) {
6802 this.BASE_ = 0x10;
6803 this.EXT_ = 0x11;
6804 this.CONTROL_ = (0x14 | this.field_) << 8;
6805 this.OFFSET_ = 0x17;
6806 } else if (this.dataChannel_ === 1) {
6807 this.BASE_ = 0x18;
6808 this.EXT_ = 0x19;
6809 this.CONTROL_ = (0x1c | this.field_) << 8;
6810 this.OFFSET_ = 0x1f;
6811 }
6812
6813 // Constants for the LSByte command codes recognized by Cea608Stream. This
6814 // list is not exhaustive. For a more comprehensive listing and semantics see
6815 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
6816 // Padding
6817 this.PADDING_ = 0x0000;
6818 // Pop-on Mode
6819 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
6820 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
6821 // Roll-up Mode
6822 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
6823 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
6824 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
6825 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
6826 // paint-on mode
6827 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
6828 // Erasure
6829 this.BACKSPACE_ = this.CONTROL_ | 0x21;
6830 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
6831 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
6832 };
6833
6834 /**
6835 * Detects if the 2-byte packet data is a special character
6836 *
6837 * Special characters have a second byte in the range 0x30 to 0x3f,
6838 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
6839 * data channel 2).
6840 *
6841 * @param {Integer} char0 The first byte
6842 * @param {Integer} char1 The second byte
6843 * @return {Boolean} Whether the 2 bytes are an special character
6844 */
6845 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
6846 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
6847 };
6848
6849 /**
6850 * Detects if the 2-byte packet data is an extended character
6851 *
6852 * Extended characters have a second byte in the range 0x20 to 0x3f,
6853 * with the first byte being 0x12 or 0x13 (for data channel 1) or
6854 * 0x1a or 0x1b (for data channel 2).
6855 *
6856 * @param {Integer} char0 The first byte
6857 * @param {Integer} char1 The second byte
6858 * @return {Boolean} Whether the 2 bytes are an extended character
6859 */
6860 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
6861 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
6862 };
6863
6864 /**
6865 * Detects if the 2-byte packet is a mid-row code
6866 *
6867 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
6868 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
6869 * channel 2).
6870 *
6871 * @param {Integer} char0 The first byte
6872 * @param {Integer} char1 The second byte
6873 * @return {Boolean} Whether the 2 bytes are a mid-row code
6874 */
6875 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
6876 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
6877 };
6878
6879 /**
6880 * Detects if the 2-byte packet is an offset control code
6881 *
6882 * Offset control codes have a second byte in the range 0x21 to 0x23,
6883 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
6884 * data channel 2).
6885 *
6886 * @param {Integer} char0 The first byte
6887 * @param {Integer} char1 The second byte
6888 * @return {Boolean} Whether the 2 bytes are an offset control code
6889 */
6890 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
6891 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
6892 };
6893
6894 /**
6895 * Detects if the 2-byte packet is a Preamble Address Code
6896 *
6897 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
6898 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
6899 * range 0x40 to 0x7f.
6900 *
6901 * @param {Integer} char0 The first byte
6902 * @param {Integer} char1 The second byte
6903 * @return {Boolean} Whether the 2 bytes are a PAC
6904 */
6905 Cea608Stream.prototype.isPAC = function (char0, char1) {
6906 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
6907 };
6908
6909 /**
6910 * Detects if a packet's second byte is in the range of a PAC color code
6911 *
6912 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
6913 * 0x60 to 0x6f.
6914 *
6915 * @param {Integer} char1 The second byte
6916 * @return {Boolean} Whether the byte is a color PAC
6917 */
6918 Cea608Stream.prototype.isColorPAC = function (char1) {
6919 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
6920 };
6921
6922 /**
6923 * Detects if a single byte is in the range of a normal character
6924 *
6925 * Normal text bytes are in the range 0x20 to 0x7f.
6926 *
6927 * @param {Integer} char The byte
6928 * @return {Boolean} Whether the byte is a normal character
6929 */
6930 Cea608Stream.prototype.isNormalChar = function (char) {
6931 return char >= 0x20 && char <= 0x7f;
6932 };
6933
6934 /**
6935 * Configures roll-up
6936 *
6937 * @param {Integer} pts Current PTS
6938 * @param {Integer} newBaseRow Used by PACs to slide the current window to
6939 * a new position
6940 */
6941 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
6942 // Reset the base row to the bottom row when switching modes
6943 if (this.mode_ !== 'rollUp') {
6944 this.row_ = BOTTOM_ROW;
6945 this.mode_ = 'rollUp';
6946 // Spec says to wipe memories when switching to roll-up
6947 this.flushDisplayed(pts);
6948 this.nonDisplayed_ = createDisplayBuffer();
6949 this.displayed_ = createDisplayBuffer();
6950 }
6951
6952 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
6953 // move currently displayed captions (up or down) to the new base row
6954 for (var i = 0; i < this.rollUpRows_; i++) {
6955 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
6956 this.displayed_[this.row_ - i] = '';
6957 }
6958 }
6959
6960 if (newBaseRow === undefined) {
6961 newBaseRow = this.row_;
6962 }
6963
6964 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
6965 };
6966
6967 // Adds the opening HTML tag for the passed character to the caption text,
6968 // and keeps track of it for later closing
6969 Cea608Stream.prototype.addFormatting = function (pts, format) {
6970 this.formatting_ = this.formatting_.concat(format);
6971 var text = format.reduce(function (text, format) {
6972 return text + '<' + format + '>';
6973 }, '');
6974 this[this.mode_](pts, text);
6975 };
6976
6977 // Adds HTML closing tags for current formatting to caption text and
6978 // clears remembered formatting
6979 Cea608Stream.prototype.clearFormatting = function (pts) {
6980 if (!this.formatting_.length) {
6981 return;
6982 }
6983 var text = this.formatting_.reverse().reduce(function (text, format) {
6984 return text + '</' + format + '>';
6985 }, '');
6986 this.formatting_ = [];
6987 this[this.mode_](pts, text);
6988 };
6989
6990 // Mode Implementations
6991 Cea608Stream.prototype.popOn = function (pts, text) {
6992 var baseRow = this.nonDisplayed_[this.row_];
6993
6994 // buffer characters
6995 baseRow += text;
6996 this.nonDisplayed_[this.row_] = baseRow;
6997 };
6998
6999 Cea608Stream.prototype.rollUp = function (pts, text) {
7000 var baseRow = this.displayed_[this.row_];
7001
7002 baseRow += text;
7003 this.displayed_[this.row_] = baseRow;
7004 };
7005
7006 Cea608Stream.prototype.shiftRowsUp_ = function () {
7007 var i;
7008 // clear out inactive rows
7009 for (i = 0; i < this.topRow_; i++) {
7010 this.displayed_[i] = '';
7011 }
7012 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
7013 this.displayed_[i] = '';
7014 }
7015 // shift displayed rows up
7016 for (i = this.topRow_; i < this.row_; i++) {
7017 this.displayed_[i] = this.displayed_[i + 1];
7018 }
7019 // clear out the bottom row
7020 this.displayed_[this.row_] = '';
7021 };
7022
7023 Cea608Stream.prototype.paintOn = function (pts, text) {
7024 var baseRow = this.displayed_[this.row_];
7025
7026 baseRow += text;
7027 this.displayed_[this.row_] = baseRow;
7028 };
7029
7030 // exports
7031 var captionStream = {
7032 CaptionStream: CaptionStream,
7033 Cea608Stream: Cea608Stream
7034 };
7035
7036 /**
7037 * mux.js
7038 *
7039 * Copyright (c) Brightcove
7040 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7041 */
7042
7043 var streamTypes = {
7044 H264_STREAM_TYPE: 0x1B,
7045 ADTS_STREAM_TYPE: 0x0F,
7046 METADATA_STREAM_TYPE: 0x15
7047 };
7048
7049 var MAX_TS = 8589934592;
7050
7051 var RO_THRESH = 4294967296;
7052
7053 var TYPE_SHARED = 'shared';
7054
7055 var handleRollover = function handleRollover(value, reference) {
7056 var direction = 1;
7057
7058 if (value > reference) {
7059 // If the current timestamp value is greater than our reference timestamp and we detect a
7060 // timestamp rollover, this means the roll over is happening in the opposite direction.
7061 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7062 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7063 // rollover point. In loading this segment, the timestamp values will be very large,
7064 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7065 // the time stamp to be `value - 2^33`.
7066 direction = -1;
7067 }
7068
7069 // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7070 // cause an incorrect adjustment.
7071 while (Math.abs(reference - value) > RO_THRESH) {
7072 value += direction * MAX_TS;
7073 }
7074
7075 return value;
7076 };
7077
7078 var TimestampRolloverStream = function TimestampRolloverStream(type) {
7079 var lastDTS, referenceDTS;
7080
7081 TimestampRolloverStream.prototype.init.call(this);
7082
7083 // The "shared" type is used in cases where a stream will contain muxed
7084 // video and audio. We could use `undefined` here, but having a string
7085 // makes debugging a little clearer.
7086 this.type_ = type || TYPE_SHARED;
7087
7088 this.push = function (data) {
7089
7090 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7091 // streams will only accept data that matches their type.
7092 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7093 return;
7094 }
7095
7096 if (referenceDTS === undefined) {
7097 referenceDTS = data.dts;
7098 }
7099
7100 data.dts = handleRollover(data.dts, referenceDTS);
7101 data.pts = handleRollover(data.pts, referenceDTS);
7102
7103 lastDTS = data.dts;
7104
7105 this.trigger('data', data);
7106 };
7107
7108 this.flush = function () {
7109 referenceDTS = lastDTS;
7110 this.trigger('done');
7111 };
7112
7113 this.endTimeline = function () {
7114 this.flush();
7115 this.trigger('endedtimeline');
7116 };
7117
7118 this.discontinuity = function () {
7119 referenceDTS = void 0;
7120 lastDTS = void 0;
7121 };
7122
7123 this.reset = function () {
7124 this.discontinuity();
7125 this.trigger('reset');
7126 };
7127 };
7128
7129 TimestampRolloverStream.prototype = new stream();
7130
7131 var timestampRolloverStream = {
7132 TimestampRolloverStream: TimestampRolloverStream,
7133 handleRollover: handleRollover
7134 };
7135
7136 var percentEncode = function percentEncode(bytes, start, end) {
7137 var i,
7138 result = '';
7139 for (i = start; i < end; i++) {
7140 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7141 }
7142 return result;
7143 },
7144
7145
7146 // return the string representation of the specified byte range,
7147 // interpreted as UTf-8.
7148 parseUtf8 = function parseUtf8(bytes, start, end) {
7149 return decodeURIComponent(percentEncode(bytes, start, end));
7150 },
7151
7152
7153 // return the string representation of the specified byte range,
7154 // interpreted as ISO-8859-1.
7155 parseIso88591 = function parseIso88591(bytes, start, end) {
7156 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
7157 },
7158 parseSyncSafeInteger = function parseSyncSafeInteger(data) {
7159 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7160 },
7161 tagParsers = {
7162 TXXX: function TXXX(tag) {
7163 var i;
7164 if (tag.data[0] !== 3) {
7165 // ignore frames with unrecognized character encodings
7166 return;
7167 }
7168
7169 for (i = 1; i < tag.data.length; i++) {
7170 if (tag.data[i] === 0) {
7171 // parse the text fields
7172 tag.description = parseUtf8(tag.data, 1, i);
7173 // do not include the null terminator in the tag value
7174 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7175 break;
7176 }
7177 }
7178 tag.data = tag.value;
7179 },
7180 WXXX: function WXXX(tag) {
7181 var i;
7182 if (tag.data[0] !== 3) {
7183 // ignore frames with unrecognized character encodings
7184 return;
7185 }
7186
7187 for (i = 1; i < tag.data.length; i++) {
7188 if (tag.data[i] === 0) {
7189 // parse the description and URL fields
7190 tag.description = parseUtf8(tag.data, 1, i);
7191 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7192 break;
7193 }
7194 }
7195 },
7196 PRIV: function PRIV(tag) {
7197 var i;
7198
7199 for (i = 0; i < tag.data.length; i++) {
7200 if (tag.data[i] === 0) {
7201 // parse the description and URL fields
7202 tag.owner = parseIso88591(tag.data, 0, i);
7203 break;
7204 }
7205 }
7206 tag.privateData = tag.data.subarray(i + 1);
7207 tag.data = tag.privateData;
7208 }
7209 },
7210 _MetadataStream;
7211
7212 _MetadataStream = function MetadataStream(options) {
7213 var settings = {
7214 debug: !!(options && options.debug),
7215
7216 // the bytes of the program-level descriptor field in MP2T
7217 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7218 // program element descriptors"
7219 descriptor: options && options.descriptor
7220 },
7221
7222
7223 // the total size in bytes of the ID3 tag being parsed
7224 tagSize = 0,
7225
7226
7227 // tag data that is not complete enough to be parsed
7228 buffer = [],
7229
7230
7231 // the total number of bytes currently in the buffer
7232 bufferSize = 0,
7233 i;
7234
7235 _MetadataStream.prototype.init.call(this);
7236
7237 // calculate the text track in-band metadata track dispatch type
7238 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7239 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7240 if (settings.descriptor) {
7241 for (i = 0; i < settings.descriptor.length; i++) {
7242 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7243 }
7244 }
7245
7246 this.push = function (chunk) {
7247 var tag, frameStart, frameSize, frame, i, frameHeader;
7248 if (chunk.type !== 'timed-metadata') {
7249 return;
7250 }
7251
7252 // if data_alignment_indicator is set in the PES header,
7253 // we must have the start of a new ID3 tag. Assume anything
7254 // remaining in the buffer was malformed and throw it out
7255 if (chunk.dataAlignmentIndicator) {
7256 bufferSize = 0;
7257 buffer.length = 0;
7258 }
7259
7260 // ignore events that don't look like ID3 data
7261 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7262 if (settings.debug) {
7263 // eslint-disable-next-line no-console
7264 console.log('Skipping unrecognized metadata packet');
7265 }
7266 return;
7267 }
7268
7269 // add this chunk to the data we've collected so far
7270
7271 buffer.push(chunk);
7272 bufferSize += chunk.data.byteLength;
7273
7274 // grab the size of the entire frame from the ID3 header
7275 if (buffer.length === 1) {
7276 // the frame size is transmitted as a 28-bit integer in the
7277 // last four bytes of the ID3 header.
7278 // The most significant bit of each byte is dropped and the
7279 // results concatenated to recover the actual value.
7280 tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
7281
7282 // ID3 reports the tag size excluding the header but it's more
7283 // convenient for our comparisons to include it
7284 tagSize += 10;
7285 }
7286
7287 // if the entire frame has not arrived, wait for more data
7288 if (bufferSize < tagSize) {
7289 return;
7290 }
7291
7292 // collect the entire frame so it can be parsed
7293 tag = {
7294 data: new Uint8Array(tagSize),
7295 frames: [],
7296 pts: buffer[0].pts,
7297 dts: buffer[0].dts
7298 };
7299 for (i = 0; i < tagSize;) {
7300 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7301 i += buffer[0].data.byteLength;
7302 bufferSize -= buffer[0].data.byteLength;
7303 buffer.shift();
7304 }
7305
7306 // find the start of the first frame and the end of the tag
7307 frameStart = 10;
7308 if (tag.data[5] & 0x40) {
7309 // advance the frame start past the extended header
7310 frameStart += 4; // header size field
7311 frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
7312
7313 // clip any padding off the end
7314 tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
7315 }
7316
7317 // parse one or more ID3 frames
7318 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7319 do {
7320 // determine the number of bytes in this frame
7321 frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
7322 if (frameSize < 1) {
7323 // eslint-disable-next-line no-console
7324 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
7325 }
7326 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7327
7328 frame = {
7329 id: frameHeader,
7330 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7331 };
7332 frame.key = frame.id;
7333 if (tagParsers[frame.id]) {
7334 tagParsers[frame.id](frame);
7335
7336 // handle the special PRIV frame used to indicate the start
7337 // time for raw AAC data
7338 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7339 var d = frame.data,
7340 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7341
7342 size *= 4;
7343 size += d[7] & 0x03;
7344 frame.timeStamp = size;
7345 // in raw AAC, all subsequent data will be timestamped based
7346 // on the value of this frame
7347 // we couldn't have known the appropriate pts and dts before
7348 // parsing this ID3 tag so set those values now
7349 if (tag.pts === undefined && tag.dts === undefined) {
7350 tag.pts = frame.timeStamp;
7351 tag.dts = frame.timeStamp;
7352 }
7353 this.trigger('timestamp', frame);
7354 }
7355 }
7356 tag.frames.push(frame);
7357
7358 frameStart += 10; // advance past the frame header
7359 frameStart += frameSize; // advance past the frame body
7360 } while (frameStart < tagSize);
7361 this.trigger('data', tag);
7362 };
7363 };
7364 _MetadataStream.prototype = new stream();
7365
7366 var metadataStream = _MetadataStream;
7367
7368 var TimestampRolloverStream$1 = timestampRolloverStream.TimestampRolloverStream;
7369
7370 // object types
7371 var _TransportPacketStream, _TransportParseStream, _ElementaryStream;
7372
7373 // constants
7374 var MP2T_PACKET_LENGTH = 188,
7375
7376 // bytes
7377 SYNC_BYTE = 0x47;
7378
7379 /**
7380 * Splits an incoming stream of binary data into MPEG-2 Transport
7381 * Stream packets.
7382 */
7383 _TransportPacketStream = function TransportPacketStream() {
7384 var buffer = new Uint8Array(MP2T_PACKET_LENGTH),
7385 bytesInBuffer = 0;
7386
7387 _TransportPacketStream.prototype.init.call(this);
7388
7389 // Deliver new bytes to the stream.
7390
7391 /**
7392 * Split a stream of data into M2TS packets
7393 **/
7394 this.push = function (bytes) {
7395 var startIndex = 0,
7396 endIndex = MP2T_PACKET_LENGTH,
7397 everything;
7398
7399 // If there are bytes remaining from the last segment, prepend them to the
7400 // bytes that were pushed in
7401 if (bytesInBuffer) {
7402 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7403 everything.set(buffer.subarray(0, bytesInBuffer));
7404 everything.set(bytes, bytesInBuffer);
7405 bytesInBuffer = 0;
7406 } else {
7407 everything = bytes;
7408 }
7409
7410 // While we have enough data for a packet
7411 while (endIndex < everything.byteLength) {
7412 // Look for a pair of start and end sync bytes in the data..
7413 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
7414 // We found a packet so emit it and jump one whole packet forward in
7415 // the stream
7416 this.trigger('data', everything.subarray(startIndex, endIndex));
7417 startIndex += MP2T_PACKET_LENGTH;
7418 endIndex += MP2T_PACKET_LENGTH;
7419 continue;
7420 }
7421 // If we get here, we have somehow become de-synchronized and we need to step
7422 // forward one byte at a time until we find a pair of sync bytes that denote
7423 // a packet
7424 startIndex++;
7425 endIndex++;
7426 }
7427
7428 // If there was some data left over at the end of the segment that couldn't
7429 // possibly be a whole packet, keep it because it might be the start of a packet
7430 // that continues in the next segment
7431 if (startIndex < everything.byteLength) {
7432 buffer.set(everything.subarray(startIndex), 0);
7433 bytesInBuffer = everything.byteLength - startIndex;
7434 }
7435 };
7436
7437 /**
7438 * Passes identified M2TS packets to the TransportParseStream to be parsed
7439 **/
7440 this.flush = function () {
7441 // If the buffer contains a whole packet when we are being flushed, emit it
7442 // and empty the buffer. Otherwise hold onto the data because it may be
7443 // important for decoding the next segment
7444 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
7445 this.trigger('data', buffer);
7446 bytesInBuffer = 0;
7447 }
7448 this.trigger('done');
7449 };
7450
7451 this.endTimeline = function () {
7452 this.flush();
7453 this.trigger('endedtimeline');
7454 };
7455
7456 this.reset = function () {
7457 bytesInBuffer = 0;
7458 this.trigger('reset');
7459 };
7460 };
7461 _TransportPacketStream.prototype = new stream();
7462
7463 /**
7464 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7465 * forms of the individual transport stream packets.
7466 */
7467 _TransportParseStream = function TransportParseStream() {
7468 var parsePsi, parsePat, parsePmt, self;
7469 _TransportParseStream.prototype.init.call(this);
7470 self = this;
7471
7472 this.packetsWaitingForPmt = [];
7473 this.programMapTable = undefined;
7474
7475 parsePsi = function parsePsi(payload, psi) {
7476 var offset = 0;
7477
7478 // PSI packets may be split into multiple sections and those
7479 // sections may be split into multiple packets. If a PSI
7480 // section starts in this packet, the payload_unit_start_indicator
7481 // will be true and the first byte of the payload will indicate
7482 // the offset from the current position to the start of the
7483 // section.
7484 if (psi.payloadUnitStartIndicator) {
7485 offset += payload[offset] + 1;
7486 }
7487
7488 if (psi.type === 'pat') {
7489 parsePat(payload.subarray(offset), psi);
7490 } else {
7491 parsePmt(payload.subarray(offset), psi);
7492 }
7493 };
7494
7495 parsePat = function parsePat(payload, pat) {
7496 pat.section_number = payload[7]; // eslint-disable-line camelcase
7497 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
7498
7499 // skip the PSI header and parse the first PMT entry
7500 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
7501 pat.pmtPid = self.pmtPid;
7502 };
7503
7504 /**
7505 * Parse out the relevant fields of a Program Map Table (PMT).
7506 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
7507 * packet. The first byte in this array should be the table_id
7508 * field.
7509 * @param pmt {object} the object that should be decorated with
7510 * fields parsed from the PMT.
7511 */
7512 parsePmt = function parsePmt(payload, pmt) {
7513 var sectionLength, tableEnd, programInfoLength, offset;
7514
7515 // PMTs can be sent ahead of the time when they should actually
7516 // take effect. We don't believe this should ever be the case
7517 // for HLS but we'll ignore "forward" PMT declarations if we see
7518 // them. Future PMT declarations have the current_next_indicator
7519 // set to zero.
7520 if (!(payload[5] & 0x01)) {
7521 return;
7522 }
7523
7524 // overwrite any existing program map table
7525 self.programMapTable = {
7526 video: null,
7527 audio: null,
7528 'timed-metadata': {}
7529 };
7530
7531 // the mapping table ends at the end of the current section
7532 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
7533 tableEnd = 3 + sectionLength - 4;
7534
7535 // to determine where the table is, we have to figure out how
7536 // long the program info descriptors are
7537 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
7538
7539 // advance the offset to the first entry in the mapping table
7540 offset = 12 + programInfoLength;
7541 while (offset < tableEnd) {
7542 var streamType = payload[offset];
7543 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
7544
7545 // only map a single elementary_pid for audio and video stream types
7546 // TODO: should this be done for metadata too? for now maintain behavior of
7547 // multiple metadata streams
7548 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
7549 self.programMapTable.video = pid;
7550 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
7551 self.programMapTable.audio = pid;
7552 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
7553 // map pid to stream type for metadata streams
7554 self.programMapTable['timed-metadata'][pid] = streamType;
7555 }
7556
7557 // move to the next table entry
7558 // skip past the elementary stream descriptors, if present
7559 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
7560 }
7561
7562 // record the map on the packet as well
7563 pmt.programMapTable = self.programMapTable;
7564 };
7565
7566 /**
7567 * Deliver a new MP2T packet to the next stream in the pipeline.
7568 */
7569 this.push = function (packet) {
7570 var result = {},
7571 offset = 4;
7572
7573 result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
7574
7575 // pid is a 13-bit field starting at the last bit of packet[1]
7576 result.pid = packet[1] & 0x1f;
7577 result.pid <<= 8;
7578 result.pid |= packet[2];
7579
7580 // if an adaption field is present, its length is specified by the
7581 // fifth byte of the TS packet header. The adaptation field is
7582 // used to add stuffing to PES packets that don't fill a complete
7583 // TS packet, and to specify some forms of timing and control data
7584 // that we do not currently use.
7585 if ((packet[3] & 0x30) >>> 4 > 0x01) {
7586 offset += packet[offset] + 1;
7587 }
7588
7589 // parse the rest of the packet based on the type
7590 if (result.pid === 0) {
7591 result.type = 'pat';
7592 parsePsi(packet.subarray(offset), result);
7593 this.trigger('data', result);
7594 } else if (result.pid === this.pmtPid) {
7595 result.type = 'pmt';
7596 parsePsi(packet.subarray(offset), result);
7597 this.trigger('data', result);
7598
7599 // if there are any packets waiting for a PMT to be found, process them now
7600 while (this.packetsWaitingForPmt.length) {
7601 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
7602 }
7603 } else if (this.programMapTable === undefined) {
7604 // When we have not seen a PMT yet, defer further processing of
7605 // PES packets until one has been parsed
7606 this.packetsWaitingForPmt.push([packet, offset, result]);
7607 } else {
7608 this.processPes_(packet, offset, result);
7609 }
7610 };
7611
7612 this.processPes_ = function (packet, offset, result) {
7613 // set the appropriate stream type
7614 if (result.pid === this.programMapTable.video) {
7615 result.streamType = streamTypes.H264_STREAM_TYPE;
7616 } else if (result.pid === this.programMapTable.audio) {
7617 result.streamType = streamTypes.ADTS_STREAM_TYPE;
7618 } else {
7619 // if not video or audio, it is timed-metadata or unknown
7620 // if unknown, streamType will be undefined
7621 result.streamType = this.programMapTable['timed-metadata'][result.pid];
7622 }
7623
7624 result.type = 'pes';
7625 result.data = packet.subarray(offset);
7626 this.trigger('data', result);
7627 };
7628 };
7629 _TransportParseStream.prototype = new stream();
7630 _TransportParseStream.STREAM_TYPES = {
7631 h264: 0x1b,
7632 adts: 0x0f
7633 };
7634
7635 /**
7636 * Reconsistutes program elementary stream (PES) packets from parsed
7637 * transport stream packets. That is, if you pipe an
7638 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
7639 * events will be events which capture the bytes for individual PES
7640 * packets plus relevant metadata that has been extracted from the
7641 * container.
7642 */
7643 _ElementaryStream = function ElementaryStream() {
7644 var self = this,
7645
7646
7647 // PES packet fragments
7648 video = {
7649 data: [],
7650 size: 0
7651 },
7652 audio = {
7653 data: [],
7654 size: 0
7655 },
7656 timedMetadata = {
7657 data: [],
7658 size: 0
7659 },
7660 programMapTable,
7661 parsePes = function parsePes(payload, pes) {
7662 var ptsDtsFlags;
7663
7664 // get the packet length, this will be 0 for video
7665 pes.packetLength = 6 + (payload[4] << 8 | payload[5]);
7666
7667 // find out if this packets starts a new keyframe
7668 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
7669 // PES packets may be annotated with a PTS value, or a PTS value
7670 // and a DTS value. Determine what combination of values is
7671 // available to work with.
7672 ptsDtsFlags = payload[7];
7673
7674 // PTS and DTS are normally stored as a 33-bit number. Javascript
7675 // performs all bitwise operations on 32-bit integers but javascript
7676 // supports a much greater range (52-bits) of integer using standard
7677 // mathematical operations.
7678 // We construct a 31-bit value using bitwise operators over the 31
7679 // most significant bits and then multiply by 4 (equal to a left-shift
7680 // of 2) before we add the final 2 least significant bits of the
7681 // timestamp (equal to an OR.)
7682 if (ptsDtsFlags & 0xC0) {
7683 // the PTS and DTS are not written out directly. For information
7684 // on how they are encoded, see
7685 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
7686 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
7687 pes.pts *= 4; // Left shift by 2
7688 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
7689 pes.dts = pes.pts;
7690 if (ptsDtsFlags & 0x40) {
7691 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
7692 pes.dts *= 4; // Left shift by 2
7693 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
7694 }
7695 }
7696 // the data section starts immediately after the PES header.
7697 // pes_header_data_length specifies the number of header bytes
7698 // that follow the last byte of the field.
7699 pes.data = payload.subarray(9 + payload[8]);
7700 },
7701
7702
7703 /**
7704 * Pass completely parsed PES packets to the next stream in the pipeline
7705 **/
7706 flushStream = function flushStream(stream$$1, type, forceFlush) {
7707 var packetData = new Uint8Array(stream$$1.size),
7708 event = {
7709 type: type
7710 },
7711 i = 0,
7712 offset = 0,
7713 packetFlushable = false,
7714 fragment;
7715
7716 // do nothing if there is not enough buffered data for a complete
7717 // PES header
7718 if (!stream$$1.data.length || stream$$1.size < 9) {
7719 return;
7720 }
7721 event.trackId = stream$$1.data[0].pid;
7722
7723 // reassemble the packet
7724 for (i = 0; i < stream$$1.data.length; i++) {
7725 fragment = stream$$1.data[i];
7726
7727 packetData.set(fragment.data, offset);
7728 offset += fragment.data.byteLength;
7729 }
7730
7731 // parse assembled packet's PES header
7732 parsePes(packetData, event);
7733
7734 // non-video PES packets MUST have a non-zero PES_packet_length
7735 // check that there is enough stream data to fill the packet
7736 packetFlushable = type === 'video' || event.packetLength <= stream$$1.size;
7737
7738 // flush pending packets if the conditions are right
7739 if (forceFlush || packetFlushable) {
7740 stream$$1.size = 0;
7741 stream$$1.data.length = 0;
7742 }
7743
7744 // only emit packets that are complete. this is to avoid assembling
7745 // incomplete PES packets due to poor segmentation
7746 if (packetFlushable) {
7747 self.trigger('data', event);
7748 }
7749 };
7750
7751 _ElementaryStream.prototype.init.call(this);
7752
7753 /**
7754 * Identifies M2TS packet types and parses PES packets using metadata
7755 * parsed from the PMT
7756 **/
7757 this.push = function (data) {
7758 ({
7759 pat: function pat() {
7760 // we have to wait for the PMT to arrive as well before we
7761 // have any meaningful metadata
7762 },
7763 pes: function pes() {
7764 var stream$$1, streamType;
7765
7766 switch (data.streamType) {
7767 case streamTypes.H264_STREAM_TYPE:
7768 stream$$1 = video;
7769 streamType = 'video';
7770 break;
7771 case streamTypes.ADTS_STREAM_TYPE:
7772 stream$$1 = audio;
7773 streamType = 'audio';
7774 break;
7775 case streamTypes.METADATA_STREAM_TYPE:
7776 stream$$1 = timedMetadata;
7777 streamType = 'timed-metadata';
7778 break;
7779 default:
7780 // ignore unknown stream types
7781 return;
7782 }
7783
7784 // if a new packet is starting, we can flush the completed
7785 // packet
7786 if (data.payloadUnitStartIndicator) {
7787 flushStream(stream$$1, streamType, true);
7788 }
7789
7790 // buffer this fragment until we are sure we've received the
7791 // complete payload
7792 stream$$1.data.push(data);
7793 stream$$1.size += data.data.byteLength;
7794 },
7795 pmt: function pmt() {
7796 var event = {
7797 type: 'metadata',
7798 tracks: []
7799 };
7800
7801 programMapTable = data.programMapTable;
7802
7803 // translate audio and video streams to tracks
7804 if (programMapTable.video !== null) {
7805 event.tracks.push({
7806 timelineStartInfo: {
7807 baseMediaDecodeTime: 0
7808 },
7809 id: +programMapTable.video,
7810 codec: 'avc',
7811 type: 'video'
7812 });
7813 }
7814 if (programMapTable.audio !== null) {
7815 event.tracks.push({
7816 timelineStartInfo: {
7817 baseMediaDecodeTime: 0
7818 },
7819 id: +programMapTable.audio,
7820 codec: 'adts',
7821 type: 'audio'
7822 });
7823 }
7824
7825 self.trigger('data', event);
7826 }
7827 })[data.type]();
7828 };
7829
7830 this.reset = function () {
7831 video.size = 0;
7832 video.data.length = 0;
7833 audio.size = 0;
7834 audio.data.length = 0;
7835 this.trigger('reset');
7836 };
7837
7838 /**
7839 * Flush any remaining input. Video PES packets may be of variable
7840 * length. Normally, the start of a new video packet can trigger the
7841 * finalization of the previous packet. That is not possible if no
7842 * more video is forthcoming, however. In that case, some other
7843 * mechanism (like the end of the file) has to be employed. When it is
7844 * clear that no additional data is forthcoming, calling this method
7845 * will flush the buffered packets.
7846 */
7847 this.flushStreams_ = function () {
7848 // !!THIS ORDER IS IMPORTANT!!
7849 // video first then audio
7850 flushStream(video, 'video');
7851 flushStream(audio, 'audio');
7852 flushStream(timedMetadata, 'timed-metadata');
7853 };
7854
7855 this.flush = function () {
7856 this.flushStreams_();
7857 this.trigger('done');
7858 };
7859 };
7860 _ElementaryStream.prototype = new stream();
7861
7862 var m2ts = {
7863 PAT_PID: 0x0000,
7864 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
7865 TransportPacketStream: _TransportPacketStream,
7866 TransportParseStream: _TransportParseStream,
7867 ElementaryStream: _ElementaryStream,
7868 TimestampRolloverStream: TimestampRolloverStream$1,
7869 CaptionStream: captionStream.CaptionStream,
7870 Cea608Stream: captionStream.Cea608Stream,
7871 MetadataStream: metadataStream
7872 };
7873
7874 for (var type in streamTypes) {
7875 if (streamTypes.hasOwnProperty(type)) {
7876 m2ts[type] = streamTypes[type];
7877 }
7878 }
7879
7880 var m2ts_1 = m2ts;
7881
7882 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
7883
7884 var _AdtsStream;
7885
7886 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
7887
7888 /*
7889 * Accepts a ElementaryStream and emits data events with parsed
7890 * AAC Audio Frames of the individual packets. Input audio in ADTS
7891 * format is unpacked and re-emitted as AAC frames.
7892 *
7893 * @see http://wiki.multimedia.cx/index.php?title=ADTS
7894 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
7895 */
7896 _AdtsStream = function AdtsStream(handlePartialSegments) {
7897 var buffer,
7898 frameNum = 0;
7899
7900 _AdtsStream.prototype.init.call(this);
7901
7902 this.push = function (packet) {
7903 var i = 0,
7904 frameLength,
7905 protectionSkipBytes,
7906 frameEnd,
7907 oldBuffer,
7908 sampleCount,
7909 adtsFrameDuration;
7910
7911 if (!handlePartialSegments) {
7912 frameNum = 0;
7913 }
7914
7915 if (packet.type !== 'audio') {
7916 // ignore non-audio data
7917 return;
7918 }
7919
7920 // Prepend any data in the buffer to the input data so that we can parse
7921 // aac frames the cross a PES packet boundary
7922 if (buffer) {
7923 oldBuffer = buffer;
7924 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
7925 buffer.set(oldBuffer);
7926 buffer.set(packet.data, oldBuffer.byteLength);
7927 } else {
7928 buffer = packet.data;
7929 }
7930
7931 // unpack any ADTS frames which have been fully received
7932 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
7933 while (i + 5 < buffer.length) {
7934
7935 // Look for the start of an ADTS header..
7936 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
7937 // If a valid header was not found, jump one forward and attempt to
7938 // find a valid ADTS header starting at the next byte
7939 i++;
7940 continue;
7941 }
7942
7943 // The protection skip bit tells us if we have 2 bytes of CRC data at the
7944 // end of the ADTS header
7945 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
7946
7947 // Frame length is a 13 bit integer starting 16 bits from the
7948 // end of the sync sequence
7949 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
7950
7951 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
7952 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
7953
7954 frameEnd = i + frameLength;
7955
7956 // If we don't have enough data to actually finish this ADTS frame, return
7957 // and wait for more data
7958 if (buffer.byteLength < frameEnd) {
7959 return;
7960 }
7961
7962 // Otherwise, deliver the complete AAC frame
7963 this.trigger('data', {
7964 pts: packet.pts + frameNum * adtsFrameDuration,
7965 dts: packet.dts + frameNum * adtsFrameDuration,
7966 sampleCount: sampleCount,
7967 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
7968 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
7969 samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
7970 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
7971 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
7972 samplesize: 16,
7973 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
7974 });
7975
7976 frameNum++;
7977
7978 // If the buffer is empty, clear it and return
7979 if (buffer.byteLength === frameEnd) {
7980 buffer = undefined;
7981 return;
7982 }
7983
7984 // Remove the finished frame from the buffer and start the process again
7985 buffer = buffer.subarray(frameEnd);
7986 }
7987 };
7988
7989 this.flush = function () {
7990 frameNum = 0;
7991 this.trigger('done');
7992 };
7993
7994 this.reset = function () {
7995 buffer = void 0;
7996 this.trigger('reset');
7997 };
7998
7999 this.endTimeline = function () {
8000 buffer = void 0;
8001 this.trigger('endedtimeline');
8002 };
8003 };
8004
8005 _AdtsStream.prototype = new stream();
8006
8007 var adts = _AdtsStream;
8008
8009 /**
8010 * mux.js
8011 *
8012 * Copyright (c) Brightcove
8013 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8014 */
8015
8016 var ExpGolomb;
8017
8018 /**
8019 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
8020 * scheme used by h264.
8021 */
8022 ExpGolomb = function ExpGolomb(workingData) {
8023 var
8024 // the number of bytes left to examine in workingData
8025 workingBytesAvailable = workingData.byteLength,
8026
8027
8028 // the current word being examined
8029 workingWord = 0,
8030
8031 // :uint
8032
8033 // the number of bits left to examine in the current word
8034 workingBitsAvailable = 0; // :uint;
8035
8036 // ():uint
8037 this.length = function () {
8038 return 8 * workingBytesAvailable;
8039 };
8040
8041 // ():uint
8042 this.bitsAvailable = function () {
8043 return 8 * workingBytesAvailable + workingBitsAvailable;
8044 };
8045
8046 // ():void
8047 this.loadWord = function () {
8048 var position = workingData.byteLength - workingBytesAvailable,
8049 workingBytes = new Uint8Array(4),
8050 availableBytes = Math.min(4, workingBytesAvailable);
8051
8052 if (availableBytes === 0) {
8053 throw new Error('no bytes available');
8054 }
8055
8056 workingBytes.set(workingData.subarray(position, position + availableBytes));
8057 workingWord = new DataView(workingBytes.buffer).getUint32(0);
8058
8059 // track the amount of workingData that has been processed
8060 workingBitsAvailable = availableBytes * 8;
8061 workingBytesAvailable -= availableBytes;
8062 };
8063
8064 // (count:int):void
8065 this.skipBits = function (count) {
8066 var skipBytes; // :int
8067 if (workingBitsAvailable > count) {
8068 workingWord <<= count;
8069 workingBitsAvailable -= count;
8070 } else {
8071 count -= workingBitsAvailable;
8072 skipBytes = Math.floor(count / 8);
8073
8074 count -= skipBytes * 8;
8075 workingBytesAvailable -= skipBytes;
8076
8077 this.loadWord();
8078
8079 workingWord <<= count;
8080 workingBitsAvailable -= count;
8081 }
8082 };
8083
8084 // (size:int):uint
8085 this.readBits = function (size) {
8086 var bits = Math.min(workingBitsAvailable, size),
8087
8088 // :uint
8089 valu = workingWord >>> 32 - bits; // :uint
8090 // if size > 31, handle error
8091 workingBitsAvailable -= bits;
8092 if (workingBitsAvailable > 0) {
8093 workingWord <<= bits;
8094 } else if (workingBytesAvailable > 0) {
8095 this.loadWord();
8096 }
8097
8098 bits = size - bits;
8099 if (bits > 0) {
8100 return valu << bits | this.readBits(bits);
8101 }
8102 return valu;
8103 };
8104
8105 // ():uint
8106 this.skipLeadingZeros = function () {
8107 var leadingZeroCount; // :uint
8108 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8109 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8110 // the first bit of working word is 1
8111 workingWord <<= leadingZeroCount;
8112 workingBitsAvailable -= leadingZeroCount;
8113 return leadingZeroCount;
8114 }
8115 }
8116
8117 // we exhausted workingWord and still have not found a 1
8118 this.loadWord();
8119 return leadingZeroCount + this.skipLeadingZeros();
8120 };
8121
8122 // ():void
8123 this.skipUnsignedExpGolomb = function () {
8124 this.skipBits(1 + this.skipLeadingZeros());
8125 };
8126
8127 // ():void
8128 this.skipExpGolomb = function () {
8129 this.skipBits(1 + this.skipLeadingZeros());
8130 };
8131
8132 // ():uint
8133 this.readUnsignedExpGolomb = function () {
8134 var clz = this.skipLeadingZeros(); // :uint
8135 return this.readBits(clz + 1) - 1;
8136 };
8137
8138 // ():int
8139 this.readExpGolomb = function () {
8140 var valu = this.readUnsignedExpGolomb(); // :int
8141 if (0x01 & valu) {
8142 // the number is odd if the low order bit is set
8143 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8144 }
8145 return -1 * (valu >>> 1); // divide by two then make it negative
8146 };
8147
8148 // Some convenience functions
8149 // :Boolean
8150 this.readBoolean = function () {
8151 return this.readBits(1) === 1;
8152 };
8153
8154 // ():int
8155 this.readUnsignedByte = function () {
8156 return this.readBits(8);
8157 };
8158
8159 this.loadWord();
8160 };
8161
8162 var expGolomb = ExpGolomb;
8163
8164 var _H264Stream, _NalByteStream;
8165 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8166
8167 /**
8168 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8169 */
8170 _NalByteStream = function NalByteStream() {
8171 var syncPoint = 0,
8172 i,
8173 buffer;
8174 _NalByteStream.prototype.init.call(this);
8175
8176 /*
8177 * Scans a byte stream and triggers a data event with the NAL units found.
8178 * @param {Object} data Event received from H264Stream
8179 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8180 *
8181 * @see H264Stream.push
8182 */
8183 this.push = function (data) {
8184 var swapBuffer;
8185
8186 if (!buffer) {
8187 buffer = data.data;
8188 } else {
8189 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8190 swapBuffer.set(buffer);
8191 swapBuffer.set(data.data, buffer.byteLength);
8192 buffer = swapBuffer;
8193 }
8194 var len = buffer.byteLength;
8195
8196 // Rec. ITU-T H.264, Annex B
8197 // scan for NAL unit boundaries
8198
8199 // a match looks like this:
8200 // 0 0 1 .. NAL .. 0 0 1
8201 // ^ sync point ^ i
8202 // or this:
8203 // 0 0 1 .. NAL .. 0 0 0
8204 // ^ sync point ^ i
8205
8206 // advance the sync point to a NAL start, if necessary
8207 for (; syncPoint < len - 3; syncPoint++) {
8208 if (buffer[syncPoint + 2] === 1) {
8209 // the sync point is properly aligned
8210 i = syncPoint + 5;
8211 break;
8212 }
8213 }
8214
8215 while (i < len) {
8216 // look at the current byte to determine if we've hit the end of
8217 // a NAL unit boundary
8218 switch (buffer[i]) {
8219 case 0:
8220 // skip past non-sync sequences
8221 if (buffer[i - 1] !== 0) {
8222 i += 2;
8223 break;
8224 } else if (buffer[i - 2] !== 0) {
8225 i++;
8226 break;
8227 }
8228
8229 // deliver the NAL unit if it isn't empty
8230 if (syncPoint + 3 !== i - 2) {
8231 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8232 }
8233
8234 // drop trailing zeroes
8235 do {
8236 i++;
8237 } while (buffer[i] !== 1 && i < len);
8238 syncPoint = i - 2;
8239 i += 3;
8240 break;
8241 case 1:
8242 // skip past non-sync sequences
8243 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8244 i += 3;
8245 break;
8246 }
8247
8248 // deliver the NAL unit
8249 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8250 syncPoint = i - 2;
8251 i += 3;
8252 break;
8253 default:
8254 // the current byte isn't a one or zero, so it cannot be part
8255 // of a sync sequence
8256 i += 3;
8257 break;
8258 }
8259 }
8260 // filter out the NAL units that were delivered
8261 buffer = buffer.subarray(syncPoint);
8262 i -= syncPoint;
8263 syncPoint = 0;
8264 };
8265
8266 this.reset = function () {
8267 buffer = null;
8268 syncPoint = 0;
8269 this.trigger('reset');
8270 };
8271
8272 this.flush = function () {
8273 // deliver the last buffered NAL unit
8274 if (buffer && buffer.byteLength > 3) {
8275 this.trigger('data', buffer.subarray(syncPoint + 3));
8276 }
8277 // reset the stream state
8278 buffer = null;
8279 syncPoint = 0;
8280 this.trigger('done');
8281 };
8282
8283 this.endTimeline = function () {
8284 this.flush();
8285 this.trigger('endedtimeline');
8286 };
8287 };
8288 _NalByteStream.prototype = new stream();
8289
8290 // values of profile_idc that indicate additional fields are included in the SPS
8291 // see Recommendation ITU-T H.264 (4/2013),
8292 // 7.3.2.1.1 Sequence parameter set data syntax
8293 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8294 100: true,
8295 110: true,
8296 122: true,
8297 244: true,
8298 44: true,
8299 83: true,
8300 86: true,
8301 118: true,
8302 128: true,
8303 138: true,
8304 139: true,
8305 134: true
8306 };
8307
8308 /**
8309 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8310 * events.
8311 */
8312 _H264Stream = function H264Stream() {
8313 var nalByteStream = new _NalByteStream(),
8314 self,
8315 trackId,
8316 currentPts,
8317 currentDts,
8318 discardEmulationPreventionBytes,
8319 readSequenceParameterSet,
8320 skipScalingList;
8321
8322 _H264Stream.prototype.init.call(this);
8323 self = this;
8324
8325 /*
8326 * Pushes a packet from a stream onto the NalByteStream
8327 *
8328 * @param {Object} packet - A packet received from a stream
8329 * @param {Uint8Array} packet.data - The raw bytes of the packet
8330 * @param {Number} packet.dts - Decode timestamp of the packet
8331 * @param {Number} packet.pts - Presentation timestamp of the packet
8332 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8333 * @param {('video'|'audio')} packet.type - The type of packet
8334 *
8335 */
8336 this.push = function (packet) {
8337 if (packet.type !== 'video') {
8338 return;
8339 }
8340 trackId = packet.trackId;
8341 currentPts = packet.pts;
8342 currentDts = packet.dts;
8343
8344 nalByteStream.push(packet);
8345 };
8346
8347 /*
8348 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8349 * for the NALUs to the next stream component.
8350 * Also, preprocess caption and sequence parameter NALUs.
8351 *
8352 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8353 * @see NalByteStream.push
8354 */
8355 nalByteStream.on('data', function (data) {
8356 var event = {
8357 trackId: trackId,
8358 pts: currentPts,
8359 dts: currentDts,
8360 data: data
8361 };
8362
8363 switch (data[0] & 0x1f) {
8364 case 0x05:
8365 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8366 break;
8367 case 0x06:
8368 event.nalUnitType = 'sei_rbsp';
8369 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8370 break;
8371 case 0x07:
8372 event.nalUnitType = 'seq_parameter_set_rbsp';
8373 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8374 event.config = readSequenceParameterSet(event.escapedRBSP);
8375 break;
8376 case 0x08:
8377 event.nalUnitType = 'pic_parameter_set_rbsp';
8378 break;
8379 case 0x09:
8380 event.nalUnitType = 'access_unit_delimiter_rbsp';
8381 break;
8382
8383 default:
8384 break;
8385 }
8386 // This triggers data on the H264Stream
8387 self.trigger('data', event);
8388 });
8389 nalByteStream.on('done', function () {
8390 self.trigger('done');
8391 });
8392 nalByteStream.on('partialdone', function () {
8393 self.trigger('partialdone');
8394 });
8395 nalByteStream.on('reset', function () {
8396 self.trigger('reset');
8397 });
8398 nalByteStream.on('endedtimeline', function () {
8399 self.trigger('endedtimeline');
8400 });
8401
8402 this.flush = function () {
8403 nalByteStream.flush();
8404 };
8405
8406 this.partialFlush = function () {
8407 nalByteStream.partialFlush();
8408 };
8409
8410 this.reset = function () {
8411 nalByteStream.reset();
8412 };
8413
8414 this.endTimeline = function () {
8415 nalByteStream.endTimeline();
8416 };
8417
8418 /**
8419 * Advance the ExpGolomb decoder past a scaling list. The scaling
8420 * list is optionally transmitted as part of a sequence parameter
8421 * set and is not relevant to transmuxing.
8422 * @param count {number} the number of entries in this scaling list
8423 * @param expGolombDecoder {object} an ExpGolomb pointed to the
8424 * start of a scaling list
8425 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
8426 */
8427 skipScalingList = function skipScalingList(count, expGolombDecoder) {
8428 var lastScale = 8,
8429 nextScale = 8,
8430 j,
8431 deltaScale;
8432
8433 for (j = 0; j < count; j++) {
8434 if (nextScale !== 0) {
8435 deltaScale = expGolombDecoder.readExpGolomb();
8436 nextScale = (lastScale + deltaScale + 256) % 256;
8437 }
8438
8439 lastScale = nextScale === 0 ? lastScale : nextScale;
8440 }
8441 };
8442
8443 /**
8444 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8445 * Sequence Payload"
8446 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8447 * unit
8448 * @return {Uint8Array} the RBSP without any Emulation
8449 * Prevention Bytes
8450 */
8451 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
8452 var length = data.byteLength,
8453 emulationPreventionBytesPositions = [],
8454 i = 1,
8455 newLength,
8456 newData;
8457
8458 // Find all `Emulation Prevention Bytes`
8459 while (i < length - 2) {
8460 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
8461 emulationPreventionBytesPositions.push(i + 2);
8462 i += 2;
8463 } else {
8464 i++;
8465 }
8466 }
8467
8468 // If no Emulation Prevention Bytes were found just return the original
8469 // array
8470 if (emulationPreventionBytesPositions.length === 0) {
8471 return data;
8472 }
8473
8474 // Create a new array to hold the NAL unit data
8475 newLength = length - emulationPreventionBytesPositions.length;
8476 newData = new Uint8Array(newLength);
8477 var sourceIndex = 0;
8478
8479 for (i = 0; i < newLength; sourceIndex++, i++) {
8480 if (sourceIndex === emulationPreventionBytesPositions[0]) {
8481 // Skip this byte
8482 sourceIndex++;
8483 // Remove this position index
8484 emulationPreventionBytesPositions.shift();
8485 }
8486 newData[i] = data[sourceIndex];
8487 }
8488
8489 return newData;
8490 };
8491
8492 /**
8493 * Read a sequence parameter set and return some interesting video
8494 * properties. A sequence parameter set is the H264 metadata that
8495 * describes the properties of upcoming video frames.
8496 * @param data {Uint8Array} the bytes of a sequence parameter set
8497 * @return {object} an object with configuration parsed from the
8498 * sequence parameter set, including the dimensions of the
8499 * associated video frames.
8500 */
8501 readSequenceParameterSet = function readSequenceParameterSet(data) {
8502 var frameCropLeftOffset = 0,
8503 frameCropRightOffset = 0,
8504 frameCropTopOffset = 0,
8505 frameCropBottomOffset = 0,
8506 sarScale = 1,
8507 expGolombDecoder,
8508 profileIdc,
8509 levelIdc,
8510 profileCompatibility,
8511 chromaFormatIdc,
8512 picOrderCntType,
8513 numRefFramesInPicOrderCntCycle,
8514 picWidthInMbsMinus1,
8515 picHeightInMapUnitsMinus1,
8516 frameMbsOnlyFlag,
8517 scalingListCount,
8518 sarRatio,
8519 aspectRatioIdc,
8520 i;
8521
8522 expGolombDecoder = new expGolomb(data);
8523 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
8524 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
8525 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
8526 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
8527
8528 // some profiles have more optional data we don't need
8529 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
8530 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
8531 if (chromaFormatIdc === 3) {
8532 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
8533 }
8534 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
8535 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
8536 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
8537 if (expGolombDecoder.readBoolean()) {
8538 // seq_scaling_matrix_present_flag
8539 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
8540 for (i = 0; i < scalingListCount; i++) {
8541 if (expGolombDecoder.readBoolean()) {
8542 // seq_scaling_list_present_flag[ i ]
8543 if (i < 6) {
8544 skipScalingList(16, expGolombDecoder);
8545 } else {
8546 skipScalingList(64, expGolombDecoder);
8547 }
8548 }
8549 }
8550 }
8551 }
8552
8553 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
8554 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
8555
8556 if (picOrderCntType === 0) {
8557 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
8558 } else if (picOrderCntType === 1) {
8559 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
8560 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
8561 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
8562 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
8563 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
8564 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
8565 }
8566 }
8567
8568 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
8569 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
8570
8571 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8572 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8573
8574 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
8575 if (frameMbsOnlyFlag === 0) {
8576 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
8577 }
8578
8579 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
8580 if (expGolombDecoder.readBoolean()) {
8581 // frame_cropping_flag
8582 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
8583 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
8584 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
8585 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
8586 }
8587 if (expGolombDecoder.readBoolean()) {
8588 // vui_parameters_present_flag
8589 if (expGolombDecoder.readBoolean()) {
8590 // aspect_ratio_info_present_flag
8591 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
8592 switch (aspectRatioIdc) {
8593 case 1:
8594 sarRatio = [1, 1];break;
8595 case 2:
8596 sarRatio = [12, 11];break;
8597 case 3:
8598 sarRatio = [10, 11];break;
8599 case 4:
8600 sarRatio = [16, 11];break;
8601 case 5:
8602 sarRatio = [40, 33];break;
8603 case 6:
8604 sarRatio = [24, 11];break;
8605 case 7:
8606 sarRatio = [20, 11];break;
8607 case 8:
8608 sarRatio = [32, 11];break;
8609 case 9:
8610 sarRatio = [80, 33];break;
8611 case 10:
8612 sarRatio = [18, 11];break;
8613 case 11:
8614 sarRatio = [15, 11];break;
8615 case 12:
8616 sarRatio = [64, 33];break;
8617 case 13:
8618 sarRatio = [160, 99];break;
8619 case 14:
8620 sarRatio = [4, 3];break;
8621 case 15:
8622 sarRatio = [3, 2];break;
8623 case 16:
8624 sarRatio = [2, 1];break;
8625 case 255:
8626 {
8627 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
8628 break;
8629 }
8630 }
8631 if (sarRatio) {
8632 sarScale = sarRatio[0] / sarRatio[1];
8633 }
8634 }
8635 }
8636 return {
8637 profileIdc: profileIdc,
8638 levelIdc: levelIdc,
8639 profileCompatibility: profileCompatibility,
8640 width: Math.ceil(((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
8641 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
8642 sarRatio: sarRatio
8643 };
8644 };
8645 };
8646 _H264Stream.prototype = new stream();
8647
8648 var h264 = {
8649 H264Stream: _H264Stream,
8650 NalByteStream: _NalByteStream
8651 };
8652
8653 /**
8654 * mux.js
8655 *
8656 * Copyright (c) Brightcove
8657 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8658 *
8659 * Utilities to detect basic properties and metadata about Aac data.
8660 */
8661
8662 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8663
8664 var isLikelyAacData = function isLikelyAacData(data) {
8665 if (data[0] === 'I'.charCodeAt(0) && data[1] === 'D'.charCodeAt(0) && data[2] === '3'.charCodeAt(0)) {
8666 return true;
8667 }
8668 return false;
8669 };
8670
8671 var parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
8672 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
8673 };
8674
8675 // return a percent-encoded representation of the specified byte range
8676 // @see http://en.wikipedia.org/wiki/Percent-encoding
8677 var percentEncode$1 = function percentEncode(bytes, start, end) {
8678 var i,
8679 result = '';
8680 for (i = start; i < end; i++) {
8681 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
8682 }
8683 return result;
8684 };
8685
8686 // return the string representation of the specified byte range,
8687 // interpreted as ISO-8859-1.
8688 var parseIso88591$1 = function parseIso88591(bytes, start, end) {
8689 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
8690 };
8691
8692 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
8693 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
8694 flags = header[byteIndex + 5],
8695 footerPresent = (flags & 16) >> 4;
8696
8697 if (footerPresent) {
8698 return returnSize + 20;
8699 }
8700 return returnSize + 10;
8701 };
8702
8703 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
8704 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
8705 middle = header[byteIndex + 4] << 3,
8706 highTwo = header[byteIndex + 3] & 0x3 << 11;
8707
8708 return highTwo | middle | lowThree;
8709 };
8710
8711 var parseType = function parseType(header, byteIndex) {
8712 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
8713 return 'timed-metadata';
8714 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
8715 return 'audio';
8716 }
8717 return null;
8718 };
8719
8720 var parseSampleRate = function parseSampleRate(packet) {
8721 var i = 0;
8722
8723 while (i + 5 < packet.length) {
8724 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
8725 // If a valid header was not found, jump one forward and attempt to
8726 // find a valid ADTS header starting at the next byte
8727 i++;
8728 continue;
8729 }
8730 return ADTS_SAMPLING_FREQUENCIES$1[(packet[i + 2] & 0x3c) >>> 2];
8731 }
8732
8733 return null;
8734 };
8735
8736 var parseAacTimestamp = function parseAacTimestamp(packet) {
8737 var frameStart, frameSize, frame, frameHeader;
8738
8739 // find the start of the first frame and the end of the tag
8740 frameStart = 10;
8741 if (packet[5] & 0x40) {
8742 // advance the frame start past the extended header
8743 frameStart += 4; // header size field
8744 frameStart += parseSyncSafeInteger$1(packet.subarray(10, 14));
8745 }
8746
8747 // parse one or more ID3 frames
8748 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
8749 do {
8750 // determine the number of bytes in this frame
8751 frameSize = parseSyncSafeInteger$1(packet.subarray(frameStart + 4, frameStart + 8));
8752 if (frameSize < 1) {
8753 return null;
8754 }
8755 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
8756
8757 if (frameHeader === 'PRIV') {
8758 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
8759
8760 for (var i = 0; i < frame.byteLength; i++) {
8761 if (frame[i] === 0) {
8762 var owner = parseIso88591$1(frame, 0, i);
8763 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
8764 var d = frame.subarray(i + 1);
8765 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
8766 size *= 4;
8767 size += d[7] & 0x03;
8768
8769 return size;
8770 }
8771 break;
8772 }
8773 }
8774 }
8775
8776 frameStart += 10; // advance past the frame header
8777 frameStart += frameSize; // advance past the frame body
8778 } while (frameStart < packet.byteLength);
8779 return null;
8780 };
8781
8782 var utils = {
8783 isLikelyAacData: isLikelyAacData,
8784 parseId3TagSize: parseId3TagSize,
8785 parseAdtsSize: parseAdtsSize,
8786 parseType: parseType,
8787 parseSampleRate: parseSampleRate,
8788 parseAacTimestamp: parseAacTimestamp
8789 };
8790
8791 // Constants
8792 var _AacStream;
8793
8794 /**
8795 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
8796 */
8797
8798 _AacStream = function AacStream() {
8799 var everything = new Uint8Array(),
8800 timeStamp = 0;
8801
8802 _AacStream.prototype.init.call(this);
8803
8804 this.setTimestamp = function (timestamp) {
8805 timeStamp = timestamp;
8806 };
8807
8808 this.push = function (bytes) {
8809 var frameSize = 0,
8810 byteIndex = 0,
8811 bytesLeft,
8812 chunk,
8813 packet,
8814 tempLength;
8815
8816 // If there are bytes remaining from the last segment, prepend them to the
8817 // bytes that were pushed in
8818 if (everything.length) {
8819 tempLength = everything.length;
8820 everything = new Uint8Array(bytes.byteLength + tempLength);
8821 everything.set(everything.subarray(0, tempLength));
8822 everything.set(bytes, tempLength);
8823 } else {
8824 everything = bytes;
8825 }
8826
8827 while (everything.length - byteIndex >= 3) {
8828 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
8829
8830 // Exit early because we don't have enough to parse
8831 // the ID3 tag header
8832 if (everything.length - byteIndex < 10) {
8833 break;
8834 }
8835
8836 // check framesize
8837 frameSize = utils.parseId3TagSize(everything, byteIndex);
8838
8839 // Exit early if we don't have enough in the buffer
8840 // to emit a full packet
8841 // Add to byteIndex to support multiple ID3 tags in sequence
8842 if (byteIndex + frameSize > everything.length) {
8843 break;
8844 }
8845 chunk = {
8846 type: 'timed-metadata',
8847 data: everything.subarray(byteIndex, byteIndex + frameSize)
8848 };
8849 this.trigger('data', chunk);
8850 byteIndex += frameSize;
8851 continue;
8852 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
8853
8854 // Exit early because we don't have enough to parse
8855 // the ADTS frame header
8856 if (everything.length - byteIndex < 7) {
8857 break;
8858 }
8859
8860 frameSize = utils.parseAdtsSize(everything, byteIndex);
8861
8862 // Exit early if we don't have enough in the buffer
8863 // to emit a full packet
8864 if (byteIndex + frameSize > everything.length) {
8865 break;
8866 }
8867
8868 packet = {
8869 type: 'audio',
8870 data: everything.subarray(byteIndex, byteIndex + frameSize),
8871 pts: timeStamp,
8872 dts: timeStamp
8873 };
8874 this.trigger('data', packet);
8875 byteIndex += frameSize;
8876 continue;
8877 }
8878 byteIndex++;
8879 }
8880 bytesLeft = everything.length - byteIndex;
8881
8882 if (bytesLeft > 0) {
8883 everything = everything.subarray(byteIndex);
8884 } else {
8885 everything = new Uint8Array();
8886 }
8887 };
8888
8889 this.reset = function () {
8890 everything = new Uint8Array();
8891 this.trigger('reset');
8892 };
8893
8894 this.endTimeline = function () {
8895 everything = new Uint8Array();
8896 this.trigger('endedtimeline');
8897 };
8898 };
8899
8900 _AacStream.prototype = new stream();
8901
8902 var aac = _AacStream;
8903
8904 // constants
8905 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
8906
8907 var audioProperties = AUDIO_PROPERTIES;
8908
8909 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
8910
8911 var videoProperties = VIDEO_PROPERTIES;
8912
8913 var H264Stream = h264.H264Stream;
8914
8915 var isLikelyAacData$1 = utils.isLikelyAacData;
8916 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
8917
8918 // object types
8919 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
8920
8921 /**
8922 * Compare two arrays (even typed) for same-ness
8923 */
8924 var arrayEquals = function arrayEquals(a, b) {
8925 var i;
8926
8927 if (a.length !== b.length) {
8928 return false;
8929 }
8930
8931 // compare the value of each element in the array
8932 for (i = 0; i < a.length; i++) {
8933 if (a[i] !== b[i]) {
8934 return false;
8935 }
8936 }
8937
8938 return true;
8939 };
8940
8941 var generateVideoSegmentTimingInfo = function generateVideoSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
8942 var ptsOffsetFromDts = startPts - startDts,
8943 decodeDuration = endDts - startDts,
8944 presentationDuration = endPts - startPts;
8945
8946 // The PTS and DTS values are based on the actual stream times from the segment,
8947 // however, the player time values will reflect a start from the baseMediaDecodeTime.
8948 // In order to provide relevant values for the player times, base timing info on the
8949 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
8950 return {
8951 start: {
8952 dts: baseMediaDecodeTime,
8953 pts: baseMediaDecodeTime + ptsOffsetFromDts
8954 },
8955 end: {
8956 dts: baseMediaDecodeTime + decodeDuration,
8957 pts: baseMediaDecodeTime + presentationDuration
8958 },
8959 prependedContentDuration: prependedContentDuration,
8960 baseMediaDecodeTime: baseMediaDecodeTime
8961 };
8962 };
8963
8964 /**
8965 * Constructs a single-track, ISO BMFF media segment from AAC data
8966 * events. The output of this stream can be fed to a SourceBuffer
8967 * configured with a suitable initialization segment.
8968 * @param track {object} track metadata configuration
8969 * @param options {object} transmuxer options object
8970 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
8971 * in the source; false to adjust the first segment to start at 0.
8972 */
8973 _AudioSegmentStream = function AudioSegmentStream(track, options) {
8974 var adtsFrames = [],
8975 sequenceNumber = 0,
8976 earliestAllowedDts = 0,
8977 audioAppendStartTs = 0,
8978 videoBaseMediaDecodeTime = Infinity;
8979
8980 options = options || {};
8981
8982 _AudioSegmentStream.prototype.init.call(this);
8983
8984 this.push = function (data) {
8985 trackDecodeInfo.collectDtsInfo(track, data);
8986
8987 if (track) {
8988 audioProperties.forEach(function (prop) {
8989 track[prop] = data[prop];
8990 });
8991 }
8992
8993 // buffer audio data until end() is called
8994 adtsFrames.push(data);
8995 };
8996
8997 this.setEarliestDts = function (earliestDts) {
8998 earliestAllowedDts = earliestDts;
8999 };
9000
9001 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9002 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9003 };
9004
9005 this.setAudioAppendStart = function (timestamp) {
9006 audioAppendStartTs = timestamp;
9007 };
9008
9009 this.flush = function () {
9010 var frames, moof, mdat, boxes, frameDuration;
9011
9012 // return early if no audio data has been observed
9013 if (adtsFrames.length === 0) {
9014 this.trigger('done', 'AudioSegmentStream');
9015 return;
9016 }
9017
9018 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9019 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9020
9021 audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime);
9022
9023 // we have to build the index from byte locations to
9024 // samples (that is, adts frames) in the audio data
9025 track.samples = audioFrameUtils.generateSampleTable(frames);
9026
9027 // concatenate the audio data to constuct the mdat
9028 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9029
9030 adtsFrames = [];
9031
9032 moof = mp4Generator.moof(sequenceNumber, [track]);
9033 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
9034
9035 // bump the sequence number for next time
9036 sequenceNumber++;
9037
9038 boxes.set(moof);
9039 boxes.set(mdat, moof.byteLength);
9040
9041 trackDecodeInfo.clearDtsInfo(track);
9042
9043 frameDuration = Math.ceil(ONE_SECOND_IN_TS$3 * 1024 / track.samplerate);
9044
9045 // TODO this check was added to maintain backwards compatibility (particularly with
9046 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9047 // valid use-case where an init segment/data should be triggered without associated
9048 // frames. Leaving for now, but should be looked into.
9049 if (frames.length) {
9050 this.trigger('timingInfo', {
9051 start: frames[0].pts,
9052 end: frames[0].pts + frames.length * frameDuration
9053 });
9054 }
9055 this.trigger('data', { track: track, boxes: boxes });
9056 this.trigger('done', 'AudioSegmentStream');
9057 };
9058
9059 this.reset = function () {
9060 trackDecodeInfo.clearDtsInfo(track);
9061 adtsFrames = [];
9062 this.trigger('reset');
9063 };
9064 };
9065
9066 _AudioSegmentStream.prototype = new stream();
9067
9068 /**
9069 * Constructs a single-track, ISO BMFF media segment from H264 data
9070 * events. The output of this stream can be fed to a SourceBuffer
9071 * configured with a suitable initialization segment.
9072 * @param track {object} track metadata configuration
9073 * @param options {object} transmuxer options object
9074 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9075 * gopsToAlignWith list when attempting to align gop pts
9076 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9077 * in the source; false to adjust the first segment to start at 0.
9078 */
9079 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9080 var sequenceNumber = 0,
9081 nalUnits = [],
9082 gopsToAlignWith = [],
9083 config,
9084 pps;
9085
9086 options = options || {};
9087
9088 _VideoSegmentStream.prototype.init.call(this);
9089
9090 delete track.minPTS;
9091
9092 this.gopCache_ = [];
9093
9094 /**
9095 * Constructs a ISO BMFF segment given H264 nalUnits
9096 * @param {Object} nalUnit A data event representing a nalUnit
9097 * @param {String} nalUnit.nalUnitType
9098 * @param {Object} nalUnit.config Properties for a mp4 track
9099 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9100 * @see lib/codecs/h264.js
9101 **/
9102 this.push = function (nalUnit) {
9103 trackDecodeInfo.collectDtsInfo(track, nalUnit);
9104
9105 // record the track config
9106 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9107 config = nalUnit.config;
9108 track.sps = [nalUnit.data];
9109
9110 videoProperties.forEach(function (prop) {
9111 track[prop] = config[prop];
9112 }, this);
9113 }
9114
9115 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9116 pps = nalUnit.data;
9117 track.pps = [nalUnit.data];
9118 }
9119
9120 // buffer video until flush() is called
9121 nalUnits.push(nalUnit);
9122 };
9123
9124 /**
9125 * Pass constructed ISO BMFF track and boxes on to the
9126 * next stream in the pipeline
9127 **/
9128 this.flush = function () {
9129 var frames,
9130 gopForFusion,
9131 gops,
9132 moof,
9133 mdat,
9134 boxes,
9135 prependedContentDuration = 0,
9136 firstGop,
9137 lastGop;
9138
9139 // Throw away nalUnits at the start of the byte stream until
9140 // we find the first AUD
9141 while (nalUnits.length) {
9142 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9143 break;
9144 }
9145 nalUnits.shift();
9146 }
9147
9148 // Return early if no video data has been observed
9149 if (nalUnits.length === 0) {
9150 this.resetStream_();
9151 this.trigger('done', 'VideoSegmentStream');
9152 return;
9153 }
9154
9155 // Organize the raw nal-units into arrays that represent
9156 // higher-level constructs such as frames and gops
9157 // (group-of-pictures)
9158 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9159 gops = frameUtils.groupFramesIntoGops(frames);
9160
9161 // If the first frame of this fragment is not a keyframe we have
9162 // a problem since MSE (on Chrome) requires a leading keyframe.
9163 //
9164 // We have two approaches to repairing this situation:
9165 // 1) GOP-FUSION:
9166 // This is where we keep track of the GOPS (group-of-pictures)
9167 // from previous fragments and attempt to find one that we can
9168 // prepend to the current fragment in order to create a valid
9169 // fragment.
9170 // 2) KEYFRAME-PULLING:
9171 // Here we search for the first keyframe in the fragment and
9172 // throw away all the frames between the start of the fragment
9173 // and that keyframe. We then extend the duration and pull the
9174 // PTS of the keyframe forward so that it covers the time range
9175 // of the frames that were disposed of.
9176 //
9177 // #1 is far prefereable over #2 which can cause "stuttering" but
9178 // requires more things to be just right.
9179 if (!gops[0][0].keyFrame) {
9180 // Search for a gop for fusion from our gopCache
9181 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9182
9183 if (gopForFusion) {
9184 // in order to provide more accurate timing information about the segment, save
9185 // the number of seconds prepended to the original segment due to GOP fusion
9186 prependedContentDuration = gopForFusion.duration;
9187
9188 gops.unshift(gopForFusion);
9189 // Adjust Gops' metadata to account for the inclusion of the
9190 // new gop at the beginning
9191 gops.byteLength += gopForFusion.byteLength;
9192 gops.nalCount += gopForFusion.nalCount;
9193 gops.pts = gopForFusion.pts;
9194 gops.dts = gopForFusion.dts;
9195 gops.duration += gopForFusion.duration;
9196 } else {
9197 // If we didn't find a candidate gop fall back to keyframe-pulling
9198 gops = frameUtils.extendFirstKeyFrame(gops);
9199 }
9200 }
9201
9202 // Trim gops to align with gopsToAlignWith
9203 if (gopsToAlignWith.length) {
9204 var alignedGops;
9205
9206 if (options.alignGopsAtEnd) {
9207 alignedGops = this.alignGopsAtEnd_(gops);
9208 } else {
9209 alignedGops = this.alignGopsAtStart_(gops);
9210 }
9211
9212 if (!alignedGops) {
9213 // save all the nals in the last GOP into the gop cache
9214 this.gopCache_.unshift({
9215 gop: gops.pop(),
9216 pps: track.pps,
9217 sps: track.sps
9218 });
9219
9220 // Keep a maximum of 6 GOPs in the cache
9221 this.gopCache_.length = Math.min(6, this.gopCache_.length);
9222
9223 // Clear nalUnits
9224 nalUnits = [];
9225
9226 // return early no gops can be aligned with desired gopsToAlignWith
9227 this.resetStream_();
9228 this.trigger('done', 'VideoSegmentStream');
9229 return;
9230 }
9231
9232 // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9233 // when recalculated before sending off to CoalesceStream
9234 trackDecodeInfo.clearDtsInfo(track);
9235
9236 gops = alignedGops;
9237 }
9238
9239 trackDecodeInfo.collectDtsInfo(track, gops);
9240
9241 // First, we have to build the index from byte locations to
9242 // samples (that is, frames) in the video data
9243 track.samples = frameUtils.generateSampleTable(gops);
9244
9245 // Concatenate the video data and construct the mdat
9246 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9247
9248 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9249
9250 this.trigger('processedGopsInfo', gops.map(function (gop) {
9251 return {
9252 pts: gop.pts,
9253 dts: gop.dts,
9254 byteLength: gop.byteLength
9255 };
9256 }));
9257
9258 firstGop = gops[0];
9259 lastGop = gops[gops.length - 1];
9260
9261 this.trigger('segmentTimingInfo', generateVideoSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9262
9263 this.trigger('timingInfo', {
9264 start: gops[0].pts,
9265 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9266 });
9267
9268 // save all the nals in the last GOP into the gop cache
9269 this.gopCache_.unshift({
9270 gop: gops.pop(),
9271 pps: track.pps,
9272 sps: track.sps
9273 });
9274
9275 // Keep a maximum of 6 GOPs in the cache
9276 this.gopCache_.length = Math.min(6, this.gopCache_.length);
9277
9278 // Clear nalUnits
9279 nalUnits = [];
9280
9281 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9282 this.trigger('timelineStartInfo', track.timelineStartInfo);
9283
9284 moof = mp4Generator.moof(sequenceNumber, [track]);
9285
9286 // it would be great to allocate this array up front instead of
9287 // throwing away hundreds of media segment fragments
9288 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
9289
9290 // Bump the sequence number for next time
9291 sequenceNumber++;
9292
9293 boxes.set(moof);
9294 boxes.set(mdat, moof.byteLength);
9295
9296 this.trigger('data', { track: track, boxes: boxes });
9297
9298 this.resetStream_();
9299
9300 // Continue with the flush process now
9301 this.trigger('done', 'VideoSegmentStream');
9302 };
9303
9304 this.reset = function () {
9305 this.resetStream_();
9306 nalUnits = [];
9307 this.gopCache_.length = 0;
9308 gopsToAlignWith.length = 0;
9309 this.trigger('reset');
9310 };
9311
9312 this.resetStream_ = function () {
9313 trackDecodeInfo.clearDtsInfo(track);
9314
9315 // reset config and pps because they may differ across segments
9316 // for instance, when we are rendition switching
9317 config = undefined;
9318 pps = undefined;
9319 };
9320
9321 // Search for a candidate Gop for gop-fusion from the gop cache and
9322 // return it or return null if no good candidate was found
9323 this.getGopForFusion_ = function (nalUnit) {
9324 var halfSecond = 45000,
9325
9326 // Half-a-second in a 90khz clock
9327 allowableOverlap = 10000,
9328
9329 // About 3 frames @ 30fps
9330 nearestDistance = Infinity,
9331 dtsDistance,
9332 nearestGopObj,
9333 currentGop,
9334 currentGopObj,
9335 i;
9336
9337 // Search for the GOP nearest to the beginning of this nal unit
9338 for (i = 0; i < this.gopCache_.length; i++) {
9339 currentGopObj = this.gopCache_[i];
9340 currentGop = currentGopObj.gop;
9341
9342 // Reject Gops with different SPS or PPS
9343 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9344 continue;
9345 }
9346
9347 // Reject Gops that would require a negative baseMediaDecodeTime
9348 if (currentGop.dts < track.timelineStartInfo.dts) {
9349 continue;
9350 }
9351
9352 // The distance between the end of the gop and the start of the nalUnit
9353 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration;
9354
9355 // Only consider GOPS that start before the nal unit and end within
9356 // a half-second of the nal unit
9357 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
9358
9359 // Always use the closest GOP we found if there is more than
9360 // one candidate
9361 if (!nearestGopObj || nearestDistance > dtsDistance) {
9362 nearestGopObj = currentGopObj;
9363 nearestDistance = dtsDistance;
9364 }
9365 }
9366 }
9367
9368 if (nearestGopObj) {
9369 return nearestGopObj.gop;
9370 }
9371 return null;
9372 };
9373
9374 // trim gop list to the first gop found that has a matching pts with a gop in the list
9375 // of gopsToAlignWith starting from the START of the list
9376 this.alignGopsAtStart_ = function (gops) {
9377 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
9378
9379 byteLength = gops.byteLength;
9380 nalCount = gops.nalCount;
9381 duration = gops.duration;
9382 alignIndex = gopIndex = 0;
9383
9384 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
9385 align = gopsToAlignWith[alignIndex];
9386 gop = gops[gopIndex];
9387
9388 if (align.pts === gop.pts) {
9389 break;
9390 }
9391
9392 if (gop.pts > align.pts) {
9393 // this current gop starts after the current gop we want to align on, so increment
9394 // align index
9395 alignIndex++;
9396 continue;
9397 }
9398
9399 // current gop starts before the current gop we want to align on. so increment gop
9400 // index
9401 gopIndex++;
9402 byteLength -= gop.byteLength;
9403 nalCount -= gop.nalCount;
9404 duration -= gop.duration;
9405 }
9406
9407 if (gopIndex === 0) {
9408 // no gops to trim
9409 return gops;
9410 }
9411
9412 if (gopIndex === gops.length) {
9413 // all gops trimmed, skip appending all gops
9414 return null;
9415 }
9416
9417 alignedGops = gops.slice(gopIndex);
9418 alignedGops.byteLength = byteLength;
9419 alignedGops.duration = duration;
9420 alignedGops.nalCount = nalCount;
9421 alignedGops.pts = alignedGops[0].pts;
9422 alignedGops.dts = alignedGops[0].dts;
9423
9424 return alignedGops;
9425 };
9426
9427 // trim gop list to the first gop found that has a matching pts with a gop in the list
9428 // of gopsToAlignWith starting from the END of the list
9429 this.alignGopsAtEnd_ = function (gops) {
9430 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
9431
9432 alignIndex = gopsToAlignWith.length - 1;
9433 gopIndex = gops.length - 1;
9434 alignEndIndex = null;
9435 matchFound = false;
9436
9437 while (alignIndex >= 0 && gopIndex >= 0) {
9438 align = gopsToAlignWith[alignIndex];
9439 gop = gops[gopIndex];
9440
9441 if (align.pts === gop.pts) {
9442 matchFound = true;
9443 break;
9444 }
9445
9446 if (align.pts > gop.pts) {
9447 alignIndex--;
9448 continue;
9449 }
9450
9451 if (alignIndex === gopsToAlignWith.length - 1) {
9452 // gop.pts is greater than the last alignment candidate. If no match is found
9453 // by the end of this loop, we still want to append gops that come after this
9454 // point
9455 alignEndIndex = gopIndex;
9456 }
9457
9458 gopIndex--;
9459 }
9460
9461 if (!matchFound && alignEndIndex === null) {
9462 return null;
9463 }
9464
9465 var trimIndex;
9466
9467 if (matchFound) {
9468 trimIndex = gopIndex;
9469 } else {
9470 trimIndex = alignEndIndex;
9471 }
9472
9473 if (trimIndex === 0) {
9474 return gops;
9475 }
9476
9477 var alignedGops = gops.slice(trimIndex);
9478 var metadata = alignedGops.reduce(function (total, gop) {
9479 total.byteLength += gop.byteLength;
9480 total.duration += gop.duration;
9481 total.nalCount += gop.nalCount;
9482 return total;
9483 }, { byteLength: 0, duration: 0, nalCount: 0 });
9484
9485 alignedGops.byteLength = metadata.byteLength;
9486 alignedGops.duration = metadata.duration;
9487 alignedGops.nalCount = metadata.nalCount;
9488 alignedGops.pts = alignedGops[0].pts;
9489 alignedGops.dts = alignedGops[0].dts;
9490
9491 return alignedGops;
9492 };
9493
9494 this.alignGopsWith = function (newGopsToAlignWith) {
9495 gopsToAlignWith = newGopsToAlignWith;
9496 };
9497 };
9498
9499 _VideoSegmentStream.prototype = new stream();
9500
9501 /**
9502 * A Stream that can combine multiple streams (ie. audio & video)
9503 * into a single output segment for MSE. Also supports audio-only
9504 * and video-only streams.
9505 * @param options {object} transmuxer options object
9506 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9507 * in the source; false to adjust the first segment to start at media timeline start.
9508 */
9509 _CoalesceStream = function CoalesceStream(options, metadataStream) {
9510 // Number of Tracks per output segment
9511 // If greater than 1, we combine multiple
9512 // tracks into a single segment
9513 this.numberOfTracks = 0;
9514 this.metadataStream = metadataStream;
9515
9516 options = options || {};
9517
9518 if (typeof options.remux !== 'undefined') {
9519 this.remuxTracks = !!options.remux;
9520 } else {
9521 this.remuxTracks = true;
9522 }
9523
9524 if (typeof options.keepOriginalTimestamps === 'boolean') {
9525 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
9526 } else {
9527 this.keepOriginalTimestamps = false;
9528 }
9529
9530 this.pendingTracks = [];
9531 this.videoTrack = null;
9532 this.pendingBoxes = [];
9533 this.pendingCaptions = [];
9534 this.pendingMetadata = [];
9535 this.pendingBytes = 0;
9536 this.emittedTracks = 0;
9537
9538 _CoalesceStream.prototype.init.call(this);
9539
9540 // Take output from multiple
9541 this.push = function (output) {
9542 // buffer incoming captions until the associated video segment
9543 // finishes
9544 if (output.text) {
9545 return this.pendingCaptions.push(output);
9546 }
9547 // buffer incoming id3 tags until the final flush
9548 if (output.frames) {
9549 return this.pendingMetadata.push(output);
9550 }
9551
9552 // Add this track to the list of pending tracks and store
9553 // important information required for the construction of
9554 // the final segment
9555 this.pendingTracks.push(output.track);
9556 this.pendingBytes += output.boxes.byteLength;
9557
9558 // TODO: is there an issue for this against chrome?
9559 // We unshift audio and push video because
9560 // as of Chrome 75 when switching from
9561 // one init segment to another if the video
9562 // mdat does not appear after the audio mdat
9563 // only audio will play for the duration of our transmux.
9564 if (output.track.type === 'video') {
9565 this.videoTrack = output.track;
9566 this.pendingBoxes.push(output.boxes);
9567 }
9568 if (output.track.type === 'audio') {
9569 this.audioTrack = output.track;
9570 this.pendingBoxes.unshift(output.boxes);
9571 }
9572 };
9573 };
9574
9575 _CoalesceStream.prototype = new stream();
9576 _CoalesceStream.prototype.flush = function (flushSource) {
9577 var offset = 0,
9578 event = {
9579 captions: [],
9580 captionStreams: {},
9581 metadata: [],
9582 info: {}
9583 },
9584 caption,
9585 id3,
9586 initSegment,
9587 timelineStartPts = 0,
9588 i;
9589
9590 if (this.pendingTracks.length < this.numberOfTracks) {
9591 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
9592 // Return because we haven't received a flush from a data-generating
9593 // portion of the segment (meaning that we have only recieved meta-data
9594 // or captions.)
9595 return;
9596 } else if (this.remuxTracks) {
9597 // Return until we have enough tracks from the pipeline to remux (if we
9598 // are remuxing audio and video into a single MP4)
9599 return;
9600 } else if (this.pendingTracks.length === 0) {
9601 // In the case where we receive a flush without any data having been
9602 // received we consider it an emitted track for the purposes of coalescing
9603 // `done` events.
9604 // We do this for the case where there is an audio and video track in the
9605 // segment but no audio data. (seen in several playlists with alternate
9606 // audio tracks and no audio present in the main TS segments.)
9607 this.emittedTracks++;
9608
9609 if (this.emittedTracks >= this.numberOfTracks) {
9610 this.trigger('done');
9611 this.emittedTracks = 0;
9612 }
9613 return;
9614 }
9615 }
9616
9617 if (this.videoTrack) {
9618 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
9619 videoProperties.forEach(function (prop) {
9620 event.info[prop] = this.videoTrack[prop];
9621 }, this);
9622 } else if (this.audioTrack) {
9623 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
9624 audioProperties.forEach(function (prop) {
9625 event.info[prop] = this.audioTrack[prop];
9626 }, this);
9627 }
9628
9629 if (this.videoTrack || this.audioTrack) {
9630 if (this.pendingTracks.length === 1) {
9631 event.type = this.pendingTracks[0].type;
9632 } else {
9633 event.type = 'combined';
9634 }
9635
9636 this.emittedTracks += this.pendingTracks.length;
9637
9638 initSegment = mp4Generator.initSegment(this.pendingTracks);
9639
9640 // Create a new typed array to hold the init segment
9641 event.initSegment = new Uint8Array(initSegment.byteLength);
9642
9643 // Create an init segment containing a moov
9644 // and track definitions
9645 event.initSegment.set(initSegment);
9646
9647 // Create a new typed array to hold the moof+mdats
9648 event.data = new Uint8Array(this.pendingBytes);
9649
9650 // Append each moof+mdat (one per track) together
9651 for (i = 0; i < this.pendingBoxes.length; i++) {
9652 event.data.set(this.pendingBoxes[i], offset);
9653 offset += this.pendingBoxes[i].byteLength;
9654 }
9655
9656 // Translate caption PTS times into second offsets to match the
9657 // video timeline for the segment, and add track info
9658 for (i = 0; i < this.pendingCaptions.length; i++) {
9659 caption = this.pendingCaptions[i];
9660 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
9661 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
9662
9663 event.captionStreams[caption.stream] = true;
9664 event.captions.push(caption);
9665 }
9666
9667 // Translate ID3 frame PTS times into second offsets to match the
9668 // video timeline for the segment
9669 for (i = 0; i < this.pendingMetadata.length; i++) {
9670 id3 = this.pendingMetadata[i];
9671 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
9672
9673 event.metadata.push(id3);
9674 }
9675
9676 // We add this to every single emitted segment even though we only need
9677 // it for the first
9678 event.metadata.dispatchType = this.metadataStream.dispatchType;
9679
9680 // Reset stream state
9681 this.pendingTracks.length = 0;
9682 this.videoTrack = null;
9683 this.pendingBoxes.length = 0;
9684 this.pendingCaptions.length = 0;
9685 this.pendingBytes = 0;
9686 this.pendingMetadata.length = 0;
9687
9688 // Emit the built segment
9689 // We include captions and ID3 tags for backwards compatibility,
9690 // ideally we should send only video and audio in the data event
9691 this.trigger('data', event);
9692 // Emit each caption to the outside world
9693 // Ideally, this would happen immediately on parsing captions,
9694 // but we need to ensure that video data is sent back first
9695 // so that caption timing can be adjusted to match video timing
9696 for (i = 0; i < event.captions.length; i++) {
9697 caption = event.captions[i];
9698
9699 this.trigger('caption', caption);
9700 }
9701 // Emit each id3 tag to the outside world
9702 // Ideally, this would happen immediately on parsing the tag,
9703 // but we need to ensure that video data is sent back first
9704 // so that ID3 frame timing can be adjusted to match video timing
9705 for (i = 0; i < event.metadata.length; i++) {
9706 id3 = event.metadata[i];
9707
9708 this.trigger('id3Frame', id3);
9709 }
9710 }
9711
9712 // Only emit `done` if all tracks have been flushed and emitted
9713 if (this.emittedTracks >= this.numberOfTracks) {
9714 this.trigger('done');
9715 this.emittedTracks = 0;
9716 }
9717 };
9718
9719 _CoalesceStream.prototype.setRemux = function (val) {
9720 this.remuxTracks = val;
9721 };
9722 /**
9723 * A Stream that expects MP2T binary data as input and produces
9724 * corresponding media segments, suitable for use with Media Source
9725 * Extension (MSE) implementations that support the ISO BMFF byte
9726 * stream format, like Chrome.
9727 */
9728 _Transmuxer = function Transmuxer(options) {
9729 var self = this,
9730 hasFlushed = true,
9731 videoTrack,
9732 audioTrack;
9733
9734 _Transmuxer.prototype.init.call(this);
9735
9736 options = options || {};
9737 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
9738 this.transmuxPipeline_ = {};
9739
9740 this.setupAacPipeline = function () {
9741 var pipeline = {};
9742 this.transmuxPipeline_ = pipeline;
9743
9744 pipeline.type = 'aac';
9745 pipeline.metadataStream = new m2ts_1.MetadataStream();
9746
9747 // set up the parsing pipeline
9748 pipeline.aacStream = new aac();
9749 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
9750 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
9751 pipeline.adtsStream = new adts();
9752 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9753 pipeline.headOfPipeline = pipeline.aacStream;
9754
9755 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
9756 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
9757
9758 pipeline.metadataStream.on('timestamp', function (frame) {
9759 pipeline.aacStream.setTimestamp(frame.timeStamp);
9760 });
9761
9762 pipeline.aacStream.on('data', function (data) {
9763 if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
9764 audioTrack = audioTrack || {
9765 timelineStartInfo: {
9766 baseMediaDecodeTime: self.baseMediaDecodeTime
9767 },
9768 codec: 'adts',
9769 type: 'audio'
9770 };
9771 // hook up the audio segment stream to the first track with aac data
9772 pipeline.coalesceStream.numberOfTracks++;
9773 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9774
9775 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
9776
9777 // Set up the final part of the audio pipeline
9778 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
9779 }
9780
9781 // emit pmt info
9782 self.trigger('trackinfo', {
9783 hasAudio: !!audioTrack,
9784 hasVideo: !!videoTrack
9785 });
9786 });
9787
9788 // Re-emit any data coming from the coalesce stream to the outside world
9789 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
9790 // Let the consumer know we have finished flushing the entire pipeline
9791 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9792 };
9793
9794 this.setupTsPipeline = function () {
9795 var pipeline = {};
9796 this.transmuxPipeline_ = pipeline;
9797
9798 pipeline.type = 'ts';
9799 pipeline.metadataStream = new m2ts_1.MetadataStream();
9800
9801 // set up the parsing pipeline
9802 pipeline.packetStream = new m2ts_1.TransportPacketStream();
9803 pipeline.parseStream = new m2ts_1.TransportParseStream();
9804 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
9805 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
9806 pipeline.adtsStream = new adts();
9807 pipeline.h264Stream = new H264Stream();
9808 pipeline.captionStream = new m2ts_1.CaptionStream();
9809 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9810 pipeline.headOfPipeline = pipeline.packetStream;
9811
9812 // disassemble MPEG2-TS packets into elementary streams
9813 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream);
9814
9815 // !!THIS ORDER IS IMPORTANT!!
9816 // demux the streams
9817 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
9818
9819 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
9820
9821 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
9822
9823 // Hook up CEA-608/708 caption stream
9824 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
9825
9826 pipeline.elementaryStream.on('data', function (data) {
9827 var i;
9828
9829 var baseMediaDecodeTime = !options.keepOriginalTimestamps ? self.baseMediaDecodeTime : 0;
9830
9831 if (data.type === 'metadata') {
9832 i = data.tracks.length;
9833
9834 // scan the tracks listed in the metadata
9835 while (i--) {
9836 if (!videoTrack && data.tracks[i].type === 'video') {
9837 videoTrack = data.tracks[i];
9838 videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
9839 } else if (!audioTrack && data.tracks[i].type === 'audio') {
9840 audioTrack = data.tracks[i];
9841 audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
9842 }
9843 }
9844
9845 // hook up the video segment stream to the first track with h264 data
9846 if (videoTrack && !pipeline.videoSegmentStream) {
9847 pipeline.coalesceStream.numberOfTracks++;
9848 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
9849
9850 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
9851 // When video emits timelineStartInfo data after a flush, we forward that
9852 // info to the AudioSegmentStream, if it exists, because video timeline
9853 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
9854 // because this is a particularly subtle form of timestamp alteration.
9855 if (audioTrack && !options.keepOriginalTimestamps) {
9856 audioTrack.timelineStartInfo = timelineStartInfo;
9857 // On the first segment we trim AAC frames that exist before the
9858 // very earliest DTS we have seen in video because Chrome will
9859 // interpret any video track with a baseMediaDecodeTime that is
9860 // non-zero as a gap.
9861 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
9862 }
9863 });
9864
9865 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
9866 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
9867
9868 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
9869 if (audioTrack) {
9870 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
9871 }
9872 });
9873
9874 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo'));
9875
9876 // Set up the final part of the video pipeline
9877 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
9878 }
9879
9880 if (audioTrack && !pipeline.audioSegmentStream) {
9881 // hook up the audio segment stream to the first track with aac data
9882 pipeline.coalesceStream.numberOfTracks++;
9883 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9884
9885 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
9886
9887 // Set up the final part of the audio pipeline
9888 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
9889 }
9890
9891 // emit pmt info
9892 self.trigger('trackinfo', {
9893 hasAudio: !!audioTrack,
9894 hasVideo: !!videoTrack
9895 });
9896 }
9897 });
9898
9899 // Re-emit any data coming from the coalesce stream to the outside world
9900 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
9901 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
9902 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
9903
9904 self.trigger('id3Frame', id3Frame);
9905 });
9906 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption'));
9907 // Let the consumer know we have finished flushing the entire pipeline
9908 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9909 };
9910
9911 // hook up the segment streams once track metadata is delivered
9912 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9913 var pipeline = this.transmuxPipeline_;
9914
9915 this.baseMediaDecodeTime = baseMediaDecodeTime;
9916
9917 if (audioTrack) {
9918 audioTrack.timelineStartInfo.dts = undefined;
9919 audioTrack.timelineStartInfo.pts = undefined;
9920 trackDecodeInfo.clearDtsInfo(audioTrack);
9921 if (pipeline.audioTimestampRolloverStream) {
9922 pipeline.audioTimestampRolloverStream.discontinuity();
9923 }
9924 }
9925 if (videoTrack) {
9926 if (pipeline.videoSegmentStream) {
9927 pipeline.videoSegmentStream.gopCache_ = [];
9928 }
9929 videoTrack.timelineStartInfo.dts = undefined;
9930 videoTrack.timelineStartInfo.pts = undefined;
9931 trackDecodeInfo.clearDtsInfo(videoTrack);
9932 pipeline.captionStream.reset();
9933 }
9934
9935 if (pipeline.timestampRolloverStream) {
9936 pipeline.timestampRolloverStream.discontinuity();
9937 }
9938 };
9939
9940 this.setAudioAppendStart = function (timestamp) {
9941 if (audioTrack) {
9942 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
9943 }
9944 };
9945
9946 this.setRemux = function (val) {
9947 var pipeline = this.transmuxPipeline_;
9948
9949 options.remux = val;
9950
9951 if (pipeline && pipeline.coalesceStream) {
9952 pipeline.coalesceStream.setRemux(val);
9953 }
9954 };
9955
9956 this.alignGopsWith = function (gopsToAlignWith) {
9957 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
9958 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
9959 }
9960 };
9961
9962 // feed incoming data to the front of the parsing pipeline
9963 this.push = function (data) {
9964 if (hasFlushed) {
9965 var isAac = isLikelyAacData$1(data);
9966
9967 if (isAac && this.transmuxPipeline_.type !== 'aac') {
9968 this.setupAacPipeline();
9969 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
9970 this.setupTsPipeline();
9971 }
9972 hasFlushed = false;
9973 }
9974 this.transmuxPipeline_.headOfPipeline.push(data);
9975 };
9976
9977 // flush any buffered data
9978 this.flush = function () {
9979 hasFlushed = true;
9980 // Start at the top of the pipeline and flush all pending work
9981 this.transmuxPipeline_.headOfPipeline.flush();
9982 };
9983
9984 this.endTimeline = function () {
9985 this.transmuxPipeline_.headOfPipeline.endTimeline();
9986 };
9987
9988 this.reset = function () {
9989 if (this.transmuxPipeline_.headOfPipeline) {
9990 this.transmuxPipeline_.headOfPipeline.reset();
9991 }
9992 };
9993
9994 // Caption data has to be reset when seeking outside buffered range
9995 this.resetCaptions = function () {
9996 if (this.transmuxPipeline_.captionStream) {
9997 this.transmuxPipeline_.captionStream.reset();
9998 }
9999 };
10000 };
10001 _Transmuxer.prototype = new stream();
10002
10003 var transmuxer = {
10004 Transmuxer: _Transmuxer,
10005 VideoSegmentStream: _VideoSegmentStream,
10006 AudioSegmentStream: _AudioSegmentStream,
10007 AUDIO_PROPERTIES: audioProperties,
10008 VIDEO_PROPERTIES: videoProperties,
10009 // exported for testing
10010 generateVideoSegmentTimingInfo: generateVideoSegmentTimingInfo
10011 };
10012
10013 var classCallCheck = function classCallCheck(instance, Constructor) {
10014 if (!(instance instanceof Constructor)) {
10015 throw new TypeError("Cannot call a class as a function");
10016 }
10017 };
10018
10019 var createClass = function () {
10020 function defineProperties(target, props) {
10021 for (var i = 0; i < props.length; i++) {
10022 var descriptor = props[i];
10023 descriptor.enumerable = descriptor.enumerable || false;
10024 descriptor.configurable = true;
10025 if ("value" in descriptor) descriptor.writable = true;
10026 Object.defineProperty(target, descriptor.key, descriptor);
10027 }
10028 }
10029
10030 return function (Constructor, protoProps, staticProps) {
10031 if (protoProps) defineProperties(Constructor.prototype, protoProps);
10032 if (staticProps) defineProperties(Constructor, staticProps);
10033 return Constructor;
10034 };
10035 }();
10036
10037 /**
10038 * @file transmuxer-worker.js
10039 */
10040
10041 /**
10042 * Re-emits transmuxer events by converting them into messages to the
10043 * world outside the worker.
10044 *
10045 * @param {Object} transmuxer the transmuxer to wire events on
10046 * @private
10047 */
10048 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer$$1) {
10049 transmuxer$$1.on('data', function (segment) {
10050 // transfer ownership of the underlying ArrayBuffer
10051 // instead of doing a copy to save memory
10052 // ArrayBuffers are transferable but generic TypedArrays are not
10053 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
10054 var initArray = segment.initSegment;
10055
10056 segment.initSegment = {
10057 data: initArray.buffer,
10058 byteOffset: initArray.byteOffset,
10059 byteLength: initArray.byteLength
10060 };
10061
10062 var typedArray = segment.data;
10063
10064 segment.data = typedArray.buffer;
10065 self.postMessage({
10066 action: 'data',
10067 segment: segment,
10068 byteOffset: typedArray.byteOffset,
10069 byteLength: typedArray.byteLength
10070 }, [segment.data]);
10071 });
10072
10073 if (transmuxer$$1.captionStream) {
10074 transmuxer$$1.captionStream.on('data', function (caption) {
10075 self.postMessage({
10076 action: 'caption',
10077 data: caption
10078 });
10079 });
10080 }
10081
10082 transmuxer$$1.on('done', function (data) {
10083 self.postMessage({ action: 'done' });
10084 });
10085
10086 transmuxer$$1.on('gopInfo', function (gopInfo) {
10087 self.postMessage({
10088 action: 'gopInfo',
10089 gopInfo: gopInfo
10090 });
10091 });
10092
10093 transmuxer$$1.on('videoSegmentTimingInfo', function (videoSegmentTimingInfo) {
10094 self.postMessage({
10095 action: 'videoSegmentTimingInfo',
10096 videoSegmentTimingInfo: videoSegmentTimingInfo
10097 });
10098 });
10099 };
10100
10101 /**
10102 * All incoming messages route through this hash. If no function exists
10103 * to handle an incoming message, then we ignore the message.
10104 *
10105 * @class MessageHandlers
10106 * @param {Object} options the options to initialize with
10107 */
10108
10109 var MessageHandlers = function () {
10110 function MessageHandlers(self, options) {
10111 classCallCheck(this, MessageHandlers);
10112
10113 this.options = options || {};
10114 this.self = self;
10115 this.init();
10116 }
10117
10118 /**
10119 * initialize our web worker and wire all the events.
10120 */
10121
10122 createClass(MessageHandlers, [{
10123 key: 'init',
10124 value: function init() {
10125 if (this.transmuxer) {
10126 this.transmuxer.dispose();
10127 }
10128 this.transmuxer = new transmuxer.Transmuxer(this.options);
10129 wireTransmuxerEvents(this.self, this.transmuxer);
10130 }
10131
10132 /**
10133 * Adds data (a ts segment) to the start of the transmuxer pipeline for
10134 * processing.
10135 *
10136 * @param {ArrayBuffer} data data to push into the muxer
10137 */
10138
10139 }, {
10140 key: 'push',
10141 value: function push(data) {
10142 // Cast array buffer to correct type for transmuxer
10143 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
10144
10145 this.transmuxer.push(segment);
10146 }
10147
10148 /**
10149 * Recreate the transmuxer so that the next segment added via `push`
10150 * start with a fresh transmuxer.
10151 */
10152
10153 }, {
10154 key: 'reset',
10155 value: function reset() {
10156 this.init();
10157 }
10158
10159 /**
10160 * Set the value that will be used as the `baseMediaDecodeTime` time for the
10161 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
10162 * set relative to the first based on the PTS values.
10163 *
10164 * @param {Object} data used to set the timestamp offset in the muxer
10165 */
10166
10167 }, {
10168 key: 'setTimestampOffset',
10169 value: function setTimestampOffset(data) {
10170 var timestampOffset = data.timestampOffset || 0;
10171
10172 this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
10173 }
10174 }, {
10175 key: 'setAudioAppendStart',
10176 value: function setAudioAppendStart(data) {
10177 this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
10178 }
10179
10180 /**
10181 * Forces the pipeline to finish processing the last segment and emit it's
10182 * results.
10183 *
10184 * @param {Object} data event data, not really used
10185 */
10186
10187 }, {
10188 key: 'flush',
10189 value: function flush(data) {
10190 this.transmuxer.flush();
10191 }
10192 }, {
10193 key: 'resetCaptions',
10194 value: function resetCaptions() {
10195 this.transmuxer.resetCaptions();
10196 }
10197 }, {
10198 key: 'alignGopsWith',
10199 value: function alignGopsWith(data) {
10200 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
10201 }
10202 }]);
10203 return MessageHandlers;
10204 }();
10205
10206 /**
10207 * Our web wroker interface so that things can talk to mux.js
10208 * that will be running in a web worker. the scope is passed to this by
10209 * webworkify.
10210 *
10211 * @param {Object} self the scope for the web worker
10212 */
10213
10214 var TransmuxerWorker = function TransmuxerWorker(self) {
10215 self.onmessage = function (event) {
10216 if (event.data.action === 'init' && event.data.options) {
10217 this.messageHandlers = new MessageHandlers(self, event.data.options);
10218 return;
10219 }
10220
10221 if (!this.messageHandlers) {
10222 this.messageHandlers = new MessageHandlers(self);
10223 }
10224
10225 if (event.data && event.data.action && event.data.action !== 'init') {
10226 if (this.messageHandlers[event.data.action]) {
10227 this.messageHandlers[event.data.action](event.data);
10228 }
10229 }
10230 };
10231 };
10232
10233 var transmuxerWorker = new TransmuxerWorker(self);
10234
10235 return transmuxerWorker;
10236 }();
10237 });
10238
10239 /**
10240 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
10241 * codec strings, or translating codec strings into objects that can be examined.
10242 */
10243
10244 // Default codec parameters if none were provided for video and/or audio
10245 var defaultCodecs = {
10246 videoCodec: 'avc1',
10247 videoObjectTypeIndicator: '.4d400d',
10248 // AAC-LC
10249 audioProfile: '2'
10250 };
10251
10252 /**
10253 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
10254 * `avc1.<hhhhhh>`
10255 *
10256 * @param {Array} codecs an array of codec strings to fix
10257 * @return {Array} the translated codec array
10258 * @private
10259 */
10260 var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
10261 return codecs.map(function (codec) {
10262 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
10263 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
10264 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
10265
10266 return 'avc1.' + profileHex + '00' + avcLevelHex;
10267 });
10268 });
10269 };
10270
10271 /**
10272 * Parses a codec string to retrieve the number of codecs specified,
10273 * the video codec and object type indicator, and the audio profile.
10274 */
10275
10276 var parseCodecs = function parseCodecs() {
10277 var codecs = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '';
10278
10279 var result = {
10280 codecCount: 0
10281 };
10282 var parsed = void 0;
10283
10284 result.codecCount = codecs.split(',').length;
10285 result.codecCount = result.codecCount || 2;
10286
10287 // parse the video codec
10288 parsed = /(^|\s|,)+(avc[13])([^ ,]*)/i.exec(codecs);
10289 if (parsed) {
10290 result.videoCodec = parsed[2];
10291 result.videoObjectTypeIndicator = parsed[3];
10292 }
10293
10294 // parse the last field of the audio codec
10295 result.audioProfile = /(^|\s|,)+mp4a.[0-9A-Fa-f]+\.([0-9A-Fa-f]+)/i.exec(codecs);
10296 result.audioProfile = result.audioProfile && result.audioProfile[2];
10297
10298 return result;
10299 };
10300
10301 /**
10302 * Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
10303 * standard `avc1.<hhhhhh>`.
10304 *
10305 * @param codecString {String} the codec string
10306 * @return {String} the codec string with old apple-style codecs replaced
10307 *
10308 * @private
10309 */
10310 var mapLegacyAvcCodecs = function mapLegacyAvcCodecs(codecString) {
10311 return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
10312 return translateLegacyCodecs([match])[0];
10313 });
10314 };
10315
10316 /**
10317 * Build a media mime-type string from a set of parameters
10318 * @param {String} type either 'audio' or 'video'
10319 * @param {String} container either 'mp2t' or 'mp4'
10320 * @param {Array} codecs an array of codec strings to add
10321 * @return {String} a valid media mime-type
10322 */
10323 var makeMimeTypeString = function makeMimeTypeString(type, container, codecs) {
10324 // The codecs array is filtered so that falsey values are
10325 // dropped and don't cause Array#join to create spurious
10326 // commas
10327 return type + '/' + container + '; codecs="' + codecs.filter(function (c) {
10328 return !!c;
10329 }).join(', ') + '"';
10330 };
10331
10332 /**
10333 * Returns the type container based on information in the playlist
10334 * @param {Playlist} media the current media playlist
10335 * @return {String} a valid media container type
10336 */
10337 var getContainerType = function getContainerType(media) {
10338 // An initialization segment means the media playlist is an iframe
10339 // playlist or is using the mp4 container. We don't currently
10340 // support iframe playlists, so assume this is signalling mp4
10341 // fragments.
10342 if (media.segments && media.segments.length && media.segments[0].map) {
10343 return 'mp4';
10344 }
10345 return 'mp2t';
10346 };
10347
10348 /**
10349 * Returns a set of codec strings parsed from the playlist or the default
10350 * codec strings if no codecs were specified in the playlist
10351 * @param {Playlist} media the current media playlist
10352 * @return {Object} an object with the video and audio codecs
10353 */
10354 var getCodecs = function getCodecs(media) {
10355 // if the codecs were explicitly specified, use them instead of the
10356 // defaults
10357 var mediaAttributes = media.attributes || {};
10358
10359 if (mediaAttributes.CODECS) {
10360 return parseCodecs(mediaAttributes.CODECS);
10361 }
10362 return defaultCodecs;
10363 };
10364
10365 var audioProfileFromDefault = function audioProfileFromDefault(master, audioGroupId) {
10366 if (!master.mediaGroups.AUDIO || !audioGroupId) {
10367 return null;
10368 }
10369
10370 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
10371
10372 if (!audioGroup) {
10373 return null;
10374 }
10375
10376 for (var name in audioGroup) {
10377 var audioType = audioGroup[name];
10378
10379 if (audioType.default && audioType.playlists) {
10380 // codec should be the same for all playlists within the audio type
10381 return parseCodecs(audioType.playlists[0].attributes.CODECS).audioProfile;
10382 }
10383 }
10384
10385 return null;
10386 };
10387
10388 /**
10389 * Calculates the MIME type strings for a working configuration of
10390 * SourceBuffers to play variant streams in a master playlist. If
10391 * there is no possible working configuration, an empty array will be
10392 * returned.
10393 *
10394 * @param master {Object} the m3u8 object for the master playlist
10395 * @param media {Object} the m3u8 object for the variant playlist
10396 * @return {Array} the MIME type strings. If the array has more than
10397 * one entry, the first element should be applied to the video
10398 * SourceBuffer and the second to the audio SourceBuffer.
10399 *
10400 * @private
10401 */
10402 var mimeTypesForPlaylist = function mimeTypesForPlaylist(master, media) {
10403 var containerType = getContainerType(media);
10404 var codecInfo = getCodecs(media);
10405 var mediaAttributes = media.attributes || {};
10406 // Default condition for a traditional HLS (no demuxed audio/video)
10407 var isMuxed = true;
10408 var isMaat = false;
10409
10410 if (!media) {
10411 // Not enough information
10412 return [];
10413 }
10414
10415 if (master.mediaGroups.AUDIO && mediaAttributes.AUDIO) {
10416 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
10417
10418 // Handle the case where we are in a multiple-audio track scenario
10419 if (audioGroup) {
10420 isMaat = true;
10421 // Start with the everything demuxed then...
10422 isMuxed = false;
10423 // ...check to see if any audio group tracks are muxed (ie. lacking a uri)
10424 for (var groupId in audioGroup) {
10425 // either a uri is present (if the case of HLS and an external playlist), or
10426 // playlists is present (in the case of DASH where we don't have external audio
10427 // playlists)
10428 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
10429 isMuxed = true;
10430 break;
10431 }
10432 }
10433 }
10434 }
10435
10436 // HLS with multiple-audio tracks must always get an audio codec.
10437 // Put another way, there is no way to have a video-only multiple-audio HLS!
10438 if (isMaat && !codecInfo.audioProfile) {
10439 if (!isMuxed) {
10440 // It is possible for codecs to be specified on the audio media group playlist but
10441 // not on the rendition playlist. This is mostly the case for DASH, where audio and
10442 // video are always separate (and separately specified).
10443 codecInfo.audioProfile = audioProfileFromDefault(master, mediaAttributes.AUDIO);
10444 }
10445
10446 if (!codecInfo.audioProfile) {
10447 videojs.log.warn('Multiple audio tracks present but no audio codec string is specified. ' + 'Attempting to use the default audio codec (mp4a.40.2)');
10448 codecInfo.audioProfile = defaultCodecs.audioProfile;
10449 }
10450 }
10451
10452 // Generate the final codec strings from the codec object generated above
10453 var codecStrings = {};
10454
10455 if (codecInfo.videoCodec) {
10456 codecStrings.video = '' + codecInfo.videoCodec + codecInfo.videoObjectTypeIndicator;
10457 }
10458
10459 if (codecInfo.audioProfile) {
10460 codecStrings.audio = 'mp4a.40.' + codecInfo.audioProfile;
10461 }
10462
10463 // Finally, make and return an array with proper mime-types depending on
10464 // the configuration
10465 var justAudio = makeMimeTypeString('audio', containerType, [codecStrings.audio]);
10466 var justVideo = makeMimeTypeString('video', containerType, [codecStrings.video]);
10467 var bothVideoAudio = makeMimeTypeString('video', containerType, [codecStrings.video, codecStrings.audio]);
10468
10469 if (isMaat) {
10470 if (!isMuxed && codecStrings.video) {
10471 return [justVideo, justAudio];
10472 }
10473
10474 if (!isMuxed && !codecStrings.video) {
10475 // There is no muxed content and no video codec string, so this is an audio only
10476 // stream with alternate audio.
10477 return [justAudio, justAudio];
10478 }
10479
10480 // There exists the possiblity that this will return a `video/container`
10481 // mime-type for the first entry in the array even when there is only audio.
10482 // This doesn't appear to be a problem and simplifies the code.
10483 return [bothVideoAudio, justAudio];
10484 }
10485
10486 // If there is no video codec at all, always just return a single
10487 // audio/<container> mime-type
10488 if (!codecStrings.video) {
10489 return [justAudio];
10490 }
10491
10492 // When not using separate audio media groups, audio and video is
10493 // *always* muxed
10494 return [bothVideoAudio];
10495 };
10496
10497 /**
10498 * Parse a content type header into a type and parameters
10499 * object
10500 *
10501 * @param {String} type the content type header
10502 * @return {Object} the parsed content-type
10503 * @private
10504 */
10505 var parseContentType = function parseContentType(type) {
10506 var object = { type: '', parameters: {} };
10507 var parameters = type.trim().split(';');
10508
10509 // first parameter should always be content-type
10510 object.type = parameters.shift().trim();
10511 parameters.forEach(function (parameter) {
10512 var pair = parameter.trim().split('=');
10513
10514 if (pair.length > 1) {
10515 var name = pair[0].replace(/"/g, '').trim();
10516 var value = pair[1].replace(/"/g, '').trim();
10517
10518 object.parameters[name] = value;
10519 }
10520 });
10521
10522 return object;
10523 };
10524
10525 /**
10526 * Check if a codec string refers to an audio codec.
10527 *
10528 * @param {String} codec codec string to check
10529 * @return {Boolean} if this is an audio codec
10530 * @private
10531 */
10532 var isAudioCodec = function isAudioCodec(codec) {
10533 return (/mp4a\.\d+.\d+/i.test(codec)
10534 );
10535 };
10536
10537 /**
10538 * Check if a codec string refers to a video codec.
10539 *
10540 * @param {String} codec codec string to check
10541 * @return {Boolean} if this is a video codec
10542 * @private
10543 */
10544 var isVideoCodec = function isVideoCodec(codec) {
10545 return (/avc1\.[\da-f]+/i.test(codec)
10546 );
10547 };
10548
10549 /**
10550 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
10551 * front of current time.
10552 *
10553 * @param {Array} buffer
10554 * The current buffer of gop information
10555 * @param {Number} currentTime
10556 * The current time
10557 * @param {Double} mapping
10558 * Offset to map display time to stream presentation time
10559 * @return {Array}
10560 * List of gops considered safe to append over
10561 */
10562 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
10563 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
10564 return [];
10565 }
10566
10567 // pts value for current time + 3 seconds to give a bit more wiggle room
10568 var currentTimePts = Math.ceil((currentTime - mapping + 3) * 90000);
10569
10570 var i = void 0;
10571
10572 for (i = 0; i < buffer.length; i++) {
10573 if (buffer[i].pts > currentTimePts) {
10574 break;
10575 }
10576 }
10577
10578 return buffer.slice(i);
10579 };
10580
10581 /**
10582 * Appends gop information (timing and byteLength) received by the transmuxer for the
10583 * gops appended in the last call to appendBuffer
10584 *
10585 * @param {Array} buffer
10586 * The current buffer of gop information
10587 * @param {Array} gops
10588 * List of new gop information
10589 * @param {boolean} replace
10590 * If true, replace the buffer with the new gop information. If false, append the
10591 * new gop information to the buffer in the right location of time.
10592 * @return {Array}
10593 * Updated list of gop information
10594 */
10595 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
10596 if (!gops.length) {
10597 return buffer;
10598 }
10599
10600 if (replace) {
10601 // If we are in safe append mode, then completely overwrite the gop buffer
10602 // with the most recent appeneded data. This will make sure that when appending
10603 // future segments, we only try to align with gops that are both ahead of current
10604 // time and in the last segment appended.
10605 return gops.slice();
10606 }
10607
10608 var start = gops[0].pts;
10609
10610 var i = 0;
10611
10612 for (i; i < buffer.length; i++) {
10613 if (buffer[i].pts >= start) {
10614 break;
10615 }
10616 }
10617
10618 return buffer.slice(0, i).concat(gops);
10619 };
10620
10621 /**
10622 * Removes gop information in buffer that overlaps with provided start and end
10623 *
10624 * @param {Array} buffer
10625 * The current buffer of gop information
10626 * @param {Double} start
10627 * position to start the remove at
10628 * @param {Double} end
10629 * position to end the remove at
10630 * @param {Double} mapping
10631 * Offset to map display time to stream presentation time
10632 */
10633 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
10634 var startPts = Math.ceil((start - mapping) * 90000);
10635 var endPts = Math.ceil((end - mapping) * 90000);
10636 var updatedBuffer = buffer.slice();
10637
10638 var i = buffer.length;
10639
10640 while (i--) {
10641 if (buffer[i].pts <= endPts) {
10642 break;
10643 }
10644 }
10645
10646 if (i === -1) {
10647 // no removal because end of remove range is before start of buffer
10648 return updatedBuffer;
10649 }
10650
10651 var j = i + 1;
10652
10653 while (j--) {
10654 if (buffer[j].pts <= startPts) {
10655 break;
10656 }
10657 }
10658
10659 // clamp remove range start to 0 index
10660 j = Math.max(j, 0);
10661
10662 updatedBuffer.splice(j, i - j + 1);
10663
10664 return updatedBuffer;
10665 };
10666
10667 var buffered = function buffered(videoBuffer, audioBuffer, audioDisabled) {
10668 var start = null;
10669 var end = null;
10670 var arity = 0;
10671 var extents = [];
10672 var ranges = [];
10673
10674 // neither buffer has been created yet
10675 if (!videoBuffer && !audioBuffer) {
10676 return videojs.createTimeRange();
10677 }
10678
10679 // only one buffer is configured
10680 if (!videoBuffer) {
10681 return audioBuffer.buffered;
10682 }
10683 if (!audioBuffer) {
10684 return videoBuffer.buffered;
10685 }
10686
10687 // both buffers are configured
10688 if (audioDisabled) {
10689 return videoBuffer.buffered;
10690 }
10691
10692 // both buffers are empty
10693 if (videoBuffer.buffered.length === 0 && audioBuffer.buffered.length === 0) {
10694 return videojs.createTimeRange();
10695 }
10696
10697 // Handle the case where we have both buffers and create an
10698 // intersection of the two
10699 var videoBuffered = videoBuffer.buffered;
10700 var audioBuffered = audioBuffer.buffered;
10701 var count = videoBuffered.length;
10702
10703 // A) Gather up all start and end times
10704 while (count--) {
10705 extents.push({ time: videoBuffered.start(count), type: 'start' });
10706 extents.push({ time: videoBuffered.end(count), type: 'end' });
10707 }
10708 count = audioBuffered.length;
10709 while (count--) {
10710 extents.push({ time: audioBuffered.start(count), type: 'start' });
10711 extents.push({ time: audioBuffered.end(count), type: 'end' });
10712 }
10713 // B) Sort them by time
10714 extents.sort(function (a, b) {
10715 return a.time - b.time;
10716 });
10717
10718 // C) Go along one by one incrementing arity for start and decrementing
10719 // arity for ends
10720 for (count = 0; count < extents.length; count++) {
10721 if (extents[count].type === 'start') {
10722 arity++;
10723
10724 // D) If arity is ever incremented to 2 we are entering an
10725 // overlapping range
10726 if (arity === 2) {
10727 start = extents[count].time;
10728 }
10729 } else if (extents[count].type === 'end') {
10730 arity--;
10731
10732 // E) If arity is ever decremented to 1 we leaving an
10733 // overlapping range
10734 if (arity === 1) {
10735 end = extents[count].time;
10736 }
10737 }
10738
10739 // F) Record overlapping ranges
10740 if (start !== null && end !== null) {
10741 ranges.push([start, end]);
10742 start = null;
10743 end = null;
10744 }
10745 }
10746
10747 return videojs.createTimeRanges(ranges);
10748 };
10749
10750 /**
10751 * @file virtual-source-buffer.js
10752 */
10753
10754 var ONE_SECOND_IN_TS = 90000;
10755
10756 // We create a wrapper around the SourceBuffer so that we can manage the
10757 // state of the `updating` property manually. We have to do this because
10758 // Firefox changes `updating` to false long before triggering `updateend`
10759 // events and that was causing strange problems in videojs-contrib-hls
10760 var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
10761 var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
10762 var wrapper = Object.create(null);
10763
10764 wrapper.updating = false;
10765 wrapper.realBuffer_ = sourceBuffer;
10766
10767 var _loop = function _loop(key) {
10768 if (typeof sourceBuffer[key] === 'function') {
10769 wrapper[key] = function () {
10770 return sourceBuffer[key].apply(sourceBuffer, arguments);
10771 };
10772 } else if (typeof wrapper[key] === 'undefined') {
10773 Object.defineProperty(wrapper, key, {
10774 get: function get$$1() {
10775 return sourceBuffer[key];
10776 },
10777 set: function set$$1(v) {
10778 return sourceBuffer[key] = v;
10779 }
10780 });
10781 }
10782 };
10783
10784 for (var key in sourceBuffer) {
10785 _loop(key);
10786 }
10787
10788 return wrapper;
10789 };
10790
10791 /**
10792 * VirtualSourceBuffers exist so that we can transmux non native formats
10793 * into a native format, but keep the same api as a native source buffer.
10794 * It creates a transmuxer, that works in its own thread (a web worker) and
10795 * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
10796 * then send all of that data to the naive sourcebuffer so that it is
10797 * indestinguishable from a natively supported format.
10798 *
10799 * @param {HtmlMediaSource} mediaSource the parent mediaSource
10800 * @param {Array} codecs array of codecs that we will be dealing with
10801 * @class VirtualSourceBuffer
10802 * @extends video.js.EventTarget
10803 */
10804
10805 var VirtualSourceBuffer = function (_videojs$EventTarget) {
10806 inherits(VirtualSourceBuffer, _videojs$EventTarget);
10807
10808 function VirtualSourceBuffer(mediaSource, codecs) {
10809 classCallCheck(this, VirtualSourceBuffer);
10810
10811 var _this = possibleConstructorReturn(this, (VirtualSourceBuffer.__proto__ || Object.getPrototypeOf(VirtualSourceBuffer)).call(this, videojs.EventTarget));
10812
10813 _this.timestampOffset_ = 0;
10814 _this.pendingBuffers_ = [];
10815 _this.bufferUpdating_ = false;
10816
10817 _this.mediaSource_ = mediaSource;
10818 _this.codecs_ = codecs;
10819 _this.audioCodec_ = null;
10820 _this.videoCodec_ = null;
10821 _this.audioDisabled_ = false;
10822 _this.appendAudioInitSegment_ = true;
10823 _this.gopBuffer_ = [];
10824 _this.timeMapping_ = 0;
10825 _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
10826
10827 var options = {
10828 remux: false,
10829 alignGopsAtEnd: _this.safeAppend_
10830 };
10831
10832 _this.codecs_.forEach(function (codec) {
10833 if (isAudioCodec(codec)) {
10834 _this.audioCodec_ = codec;
10835 } else if (isVideoCodec(codec)) {
10836 _this.videoCodec_ = codec;
10837 }
10838 });
10839
10840 // append muxed segments to their respective native buffers as
10841 // soon as they are available
10842 _this.transmuxer_ = new TransmuxWorker();
10843 _this.transmuxer_.postMessage({ action: 'init', options: options });
10844
10845 _this.transmuxer_.onmessage = function (event) {
10846 if (event.data.action === 'data') {
10847 return _this.data_(event);
10848 }
10849
10850 if (event.data.action === 'done') {
10851 return _this.done_(event);
10852 }
10853
10854 if (event.data.action === 'gopInfo') {
10855 return _this.appendGopInfo_(event);
10856 }
10857
10858 if (event.data.action === 'videoSegmentTimingInfo') {
10859 return _this.videoSegmentTimingInfo_(event.data.videoSegmentTimingInfo);
10860 }
10861 };
10862
10863 // this timestampOffset is a property with the side-effect of resetting
10864 // baseMediaDecodeTime in the transmuxer on the setter
10865 Object.defineProperty(_this, 'timestampOffset', {
10866 get: function get$$1() {
10867 return this.timestampOffset_;
10868 },
10869 set: function set$$1(val) {
10870 if (typeof val === 'number' && val >= 0) {
10871 this.timestampOffset_ = val;
10872 this.appendAudioInitSegment_ = true;
10873
10874 // reset gop buffer on timestampoffset as this signals a change in timeline
10875 this.gopBuffer_.length = 0;
10876 this.timeMapping_ = 0;
10877
10878 // We have to tell the transmuxer to set the baseMediaDecodeTime to
10879 // the desired timestampOffset for the next segment
10880 this.transmuxer_.postMessage({
10881 action: 'setTimestampOffset',
10882 timestampOffset: val
10883 });
10884 }
10885 }
10886 });
10887
10888 // setting the append window affects both source buffers
10889 Object.defineProperty(_this, 'appendWindowStart', {
10890 get: function get$$1() {
10891 return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
10892 },
10893 set: function set$$1(start) {
10894 if (this.videoBuffer_) {
10895 this.videoBuffer_.appendWindowStart = start;
10896 }
10897 if (this.audioBuffer_) {
10898 this.audioBuffer_.appendWindowStart = start;
10899 }
10900 }
10901 });
10902
10903 // this buffer is "updating" if either of its native buffers are
10904 Object.defineProperty(_this, 'updating', {
10905 get: function get$$1() {
10906 return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
10907 }
10908 });
10909
10910 // the buffered property is the intersection of the buffered
10911 // ranges of the native source buffers
10912 Object.defineProperty(_this, 'buffered', {
10913 get: function get$$1() {
10914 return buffered(this.videoBuffer_, this.audioBuffer_, this.audioDisabled_);
10915 }
10916 });
10917 return _this;
10918 }
10919
10920 /**
10921 * When we get a data event from the transmuxer
10922 * we call this function and handle the data that
10923 * was sent to us
10924 *
10925 * @private
10926 * @param {Event} event the data event from the transmuxer
10927 */
10928
10929
10930 createClass(VirtualSourceBuffer, [{
10931 key: 'data_',
10932 value: function data_(event) {
10933 var segment = event.data.segment;
10934
10935 // Cast ArrayBuffer to TypedArray
10936 segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
10937
10938 segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
10939
10940 createTextTracksIfNecessary(this, this.mediaSource_, segment);
10941
10942 // Add the segments to the pendingBuffers array
10943 this.pendingBuffers_.push(segment);
10944 return;
10945 }
10946
10947 /**
10948 * When we get a done event from the transmuxer
10949 * we call this function and we process all
10950 * of the pending data that we have been saving in the
10951 * data_ function
10952 *
10953 * @private
10954 * @param {Event} event the done event from the transmuxer
10955 */
10956
10957 }, {
10958 key: 'done_',
10959 value: function done_(event) {
10960 // Don't process and append data if the mediaSource is closed
10961 if (this.mediaSource_.readyState === 'closed') {
10962 this.pendingBuffers_.length = 0;
10963 return;
10964 }
10965
10966 // All buffers should have been flushed from the muxer
10967 // start processing anything we have received
10968 this.processPendingSegments_();
10969 return;
10970 }
10971 }, {
10972 key: 'videoSegmentTimingInfo_',
10973 value: function videoSegmentTimingInfo_(timingInfo) {
10974 var timingInfoInSeconds = {
10975 start: {
10976 decode: timingInfo.start.dts / ONE_SECOND_IN_TS,
10977 presentation: timingInfo.start.pts / ONE_SECOND_IN_TS
10978 },
10979 end: {
10980 decode: timingInfo.end.dts / ONE_SECOND_IN_TS,
10981 presentation: timingInfo.end.pts / ONE_SECOND_IN_TS
10982 },
10983 baseMediaDecodeTime: timingInfo.baseMediaDecodeTime / ONE_SECOND_IN_TS
10984 };
10985
10986 if (timingInfo.prependedContentDuration) {
10987 timingInfoInSeconds.prependedContentDuration = timingInfo.prependedContentDuration / ONE_SECOND_IN_TS;
10988 }
10989
10990 this.trigger({
10991 type: 'videoSegmentTimingInfo',
10992 videoSegmentTimingInfo: timingInfoInSeconds
10993 });
10994 }
10995
10996 /**
10997 * Create our internal native audio/video source buffers and add
10998 * event handlers to them with the following conditions:
10999 * 1. they do not already exist on the mediaSource
11000 * 2. this VSB has a codec for them
11001 *
11002 * @private
11003 */
11004
11005 }, {
11006 key: 'createRealSourceBuffers_',
11007 value: function createRealSourceBuffers_() {
11008 var _this2 = this;
11009
11010 var types = ['audio', 'video'];
11011
11012 types.forEach(function (type) {
11013 // Don't create a SourceBuffer of this type if we don't have a
11014 // codec for it
11015 if (!_this2[type + 'Codec_']) {
11016 return;
11017 }
11018
11019 // Do nothing if a SourceBuffer of this type already exists
11020 if (_this2[type + 'Buffer_']) {
11021 return;
11022 }
11023
11024 var buffer = null;
11025
11026 // If the mediasource already has a SourceBuffer for the codec
11027 // use that
11028 if (_this2.mediaSource_[type + 'Buffer_']) {
11029 buffer = _this2.mediaSource_[type + 'Buffer_'];
11030 // In multiple audio track cases, the audio source buffer is disabled
11031 // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
11032 // than createRealSourceBuffers_ is called to create the second
11033 // VirtualSourceBuffer because that happens as a side-effect of
11034 // videojs-contrib-hls starting the audioSegmentLoader. As a result,
11035 // the audioBuffer is essentially "ownerless" and no one will toggle
11036 // the `updating` state back to false once the `updateend` event is received
11037 //
11038 // Setting `updating` to false manually will work around this
11039 // situation and allow work to continue
11040 buffer.updating = false;
11041 } else {
11042 var codecProperty = type + 'Codec_';
11043 var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';
11044
11045 buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);
11046
11047 _this2.mediaSource_[type + 'Buffer_'] = buffer;
11048 }
11049
11050 _this2[type + 'Buffer_'] = buffer;
11051
11052 // Wire up the events to the SourceBuffer
11053 ['update', 'updatestart', 'updateend'].forEach(function (event) {
11054 buffer.addEventListener(event, function () {
11055 // if audio is disabled
11056 if (type === 'audio' && _this2.audioDisabled_) {
11057 return;
11058 }
11059
11060 if (event === 'updateend') {
11061 _this2[type + 'Buffer_'].updating = false;
11062 }
11063
11064 var shouldTrigger = types.every(function (t) {
11065 // skip checking audio's updating status if audio
11066 // is not enabled
11067 if (t === 'audio' && _this2.audioDisabled_) {
11068 return true;
11069 }
11070 // if the other type is updating we don't trigger
11071 if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
11072 return false;
11073 }
11074 return true;
11075 });
11076
11077 if (shouldTrigger) {
11078 return _this2.trigger(event);
11079 }
11080 });
11081 });
11082 });
11083 }
11084
11085 /**
11086 * Emulate the native mediasource function, but our function will
11087 * send all of the proposed segments to the transmuxer so that we
11088 * can transmux them before we append them to our internal
11089 * native source buffers in the correct format.
11090 *
11091 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
11092 * @param {Uint8Array} segment the segment to append to the buffer
11093 */
11094
11095 }, {
11096 key: 'appendBuffer',
11097 value: function appendBuffer(segment) {
11098 // Start the internal "updating" state
11099 this.bufferUpdating_ = true;
11100
11101 if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
11102 var audioBuffered = this.audioBuffer_.buffered;
11103
11104 this.transmuxer_.postMessage({
11105 action: 'setAudioAppendStart',
11106 appendStart: audioBuffered.end(audioBuffered.length - 1)
11107 });
11108 }
11109
11110 if (this.videoBuffer_) {
11111 this.transmuxer_.postMessage({
11112 action: 'alignGopsWith',
11113 gopsToAlignWith: gopsSafeToAlignWith(this.gopBuffer_, this.mediaSource_.player_ ? this.mediaSource_.player_.currentTime() : null, this.timeMapping_)
11114 });
11115 }
11116
11117 this.transmuxer_.postMessage({
11118 action: 'push',
11119 // Send the typed-array of data as an ArrayBuffer so that
11120 // it can be sent as a "Transferable" and avoid the costly
11121 // memory copy
11122 data: segment.buffer,
11123
11124 // To recreate the original typed-array, we need information
11125 // about what portion of the ArrayBuffer it was a view into
11126 byteOffset: segment.byteOffset,
11127 byteLength: segment.byteLength
11128 }, [segment.buffer]);
11129 this.transmuxer_.postMessage({ action: 'flush' });
11130 }
11131
11132 /**
11133 * Appends gop information (timing and byteLength) received by the transmuxer for the
11134 * gops appended in the last call to appendBuffer
11135 *
11136 * @param {Event} event
11137 * The gopInfo event from the transmuxer
11138 * @param {Array} event.data.gopInfo
11139 * List of gop info to append
11140 */
11141
11142 }, {
11143 key: 'appendGopInfo_',
11144 value: function appendGopInfo_(event) {
11145 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, event.data.gopInfo, this.safeAppend_);
11146 }
11147
11148 /**
11149 * Emulate the native mediasource function and remove parts
11150 * of the buffer from any of our internal buffers that exist
11151 *
11152 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
11153 * @param {Double} start position to start the remove at
11154 * @param {Double} end position to end the remove at
11155 */
11156
11157 }, {
11158 key: 'remove',
11159 value: function remove(start, end) {
11160 if (this.videoBuffer_) {
11161 this.videoBuffer_.updating = true;
11162 this.videoBuffer_.remove(start, end);
11163 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
11164 }
11165 if (!this.audioDisabled_ && this.audioBuffer_) {
11166 this.audioBuffer_.updating = true;
11167 this.audioBuffer_.remove(start, end);
11168 }
11169
11170 // Remove Metadata Cues (id3)
11171 removeCuesFromTrack(start, end, this.metadataTrack_);
11172
11173 // Remove Any Captions
11174 if (this.inbandTextTracks_) {
11175 for (var track in this.inbandTextTracks_) {
11176 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
11177 }
11178 }
11179 }
11180
11181 /**
11182 * Process any segments that the muxer has output
11183 * Concatenate segments together based on type and append them into
11184 * their respective sourceBuffers
11185 *
11186 * @private
11187 */
11188
11189 }, {
11190 key: 'processPendingSegments_',
11191 value: function processPendingSegments_() {
11192 var sortedSegments = {
11193 video: {
11194 segments: [],
11195 bytes: 0
11196 },
11197 audio: {
11198 segments: [],
11199 bytes: 0
11200 },
11201 captions: [],
11202 metadata: []
11203 };
11204
11205 if (!this.pendingBuffers_.length) {
11206 // We are no longer in the internal "updating" state
11207 this.trigger('updateend');
11208 this.bufferUpdating_ = false;
11209 return;
11210 }
11211
11212 // Sort segments into separate video/audio arrays and
11213 // keep track of their total byte lengths
11214 sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
11215 var type = segment.type;
11216 var data = segment.data;
11217 var initSegment = segment.initSegment;
11218
11219 segmentObj[type].segments.push(data);
11220 segmentObj[type].bytes += data.byteLength;
11221
11222 segmentObj[type].initSegment = initSegment;
11223
11224 // Gather any captions into a single array
11225 if (segment.captions) {
11226 segmentObj.captions = segmentObj.captions.concat(segment.captions);
11227 }
11228
11229 if (segment.info) {
11230 segmentObj[type].info = segment.info;
11231 }
11232
11233 // Gather any metadata into a single array
11234 if (segment.metadata) {
11235 segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
11236 }
11237
11238 return segmentObj;
11239 }, sortedSegments);
11240
11241 // Create the real source buffers if they don't exist by now since we
11242 // finally are sure what tracks are contained in the source
11243 if (!this.videoBuffer_ && !this.audioBuffer_) {
11244 // Remove any codecs that may have been specified by default but
11245 // are no longer applicable now
11246 if (sortedSegments.video.bytes === 0) {
11247 this.videoCodec_ = null;
11248 }
11249 if (sortedSegments.audio.bytes === 0) {
11250 this.audioCodec_ = null;
11251 }
11252
11253 this.createRealSourceBuffers_();
11254 }
11255
11256 if (sortedSegments.audio.info) {
11257 this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
11258 }
11259 if (sortedSegments.video.info) {
11260 this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
11261 }
11262
11263 if (this.appendAudioInitSegment_) {
11264 if (!this.audioDisabled_ && this.audioBuffer_) {
11265 sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
11266 sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
11267 }
11268 this.appendAudioInitSegment_ = false;
11269 }
11270
11271 var triggerUpdateend = false;
11272
11273 // Merge multiple video and audio segments into one and append
11274 if (this.videoBuffer_ && sortedSegments.video.bytes) {
11275 sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
11276 sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
11277 this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
11278 } else if (this.videoBuffer_ && (this.audioDisabled_ || !this.audioBuffer_)) {
11279 // The transmuxer did not return any bytes of video, meaning it was all trimmed
11280 // for gop alignment. Since we have a video buffer and audio is disabled, updateend
11281 // will never be triggered by this source buffer, which will cause contrib-hls
11282 // to be stuck forever waiting for updateend. If audio is not disabled, updateend
11283 // will be triggered by the audio buffer, which will be sent upwards since the video
11284 // buffer will not be in an updating state.
11285 triggerUpdateend = true;
11286 }
11287
11288 // Add text-track data for all
11289 addTextTrackData(this, sortedSegments.captions, sortedSegments.metadata);
11290
11291 if (!this.audioDisabled_ && this.audioBuffer_) {
11292 this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
11293 }
11294
11295 this.pendingBuffers_.length = 0;
11296
11297 if (triggerUpdateend) {
11298 this.trigger('updateend');
11299 }
11300
11301 // We are no longer in the internal "updating" state
11302 this.bufferUpdating_ = false;
11303 }
11304
11305 /**
11306 * Combine all segments into a single Uint8Array and then append them
11307 * to the destination buffer
11308 *
11309 * @param {Object} segmentObj
11310 * @param {SourceBuffer} destinationBuffer native source buffer to append data to
11311 * @private
11312 */
11313
11314 }, {
11315 key: 'concatAndAppendSegments_',
11316 value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
11317 var offset = 0;
11318 var tempBuffer = void 0;
11319
11320 if (segmentObj.bytes) {
11321 tempBuffer = new Uint8Array(segmentObj.bytes);
11322
11323 // Combine the individual segments into one large typed-array
11324 segmentObj.segments.forEach(function (segment) {
11325 tempBuffer.set(segment, offset);
11326 offset += segment.byteLength;
11327 });
11328
11329 try {
11330 destinationBuffer.updating = true;
11331 destinationBuffer.appendBuffer(tempBuffer);
11332 } catch (error) {
11333 if (this.mediaSource_.player_) {
11334 this.mediaSource_.player_.error({
11335 code: -3,
11336 type: 'APPEND_BUFFER_ERR',
11337 message: error.message,
11338 originalError: error
11339 });
11340 }
11341 }
11342 }
11343 }
11344
11345 /**
11346 * Emulate the native mediasource function. abort any soureBuffer
11347 * actions and throw out any un-appended data.
11348 *
11349 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
11350 */
11351
11352 }, {
11353 key: 'abort',
11354 value: function abort() {
11355 if (this.videoBuffer_) {
11356 this.videoBuffer_.abort();
11357 }
11358 if (!this.audioDisabled_ && this.audioBuffer_) {
11359 this.audioBuffer_.abort();
11360 }
11361 if (this.transmuxer_) {
11362 this.transmuxer_.postMessage({ action: 'reset' });
11363 }
11364 this.pendingBuffers_.length = 0;
11365 this.bufferUpdating_ = false;
11366 }
11367 }, {
11368 key: 'dispose',
11369 value: function dispose() {
11370 if (this.transmuxer_) {
11371 this.transmuxer_.terminate();
11372 }
11373 this.trigger('dispose');
11374 this.off();
11375 }
11376 }]);
11377 return VirtualSourceBuffer;
11378 }(videojs.EventTarget);
11379
11380 /**
11381 * @file html-media-source.js
11382 */
11383
11384 /**
11385 * Our MediaSource implementation in HTML, mimics native
11386 * MediaSource where/if possible.
11387 *
11388 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
11389 * @class HtmlMediaSource
11390 * @extends videojs.EventTarget
11391 */
11392
11393 var HtmlMediaSource = function (_videojs$EventTarget) {
11394 inherits(HtmlMediaSource, _videojs$EventTarget);
11395
11396 function HtmlMediaSource() {
11397 classCallCheck(this, HtmlMediaSource);
11398
11399 var _this = possibleConstructorReturn(this, (HtmlMediaSource.__proto__ || Object.getPrototypeOf(HtmlMediaSource)).call(this));
11400
11401 var property = void 0;
11402
11403 _this.nativeMediaSource_ = new window_1.MediaSource();
11404 // delegate to the native MediaSource's methods by default
11405 for (property in _this.nativeMediaSource_) {
11406 if (!(property in HtmlMediaSource.prototype) && typeof _this.nativeMediaSource_[property] === 'function') {
11407 _this[property] = _this.nativeMediaSource_[property].bind(_this.nativeMediaSource_);
11408 }
11409 }
11410
11411 // emulate `duration` and `seekable` until seeking can be
11412 // handled uniformly for live streams
11413 // see https://github.com/w3c/media-source/issues/5
11414 _this.duration_ = NaN;
11415 Object.defineProperty(_this, 'duration', {
11416 get: function get$$1() {
11417 if (this.duration_ === Infinity) {
11418 return this.duration_;
11419 }
11420 return this.nativeMediaSource_.duration;
11421 },
11422 set: function set$$1(duration) {
11423 this.duration_ = duration;
11424 if (duration !== Infinity) {
11425 this.nativeMediaSource_.duration = duration;
11426 return;
11427 }
11428 }
11429 });
11430 Object.defineProperty(_this, 'seekable', {
11431 get: function get$$1() {
11432 if (this.duration_ === Infinity) {
11433 return videojs.createTimeRanges([[0, this.nativeMediaSource_.duration]]);
11434 }
11435 return this.nativeMediaSource_.seekable;
11436 }
11437 });
11438
11439 Object.defineProperty(_this, 'readyState', {
11440 get: function get$$1() {
11441 return this.nativeMediaSource_.readyState;
11442 }
11443 });
11444
11445 Object.defineProperty(_this, 'activeSourceBuffers', {
11446 get: function get$$1() {
11447 return this.activeSourceBuffers_;
11448 }
11449 });
11450
11451 // the list of virtual and native SourceBuffers created by this
11452 // MediaSource
11453 _this.sourceBuffers = [];
11454
11455 _this.activeSourceBuffers_ = [];
11456
11457 /**
11458 * update the list of active source buffers based upon various
11459 * imformation from HLS and video.js
11460 *
11461 * @private
11462 */
11463 _this.updateActiveSourceBuffers_ = function () {
11464 // Retain the reference but empty the array
11465 _this.activeSourceBuffers_.length = 0;
11466
11467 // If there is only one source buffer, then it will always be active and audio will
11468 // be disabled based on the codec of the source buffer
11469 if (_this.sourceBuffers.length === 1) {
11470 var sourceBuffer = _this.sourceBuffers[0];
11471
11472 sourceBuffer.appendAudioInitSegment_ = true;
11473 sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
11474 _this.activeSourceBuffers_.push(sourceBuffer);
11475 return;
11476 }
11477
11478 // There are 2 source buffers, a combined (possibly video only) source buffer and
11479 // and an audio only source buffer.
11480 // By default, the audio in the combined virtual source buffer is enabled
11481 // and the audio-only source buffer (if it exists) is disabled.
11482 var disableCombined = false;
11483 var disableAudioOnly = true;
11484
11485 // TODO: maybe we can store the sourcebuffers on the track objects?
11486 // safari may do something like this
11487 for (var i = 0; i < _this.player_.audioTracks().length; i++) {
11488 var track = _this.player_.audioTracks()[i];
11489
11490 if (track.enabled && track.kind !== 'main') {
11491 // The enabled track is an alternate audio track so disable the audio in
11492 // the combined source buffer and enable the audio-only source buffer.
11493 disableCombined = true;
11494 disableAudioOnly = false;
11495 break;
11496 }
11497 }
11498
11499 _this.sourceBuffers.forEach(function (sourceBuffer, index) {
11500 /* eslinst-disable */
11501 // TODO once codecs are required, we can switch to using the codecs to determine
11502 // what stream is the video stream, rather than relying on videoTracks
11503 /* eslinst-enable */
11504
11505 sourceBuffer.appendAudioInitSegment_ = true;
11506
11507 if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
11508 // combined
11509 sourceBuffer.audioDisabled_ = disableCombined;
11510 } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
11511 // If the "combined" source buffer is video only, then we do not want
11512 // disable the audio-only source buffer (this is mostly for demuxed
11513 // audio and video hls)
11514 sourceBuffer.audioDisabled_ = true;
11515 disableAudioOnly = false;
11516 } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
11517 // audio only
11518 // In the case of audio only with alternate audio and disableAudioOnly is true
11519 // this means we want to disable the audio on the alternate audio sourcebuffer
11520 // but not the main "combined" source buffer. The "combined" source buffer is
11521 // always at index 0, so this ensures audio won't be disabled in both source
11522 // buffers.
11523 sourceBuffer.audioDisabled_ = index ? disableAudioOnly : !disableAudioOnly;
11524 if (sourceBuffer.audioDisabled_) {
11525 return;
11526 }
11527 }
11528
11529 _this.activeSourceBuffers_.push(sourceBuffer);
11530 });
11531 };
11532
11533 _this.onPlayerMediachange_ = function () {
11534 _this.sourceBuffers.forEach(function (sourceBuffer) {
11535 sourceBuffer.appendAudioInitSegment_ = true;
11536 });
11537 };
11538
11539 _this.onHlsReset_ = function () {
11540 _this.sourceBuffers.forEach(function (sourceBuffer) {
11541 if (sourceBuffer.transmuxer_) {
11542 sourceBuffer.transmuxer_.postMessage({ action: 'resetCaptions' });
11543 }
11544 });
11545 };
11546
11547 _this.onHlsSegmentTimeMapping_ = function (event) {
11548 _this.sourceBuffers.forEach(function (buffer) {
11549 return buffer.timeMapping_ = event.mapping;
11550 });
11551 };
11552
11553 // Re-emit MediaSource events on the polyfill
11554 ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
11555 this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
11556 }, _this);
11557
11558 // capture the associated player when the MediaSource is
11559 // successfully attached
11560 _this.on('sourceopen', function (event) {
11561 // Get the player this MediaSource is attached to
11562 var video = document_1.querySelector('[src="' + _this.url_ + '"]');
11563
11564 if (!video) {
11565 return;
11566 }
11567
11568 _this.player_ = videojs(video.parentNode);
11569
11570 if (!_this.player_) {
11571 return;
11572 }
11573
11574 // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
11575 // resets its state and flushes the buffer
11576 _this.player_.tech_.on('hls-reset', _this.onHlsReset_);
11577 // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
11578 // SegmentLoader inspects an MTS segment and has an accurate stream to display
11579 // time mapping
11580 _this.player_.tech_.on('hls-segment-time-mapping', _this.onHlsSegmentTimeMapping_);
11581
11582 if (_this.player_.audioTracks && _this.player_.audioTracks()) {
11583 _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
11584 _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
11585 _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
11586 }
11587
11588 _this.player_.on('mediachange', _this.onPlayerMediachange_);
11589 });
11590
11591 _this.on('sourceended', function (event) {
11592 var duration = durationOfVideo(_this.duration);
11593
11594 for (var i = 0; i < _this.sourceBuffers.length; i++) {
11595 var sourcebuffer = _this.sourceBuffers[i];
11596 var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
11597
11598 if (cues && cues.length) {
11599 cues[cues.length - 1].endTime = duration;
11600 }
11601 }
11602 });
11603
11604 // explicitly terminate any WebWorkers that were created
11605 // by SourceHandlers
11606 _this.on('sourceclose', function (event) {
11607 this.sourceBuffers.forEach(function (sourceBuffer) {
11608 if (sourceBuffer.transmuxer_) {
11609 sourceBuffer.transmuxer_.terminate();
11610 }
11611 });
11612
11613 this.sourceBuffers.length = 0;
11614 if (!this.player_) {
11615 return;
11616 }
11617
11618 if (this.player_.audioTracks && this.player_.audioTracks()) {
11619 this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
11620 this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
11621 this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
11622 }
11623
11624 // We can only change this if the player hasn't been disposed of yet
11625 // because `off` eventually tries to use the el_ property. If it has
11626 // been disposed of, then don't worry about it because there are no
11627 // event handlers left to unbind anyway
11628 if (this.player_.el_) {
11629 this.player_.off('mediachange', this.onPlayerMediachange_);
11630 }
11631
11632 if (this.player_.tech_ && this.player_.tech_.el_) {
11633 this.player_.tech_.off('hls-reset', this.onHlsReset_);
11634 this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
11635 }
11636 });
11637 return _this;
11638 }
11639
11640 /**
11641 * Add a range that that can now be seeked to.
11642 *
11643 * @param {Double} start where to start the addition
11644 * @param {Double} end where to end the addition
11645 * @private
11646 */
11647
11648
11649 createClass(HtmlMediaSource, [{
11650 key: 'addSeekableRange_',
11651 value: function addSeekableRange_(start, end) {
11652 var error = void 0;
11653
11654 if (this.duration !== Infinity) {
11655 error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
11656 error.name = 'InvalidStateError';
11657 error.code = 11;
11658 throw error;
11659 }
11660
11661 if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
11662 this.nativeMediaSource_.duration = end;
11663 }
11664 }
11665
11666 /**
11667 * Add a source buffer to the media source.
11668 *
11669 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
11670 * @param {String} type the content-type of the content
11671 * @return {Object} the created source buffer
11672 */
11673
11674 }, {
11675 key: 'addSourceBuffer',
11676 value: function addSourceBuffer(type) {
11677 var buffer = void 0;
11678 var parsedType = parseContentType(type);
11679
11680 // Create a VirtualSourceBuffer to transmux MPEG-2 transport
11681 // stream segments into fragmented MP4s
11682 if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
11683 var codecs = [];
11684
11685 if (parsedType.parameters && parsedType.parameters.codecs) {
11686 codecs = parsedType.parameters.codecs.split(',');
11687 codecs = translateLegacyCodecs(codecs);
11688 codecs = codecs.filter(function (codec) {
11689 return isAudioCodec(codec) || isVideoCodec(codec);
11690 });
11691 }
11692
11693 if (codecs.length === 0) {
11694 codecs = ['avc1.4d400d', 'mp4a.40.2'];
11695 }
11696
11697 buffer = new VirtualSourceBuffer(this, codecs);
11698
11699 if (this.sourceBuffers.length !== 0) {
11700 // If another VirtualSourceBuffer already exists, then we are creating a
11701 // SourceBuffer for an alternate audio track and therefore we know that
11702 // the source has both an audio and video track.
11703 // That means we should trigger the manual creation of the real
11704 // SourceBuffers instead of waiting for the transmuxer to return data
11705 this.sourceBuffers[0].createRealSourceBuffers_();
11706 buffer.createRealSourceBuffers_();
11707
11708 // Automatically disable the audio on the first source buffer if
11709 // a second source buffer is ever created
11710 this.sourceBuffers[0].audioDisabled_ = true;
11711 }
11712 } else {
11713 // delegate to the native implementation
11714 buffer = this.nativeMediaSource_.addSourceBuffer(type);
11715 }
11716
11717 this.sourceBuffers.push(buffer);
11718 return buffer;
11719 }
11720 }, {
11721 key: 'dispose',
11722 value: function dispose() {
11723 this.trigger('dispose');
11724 this.off();
11725
11726 this.sourceBuffers.forEach(function (buffer) {
11727 if (buffer.dispose) {
11728 buffer.dispose();
11729 }
11730 });
11731
11732 this.sourceBuffers.length = 0;
11733 }
11734 }]);
11735 return HtmlMediaSource;
11736 }(videojs.EventTarget);
11737
11738 /**
11739 * @file videojs-contrib-media-sources.js
11740 */
11741 var urlCount = 0;
11742
11743 // ------------
11744 // Media Source
11745 // ------------
11746
11747 // store references to the media sources so they can be connected
11748 // to a video element (a swf object)
11749 // TODO: can we store this somewhere local to this module?
11750 videojs.mediaSources = {};
11751
11752 /**
11753 * Provide a method for a swf object to notify JS that a
11754 * media source is now open.
11755 *
11756 * @param {String} msObjectURL string referencing the MSE Object URL
11757 * @param {String} swfId the swf id
11758 */
11759 var open = function open(msObjectURL, swfId) {
11760 var mediaSource = videojs.mediaSources[msObjectURL];
11761
11762 if (mediaSource) {
11763 mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
11764 } else {
11765 throw new Error('Media Source not found (Video.js)');
11766 }
11767 };
11768
11769 /**
11770 * Check to see if the native MediaSource object exists and supports
11771 * an MP4 container with both H.264 video and AAC-LC audio.
11772 *
11773 * @return {Boolean} if native media sources are supported
11774 */
11775 var supportsNativeMediaSources = function supportsNativeMediaSources() {
11776 return !!window_1.MediaSource && !!window_1.MediaSource.isTypeSupported && window_1.MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
11777 };
11778
11779 /**
11780 * An emulation of the MediaSource API so that we can support
11781 * native and non-native functionality. returns an instance of
11782 * HtmlMediaSource.
11783 *
11784 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
11785 */
11786 var MediaSource = function MediaSource() {
11787 this.MediaSource = {
11788 open: open,
11789 supportsNativeMediaSources: supportsNativeMediaSources
11790 };
11791
11792 if (supportsNativeMediaSources()) {
11793 return new HtmlMediaSource();
11794 }
11795
11796 throw new Error('Cannot use create a virtual MediaSource for this video');
11797 };
11798
11799 MediaSource.open = open;
11800 MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
11801
11802 /**
11803 * A wrapper around the native URL for our MSE object
11804 * implementation, this object is exposed under videojs.URL
11805 *
11806 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
11807 */
11808 var URL$1 = {
11809 /**
11810 * A wrapper around the native createObjectURL for our objects.
11811 * This function maps a native or emulated mediaSource to a blob
11812 * url so that it can be loaded into video.js
11813 *
11814 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
11815 * @param {MediaSource} object the object to create a blob url to
11816 */
11817 createObjectURL: function createObjectURL(object) {
11818 var objectUrlPrefix = 'blob:vjs-media-source/';
11819 var url = void 0;
11820
11821 // use the native MediaSource to generate an object URL
11822 if (object instanceof HtmlMediaSource) {
11823 url = window_1.URL.createObjectURL(object.nativeMediaSource_);
11824 object.url_ = url;
11825 return url;
11826 }
11827 // if the object isn't an emulated MediaSource, delegate to the
11828 // native implementation
11829 if (!(object instanceof HtmlMediaSource)) {
11830 url = window_1.URL.createObjectURL(object);
11831 object.url_ = url;
11832 return url;
11833 }
11834
11835 // build a URL that can be used to map back to the emulated
11836 // MediaSource
11837 url = objectUrlPrefix + urlCount;
11838
11839 urlCount++;
11840
11841 // setup the mapping back to object
11842 videojs.mediaSources[url] = object;
11843
11844 return url;
11845 }
11846 };
11847
11848 videojs.MediaSource = MediaSource;
11849 videojs.URL = URL$1;
11850
11851 function _interopDefault$1(ex) {
11852 return ex && typeof ex === 'object' && 'default' in ex ? ex['default'] : ex;
11853 }
11854
11855 var URLToolkit = _interopDefault$1(urlToolkit);
11856 var window$1 = _interopDefault$1(window_1);
11857
11858 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
11859 // return early if we don't need to resolve
11860 if (/^[a-z]+:/i.test(relativeUrl)) {
11861 return relativeUrl;
11862 } // if the base URL is relative then combine with the current location
11863
11864
11865 if (!/\/\//i.test(baseUrl)) {
11866 baseUrl = URLToolkit.buildAbsoluteURL(window$1.location && window$1.location.href || '', baseUrl);
11867 }
11868
11869 return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
11870 };
11871
11872 var resolveUrl_1 = resolveUrl$1;
11873
11874 function _interopDefault$2(ex) {
11875 return ex && typeof ex === 'object' && 'default' in ex ? ex['default'] : ex;
11876 }
11877
11878 var window$2 = _interopDefault$2(window_1);
11879
11880 var atob = function atob(s) {
11881 return window$2.atob ? window$2.atob(s) : Buffer.from(s, 'base64').toString('binary');
11882 };
11883
11884 function decodeB64ToUint8Array$1(b64Text) {
11885 var decodedString = atob(b64Text);
11886 var array = new Uint8Array(decodedString.length);
11887
11888 for (var i = 0; i < decodedString.length; i++) {
11889 array[i] = decodedString.charCodeAt(i);
11890 }
11891
11892 return array;
11893 }
11894
11895 var decodeB64ToUint8Array_1 = decodeB64ToUint8Array$1;
11896
11897 //[4] NameStartChar ::= ":" | [A-Z] | "_" | [a-z] | [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF]
11898 //[4a] NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 | [#x0300-#x036F] | [#x203F-#x2040]
11899 //[5] Name ::= NameStartChar (NameChar)*
11900 var nameStartChar = /[A-Z_a-z\xC0-\xD6\xD8-\xF6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/; //\u10000-\uEFFFF
11901 var nameChar = new RegExp("[\\-\\.0-9" + nameStartChar.source.slice(1, -1) + "\\u00B7\\u0300-\\u036F\\u203F-\\u2040]");
11902 var tagNamePattern = new RegExp('^' + nameStartChar.source + nameChar.source + '*(?:\:' + nameStartChar.source + nameChar.source + '*)?$');
11903 //var tagNamePattern = /^[a-zA-Z_][\w\-\.]*(?:\:[a-zA-Z_][\w\-\.]*)?$/
11904 //var handlers = 'resolveEntity,getExternalSubset,characters,endDocument,endElement,endPrefixMapping,ignorableWhitespace,processingInstruction,setDocumentLocator,skippedEntity,startDocument,startElement,startPrefixMapping,notationDecl,unparsedEntityDecl,error,fatalError,warning,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,comment,endCDATA,endDTD,endEntity,startCDATA,startDTD,startEntity'.split(',')
11905
11906 //S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
11907 //S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
11908 var S_TAG = 0; //tag name offerring
11909 var S_ATTR = 1; //attr name offerring
11910 var S_ATTR_SPACE = 2; //attr name end and space offer
11911 var S_EQ = 3; //=space?
11912 var S_ATTR_NOQUOT_VALUE = 4; //attr value(no quot value only)
11913 var S_ATTR_END = 5; //attr value end and no space(quot end)
11914 var S_TAG_SPACE = 6; //(attr value end || tag end ) && (space offer)
11915 var S_TAG_CLOSE = 7; //closed el<el />
11916
11917 function XMLReader() {}
11918
11919 XMLReader.prototype = {
11920 parse: function parse(source, defaultNSMap, entityMap) {
11921 var domBuilder = this.domBuilder;
11922 domBuilder.startDocument();
11923 _copy(defaultNSMap, defaultNSMap = {});
11924 _parse(source, defaultNSMap, entityMap, domBuilder, this.errorHandler);
11925 domBuilder.endDocument();
11926 }
11927 };
11928 function _parse(source, defaultNSMapCopy, entityMap, domBuilder, errorHandler) {
11929 function fixedFromCharCode(code) {
11930 // String.prototype.fromCharCode does not supports
11931 // > 2 bytes unicode chars directly
11932 if (code > 0xffff) {
11933 code -= 0x10000;
11934 var surrogate1 = 0xd800 + (code >> 10),
11935 surrogate2 = 0xdc00 + (code & 0x3ff);
11936
11937 return String.fromCharCode(surrogate1, surrogate2);
11938 } else {
11939 return String.fromCharCode(code);
11940 }
11941 }
11942 function entityReplacer(a) {
11943 var k = a.slice(1, -1);
11944 if (k in entityMap) {
11945 return entityMap[k];
11946 } else if (k.charAt(0) === '#') {
11947 return fixedFromCharCode(parseInt(k.substr(1).replace('x', '0x')));
11948 } else {
11949 errorHandler.error('entity not found:' + a);
11950 return a;
11951 }
11952 }
11953 function appendText(end) {
11954 //has some bugs
11955 if (end > start) {
11956 var xt = source.substring(start, end).replace(/&#?\w+;/g, entityReplacer);
11957 locator && position(start);
11958 domBuilder.characters(xt, 0, end - start);
11959 start = end;
11960 }
11961 }
11962 function position(p, m) {
11963 while (p >= lineEnd && (m = linePattern.exec(source))) {
11964 lineStart = m.index;
11965 lineEnd = lineStart + m[0].length;
11966 locator.lineNumber++;
11967 //console.log('line++:',locator,startPos,endPos)
11968 }
11969 locator.columnNumber = p - lineStart + 1;
11970 }
11971 var lineStart = 0;
11972 var lineEnd = 0;
11973 var linePattern = /.*(?:\r\n?|\n)|.*$/g;
11974 var locator = domBuilder.locator;
11975
11976 var parseStack = [{ currentNSMap: defaultNSMapCopy }];
11977 var closeMap = {};
11978 var start = 0;
11979 while (true) {
11980 try {
11981 var tagStart = source.indexOf('<', start);
11982 if (tagStart < 0) {
11983 if (!source.substr(start).match(/^\s*$/)) {
11984 var doc = domBuilder.doc;
11985 var text = doc.createTextNode(source.substr(start));
11986 doc.appendChild(text);
11987 domBuilder.currentElement = text;
11988 }
11989 return;
11990 }
11991 if (tagStart > start) {
11992 appendText(tagStart);
11993 }
11994 switch (source.charAt(tagStart + 1)) {
11995 case '/':
11996 var end = source.indexOf('>', tagStart + 3);
11997 var tagName = source.substring(tagStart + 2, end);
11998 var config = parseStack.pop();
11999 if (end < 0) {
12000
12001 tagName = source.substring(tagStart + 2).replace(/[\s<].*/, '');
12002 //console.error('#@@@@@@'+tagName)
12003 errorHandler.error("end tag name: " + tagName + ' is not complete:' + config.tagName);
12004 end = tagStart + 1 + tagName.length;
12005 } else if (tagName.match(/\s</)) {
12006 tagName = tagName.replace(/[\s<].*/, '');
12007 errorHandler.error("end tag name: " + tagName + ' maybe not complete');
12008 end = tagStart + 1 + tagName.length;
12009 }
12010 //console.error(parseStack.length,parseStack)
12011 //console.error(config);
12012 var localNSMap = config.localNSMap;
12013 var endMatch = config.tagName == tagName;
12014 var endIgnoreCaseMach = endMatch || config.tagName && config.tagName.toLowerCase() == tagName.toLowerCase();
12015 if (endIgnoreCaseMach) {
12016 domBuilder.endElement(config.uri, config.localName, tagName);
12017 if (localNSMap) {
12018 for (var prefix in localNSMap) {
12019 domBuilder.endPrefixMapping(prefix);
12020 }
12021 }
12022 if (!endMatch) {
12023 errorHandler.fatalError("end tag name: " + tagName + ' is not match the current start tagName:' + config.tagName);
12024 }
12025 } else {
12026 parseStack.push(config);
12027 }
12028
12029 end++;
12030 break;
12031 // end elment
12032 case '?':
12033 // <?...?>
12034 locator && position(tagStart);
12035 end = parseInstruction(source, tagStart, domBuilder);
12036 break;
12037 case '!':
12038 // <!doctype,<![CDATA,<!--
12039 locator && position(tagStart);
12040 end = parseDCC(source, tagStart, domBuilder, errorHandler);
12041 break;
12042 default:
12043 locator && position(tagStart);
12044 var el = new ElementAttributes();
12045 var currentNSMap = parseStack[parseStack.length - 1].currentNSMap;
12046 //elStartEnd
12047 var end = parseElementStartPart(source, tagStart, el, currentNSMap, entityReplacer, errorHandler);
12048 var len = el.length;
12049
12050 if (!el.closed && fixSelfClosed(source, end, el.tagName, closeMap)) {
12051 el.closed = true;
12052 if (!entityMap.nbsp) {
12053 errorHandler.warning('unclosed xml attribute');
12054 }
12055 }
12056 if (locator && len) {
12057 var locator2 = copyLocator(locator, {});
12058 //try{//attribute position fixed
12059 for (var i = 0; i < len; i++) {
12060 var a = el[i];
12061 position(a.offset);
12062 a.locator = copyLocator(locator, {});
12063 }
12064 //}catch(e){console.error('@@@@@'+e)}
12065 domBuilder.locator = locator2;
12066 if (appendElement(el, domBuilder, currentNSMap)) {
12067 parseStack.push(el);
12068 }
12069 domBuilder.locator = locator;
12070 } else {
12071 if (appendElement(el, domBuilder, currentNSMap)) {
12072 parseStack.push(el);
12073 }
12074 }
12075
12076 if (el.uri === 'http://www.w3.org/1999/xhtml' && !el.closed) {
12077 end = parseHtmlSpecialContent(source, end, el.tagName, entityReplacer, domBuilder);
12078 } else {
12079 end++;
12080 }
12081 }
12082 } catch (e) {
12083 errorHandler.error('element parse error: ' + e);
12084 //errorHandler.error('element parse error: '+e);
12085 end = -1;
12086 //throw e;
12087 }
12088 if (end > start) {
12089 start = end;
12090 } else {
12091 //TODO: 这里有可能sax回退,有位置错误风险
12092 appendText(Math.max(tagStart, start) + 1);
12093 }
12094 }
12095 }
12096 function copyLocator(f, t) {
12097 t.lineNumber = f.lineNumber;
12098 t.columnNumber = f.columnNumber;
12099 return t;
12100 }
12101
12102 /**
12103 * @see #appendElement(source,elStartEnd,el,selfClosed,entityReplacer,domBuilder,parseStack);
12104 * @return end of the elementStartPart(end of elementEndPart for selfClosed el)
12105 */
12106 function parseElementStartPart(source, start, el, currentNSMap, entityReplacer, errorHandler) {
12107 var attrName;
12108 var value;
12109 var p = ++start;
12110 var s = S_TAG; //status
12111 while (true) {
12112 var c = source.charAt(p);
12113 switch (c) {
12114 case '=':
12115 if (s === S_ATTR) {
12116 //attrName
12117 attrName = source.slice(start, p);
12118 s = S_EQ;
12119 } else if (s === S_ATTR_SPACE) {
12120 s = S_EQ;
12121 } else {
12122 //fatalError: equal must after attrName or space after attrName
12123 throw new Error('attribute equal must after attrName');
12124 }
12125 break;
12126 case '\'':
12127 case '"':
12128 if (s === S_EQ || s === S_ATTR //|| s == S_ATTR_SPACE
12129 ) {
12130 //equal
12131 if (s === S_ATTR) {
12132 errorHandler.warning('attribute value must after "="');
12133 attrName = source.slice(start, p);
12134 }
12135 start = p + 1;
12136 p = source.indexOf(c, start);
12137 if (p > 0) {
12138 value = source.slice(start, p).replace(/&#?\w+;/g, entityReplacer);
12139 el.add(attrName, value, start - 1);
12140 s = S_ATTR_END;
12141 } else {
12142 //fatalError: no end quot match
12143 throw new Error('attribute value no end \'' + c + '\' match');
12144 }
12145 } else if (s == S_ATTR_NOQUOT_VALUE) {
12146 value = source.slice(start, p).replace(/&#?\w+;/g, entityReplacer);
12147 //console.log(attrName,value,start,p)
12148 el.add(attrName, value, start);
12149 //console.dir(el)
12150 errorHandler.warning('attribute "' + attrName + '" missed start quot(' + c + ')!!');
12151 start = p + 1;
12152 s = S_ATTR_END;
12153 } else {
12154 //fatalError: no equal before
12155 throw new Error('attribute value must after "="');
12156 }
12157 break;
12158 case '/':
12159 switch (s) {
12160 case S_TAG:
12161 el.setTagName(source.slice(start, p));
12162 case S_ATTR_END:
12163 case S_TAG_SPACE:
12164 case S_TAG_CLOSE:
12165 s = S_TAG_CLOSE;
12166 el.closed = true;
12167 case S_ATTR_NOQUOT_VALUE:
12168 case S_ATTR:
12169 case S_ATTR_SPACE:
12170 break;
12171 //case S_EQ:
12172 default:
12173 throw new Error("attribute invalid close char('/')");
12174 }
12175 break;
12176 case '':
12177 //end document
12178 //throw new Error('unexpected end of input')
12179 errorHandler.error('unexpected end of input');
12180 if (s == S_TAG) {
12181 el.setTagName(source.slice(start, p));
12182 }
12183 return p;
12184 case '>':
12185 switch (s) {
12186 case S_TAG:
12187 el.setTagName(source.slice(start, p));
12188 case S_ATTR_END:
12189 case S_TAG_SPACE:
12190 case S_TAG_CLOSE:
12191 break; //normal
12192 case S_ATTR_NOQUOT_VALUE: //Compatible state
12193 case S_ATTR:
12194 value = source.slice(start, p);
12195 if (value.slice(-1) === '/') {
12196 el.closed = true;
12197 value = value.slice(0, -1);
12198 }
12199 case S_ATTR_SPACE:
12200 if (s === S_ATTR_SPACE) {
12201 value = attrName;
12202 }
12203 if (s == S_ATTR_NOQUOT_VALUE) {
12204 errorHandler.warning('attribute "' + value + '" missed quot(")!!');
12205 el.add(attrName, value.replace(/&#?\w+;/g, entityReplacer), start);
12206 } else {
12207 if (currentNSMap[''] !== 'http://www.w3.org/1999/xhtml' || !value.match(/^(?:disabled|checked|selected)$/i)) {
12208 errorHandler.warning('attribute "' + value + '" missed value!! "' + value + '" instead!!');
12209 }
12210 el.add(value, value, start);
12211 }
12212 break;
12213 case S_EQ:
12214 throw new Error('attribute value missed!!');
12215 }
12216 // console.log(tagName,tagNamePattern,tagNamePattern.test(tagName))
12217 return p;
12218 /*xml space '\x20' | #x9 | #xD | #xA; */
12219 case "\x80":
12220 c = ' ';
12221 default:
12222 if (c <= ' ') {
12223 //space
12224 switch (s) {
12225 case S_TAG:
12226 el.setTagName(source.slice(start, p)); //tagName
12227 s = S_TAG_SPACE;
12228 break;
12229 case S_ATTR:
12230 attrName = source.slice(start, p);
12231 s = S_ATTR_SPACE;
12232 break;
12233 case S_ATTR_NOQUOT_VALUE:
12234 var value = source.slice(start, p).replace(/&#?\w+;/g, entityReplacer);
12235 errorHandler.warning('attribute "' + value + '" missed quot(")!!');
12236 el.add(attrName, value, start);
12237 case S_ATTR_END:
12238 s = S_TAG_SPACE;
12239 break;
12240 //case S_TAG_SPACE:
12241 //case S_EQ:
12242 //case S_ATTR_SPACE:
12243 // void();break;
12244 //case S_TAG_CLOSE:
12245 //ignore warning
12246 }
12247 } else {
12248 //not space
12249 //S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
12250 //S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
12251 switch (s) {
12252 //case S_TAG:void();break;
12253 //case S_ATTR:void();break;
12254 //case S_ATTR_NOQUOT_VALUE:void();break;
12255 case S_ATTR_SPACE:
12256 var tagName = el.tagName;
12257 if (currentNSMap[''] !== 'http://www.w3.org/1999/xhtml' || !attrName.match(/^(?:disabled|checked|selected)$/i)) {
12258 errorHandler.warning('attribute "' + attrName + '" missed value!! "' + attrName + '" instead2!!');
12259 }
12260 el.add(attrName, attrName, start);
12261 start = p;
12262 s = S_ATTR;
12263 break;
12264 case S_ATTR_END:
12265 errorHandler.warning('attribute space is required"' + attrName + '"!!');
12266 case S_TAG_SPACE:
12267 s = S_ATTR;
12268 start = p;
12269 break;
12270 case S_EQ:
12271 s = S_ATTR_NOQUOT_VALUE;
12272 start = p;
12273 break;
12274 case S_TAG_CLOSE:
12275 throw new Error("elements closed character '/' and '>' must be connected to");
12276 }
12277 }
12278 } //end outer switch
12279 //console.log('p++',p)
12280 p++;
12281 }
12282 }
12283 /**
12284 * @return true if has new namespace define
12285 */
12286 function appendElement(el, domBuilder, currentNSMap) {
12287 var tagName = el.tagName;
12288 var localNSMap = null;
12289 //var currentNSMap = parseStack[parseStack.length-1].currentNSMap;
12290 var i = el.length;
12291 while (i--) {
12292 var a = el[i];
12293 var qName = a.qName;
12294 var value = a.value;
12295 var nsp = qName.indexOf(':');
12296 if (nsp > 0) {
12297 var prefix = a.prefix = qName.slice(0, nsp);
12298 var localName = qName.slice(nsp + 1);
12299 var nsPrefix = prefix === 'xmlns' && localName;
12300 } else {
12301 localName = qName;
12302 prefix = null;
12303 nsPrefix = qName === 'xmlns' && '';
12304 }
12305 //can not set prefix,because prefix !== ''
12306 a.localName = localName;
12307 //prefix == null for no ns prefix attribute
12308 if (nsPrefix !== false) {
12309 //hack!!
12310 if (localNSMap == null) {
12311 localNSMap = {};
12312 //console.log(currentNSMap,0)
12313 _copy(currentNSMap, currentNSMap = {});
12314 //console.log(currentNSMap,1)
12315 }
12316 currentNSMap[nsPrefix] = localNSMap[nsPrefix] = value;
12317 a.uri = 'http://www.w3.org/2000/xmlns/';
12318 domBuilder.startPrefixMapping(nsPrefix, value);
12319 }
12320 }
12321 var i = el.length;
12322 while (i--) {
12323 a = el[i];
12324 var prefix = a.prefix;
12325 if (prefix) {
12326 //no prefix attribute has no namespace
12327 if (prefix === 'xml') {
12328 a.uri = 'http://www.w3.org/XML/1998/namespace';
12329 }if (prefix !== 'xmlns') {
12330 a.uri = currentNSMap[prefix || ''];
12331
12332 //{console.log('###'+a.qName,domBuilder.locator.systemId+'',currentNSMap,a.uri)}
12333 }
12334 }
12335 }
12336 var nsp = tagName.indexOf(':');
12337 if (nsp > 0) {
12338 prefix = el.prefix = tagName.slice(0, nsp);
12339 localName = el.localName = tagName.slice(nsp + 1);
12340 } else {
12341 prefix = null; //important!!
12342 localName = el.localName = tagName;
12343 }
12344 //no prefix element has default namespace
12345 var ns = el.uri = currentNSMap[prefix || ''];
12346 domBuilder.startElement(ns, localName, tagName, el);
12347 //endPrefixMapping and startPrefixMapping have not any help for dom builder
12348 //localNSMap = null
12349 if (el.closed) {
12350 domBuilder.endElement(ns, localName, tagName);
12351 if (localNSMap) {
12352 for (prefix in localNSMap) {
12353 domBuilder.endPrefixMapping(prefix);
12354 }
12355 }
12356 } else {
12357 el.currentNSMap = currentNSMap;
12358 el.localNSMap = localNSMap;
12359 //parseStack.push(el);
12360 return true;
12361 }
12362 }
12363 function parseHtmlSpecialContent(source, elStartEnd, tagName, entityReplacer, domBuilder) {
12364 if (/^(?:script|textarea)$/i.test(tagName)) {
12365 var elEndStart = source.indexOf('</' + tagName + '>', elStartEnd);
12366 var text = source.substring(elStartEnd + 1, elEndStart);
12367 if (/[&<]/.test(text)) {
12368 if (/^script$/i.test(tagName)) {
12369 //if(!/\]\]>/.test(text)){
12370 //lexHandler.startCDATA();
12371 domBuilder.characters(text, 0, text.length);
12372 //lexHandler.endCDATA();
12373 return elEndStart;
12374 //}
12375 } //}else{//text area
12376 text = text.replace(/&#?\w+;/g, entityReplacer);
12377 domBuilder.characters(text, 0, text.length);
12378 return elEndStart;
12379 //}
12380 }
12381 }
12382 return elStartEnd + 1;
12383 }
12384 function fixSelfClosed(source, elStartEnd, tagName, closeMap) {
12385 //if(tagName in closeMap){
12386 var pos = closeMap[tagName];
12387 if (pos == null) {
12388 //console.log(tagName)
12389 pos = source.lastIndexOf('</' + tagName + '>');
12390 if (pos < elStartEnd) {
12391 //忘记闭合
12392 pos = source.lastIndexOf('</' + tagName);
12393 }
12394 closeMap[tagName] = pos;
12395 }
12396 return pos < elStartEnd;
12397 //}
12398 }
12399 function _copy(source, target) {
12400 for (var n in source) {
12401 target[n] = source[n];
12402 }
12403 }
12404 function parseDCC(source, start, domBuilder, errorHandler) {
12405 //sure start with '<!'
12406 var next = source.charAt(start + 2);
12407 switch (next) {
12408 case '-':
12409 if (source.charAt(start + 3) === '-') {
12410 var end = source.indexOf('-->', start + 4);
12411 //append comment source.substring(4,end)//<!--
12412 if (end > start) {
12413 domBuilder.comment(source, start + 4, end - start - 4);
12414 return end + 3;
12415 } else {
12416 errorHandler.error("Unclosed comment");
12417 return -1;
12418 }
12419 } else {
12420 //error
12421 return -1;
12422 }
12423 default:
12424 if (source.substr(start + 3, 6) == 'CDATA[') {
12425 var end = source.indexOf(']]>', start + 9);
12426 domBuilder.startCDATA();
12427 domBuilder.characters(source, start + 9, end - start - 9);
12428 domBuilder.endCDATA();
12429 return end + 3;
12430 }
12431 //<!DOCTYPE
12432 //startDTD(java.lang.String name, java.lang.String publicId, java.lang.String systemId)
12433 var matchs = split(source, start);
12434 var len = matchs.length;
12435 if (len > 1 && /!doctype/i.test(matchs[0][0])) {
12436 var name = matchs[1][0];
12437 var pubid = len > 3 && /^public$/i.test(matchs[2][0]) && matchs[3][0];
12438 var sysid = len > 4 && matchs[4][0];
12439 var lastMatch = matchs[len - 1];
12440 domBuilder.startDTD(name, pubid && pubid.replace(/^(['"])(.*?)\1$/, '$2'), sysid && sysid.replace(/^(['"])(.*?)\1$/, '$2'));
12441 domBuilder.endDTD();
12442
12443 return lastMatch.index + lastMatch[0].length;
12444 }
12445 }
12446 return -1;
12447 }
12448
12449 function parseInstruction(source, start, domBuilder) {
12450 var end = source.indexOf('?>', start);
12451 if (end) {
12452 var match = source.substring(start, end).match(/^<\?(\S*)\s*([\s\S]*?)\s*$/);
12453 if (match) {
12454 var len = match[0].length;
12455 domBuilder.processingInstruction(match[1], match[2]);
12456 return end + 2;
12457 } else {
12458 //error
12459 return -1;
12460 }
12461 }
12462 return -1;
12463 }
12464
12465 /**
12466 * @param source
12467 */
12468 function ElementAttributes(source) {}
12469 ElementAttributes.prototype = {
12470 setTagName: function setTagName(tagName) {
12471 if (!tagNamePattern.test(tagName)) {
12472 throw new Error('invalid tagName:' + tagName);
12473 }
12474 this.tagName = tagName;
12475 },
12476 add: function add(qName, value, offset) {
12477 if (!tagNamePattern.test(qName)) {
12478 throw new Error('invalid attribute:' + qName);
12479 }
12480 this[this.length++] = { qName: qName, value: value, offset: offset };
12481 },
12482 length: 0,
12483 getLocalName: function getLocalName(i) {
12484 return this[i].localName;
12485 },
12486 getLocator: function getLocator(i) {
12487 return this[i].locator;
12488 },
12489 getQName: function getQName(i) {
12490 return this[i].qName;
12491 },
12492 getURI: function getURI(i) {
12493 return this[i].uri;
12494 },
12495 getValue: function getValue(i) {
12496 return this[i].value;
12497 }
12498 // ,getIndex:function(uri, localName)){
12499 // if(localName){
12500 //
12501 // }else{
12502 // var qName = uri
12503 // }
12504 // },
12505 // getValue:function(){return this.getValue(this.getIndex.apply(this,arguments))},
12506 // getType:function(uri,localName){}
12507 // getType:function(i){},
12508 };
12509
12510 function _set_proto_(thiz, parent) {
12511 thiz.__proto__ = parent;
12512 return thiz;
12513 }
12514 if (!(_set_proto_({}, _set_proto_.prototype) instanceof _set_proto_)) {
12515 _set_proto_ = function _set_proto_(thiz, parent) {
12516 function p() {} p.prototype = parent;
12517 p = new p();
12518 for (parent in thiz) {
12519 p[parent] = thiz[parent];
12520 }
12521 return p;
12522 };
12523 }
12524
12525 function split(source, start) {
12526 var match;
12527 var buf = [];
12528 var reg = /'[^']+'|"[^"]+"|[^\s<>\/=]+=?|(\/?\s*>|<)/g;
12529 reg.lastIndex = start;
12530 reg.exec(source); //skip <
12531 while (match = reg.exec(source)) {
12532 buf.push(match);
12533 if (match[1]) return buf;
12534 }
12535 }
12536
12537 var XMLReader_1 = XMLReader;
12538
12539 var sax = {
12540 XMLReader: XMLReader_1
12541 };
12542
12543 /*
12544 * DOM Level 2
12545 * Object DOMException
12546 * @see http://www.w3.org/TR/REC-DOM-Level-1/ecma-script-language-binding.html
12547 * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/ecma-script-binding.html
12548 */
12549
12550 function copy(src, dest) {
12551 for (var p in src) {
12552 dest[p] = src[p];
12553 }
12554 }
12555 /**
12556 ^\w+\.prototype\.([_\w]+)\s*=\s*((?:.*\{\s*?[\r\n][\s\S]*?^})|\S.*?(?=[;\r\n]));?
12557 ^\w+\.prototype\.([_\w]+)\s*=\s*(\S.*?(?=[;\r\n]));?
12558 */
12559 function _extends$2(Class, Super) {
12560 var pt = Class.prototype;
12561 if (Object.create) {
12562 var ppt = Object.create(Super.prototype);
12563 pt.__proto__ = ppt;
12564 }
12565 if (!(pt instanceof Super)) {
12566 var t = function t() {};
12567 t.prototype = Super.prototype;
12568 t = new t();
12569 copy(pt, t);
12570 Class.prototype = pt = t;
12571 }
12572 if (pt.constructor != Class) {
12573 if (typeof Class != 'function') {
12574 console.error("unknow Class:" + Class);
12575 }
12576 pt.constructor = Class;
12577 }
12578 }
12579 var htmlns = 'http://www.w3.org/1999/xhtml';
12580 // Node Types
12581 var NodeType = {};
12582 var ELEMENT_NODE = NodeType.ELEMENT_NODE = 1;
12583 var ATTRIBUTE_NODE = NodeType.ATTRIBUTE_NODE = 2;
12584 var TEXT_NODE = NodeType.TEXT_NODE = 3;
12585 var CDATA_SECTION_NODE = NodeType.CDATA_SECTION_NODE = 4;
12586 var ENTITY_REFERENCE_NODE = NodeType.ENTITY_REFERENCE_NODE = 5;
12587 var ENTITY_NODE = NodeType.ENTITY_NODE = 6;
12588 var PROCESSING_INSTRUCTION_NODE = NodeType.PROCESSING_INSTRUCTION_NODE = 7;
12589 var COMMENT_NODE = NodeType.COMMENT_NODE = 8;
12590 var DOCUMENT_NODE = NodeType.DOCUMENT_NODE = 9;
12591 var DOCUMENT_TYPE_NODE = NodeType.DOCUMENT_TYPE_NODE = 10;
12592 var DOCUMENT_FRAGMENT_NODE = NodeType.DOCUMENT_FRAGMENT_NODE = 11;
12593 var NOTATION_NODE = NodeType.NOTATION_NODE = 12;
12594
12595 // ExceptionCode
12596 var ExceptionCode = {};
12597 var ExceptionMessage = {};
12598 var INDEX_SIZE_ERR = ExceptionCode.INDEX_SIZE_ERR = (ExceptionMessage[1] = "Index size error", 1);
12599 var DOMSTRING_SIZE_ERR = ExceptionCode.DOMSTRING_SIZE_ERR = (ExceptionMessage[2] = "DOMString size error", 2);
12600 var HIERARCHY_REQUEST_ERR = ExceptionCode.HIERARCHY_REQUEST_ERR = (ExceptionMessage[3] = "Hierarchy request error", 3);
12601 var WRONG_DOCUMENT_ERR = ExceptionCode.WRONG_DOCUMENT_ERR = (ExceptionMessage[4] = "Wrong document", 4);
12602 var INVALID_CHARACTER_ERR = ExceptionCode.INVALID_CHARACTER_ERR = (ExceptionMessage[5] = "Invalid character", 5);
12603 var NO_DATA_ALLOWED_ERR = ExceptionCode.NO_DATA_ALLOWED_ERR = (ExceptionMessage[6] = "No data allowed", 6);
12604 var NO_MODIFICATION_ALLOWED_ERR = ExceptionCode.NO_MODIFICATION_ALLOWED_ERR = (ExceptionMessage[7] = "No modification allowed", 7);
12605 var NOT_FOUND_ERR = ExceptionCode.NOT_FOUND_ERR = (ExceptionMessage[8] = "Not found", 8);
12606 var NOT_SUPPORTED_ERR = ExceptionCode.NOT_SUPPORTED_ERR = (ExceptionMessage[9] = "Not supported", 9);
12607 var INUSE_ATTRIBUTE_ERR = ExceptionCode.INUSE_ATTRIBUTE_ERR = (ExceptionMessage[10] = "Attribute in use", 10);
12608 //level2
12609 var INVALID_STATE_ERR = ExceptionCode.INVALID_STATE_ERR = (ExceptionMessage[11] = "Invalid state", 11);
12610 var SYNTAX_ERR = ExceptionCode.SYNTAX_ERR = (ExceptionMessage[12] = "Syntax error", 12);
12611 var INVALID_MODIFICATION_ERR = ExceptionCode.INVALID_MODIFICATION_ERR = (ExceptionMessage[13] = "Invalid modification", 13);
12612 var NAMESPACE_ERR = ExceptionCode.NAMESPACE_ERR = (ExceptionMessage[14] = "Invalid namespace", 14);
12613 var INVALID_ACCESS_ERR = ExceptionCode.INVALID_ACCESS_ERR = (ExceptionMessage[15] = "Invalid access", 15);
12614
12615 function DOMException(code, message) {
12616 if (message instanceof Error) {
12617 var error = message;
12618 } else {
12619 error = this;
12620 Error.call(this, ExceptionMessage[code]);
12621 this.message = ExceptionMessage[code];
12622 if (Error.captureStackTrace) Error.captureStackTrace(this, DOMException);
12623 }
12624 error.code = code;
12625 if (message) this.message = this.message + ": " + message;
12626 return error;
12627 }DOMException.prototype = Error.prototype;
12628 copy(ExceptionCode, DOMException);
12629 /**
12630 * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-536297177
12631 * The NodeList interface provides the abstraction of an ordered collection of nodes, without defining or constraining how this collection is implemented. NodeList objects in the DOM are live.
12632 * The items in the NodeList are accessible via an integral index, starting from 0.
12633 */
12634 function NodeList() {}NodeList.prototype = {
12635 /**
12636 * The number of nodes in the list. The range of valid child node indices is 0 to length-1 inclusive.
12637 * @standard level1
12638 */
12639 length: 0,
12640 /**
12641 * Returns the indexth item in the collection. If index is greater than or equal to the number of nodes in the list, this returns null.
12642 * @standard level1
12643 * @param index unsigned long
12644 * Index into the collection.
12645 * @return Node
12646 * The node at the indexth position in the NodeList, or null if that is not a valid index.
12647 */
12648 item: function item(index) {
12649 return this[index] || null;
12650 },
12651 toString: function toString(isHTML, nodeFilter) {
12652 for (var buf = [], i = 0; i < this.length; i++) {
12653 serializeToString(this[i], buf, isHTML, nodeFilter);
12654 }
12655 return buf.join('');
12656 }
12657 };
12658 function LiveNodeList(node, refresh) {
12659 this._node = node;
12660 this._refresh = refresh;
12661 _updateLiveList(this);
12662 }
12663 function _updateLiveList(list) {
12664 var inc = list._node._inc || list._node.ownerDocument._inc;
12665 if (list._inc != inc) {
12666 var ls = list._refresh(list._node);
12667 //console.log(ls.length)
12668 __set__(list, 'length', ls.length);
12669 copy(ls, list);
12670 list._inc = inc;
12671 }
12672 }
12673 LiveNodeList.prototype.item = function (i) {
12674 _updateLiveList(this);
12675 return this[i];
12676 };
12677
12678 _extends$2(LiveNodeList, NodeList);
12679 /**
12680 *
12681 * Objects implementing the NamedNodeMap interface are used to represent collections of nodes that can be accessed by name. Note that NamedNodeMap does not inherit from NodeList; NamedNodeMaps are not maintained in any particular order. Objects contained in an object implementing NamedNodeMap may also be accessed by an ordinal index, but this is simply to allow convenient enumeration of the contents of a NamedNodeMap, and does not imply that the DOM specifies an order to these Nodes.
12682 * NamedNodeMap objects in the DOM are live.
12683 * used for attributes or DocumentType entities
12684 */
12685 function NamedNodeMap() {}
12686 function _findNodeIndex(list, node) {
12687 var i = list.length;
12688 while (i--) {
12689 if (list[i] === node) {
12690 return i;
12691 }
12692 }
12693 }
12694
12695 function _addNamedNode(el, list, newAttr, oldAttr) {
12696 if (oldAttr) {
12697 list[_findNodeIndex(list, oldAttr)] = newAttr;
12698 } else {
12699 list[list.length++] = newAttr;
12700 }
12701 if (el) {
12702 newAttr.ownerElement = el;
12703 var doc = el.ownerDocument;
12704 if (doc) {
12705 oldAttr && _onRemoveAttribute(doc, el, oldAttr);
12706 _onAddAttribute(doc, el, newAttr);
12707 }
12708 }
12709 }
12710 function _removeNamedNode(el, list, attr) {
12711 //console.log('remove attr:'+attr)
12712 var i = _findNodeIndex(list, attr);
12713 if (i >= 0) {
12714 var lastIndex = list.length - 1;
12715 while (i < lastIndex) {
12716 list[i] = list[++i];
12717 }
12718 list.length = lastIndex;
12719 if (el) {
12720 var doc = el.ownerDocument;
12721 if (doc) {
12722 _onRemoveAttribute(doc, el, attr);
12723 attr.ownerElement = null;
12724 }
12725 }
12726 } else {
12727 throw DOMException(NOT_FOUND_ERR, new Error(el.tagName + '@' + attr));
12728 }
12729 }
12730 NamedNodeMap.prototype = {
12731 length: 0,
12732 item: NodeList.prototype.item,
12733 getNamedItem: function getNamedItem(key) {
12734 // if(key.indexOf(':')>0 || key == 'xmlns'){
12735 // return null;
12736 // }
12737 //console.log()
12738 var i = this.length;
12739 while (i--) {
12740 var attr = this[i];
12741 //console.log(attr.nodeName,key)
12742 if (attr.nodeName == key) {
12743 return attr;
12744 }
12745 }
12746 },
12747 setNamedItem: function setNamedItem(attr) {
12748 var el = attr.ownerElement;
12749 if (el && el != this._ownerElement) {
12750 throw new DOMException(INUSE_ATTRIBUTE_ERR);
12751 }
12752 var oldAttr = this.getNamedItem(attr.nodeName);
12753 _addNamedNode(this._ownerElement, this, attr, oldAttr);
12754 return oldAttr;
12755 },
12756 /* returns Node */
12757 setNamedItemNS: function setNamedItemNS(attr) {
12758 // raises: WRONG_DOCUMENT_ERR,NO_MODIFICATION_ALLOWED_ERR,INUSE_ATTRIBUTE_ERR
12759 var el = attr.ownerElement,
12760 oldAttr;
12761 if (el && el != this._ownerElement) {
12762 throw new DOMException(INUSE_ATTRIBUTE_ERR);
12763 }
12764 oldAttr = this.getNamedItemNS(attr.namespaceURI, attr.localName);
12765 _addNamedNode(this._ownerElement, this, attr, oldAttr);
12766 return oldAttr;
12767 },
12768
12769 /* returns Node */
12770 removeNamedItem: function removeNamedItem(key) {
12771 var attr = this.getNamedItem(key);
12772 _removeNamedNode(this._ownerElement, this, attr);
12773 return attr;
12774 }, // raises: NOT_FOUND_ERR,NO_MODIFICATION_ALLOWED_ERR
12775
12776 //for level2
12777 removeNamedItemNS: function removeNamedItemNS(namespaceURI, localName) {
12778 var attr = this.getNamedItemNS(namespaceURI, localName);
12779 _removeNamedNode(this._ownerElement, this, attr);
12780 return attr;
12781 },
12782 getNamedItemNS: function getNamedItemNS(namespaceURI, localName) {
12783 var i = this.length;
12784 while (i--) {
12785 var node = this[i];
12786 if (node.localName == localName && node.namespaceURI == namespaceURI) {
12787 return node;
12788 }
12789 }
12790 return null;
12791 }
12792 };
12793 /**
12794 * @see http://www.w3.org/TR/REC-DOM-Level-1/level-one-core.html#ID-102161490
12795 */
12796 function DOMImplementation( /* Object */features) {
12797 this._features = {};
12798 if (features) {
12799 for (var feature in features) {
12800 this._features = features[feature];
12801 }
12802 }
12803 }
12804 DOMImplementation.prototype = {
12805 hasFeature: function hasFeature( /* string */feature, /* string */version) {
12806 var versions = this._features[feature.toLowerCase()];
12807 if (versions && (!version || version in versions)) {
12808 return true;
12809 } else {
12810 return false;
12811 }
12812 },
12813 // Introduced in DOM Level 2:
12814 createDocument: function createDocument(namespaceURI, qualifiedName, doctype) {
12815 // raises:INVALID_CHARACTER_ERR,NAMESPACE_ERR,WRONG_DOCUMENT_ERR
12816 var doc = new Document();
12817 doc.implementation = this;
12818 doc.childNodes = new NodeList();
12819 doc.doctype = doctype;
12820 if (doctype) {
12821 doc.appendChild(doctype);
12822 }
12823 if (qualifiedName) {
12824 var root = doc.createElementNS(namespaceURI, qualifiedName);
12825 doc.appendChild(root);
12826 }
12827 return doc;
12828 },
12829 // Introduced in DOM Level 2:
12830 createDocumentType: function createDocumentType(qualifiedName, publicId, systemId) {
12831 // raises:INVALID_CHARACTER_ERR,NAMESPACE_ERR
12832 var node = new DocumentType();
12833 node.name = qualifiedName;
12834 node.nodeName = qualifiedName;
12835 node.publicId = publicId;
12836 node.systemId = systemId;
12837 // Introduced in DOM Level 2:
12838 //readonly attribute DOMString internalSubset;
12839
12840 //TODO:..
12841 // readonly attribute NamedNodeMap entities;
12842 // readonly attribute NamedNodeMap notations;
12843 return node;
12844 }
12845 };
12846
12847 /**
12848 * @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1950641247
12849 */
12850
12851 function Node() {}
12852 Node.prototype = {
12853 firstChild: null,
12854 lastChild: null,
12855 previousSibling: null,
12856 nextSibling: null,
12857 attributes: null,
12858 parentNode: null,
12859 childNodes: null,
12860 ownerDocument: null,
12861 nodeValue: null,
12862 namespaceURI: null,
12863 prefix: null,
12864 localName: null,
12865 // Modified in DOM Level 2:
12866 insertBefore: function insertBefore(newChild, refChild) {
12867 //raises
12868 return _insertBefore(this, newChild, refChild);
12869 },
12870 replaceChild: function replaceChild(newChild, oldChild) {
12871 //raises
12872 this.insertBefore(newChild, oldChild);
12873 if (oldChild) {
12874 this.removeChild(oldChild);
12875 }
12876 },
12877 removeChild: function removeChild(oldChild) {
12878 return _removeChild(this, oldChild);
12879 },
12880 appendChild: function appendChild(newChild) {
12881 return this.insertBefore(newChild, null);
12882 },
12883 hasChildNodes: function hasChildNodes() {
12884 return this.firstChild != null;
12885 },
12886 cloneNode: function cloneNode(deep) {
12887 return _cloneNode(this.ownerDocument || this, this, deep);
12888 },
12889 // Modified in DOM Level 2:
12890 normalize: function normalize() {
12891 var child = this.firstChild;
12892 while (child) {
12893 var next = child.nextSibling;
12894 if (next && next.nodeType == TEXT_NODE && child.nodeType == TEXT_NODE) {
12895 this.removeChild(next);
12896 child.appendData(next.data);
12897 } else {
12898 child.normalize();
12899 child = next;
12900 }
12901 }
12902 },
12903 // Introduced in DOM Level 2:
12904 isSupported: function isSupported(feature, version) {
12905 return this.ownerDocument.implementation.hasFeature(feature, version);
12906 },
12907 // Introduced in DOM Level 2:
12908 hasAttributes: function hasAttributes() {
12909 return this.attributes.length > 0;
12910 },
12911 lookupPrefix: function lookupPrefix(namespaceURI) {
12912 var el = this;
12913 while (el) {
12914 var map = el._nsMap;
12915 //console.dir(map)
12916 if (map) {
12917 for (var n in map) {
12918 if (map[n] == namespaceURI) {
12919 return n;
12920 }
12921 }
12922 }
12923 el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;
12924 }
12925 return null;
12926 },
12927 // Introduced in DOM Level 3:
12928 lookupNamespaceURI: function lookupNamespaceURI(prefix) {
12929 var el = this;
12930 while (el) {
12931 var map = el._nsMap;
12932 //console.dir(map)
12933 if (map) {
12934 if (prefix in map) {
12935 return map[prefix];
12936 }
12937 }
12938 el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;
12939 }
12940 return null;
12941 },
12942 // Introduced in DOM Level 3:
12943 isDefaultNamespace: function isDefaultNamespace(namespaceURI) {
12944 var prefix = this.lookupPrefix(namespaceURI);
12945 return prefix == null;
12946 }
12947 };
12948
12949 function _xmlEncoder(c) {
12950 return c == '<' && '&lt;' || c == '>' && '&gt;' || c == '&' && '&amp;' || c == '"' && '&quot;' || '&#' + c.charCodeAt() + ';';
12951 }
12952
12953 copy(NodeType, Node);
12954 copy(NodeType, Node.prototype);
12955
12956 /**
12957 * @param callback return true for continue,false for break
12958 * @return boolean true: break visit;
12959 */
12960 function _visitNode(node, callback) {
12961 if (callback(node)) {
12962 return true;
12963 }
12964 if (node = node.firstChild) {
12965 do {
12966 if (_visitNode(node, callback)) {
12967 return true;
12968 }
12969 } while (node = node.nextSibling);
12970 }
12971 }
12972
12973 function Document() {}
12974 function _onAddAttribute(doc, el, newAttr) {
12975 doc && doc._inc++;
12976 var ns = newAttr.namespaceURI;
12977 if (ns == 'http://www.w3.org/2000/xmlns/') {
12978 //update namespace
12979 el._nsMap[newAttr.prefix ? newAttr.localName : ''] = newAttr.value;
12980 }
12981 }
12982 function _onRemoveAttribute(doc, el, newAttr, remove) {
12983 doc && doc._inc++;
12984 var ns = newAttr.namespaceURI;
12985 if (ns == 'http://www.w3.org/2000/xmlns/') {
12986 //update namespace
12987 delete el._nsMap[newAttr.prefix ? newAttr.localName : ''];
12988 }
12989 }
12990 function _onUpdateChild(doc, el, newChild) {
12991 if (doc && doc._inc) {
12992 doc._inc++;
12993 //update childNodes
12994 var cs = el.childNodes;
12995 if (newChild) {
12996 cs[cs.length++] = newChild;
12997 } else {
12998 //console.log(1)
12999 var child = el.firstChild;
13000 var i = 0;
13001 while (child) {
13002 cs[i++] = child;
13003 child = child.nextSibling;
13004 }
13005 cs.length = i;
13006 }
13007 }
13008 }
13009
13010 /**
13011 * attributes;
13012 * children;
13013 *
13014 * writeable properties:
13015 * nodeValue,Attr:value,CharacterData:data
13016 * prefix
13017 */
13018 function _removeChild(parentNode, child) {
13019 var previous = child.previousSibling;
13020 var next = child.nextSibling;
13021 if (previous) {
13022 previous.nextSibling = next;
13023 } else {
13024 parentNode.firstChild = next;
13025 }
13026 if (next) {
13027 next.previousSibling = previous;
13028 } else {
13029 parentNode.lastChild = previous;
13030 }
13031 _onUpdateChild(parentNode.ownerDocument, parentNode);
13032 return child;
13033 }
13034 /**
13035 * preformance key(refChild == null)
13036 */
13037 function _insertBefore(parentNode, newChild, nextChild) {
13038 var cp = newChild.parentNode;
13039 if (cp) {
13040 cp.removeChild(newChild); //remove and update
13041 }
13042 if (newChild.nodeType === DOCUMENT_FRAGMENT_NODE) {
13043 var newFirst = newChild.firstChild;
13044 if (newFirst == null) {
13045 return newChild;
13046 }
13047 var newLast = newChild.lastChild;
13048 } else {
13049 newFirst = newLast = newChild;
13050 }
13051 var pre = nextChild ? nextChild.previousSibling : parentNode.lastChild;
13052
13053 newFirst.previousSibling = pre;
13054 newLast.nextSibling = nextChild;
13055
13056 if (pre) {
13057 pre.nextSibling = newFirst;
13058 } else {
13059 parentNode.firstChild = newFirst;
13060 }
13061 if (nextChild == null) {
13062 parentNode.lastChild = newLast;
13063 } else {
13064 nextChild.previousSibling = newLast;
13065 }
13066 do {
13067 newFirst.parentNode = parentNode;
13068 } while (newFirst !== newLast && (newFirst = newFirst.nextSibling));
13069 _onUpdateChild(parentNode.ownerDocument || parentNode, parentNode);
13070 //console.log(parentNode.lastChild.nextSibling == null)
13071 if (newChild.nodeType == DOCUMENT_FRAGMENT_NODE) {
13072 newChild.firstChild = newChild.lastChild = null;
13073 }
13074 return newChild;
13075 }
13076 function _appendSingleChild(parentNode, newChild) {
13077 var cp = newChild.parentNode;
13078 if (cp) {
13079 var pre = parentNode.lastChild;
13080 cp.removeChild(newChild); //remove and update
13081 var pre = parentNode.lastChild;
13082 }
13083 var pre = parentNode.lastChild;
13084 newChild.parentNode = parentNode;
13085 newChild.previousSibling = pre;
13086 newChild.nextSibling = null;
13087 if (pre) {
13088 pre.nextSibling = newChild;
13089 } else {
13090 parentNode.firstChild = newChild;
13091 }
13092 parentNode.lastChild = newChild;
13093 _onUpdateChild(parentNode.ownerDocument, parentNode, newChild);
13094 return newChild;
13095 //console.log("__aa",parentNode.lastChild.nextSibling == null)
13096 }
13097 Document.prototype = {
13098 //implementation : null,
13099 nodeName: '#document',
13100 nodeType: DOCUMENT_NODE,
13101 doctype: null,
13102 documentElement: null,
13103 _inc: 1,
13104
13105 insertBefore: function insertBefore(newChild, refChild) {
13106 //raises
13107 if (newChild.nodeType == DOCUMENT_FRAGMENT_NODE) {
13108 var child = newChild.firstChild;
13109 while (child) {
13110 var next = child.nextSibling;
13111 this.insertBefore(child, refChild);
13112 child = next;
13113 }
13114 return newChild;
13115 }
13116 if (this.documentElement == null && newChild.nodeType == ELEMENT_NODE) {
13117 this.documentElement = newChild;
13118 }
13119
13120 return _insertBefore(this, newChild, refChild), newChild.ownerDocument = this, newChild;
13121 },
13122 removeChild: function removeChild(oldChild) {
13123 if (this.documentElement == oldChild) {
13124 this.documentElement = null;
13125 }
13126 return _removeChild(this, oldChild);
13127 },
13128 // Introduced in DOM Level 2:
13129 importNode: function importNode(importedNode, deep) {
13130 return _importNode(this, importedNode, deep);
13131 },
13132 // Introduced in DOM Level 2:
13133 getElementById: function getElementById(id) {
13134 var rtv = null;
13135 _visitNode(this.documentElement, function (node) {
13136 if (node.nodeType == ELEMENT_NODE) {
13137 if (node.getAttribute('id') == id) {
13138 rtv = node;
13139 return true;
13140 }
13141 }
13142 });
13143 return rtv;
13144 },
13145
13146 //document factory method:
13147 createElement: function createElement(tagName) {
13148 var node = new Element();
13149 node.ownerDocument = this;
13150 node.nodeName = tagName;
13151 node.tagName = tagName;
13152 node.childNodes = new NodeList();
13153 var attrs = node.attributes = new NamedNodeMap();
13154 attrs._ownerElement = node;
13155 return node;
13156 },
13157 createDocumentFragment: function createDocumentFragment() {
13158 var node = new DocumentFragment();
13159 node.ownerDocument = this;
13160 node.childNodes = new NodeList();
13161 return node;
13162 },
13163 createTextNode: function createTextNode(data) {
13164 var node = new Text();
13165 node.ownerDocument = this;
13166 node.appendData(data);
13167 return node;
13168 },
13169 createComment: function createComment(data) {
13170 var node = new Comment();
13171 node.ownerDocument = this;
13172 node.appendData(data);
13173 return node;
13174 },
13175 createCDATASection: function createCDATASection(data) {
13176 var node = new CDATASection();
13177 node.ownerDocument = this;
13178 node.appendData(data);
13179 return node;
13180 },
13181 createProcessingInstruction: function createProcessingInstruction(target, data) {
13182 var node = new ProcessingInstruction();
13183 node.ownerDocument = this;
13184 node.tagName = node.target = target;
13185 node.nodeValue = node.data = data;
13186 return node;
13187 },
13188 createAttribute: function createAttribute(name) {
13189 var node = new Attr();
13190 node.ownerDocument = this;
13191 node.name = name;
13192 node.nodeName = name;
13193 node.localName = name;
13194 node.specified = true;
13195 return node;
13196 },
13197 createEntityReference: function createEntityReference(name) {
13198 var node = new EntityReference();
13199 node.ownerDocument = this;
13200 node.nodeName = name;
13201 return node;
13202 },
13203 // Introduced in DOM Level 2:
13204 createElementNS: function createElementNS(namespaceURI, qualifiedName) {
13205 var node = new Element();
13206 var pl = qualifiedName.split(':');
13207 var attrs = node.attributes = new NamedNodeMap();
13208 node.childNodes = new NodeList();
13209 node.ownerDocument = this;
13210 node.nodeName = qualifiedName;
13211 node.tagName = qualifiedName;
13212 node.namespaceURI = namespaceURI;
13213 if (pl.length == 2) {
13214 node.prefix = pl[0];
13215 node.localName = pl[1];
13216 } else {
13217 //el.prefix = null;
13218 node.localName = qualifiedName;
13219 }
13220 attrs._ownerElement = node;
13221 return node;
13222 },
13223 // Introduced in DOM Level 2:
13224 createAttributeNS: function createAttributeNS(namespaceURI, qualifiedName) {
13225 var node = new Attr();
13226 var pl = qualifiedName.split(':');
13227 node.ownerDocument = this;
13228 node.nodeName = qualifiedName;
13229 node.name = qualifiedName;
13230 node.namespaceURI = namespaceURI;
13231 node.specified = true;
13232 if (pl.length == 2) {
13233 node.prefix = pl[0];
13234 node.localName = pl[1];
13235 } else {
13236 //el.prefix = null;
13237 node.localName = qualifiedName;
13238 }
13239 return node;
13240 }
13241 };
13242 _extends$2(Document, Node);
13243
13244 function Element() {
13245 this._nsMap = {};
13246 }Element.prototype = {
13247 nodeType: ELEMENT_NODE,
13248 hasAttribute: function hasAttribute(name) {
13249 return this.getAttributeNode(name) != null;
13250 },
13251 getAttribute: function getAttribute(name) {
13252 var attr = this.getAttributeNode(name);
13253 return attr && attr.value || '';
13254 },
13255 getAttributeNode: function getAttributeNode(name) {
13256 return this.attributes.getNamedItem(name);
13257 },
13258 setAttribute: function setAttribute(name, value) {
13259 var attr = this.ownerDocument.createAttribute(name);
13260 attr.value = attr.nodeValue = "" + value;
13261 this.setAttributeNode(attr);
13262 },
13263 removeAttribute: function removeAttribute(name) {
13264 var attr = this.getAttributeNode(name);
13265 attr && this.removeAttributeNode(attr);
13266 },
13267
13268 //four real opeartion method
13269 appendChild: function appendChild(newChild) {
13270 if (newChild.nodeType === DOCUMENT_FRAGMENT_NODE) {
13271 return this.insertBefore(newChild, null);
13272 } else {
13273 return _appendSingleChild(this, newChild);
13274 }
13275 },
13276 setAttributeNode: function setAttributeNode(newAttr) {
13277 return this.attributes.setNamedItem(newAttr);
13278 },
13279 setAttributeNodeNS: function setAttributeNodeNS(newAttr) {
13280 return this.attributes.setNamedItemNS(newAttr);
13281 },
13282 removeAttributeNode: function removeAttributeNode(oldAttr) {
13283 //console.log(this == oldAttr.ownerElement)
13284 return this.attributes.removeNamedItem(oldAttr.nodeName);
13285 },
13286 //get real attribute name,and remove it by removeAttributeNode
13287 removeAttributeNS: function removeAttributeNS(namespaceURI, localName) {
13288 var old = this.getAttributeNodeNS(namespaceURI, localName);
13289 old && this.removeAttributeNode(old);
13290 },
13291
13292 hasAttributeNS: function hasAttributeNS(namespaceURI, localName) {
13293 return this.getAttributeNodeNS(namespaceURI, localName) != null;
13294 },
13295 getAttributeNS: function getAttributeNS(namespaceURI, localName) {
13296 var attr = this.getAttributeNodeNS(namespaceURI, localName);
13297 return attr && attr.value || '';
13298 },
13299 setAttributeNS: function setAttributeNS(namespaceURI, qualifiedName, value) {
13300 var attr = this.ownerDocument.createAttributeNS(namespaceURI, qualifiedName);
13301 attr.value = attr.nodeValue = "" + value;
13302 this.setAttributeNode(attr);
13303 },
13304 getAttributeNodeNS: function getAttributeNodeNS(namespaceURI, localName) {
13305 return this.attributes.getNamedItemNS(namespaceURI, localName);
13306 },
13307
13308 getElementsByTagName: function getElementsByTagName(tagName) {
13309 return new LiveNodeList(this, function (base) {
13310 var ls = [];
13311 _visitNode(base, function (node) {
13312 if (node !== base && node.nodeType == ELEMENT_NODE && (tagName === '*' || node.tagName == tagName)) {
13313 ls.push(node);
13314 }
13315 });
13316 return ls;
13317 });
13318 },
13319 getElementsByTagNameNS: function getElementsByTagNameNS(namespaceURI, localName) {
13320 return new LiveNodeList(this, function (base) {
13321 var ls = [];
13322 _visitNode(base, function (node) {
13323 if (node !== base && node.nodeType === ELEMENT_NODE && (namespaceURI === '*' || node.namespaceURI === namespaceURI) && (localName === '*' || node.localName == localName)) {
13324 ls.push(node);
13325 }
13326 });
13327 return ls;
13328 });
13329 }
13330 };
13331 Document.prototype.getElementsByTagName = Element.prototype.getElementsByTagName;
13332 Document.prototype.getElementsByTagNameNS = Element.prototype.getElementsByTagNameNS;
13333
13334 _extends$2(Element, Node);
13335 function Attr() {}Attr.prototype.nodeType = ATTRIBUTE_NODE;
13336 _extends$2(Attr, Node);
13337
13338 function CharacterData() {}CharacterData.prototype = {
13339 data: '',
13340 substringData: function substringData(offset, count) {
13341 return this.data.substring(offset, offset + count);
13342 },
13343 appendData: function appendData(text) {
13344 text = this.data + text;
13345 this.nodeValue = this.data = text;
13346 this.length = text.length;
13347 },
13348 insertData: function insertData(offset, text) {
13349 this.replaceData(offset, 0, text);
13350 },
13351 appendChild: function appendChild(newChild) {
13352 throw new Error(ExceptionMessage[HIERARCHY_REQUEST_ERR]);
13353 },
13354 deleteData: function deleteData(offset, count) {
13355 this.replaceData(offset, count, "");
13356 },
13357 replaceData: function replaceData(offset, count, text) {
13358 var start = this.data.substring(0, offset);
13359 var end = this.data.substring(offset + count);
13360 text = start + text + end;
13361 this.nodeValue = this.data = text;
13362 this.length = text.length;
13363 }
13364 };
13365 _extends$2(CharacterData, Node);
13366 function Text() {}Text.prototype = {
13367 nodeName: "#text",
13368 nodeType: TEXT_NODE,
13369 splitText: function splitText(offset) {
13370 var text = this.data;
13371 var newText = text.substring(offset);
13372 text = text.substring(0, offset);
13373 this.data = this.nodeValue = text;
13374 this.length = text.length;
13375 var newNode = this.ownerDocument.createTextNode(newText);
13376 if (this.parentNode) {
13377 this.parentNode.insertBefore(newNode, this.nextSibling);
13378 }
13379 return newNode;
13380 }
13381 };
13382 _extends$2(Text, CharacterData);
13383 function Comment() {}Comment.prototype = {
13384 nodeName: "#comment",
13385 nodeType: COMMENT_NODE
13386 };
13387 _extends$2(Comment, CharacterData);
13388
13389 function CDATASection() {}CDATASection.prototype = {
13390 nodeName: "#cdata-section",
13391 nodeType: CDATA_SECTION_NODE
13392 };
13393 _extends$2(CDATASection, CharacterData);
13394
13395 function DocumentType() {}DocumentType.prototype.nodeType = DOCUMENT_TYPE_NODE;
13396 _extends$2(DocumentType, Node);
13397
13398 function Notation() {}Notation.prototype.nodeType = NOTATION_NODE;
13399 _extends$2(Notation, Node);
13400
13401 function Entity() {}Entity.prototype.nodeType = ENTITY_NODE;
13402 _extends$2(Entity, Node);
13403
13404 function EntityReference() {}EntityReference.prototype.nodeType = ENTITY_REFERENCE_NODE;
13405 _extends$2(EntityReference, Node);
13406
13407 function DocumentFragment() {}DocumentFragment.prototype.nodeName = "#document-fragment";
13408 DocumentFragment.prototype.nodeType = DOCUMENT_FRAGMENT_NODE;
13409 _extends$2(DocumentFragment, Node);
13410
13411 function ProcessingInstruction() {}
13412 ProcessingInstruction.prototype.nodeType = PROCESSING_INSTRUCTION_NODE;
13413 _extends$2(ProcessingInstruction, Node);
13414 function XMLSerializer() {}
13415 XMLSerializer.prototype.serializeToString = function (node, isHtml, nodeFilter) {
13416 return nodeSerializeToString.call(node, isHtml, nodeFilter);
13417 };
13418 Node.prototype.toString = nodeSerializeToString;
13419 function nodeSerializeToString(isHtml, nodeFilter) {
13420 var buf = [];
13421 var refNode = this.nodeType == 9 ? this.documentElement : this;
13422 var prefix = refNode.prefix;
13423 var uri = refNode.namespaceURI;
13424
13425 if (uri && prefix == null) {
13426 //console.log(prefix)
13427 var prefix = refNode.lookupPrefix(uri);
13428 if (prefix == null) {
13429 //isHTML = true;
13430 var visibleNamespaces = [{ namespace: uri, prefix: null
13431 //{namespace:uri,prefix:''}
13432 }];
13433 }
13434 }
13435 serializeToString(this, buf, isHtml, nodeFilter, visibleNamespaces);
13436 //console.log('###',this.nodeType,uri,prefix,buf.join(''))
13437 return buf.join('');
13438 }
13439 function needNamespaceDefine(node, isHTML, visibleNamespaces) {
13440 var prefix = node.prefix || '';
13441 var uri = node.namespaceURI;
13442 if (!prefix && !uri) {
13443 return false;
13444 }
13445 if (prefix === "xml" && uri === "http://www.w3.org/XML/1998/namespace" || uri == 'http://www.w3.org/2000/xmlns/') {
13446 return false;
13447 }
13448
13449 var i = visibleNamespaces.length;
13450 //console.log('@@@@',node.tagName,prefix,uri,visibleNamespaces)
13451 while (i--) {
13452 var ns = visibleNamespaces[i];
13453 // get namespace prefix
13454 //console.log(node.nodeType,node.tagName,ns.prefix,prefix)
13455 if (ns.prefix == prefix) {
13456 return ns.namespace != uri;
13457 }
13458 }
13459 //console.log(isHTML,uri,prefix=='')
13460 //if(isHTML && prefix ==null && uri == 'http://www.w3.org/1999/xhtml'){
13461 // return false;
13462 //}
13463 //node.flag = '11111'
13464 //console.error(3,true,node.flag,node.prefix,node.namespaceURI)
13465 return true;
13466 }
13467 function serializeToString(node, buf, isHTML, nodeFilter, visibleNamespaces) {
13468 if (nodeFilter) {
13469 node = nodeFilter(node);
13470 if (node) {
13471 if (typeof node == 'string') {
13472 buf.push(node);
13473 return;
13474 }
13475 } else {
13476 return;
13477 }
13478 //buf.sort.apply(attrs, attributeSorter);
13479 }
13480 switch (node.nodeType) {
13481 case ELEMENT_NODE:
13482 if (!visibleNamespaces) visibleNamespaces = [];
13483 var startVisibleNamespaces = visibleNamespaces.length;
13484 var attrs = node.attributes;
13485 var len = attrs.length;
13486 var child = node.firstChild;
13487 var nodeName = node.tagName;
13488
13489 isHTML = htmlns === node.namespaceURI || isHTML;
13490 buf.push('<', nodeName);
13491
13492 for (var i = 0; i < len; i++) {
13493 // add namespaces for attributes
13494 var attr = attrs.item(i);
13495 if (attr.prefix == 'xmlns') {
13496 visibleNamespaces.push({ prefix: attr.localName, namespace: attr.value });
13497 } else if (attr.nodeName == 'xmlns') {
13498 visibleNamespaces.push({ prefix: '', namespace: attr.value });
13499 }
13500 }
13501 for (var i = 0; i < len; i++) {
13502 var attr = attrs.item(i);
13503 if (needNamespaceDefine(attr, isHTML, visibleNamespaces)) {
13504 var prefix = attr.prefix || '';
13505 var uri = attr.namespaceURI;
13506 var ns = prefix ? ' xmlns:' + prefix : " xmlns";
13507 buf.push(ns, '="', uri, '"');
13508 visibleNamespaces.push({ prefix: prefix, namespace: uri });
13509 }
13510 serializeToString(attr, buf, isHTML, nodeFilter, visibleNamespaces);
13511 }
13512 // add namespace for current node
13513 if (needNamespaceDefine(node, isHTML, visibleNamespaces)) {
13514 var prefix = node.prefix || '';
13515 var uri = node.namespaceURI;
13516 var ns = prefix ? ' xmlns:' + prefix : " xmlns";
13517 buf.push(ns, '="', uri, '"');
13518 visibleNamespaces.push({ prefix: prefix, namespace: uri });
13519 }
13520
13521 if (child || isHTML && !/^(?:meta|link|img|br|hr|input)$/i.test(nodeName)) {
13522 buf.push('>');
13523 //if is cdata child node
13524 if (isHTML && /^script$/i.test(nodeName)) {
13525 while (child) {
13526 if (child.data) {
13527 buf.push(child.data);
13528 } else {
13529 serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces);
13530 }
13531 child = child.nextSibling;
13532 }
13533 } else {
13534 while (child) {
13535 serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces);
13536 child = child.nextSibling;
13537 }
13538 }
13539 buf.push('</', nodeName, '>');
13540 } else {
13541 buf.push('/>');
13542 }
13543 // remove added visible namespaces
13544 //visibleNamespaces.length = startVisibleNamespaces;
13545 return;
13546 case DOCUMENT_NODE:
13547 case DOCUMENT_FRAGMENT_NODE:
13548 var child = node.firstChild;
13549 while (child) {
13550 serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces);
13551 child = child.nextSibling;
13552 }
13553 return;
13554 case ATTRIBUTE_NODE:
13555 return buf.push(' ', node.name, '="', node.value.replace(/[<&"]/g, _xmlEncoder), '"');
13556 case TEXT_NODE:
13557 return buf.push(node.data.replace(/[<&]/g, _xmlEncoder));
13558 case CDATA_SECTION_NODE:
13559 return buf.push('<![CDATA[', node.data, ']]>');
13560 case COMMENT_NODE:
13561 return buf.push("<!--", node.data, "-->");
13562 case DOCUMENT_TYPE_NODE:
13563 var pubid = node.publicId;
13564 var sysid = node.systemId;
13565 buf.push('<!DOCTYPE ', node.name);
13566 if (pubid) {
13567 buf.push(' PUBLIC "', pubid);
13568 if (sysid && sysid != '.') {
13569 buf.push('" "', sysid);
13570 }
13571 buf.push('">');
13572 } else if (sysid && sysid != '.') {
13573 buf.push(' SYSTEM "', sysid, '">');
13574 } else {
13575 var sub = node.internalSubset;
13576 if (sub) {
13577 buf.push(" [", sub, "]");
13578 }
13579 buf.push(">");
13580 }
13581 return;
13582 case PROCESSING_INSTRUCTION_NODE:
13583 return buf.push("<?", node.target, " ", node.data, "?>");
13584 case ENTITY_REFERENCE_NODE:
13585 return buf.push('&', node.nodeName, ';');
13586 //case ENTITY_NODE:
13587 //case NOTATION_NODE:
13588 default:
13589 buf.push('??', node.nodeName);
13590 }
13591 }
13592 function _importNode(doc, node, deep) {
13593 var node2;
13594 switch (node.nodeType) {
13595 case ELEMENT_NODE:
13596 node2 = node.cloneNode(false);
13597 node2.ownerDocument = doc;
13598 //var attrs = node2.attributes;
13599 //var len = attrs.length;
13600 //for(var i=0;i<len;i++){
13601 //node2.setAttributeNodeNS(importNode(doc,attrs.item(i),deep));
13602 //}
13603 case DOCUMENT_FRAGMENT_NODE:
13604 break;
13605 case ATTRIBUTE_NODE:
13606 deep = true;
13607 break;
13608 //case ENTITY_REFERENCE_NODE:
13609 //case PROCESSING_INSTRUCTION_NODE:
13610 ////case TEXT_NODE:
13611 //case CDATA_SECTION_NODE:
13612 //case COMMENT_NODE:
13613 // deep = false;
13614 // break;
13615 //case DOCUMENT_NODE:
13616 //case DOCUMENT_TYPE_NODE:
13617 //cannot be imported.
13618 //case ENTITY_NODE:
13619 //case NOTATION_NODE:
13620 //can not hit in level3
13621 //default:throw e;
13622 }
13623 if (!node2) {
13624 node2 = node.cloneNode(false); //false
13625 }
13626 node2.ownerDocument = doc;
13627 node2.parentNode = null;
13628 if (deep) {
13629 var child = node.firstChild;
13630 while (child) {
13631 node2.appendChild(_importNode(doc, child, deep));
13632 child = child.nextSibling;
13633 }
13634 }
13635 return node2;
13636 }
13637 //
13638 //var _relationMap = {firstChild:1,lastChild:1,previousSibling:1,nextSibling:1,
13639 // attributes:1,childNodes:1,parentNode:1,documentElement:1,doctype,};
13640 function _cloneNode(doc, node, deep) {
13641 var node2 = new node.constructor();
13642 for (var n in node) {
13643 var v = node[n];
13644 if (typeof v != 'object') {
13645 if (v != node2[n]) {
13646 node2[n] = v;
13647 }
13648 }
13649 }
13650 if (node.childNodes) {
13651 node2.childNodes = new NodeList();
13652 }
13653 node2.ownerDocument = doc;
13654 switch (node2.nodeType) {
13655 case ELEMENT_NODE:
13656 var attrs = node.attributes;
13657 var attrs2 = node2.attributes = new NamedNodeMap();
13658 var len = attrs.length;
13659 attrs2._ownerElement = node2;
13660 for (var i = 0; i < len; i++) {
13661 node2.setAttributeNode(_cloneNode(doc, attrs.item(i), true));
13662 }
13663 break;
13664 case ATTRIBUTE_NODE:
13665 deep = true;
13666 }
13667 if (deep) {
13668 var child = node.firstChild;
13669 while (child) {
13670 node2.appendChild(_cloneNode(doc, child, deep));
13671 child = child.nextSibling;
13672 }
13673 }
13674 return node2;
13675 }
13676
13677 function __set__(object, key, value) {
13678 object[key] = value;
13679 }
13680 //do dynamic
13681 try {
13682 if (Object.defineProperty) {
13683 var getTextContent = function getTextContent(node) {
13684 switch (node.nodeType) {
13685 case ELEMENT_NODE:
13686 case DOCUMENT_FRAGMENT_NODE:
13687 var buf = [];
13688 node = node.firstChild;
13689 while (node) {
13690 if (node.nodeType !== 7 && node.nodeType !== 8) {
13691 buf.push(getTextContent(node));
13692 }
13693 node = node.nextSibling;
13694 }
13695 return buf.join('');
13696 default:
13697 return node.nodeValue;
13698 }
13699 };
13700
13701 Object.defineProperty(LiveNodeList.prototype, 'length', {
13702 get: function get() {
13703 _updateLiveList(this);
13704 return this.$$length;
13705 }
13706 });
13707 Object.defineProperty(Node.prototype, 'textContent', {
13708 get: function get() {
13709 return getTextContent(this);
13710 },
13711 set: function set(data) {
13712 switch (this.nodeType) {
13713 case ELEMENT_NODE:
13714 case DOCUMENT_FRAGMENT_NODE:
13715 while (this.firstChild) {
13716 this.removeChild(this.firstChild);
13717 }
13718 if (data || String(data)) {
13719 this.appendChild(this.ownerDocument.createTextNode(data));
13720 }
13721 break;
13722 default:
13723 //TODO:
13724 this.data = data;
13725 this.value = data;
13726 this.nodeValue = data;
13727 }
13728 }
13729 });
13730
13731 __set__ = function __set__(object, key, value) {
13732 //console.log(value)
13733 object['$$' + key] = value;
13734 };
13735 }
13736 } catch (e) {} //ie8
13737
13738
13739 //if(typeof require == 'function'){
13740 var DOMImplementation_1 = DOMImplementation;
13741 var XMLSerializer_1 = XMLSerializer;
13742 //}
13743
13744 var dom = {
13745 DOMImplementation: DOMImplementation_1,
13746 XMLSerializer: XMLSerializer_1
13747 };
13748
13749 var domParser = createCommonjsModule(function (module, exports) {
13750 function DOMParser(options) {
13751 this.options = options || { locator: {} };
13752 }
13753 DOMParser.prototype.parseFromString = function (source, mimeType) {
13754 var options = this.options;
13755 var sax$$1 = new XMLReader();
13756 var domBuilder = options.domBuilder || new DOMHandler(); //contentHandler and LexicalHandler
13757 var errorHandler = options.errorHandler;
13758 var locator = options.locator;
13759 var defaultNSMap = options.xmlns || {};
13760 var entityMap = { 'lt': '<', 'gt': '>', 'amp': '&', 'quot': '"', 'apos': "'" };
13761 if (locator) {
13762 domBuilder.setDocumentLocator(locator);
13763 }
13764
13765 sax$$1.errorHandler = buildErrorHandler(errorHandler, domBuilder, locator);
13766 sax$$1.domBuilder = options.domBuilder || domBuilder;
13767 if (/\/x?html?$/.test(mimeType)) {
13768 entityMap.nbsp = '\xa0';
13769 entityMap.copy = '\xa9';
13770 defaultNSMap[''] = 'http://www.w3.org/1999/xhtml';
13771 }
13772 defaultNSMap.xml = defaultNSMap.xml || 'http://www.w3.org/XML/1998/namespace';
13773 if (source) {
13774 sax$$1.parse(source, defaultNSMap, entityMap);
13775 } else {
13776 sax$$1.errorHandler.error("invalid doc source");
13777 }
13778 return domBuilder.doc;
13779 };
13780 function buildErrorHandler(errorImpl, domBuilder, locator) {
13781 if (!errorImpl) {
13782 if (domBuilder instanceof DOMHandler) {
13783 return domBuilder;
13784 }
13785 errorImpl = domBuilder;
13786 }
13787 var errorHandler = {};
13788 var isCallback = errorImpl instanceof Function;
13789 locator = locator || {};
13790 function build(key) {
13791 var fn = errorImpl[key];
13792 if (!fn && isCallback) {
13793 fn = errorImpl.length == 2 ? function (msg) {
13794 errorImpl(key, msg);
13795 } : errorImpl;
13796 }
13797 errorHandler[key] = fn && function (msg) {
13798 fn('[xmldom ' + key + ']\t' + msg + _locator(locator));
13799 } || function () {};
13800 }
13801 build('warning');
13802 build('error');
13803 build('fatalError');
13804 return errorHandler;
13805 }
13806
13807 //console.log('#\n\n\n\n\n\n\n####')
13808 /**
13809 * +ContentHandler+ErrorHandler
13810 * +LexicalHandler+EntityResolver2
13811 * -DeclHandler-DTDHandler
13812 *
13813 * DefaultHandler:EntityResolver, DTDHandler, ContentHandler, ErrorHandler
13814 * DefaultHandler2:DefaultHandler,LexicalHandler, DeclHandler, EntityResolver2
13815 * @link http://www.saxproject.org/apidoc/org/xml/sax/helpers/DefaultHandler.html
13816 */
13817 function DOMHandler() {
13818 this.cdata = false;
13819 }
13820 function position(locator, node) {
13821 node.lineNumber = locator.lineNumber;
13822 node.columnNumber = locator.columnNumber;
13823 }
13824 /**
13825 * @see org.xml.sax.ContentHandler#startDocument
13826 * @link http://www.saxproject.org/apidoc/org/xml/sax/ContentHandler.html
13827 */
13828 DOMHandler.prototype = {
13829 startDocument: function startDocument() {
13830 this.doc = new DOMImplementation().createDocument(null, null, null);
13831 if (this.locator) {
13832 this.doc.documentURI = this.locator.systemId;
13833 }
13834 },
13835 startElement: function startElement(namespaceURI, localName, qName, attrs) {
13836 var doc = this.doc;
13837 var el = doc.createElementNS(namespaceURI, qName || localName);
13838 var len = attrs.length;
13839 appendElement(this, el);
13840 this.currentElement = el;
13841
13842 this.locator && position(this.locator, el);
13843 for (var i = 0; i < len; i++) {
13844 var namespaceURI = attrs.getURI(i);
13845 var value = attrs.getValue(i);
13846 var qName = attrs.getQName(i);
13847 var attr = doc.createAttributeNS(namespaceURI, qName);
13848 this.locator && position(attrs.getLocator(i), attr);
13849 attr.value = attr.nodeValue = value;
13850 el.setAttributeNode(attr);
13851 }
13852 },
13853 endElement: function endElement(namespaceURI, localName, qName) {
13854 var current = this.currentElement;
13855 var tagName = current.tagName;
13856 this.currentElement = current.parentNode;
13857 },
13858 startPrefixMapping: function startPrefixMapping(prefix, uri) {},
13859 endPrefixMapping: function endPrefixMapping(prefix) {},
13860 processingInstruction: function processingInstruction(target, data) {
13861 var ins = this.doc.createProcessingInstruction(target, data);
13862 this.locator && position(this.locator, ins);
13863 appendElement(this, ins);
13864 },
13865 ignorableWhitespace: function ignorableWhitespace(ch, start, length) {},
13866 characters: function characters(chars, start, length) {
13867 chars = _toString.apply(this, arguments);
13868 //console.log(chars)
13869 if (chars) {
13870 if (this.cdata) {
13871 var charNode = this.doc.createCDATASection(chars);
13872 } else {
13873 var charNode = this.doc.createTextNode(chars);
13874 }
13875 if (this.currentElement) {
13876 this.currentElement.appendChild(charNode);
13877 } else if (/^\s*$/.test(chars)) {
13878 this.doc.appendChild(charNode);
13879 //process xml
13880 }
13881 this.locator && position(this.locator, charNode);
13882 }
13883 },
13884 skippedEntity: function skippedEntity(name) {},
13885 endDocument: function endDocument() {
13886 this.doc.normalize();
13887 },
13888 setDocumentLocator: function setDocumentLocator(locator) {
13889 if (this.locator = locator) {
13890 // && !('lineNumber' in locator)){
13891 locator.lineNumber = 0;
13892 }
13893 },
13894 //LexicalHandler
13895 comment: function comment(chars, start, length) {
13896 chars = _toString.apply(this, arguments);
13897 var comm = this.doc.createComment(chars);
13898 this.locator && position(this.locator, comm);
13899 appendElement(this, comm);
13900 },
13901
13902 startCDATA: function startCDATA() {
13903 //used in characters() methods
13904 this.cdata = true;
13905 },
13906 endCDATA: function endCDATA() {
13907 this.cdata = false;
13908 },
13909
13910 startDTD: function startDTD(name, publicId, systemId) {
13911 var impl = this.doc.implementation;
13912 if (impl && impl.createDocumentType) {
13913 var dt = impl.createDocumentType(name, publicId, systemId);
13914 this.locator && position(this.locator, dt);
13915 appendElement(this, dt);
13916 }
13917 },
13918 /**
13919 * @see org.xml.sax.ErrorHandler
13920 * @link http://www.saxproject.org/apidoc/org/xml/sax/ErrorHandler.html
13921 */
13922 warning: function warning(error) {
13923 console.warn('[xmldom warning]\t' + error, _locator(this.locator));
13924 },
13925 error: function error(_error) {
13926 console.error('[xmldom error]\t' + _error, _locator(this.locator));
13927 },
13928 fatalError: function fatalError(error) {
13929 console.error('[xmldom fatalError]\t' + error, _locator(this.locator));
13930 throw error;
13931 }
13932 };
13933 function _locator(l) {
13934 if (l) {
13935 return '\n@' + (l.systemId || '') + '#[line:' + l.lineNumber + ',col:' + l.columnNumber + ']';
13936 }
13937 }
13938 function _toString(chars, start, length) {
13939 if (typeof chars == 'string') {
13940 return chars.substr(start, length);
13941 } else {
13942 //java sax connect width xmldom on rhino(what about: "? && !(chars instanceof String)")
13943 if (chars.length >= start + length || start) {
13944 return new java.lang.String(chars, start, length) + '';
13945 }
13946 return chars;
13947 }
13948 }
13949
13950 /*
13951 * @link http://www.saxproject.org/apidoc/org/xml/sax/ext/LexicalHandler.html
13952 * used method of org.xml.sax.ext.LexicalHandler:
13953 * #comment(chars, start, length)
13954 * #startCDATA()
13955 * #endCDATA()
13956 * #startDTD(name, publicId, systemId)
13957 *
13958 *
13959 * IGNORED method of org.xml.sax.ext.LexicalHandler:
13960 * #endDTD()
13961 * #startEntity(name)
13962 * #endEntity(name)
13963 *
13964 *
13965 * @link http://www.saxproject.org/apidoc/org/xml/sax/ext/DeclHandler.html
13966 * IGNORED method of org.xml.sax.ext.DeclHandler
13967 * #attributeDecl(eName, aName, type, mode, value)
13968 * #elementDecl(name, model)
13969 * #externalEntityDecl(name, publicId, systemId)
13970 * #internalEntityDecl(name, value)
13971 * @link http://www.saxproject.org/apidoc/org/xml/sax/ext/EntityResolver2.html
13972 * IGNORED method of org.xml.sax.EntityResolver2
13973 * #resolveEntity(String name,String publicId,String baseURI,String systemId)
13974 * #resolveEntity(publicId, systemId)
13975 * #getExternalSubset(name, baseURI)
13976 * @link http://www.saxproject.org/apidoc/org/xml/sax/DTDHandler.html
13977 * IGNORED method of org.xml.sax.DTDHandler
13978 * #notationDecl(name, publicId, systemId) {};
13979 * #unparsedEntityDecl(name, publicId, systemId, notationName) {};
13980 */
13981 "endDTD,startEntity,endEntity,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,resolveEntity,getExternalSubset,notationDecl,unparsedEntityDecl".replace(/\w+/g, function (key) {
13982 DOMHandler.prototype[key] = function () {
13983 return null;
13984 };
13985 });
13986
13987 /* Private static helpers treated below as private instance methods, so don't need to add these to the public API; we might use a Relator to also get rid of non-standard public properties */
13988 function appendElement(hander, node) {
13989 if (!hander.currentElement) {
13990 hander.doc.appendChild(node);
13991 } else {
13992 hander.currentElement.appendChild(node);
13993 }
13994 } //appendChild and setAttributeNS are preformance key
13995
13996 //if(typeof require == 'function'){
13997 var XMLReader = sax.XMLReader;
13998 var DOMImplementation = exports.DOMImplementation = dom.DOMImplementation;
13999 exports.XMLSerializer = dom.XMLSerializer;
14000 exports.DOMParser = DOMParser;
14001 //}
14002 });
14003 var domParser_1 = domParser.DOMImplementation;
14004 var domParser_2 = domParser.XMLSerializer;
14005 var domParser_3 = domParser.DOMParser;
14006
14007 /*! @name mpd-parser @version 0.10.0 @license Apache-2.0 */
14008
14009 var isObject = function isObject(obj) {
14010 return !!obj && typeof obj === 'object';
14011 };
14012
14013 var merge = function merge() {
14014 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
14015 objects[_key] = arguments[_key];
14016 }
14017
14018 return objects.reduce(function (result, source) {
14019 Object.keys(source).forEach(function (key) {
14020 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
14021 result[key] = result[key].concat(source[key]);
14022 } else if (isObject(result[key]) && isObject(source[key])) {
14023 result[key] = merge(result[key], source[key]);
14024 } else {
14025 result[key] = source[key];
14026 }
14027 });
14028 return result;
14029 }, {});
14030 };
14031 var values = function values(o) {
14032 return Object.keys(o).map(function (k) {
14033 return o[k];
14034 });
14035 };
14036
14037 var range = function range(start, end) {
14038 var result = [];
14039
14040 for (var i = start; i < end; i++) {
14041 result.push(i);
14042 }
14043
14044 return result;
14045 };
14046 var flatten = function flatten(lists) {
14047 return lists.reduce(function (x, y) {
14048 return x.concat(y);
14049 }, []);
14050 };
14051 var from = function from(list) {
14052 if (!list.length) {
14053 return [];
14054 }
14055
14056 var result = [];
14057
14058 for (var i = 0; i < list.length; i++) {
14059 result.push(list[i]);
14060 }
14061
14062 return result;
14063 };
14064 var findIndexes = function findIndexes(l, key) {
14065 return l.reduce(function (a, e, i) {
14066 if (e[key]) {
14067 a.push(i);
14068 }
14069
14070 return a;
14071 }, []);
14072 };
14073
14074 var errors = {
14075 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
14076 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
14077 DASH_INVALID_XML: 'DASH_INVALID_XML',
14078 NO_BASE_URL: 'NO_BASE_URL',
14079 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
14080 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
14081 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
14082 };
14083
14084 /**
14085 * @typedef {Object} SingleUri
14086 * @property {string} uri - relative location of segment
14087 * @property {string} resolvedUri - resolved location of segment
14088 * @property {Object} byterange - Object containing information on how to make byte range
14089 * requests following byte-range-spec per RFC2616.
14090 * @property {String} byterange.length - length of range request
14091 * @property {String} byterange.offset - byte offset of range request
14092 *
14093 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
14094 */
14095
14096 /**
14097 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
14098 * that conforms to how m3u8-parser is structured
14099 *
14100 * @see https://github.com/videojs/m3u8-parser
14101 *
14102 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
14103 * @param {string} source - source url for segment
14104 * @param {string} range - optional range used for range calls,
14105 * follows RFC 2616, Clause 14.35.1
14106 * @return {SingleUri} full segment information transformed into a format similar
14107 * to m3u8-parser
14108 */
14109
14110 var urlTypeToSegment = function urlTypeToSegment(_ref) {
14111 var _ref$baseUrl = _ref.baseUrl,
14112 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
14113 _ref$source = _ref.source,
14114 source = _ref$source === void 0 ? '' : _ref$source,
14115 _ref$range = _ref.range,
14116 range = _ref$range === void 0 ? '' : _ref$range,
14117 _ref$indexRange = _ref.indexRange,
14118 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
14119 var segment = {
14120 uri: source,
14121 resolvedUri: resolveUrl_1(baseUrl || '', source)
14122 };
14123
14124 if (range || indexRange) {
14125 var rangeStr = range ? range : indexRange;
14126 var ranges = rangeStr.split('-');
14127 var startRange = parseInt(ranges[0], 10);
14128 var endRange = parseInt(ranges[1], 10); // byterange should be inclusive according to
14129 // RFC 2616, Clause 14.35.1
14130
14131 segment.byterange = {
14132 length: endRange - startRange + 1,
14133 offset: startRange
14134 };
14135 }
14136
14137 return segment;
14138 };
14139 var byteRangeToString = function byteRangeToString(byterange) {
14140 // `endRange` is one less than `offset + length` because the HTTP range
14141 // header uses inclusive ranges
14142 var endRange = byterange.offset + byterange.length - 1;
14143 return byterange.offset + "-" + endRange;
14144 };
14145
14146 /**
14147 * Functions for calculating the range of available segments in static and dynamic
14148 * manifests.
14149 */
14150
14151 var segmentRange = {
14152 /**
14153 * Returns the entire range of available segments for a static MPD
14154 *
14155 * @param {Object} attributes
14156 * Inheritied MPD attributes
14157 * @return {{ start: number, end: number }}
14158 * The start and end numbers for available segments
14159 */
14160 static: function _static(attributes) {
14161 var duration = attributes.duration,
14162 _attributes$timescale = attributes.timescale,
14163 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
14164 sourceDuration = attributes.sourceDuration;
14165 return {
14166 start: 0,
14167 end: Math.ceil(sourceDuration / (duration / timescale))
14168 };
14169 },
14170
14171 /**
14172 * Returns the current live window range of available segments for a dynamic MPD
14173 *
14174 * @param {Object} attributes
14175 * Inheritied MPD attributes
14176 * @return {{ start: number, end: number }}
14177 * The start and end numbers for available segments
14178 */
14179 dynamic: function dynamic(attributes) {
14180 var NOW = attributes.NOW,
14181 clientOffset = attributes.clientOffset,
14182 availabilityStartTime = attributes.availabilityStartTime,
14183 _attributes$timescale2 = attributes.timescale,
14184 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
14185 duration = attributes.duration,
14186 _attributes$start = attributes.start,
14187 start = _attributes$start === void 0 ? 0 : _attributes$start,
14188 _attributes$minimumUp = attributes.minimumUpdatePeriod,
14189 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
14190 _attributes$timeShift = attributes.timeShiftBufferDepth,
14191 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
14192 var now = (NOW + clientOffset) / 1000;
14193 var periodStartWC = availabilityStartTime + start;
14194 var periodEndWC = now + minimumUpdatePeriod;
14195 var periodDuration = periodEndWC - periodStartWC;
14196 var segmentCount = Math.ceil(periodDuration * timescale / duration);
14197 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
14198 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
14199 return {
14200 start: Math.max(0, availableStart),
14201 end: Math.min(segmentCount, availableEnd)
14202 };
14203 }
14204 };
14205 /**
14206 * Maps a range of numbers to objects with information needed to build the corresponding
14207 * segment list
14208 *
14209 * @name toSegmentsCallback
14210 * @function
14211 * @param {number} number
14212 * Number of the segment
14213 * @param {number} index
14214 * Index of the number in the range list
14215 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
14216 * Object with segment timing and duration info
14217 */
14218
14219 /**
14220 * Returns a callback for Array.prototype.map for mapping a range of numbers to
14221 * information needed to build the segment list.
14222 *
14223 * @param {Object} attributes
14224 * Inherited MPD attributes
14225 * @return {toSegmentsCallback}
14226 * Callback map function
14227 */
14228
14229 var toSegments = function toSegments(attributes) {
14230 return function (number, index) {
14231 var duration = attributes.duration,
14232 _attributes$timescale3 = attributes.timescale,
14233 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
14234 periodIndex = attributes.periodIndex,
14235 _attributes$startNumb = attributes.startNumber,
14236 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
14237 return {
14238 number: startNumber + number,
14239 duration: duration / timescale,
14240 timeline: periodIndex,
14241 time: index * duration
14242 };
14243 };
14244 };
14245 /**
14246 * Returns a list of objects containing segment timing and duration info used for
14247 * building the list of segments. This uses the @duration attribute specified
14248 * in the MPD manifest to derive the range of segments.
14249 *
14250 * @param {Object} attributes
14251 * Inherited MPD attributes
14252 * @return {{number: number, duration: number, time: number, timeline: number}[]}
14253 * List of Objects with segment timing and duration info
14254 */
14255
14256 var parseByDuration = function parseByDuration(attributes) {
14257 var _attributes$type = attributes.type,
14258 type = _attributes$type === void 0 ? 'static' : _attributes$type,
14259 duration = attributes.duration,
14260 _attributes$timescale4 = attributes.timescale,
14261 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
14262 sourceDuration = attributes.sourceDuration;
14263
14264 var _segmentRange$type = segmentRange[type](attributes),
14265 start = _segmentRange$type.start,
14266 end = _segmentRange$type.end;
14267
14268 var segments = range(start, end).map(toSegments(attributes));
14269
14270 if (type === 'static') {
14271 var index = segments.length - 1; // final segment may be less than full segment duration
14272
14273 segments[index].duration = sourceDuration - duration / timescale * index;
14274 }
14275
14276 return segments;
14277 };
14278
14279 /**
14280 * Translates SegmentBase into a set of segments.
14281 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
14282 * node should be translated into segment.
14283 *
14284 * @param {Object} attributes
14285 * Object containing all inherited attributes from parent elements with attribute
14286 * names as keys
14287 * @return {Object.<Array>} list of segments
14288 */
14289
14290 var segmentsFromBase = function segmentsFromBase(attributes) {
14291 var baseUrl = attributes.baseUrl,
14292 _attributes$initializ = attributes.initialization,
14293 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
14294 sourceDuration = attributes.sourceDuration,
14295 _attributes$timescale = attributes.timescale,
14296 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
14297 _attributes$indexRang = attributes.indexRange,
14298 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
14299 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
14300
14301 if (!baseUrl) {
14302 throw new Error(errors.NO_BASE_URL);
14303 }
14304
14305 var initSegment = urlTypeToSegment({
14306 baseUrl: baseUrl,
14307 source: initialization.sourceURL,
14308 range: initialization.range
14309 });
14310 var segment = urlTypeToSegment({
14311 baseUrl: baseUrl,
14312 source: baseUrl,
14313 indexRange: indexRange
14314 });
14315 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
14316 // (since SegmentBase is only for one total segment)
14317
14318 if (duration) {
14319 var segmentTimeInfo = parseByDuration(attributes);
14320
14321 if (segmentTimeInfo.length) {
14322 segment.duration = segmentTimeInfo[0].duration;
14323 segment.timeline = segmentTimeInfo[0].timeline;
14324 }
14325 } else if (sourceDuration) {
14326 segment.duration = sourceDuration / timescale;
14327 segment.timeline = 0;
14328 } // This is used for mediaSequence
14329
14330
14331 segment.number = 0;
14332 return [segment];
14333 };
14334 /**
14335 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
14336 * according to the sidx information given.
14337 *
14338 * playlist.sidx has metadadata about the sidx where-as the sidx param
14339 * is the parsed sidx box itself.
14340 *
14341 * @param {Object} playlist the playlist to update the sidx information for
14342 * @param {Object} sidx the parsed sidx box
14343 * @return {Object} the playlist object with the updated sidx information
14344 */
14345
14346 var addSegmentsToPlaylist = function addSegmentsToPlaylist(playlist, sidx, baseUrl) {
14347 // Retain init segment information
14348 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial master manifest parsing
14349
14350 var sourceDuration = playlist.sidx.duration; // Retain source timeline
14351
14352 var timeline = playlist.timeline || 0;
14353 var sidxByteRange = playlist.sidx.byterange;
14354 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
14355
14356 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
14357
14358 var mediaReferences = sidx.references.filter(function (r) {
14359 return r.referenceType !== 1;
14360 });
14361 var segments = []; // firstOffset is the offset from the end of the sidx box
14362
14363 var startIndex = sidxEnd + sidx.firstOffset;
14364
14365 for (var i = 0; i < mediaReferences.length; i++) {
14366 var reference = sidx.references[i]; // size of the referenced (sub)segment
14367
14368 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
14369 // this will be converted to seconds when generating segments
14370
14371 var duration = reference.subsegmentDuration; // should be an inclusive range
14372
14373 var endIndex = startIndex + size - 1;
14374 var indexRange = startIndex + "-" + endIndex;
14375 var attributes = {
14376 baseUrl: baseUrl,
14377 timescale: timescale,
14378 timeline: timeline,
14379 // this is used in parseByDuration
14380 periodIndex: timeline,
14381 duration: duration,
14382 sourceDuration: sourceDuration,
14383 indexRange: indexRange
14384 };
14385 var segment = segmentsFromBase(attributes)[0];
14386
14387 if (initSegment) {
14388 segment.map = initSegment;
14389 }
14390
14391 segments.push(segment);
14392 startIndex += size;
14393 }
14394
14395 playlist.segments = segments;
14396 return playlist;
14397 };
14398
14399 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
14400 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
14401 // assuming playlist IDs are the same across periods
14402 // TODO: handle multiperiod where representation sets are not the same
14403 // across periods
14404 var name = playlist.attributes.id + (playlist.attributes.lang || ''); // Periods after first
14405
14406 if (acc[name]) {
14407 var _acc$name$segments;
14408
14409 // first segment of subsequent periods signal a discontinuity
14410 if (playlist.segments[0]) {
14411 playlist.segments[0].discontinuity = true;
14412 }
14413
14414 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments); // bubble up contentProtection, this assumes all DRM content
14415 // has the same contentProtection
14416
14417
14418 if (playlist.attributes.contentProtection) {
14419 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
14420 }
14421 } else {
14422 // first Period
14423 acc[name] = playlist;
14424 }
14425
14426 return acc;
14427 }, {}));
14428 return mergedPlaylists.map(function (playlist) {
14429 playlist.discontinuityStarts = findIndexes(playlist.segments, 'discontinuity');
14430 return playlist;
14431 });
14432 };
14433
14434 var addSegmentInfoFromSidx = function addSegmentInfoFromSidx(playlists, sidxMapping) {
14435 if (sidxMapping === void 0) {
14436 sidxMapping = {};
14437 }
14438
14439 if (!Object.keys(sidxMapping).length) {
14440 return playlists;
14441 }
14442
14443 for (var i in playlists) {
14444 var playlist = playlists[i];
14445
14446 if (!playlist.sidx) {
14447 continue;
14448 }
14449
14450 var sidxKey = playlist.sidx.uri + '-' + byteRangeToString(playlist.sidx.byterange);
14451 var sidxMatch = sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
14452
14453 if (playlist.sidx && sidxMatch) {
14454 addSegmentsToPlaylist(playlist, sidxMatch, playlist.sidx.resolvedUri);
14455 }
14456 }
14457
14458 return playlists;
14459 };
14460
14461 var formatAudioPlaylist = function formatAudioPlaylist(_ref) {
14462 var _attributes;
14463
14464 var attributes = _ref.attributes,
14465 segments = _ref.segments,
14466 sidx = _ref.sidx;
14467 var playlist = {
14468 attributes: (_attributes = {
14469 NAME: attributes.id,
14470 BANDWIDTH: attributes.bandwidth,
14471 CODECS: attributes.codecs
14472 }, _attributes['PROGRAM-ID'] = 1, _attributes),
14473 uri: '',
14474 endList: (attributes.type || 'static') === 'static',
14475 timeline: attributes.periodIndex,
14476 resolvedUri: '',
14477 targetDuration: attributes.duration,
14478 segments: segments,
14479 mediaSequence: segments.length ? segments[0].number : 1
14480 };
14481
14482 if (attributes.contentProtection) {
14483 playlist.contentProtection = attributes.contentProtection;
14484 }
14485
14486 if (sidx) {
14487 playlist.sidx = sidx;
14488 }
14489
14490 return playlist;
14491 };
14492 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
14493 var _attributes2;
14494
14495 var attributes = _ref2.attributes,
14496 segments = _ref2.segments;
14497
14498 if (typeof segments === 'undefined') {
14499 // vtt tracks may use single file in BaseURL
14500 segments = [{
14501 uri: attributes.baseUrl,
14502 timeline: attributes.periodIndex,
14503 resolvedUri: attributes.baseUrl || '',
14504 duration: attributes.sourceDuration,
14505 number: 0
14506 }]; // targetDuration should be the same duration as the only segment
14507
14508 attributes.duration = attributes.sourceDuration;
14509 }
14510
14511 return {
14512 attributes: (_attributes2 = {
14513 NAME: attributes.id,
14514 BANDWIDTH: attributes.bandwidth
14515 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
14516 uri: '',
14517 endList: (attributes.type || 'static') === 'static',
14518 timeline: attributes.periodIndex,
14519 resolvedUri: attributes.baseUrl || '',
14520 targetDuration: attributes.duration,
14521 segments: segments,
14522 mediaSequence: segments.length ? segments[0].number : 1
14523 };
14524 };
14525 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping) {
14526 if (sidxMapping === void 0) {
14527 sidxMapping = {};
14528 }
14529
14530 var mainPlaylist;
14531 var formattedPlaylists = playlists.reduce(function (a, playlist) {
14532 var role = playlist.attributes.role && playlist.attributes.role.value || '';
14533 var language = playlist.attributes.lang || '';
14534 var label = 'main';
14535
14536 if (language) {
14537 var roleLabel = role ? " (" + role + ")" : '';
14538 label = "" + playlist.attributes.lang + roleLabel;
14539 } // skip if we already have the highest quality audio for a language
14540
14541
14542 if (a[label] && a[label].playlists[0].attributes.BANDWIDTH > playlist.attributes.bandwidth) {
14543 return a;
14544 }
14545
14546 a[label] = {
14547 language: language,
14548 autoselect: true,
14549 default: role === 'main',
14550 playlists: addSegmentInfoFromSidx([formatAudioPlaylist(playlist)], sidxMapping),
14551 uri: ''
14552 };
14553
14554 if (typeof mainPlaylist === 'undefined' && role === 'main') {
14555 mainPlaylist = playlist;
14556 mainPlaylist.default = true;
14557 }
14558
14559 return a;
14560 }, {}); // if no playlists have role "main", mark the first as main
14561
14562 if (!mainPlaylist) {
14563 var firstLabel = Object.keys(formattedPlaylists)[0];
14564 formattedPlaylists[firstLabel].default = true;
14565 }
14566
14567 return formattedPlaylists;
14568 };
14569 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
14570 if (sidxMapping === void 0) {
14571 sidxMapping = {};
14572 }
14573
14574 return playlists.reduce(function (a, playlist) {
14575 var label = playlist.attributes.lang || 'text'; // skip if we already have subtitles
14576
14577 if (a[label]) {
14578 return a;
14579 }
14580
14581 a[label] = {
14582 language: label,
14583 default: false,
14584 autoselect: false,
14585 playlists: addSegmentInfoFromSidx([formatVttPlaylist(playlist)], sidxMapping),
14586 uri: ''
14587 };
14588 return a;
14589 }, {});
14590 };
14591 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
14592 var _attributes3;
14593
14594 var attributes = _ref3.attributes,
14595 segments = _ref3.segments,
14596 sidx = _ref3.sidx;
14597 var playlist = {
14598 attributes: (_attributes3 = {
14599 NAME: attributes.id,
14600 AUDIO: 'audio',
14601 SUBTITLES: 'subs',
14602 RESOLUTION: {
14603 width: attributes.width,
14604 height: attributes.height
14605 },
14606 CODECS: attributes.codecs,
14607 BANDWIDTH: attributes.bandwidth
14608 }, _attributes3['PROGRAM-ID'] = 1, _attributes3),
14609 uri: '',
14610 endList: (attributes.type || 'static') === 'static',
14611 timeline: attributes.periodIndex,
14612 resolvedUri: '',
14613 targetDuration: attributes.duration,
14614 segments: segments,
14615 mediaSequence: segments.length ? segments[0].number : 1
14616 };
14617
14618 if (attributes.contentProtection) {
14619 playlist.contentProtection = attributes.contentProtection;
14620 }
14621
14622 if (sidx) {
14623 playlist.sidx = sidx;
14624 }
14625
14626 return playlist;
14627 };
14628 var toM3u8 = function toM3u8(dashPlaylists, sidxMapping) {
14629 var _mediaGroups;
14630
14631 if (sidxMapping === void 0) {
14632 sidxMapping = {};
14633 }
14634
14635 if (!dashPlaylists.length) {
14636 return {};
14637 } // grab all master attributes
14638
14639
14640 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
14641 duration = _dashPlaylists$0$attr.sourceDuration,
14642 _dashPlaylists$0$attr2 = _dashPlaylists$0$attr.type,
14643 type = _dashPlaylists$0$attr2 === void 0 ? 'static' : _dashPlaylists$0$attr2,
14644 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
14645 _dashPlaylists$0$attr3 = _dashPlaylists$0$attr.minimumUpdatePeriod,
14646 minimumUpdatePeriod = _dashPlaylists$0$attr3 === void 0 ? 0 : _dashPlaylists$0$attr3;
14647
14648 var videoOnly = function videoOnly(_ref4) {
14649 var attributes = _ref4.attributes;
14650 return attributes.mimeType === 'video/mp4' || attributes.contentType === 'video';
14651 };
14652
14653 var audioOnly = function audioOnly(_ref5) {
14654 var attributes = _ref5.attributes;
14655 return attributes.mimeType === 'audio/mp4' || attributes.contentType === 'audio';
14656 };
14657
14658 var vttOnly = function vttOnly(_ref6) {
14659 var attributes = _ref6.attributes;
14660 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
14661 };
14662
14663 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
14664 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
14665 var vttPlaylists = dashPlaylists.filter(vttOnly);
14666 var master = {
14667 allowCache: true,
14668 discontinuityStarts: [],
14669 segments: [],
14670 endList: true,
14671 mediaGroups: (_mediaGroups = {
14672 AUDIO: {},
14673 VIDEO: {}
14674 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
14675 uri: '',
14676 duration: duration,
14677 playlists: addSegmentInfoFromSidx(videoPlaylists, sidxMapping),
14678 minimumUpdatePeriod: minimumUpdatePeriod * 1000
14679 };
14680
14681 if (type === 'dynamic') {
14682 master.suggestedPresentationDelay = suggestedPresentationDelay;
14683 }
14684
14685 if (audioPlaylists.length) {
14686 master.mediaGroups.AUDIO.audio = organizeAudioPlaylists(audioPlaylists, sidxMapping);
14687 }
14688
14689 if (vttPlaylists.length) {
14690 master.mediaGroups.SUBTITLES.subs = organizeVttPlaylists(vttPlaylists, sidxMapping);
14691 }
14692
14693 return master;
14694 };
14695
14696 /**
14697 * Calculates the R (repetition) value for a live stream (for the final segment
14698 * in a manifest where the r value is negative 1)
14699 *
14700 * @param {Object} attributes
14701 * Object containing all inherited attributes from parent elements with attribute
14702 * names as keys
14703 * @param {number} time
14704 * current time (typically the total time up until the final segment)
14705 * @param {number} duration
14706 * duration property for the given <S />
14707 *
14708 * @return {number}
14709 * R value to reach the end of the given period
14710 */
14711 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
14712 var NOW = attributes.NOW,
14713 clientOffset = attributes.clientOffset,
14714 availabilityStartTime = attributes.availabilityStartTime,
14715 _attributes$timescale = attributes.timescale,
14716 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
14717 _attributes$start = attributes.start,
14718 start = _attributes$start === void 0 ? 0 : _attributes$start,
14719 _attributes$minimumUp = attributes.minimumUpdatePeriod,
14720 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
14721 var now = (NOW + clientOffset) / 1000;
14722 var periodStartWC = availabilityStartTime + start;
14723 var periodEndWC = now + minimumUpdatePeriod;
14724 var periodDuration = periodEndWC - periodStartWC;
14725 return Math.ceil((periodDuration * timescale - time) / duration);
14726 };
14727 /**
14728 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
14729 * timing and duration
14730 *
14731 * @param {Object} attributes
14732 * Object containing all inherited attributes from parent elements with attribute
14733 * names as keys
14734 * @param {Object[]} segmentTimeline
14735 * List of objects representing the attributes of each S element contained within
14736 *
14737 * @return {{number: number, duration: number, time: number, timeline: number}[]}
14738 * List of Objects with segment timing and duration info
14739 */
14740
14741 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
14742 var _attributes$type = attributes.type,
14743 type = _attributes$type === void 0 ? 'static' : _attributes$type,
14744 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
14745 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
14746 _attributes$media = attributes.media,
14747 media = _attributes$media === void 0 ? '' : _attributes$media,
14748 sourceDuration = attributes.sourceDuration,
14749 _attributes$timescale2 = attributes.timescale,
14750 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
14751 _attributes$startNumb = attributes.startNumber,
14752 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
14753 timeline = attributes.periodIndex;
14754 var segments = [];
14755 var time = -1;
14756
14757 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
14758 var S = segmentTimeline[sIndex];
14759 var duration = S.d;
14760 var repeat = S.r || 0;
14761 var segmentTime = S.t || 0;
14762
14763 if (time < 0) {
14764 // first segment
14765 time = segmentTime;
14766 }
14767
14768 if (segmentTime && segmentTime > time) {
14769 // discontinuity
14770 // TODO: How to handle this type of discontinuity
14771 // timeline++ here would treat it like HLS discontuity and content would
14772 // get appended without gap
14773 // E.G.
14774 // <S t="0" d="1" />
14775 // <S d="1" />
14776 // <S d="1" />
14777 // <S t="5" d="1" />
14778 // would have $Time$ values of [0, 1, 2, 5]
14779 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
14780 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
14781 // does the value of sourceDuration consider this when calculating arbitrary
14782 // negative @r repeat value?
14783 // E.G. Same elements as above with this added at the end
14784 // <S d="1" r="-1" />
14785 // with a sourceDuration of 10
14786 // Would the 2 gaps be included in the time duration calculations resulting in
14787 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
14788 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
14789 time = segmentTime;
14790 }
14791
14792 var count = void 0;
14793
14794 if (repeat < 0) {
14795 var nextS = sIndex + 1;
14796
14797 if (nextS === segmentTimeline.length) {
14798 // last segment
14799 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
14800 count = getLiveRValue(attributes, time, duration);
14801 } else {
14802 // TODO: This may be incorrect depending on conclusion of TODO above
14803 count = (sourceDuration * timescale - time) / duration;
14804 }
14805 } else {
14806 count = (segmentTimeline[nextS].t - time) / duration;
14807 }
14808 } else {
14809 count = repeat + 1;
14810 }
14811
14812 var end = startNumber + segments.length + count;
14813 var number = startNumber + segments.length;
14814
14815 while (number < end) {
14816 segments.push({
14817 number: number,
14818 duration: duration / timescale,
14819 time: time,
14820 timeline: timeline
14821 });
14822 time += duration;
14823 number++;
14824 }
14825 }
14826
14827 return segments;
14828 };
14829
14830 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
14831 /**
14832 * Replaces template identifiers with corresponding values. To be used as the callback
14833 * for String.prototype.replace
14834 *
14835 * @name replaceCallback
14836 * @function
14837 * @param {string} match
14838 * Entire match of identifier
14839 * @param {string} identifier
14840 * Name of matched identifier
14841 * @param {string} format
14842 * Format tag string. Its presence indicates that padding is expected
14843 * @param {string} width
14844 * Desired length of the replaced value. Values less than this width shall be left
14845 * zero padded
14846 * @return {string}
14847 * Replacement for the matched identifier
14848 */
14849
14850 /**
14851 * Returns a function to be used as a callback for String.prototype.replace to replace
14852 * template identifiers
14853 *
14854 * @param {Obect} values
14855 * Object containing values that shall be used to replace known identifiers
14856 * @param {number} values.RepresentationID
14857 * Value of the Representation@id attribute
14858 * @param {number} values.Number
14859 * Number of the corresponding segment
14860 * @param {number} values.Bandwidth
14861 * Value of the Representation@bandwidth attribute.
14862 * @param {number} values.Time
14863 * Timestamp value of the corresponding segment
14864 * @return {replaceCallback}
14865 * Callback to be used with String.prototype.replace to replace identifiers
14866 */
14867
14868 var identifierReplacement = function identifierReplacement(values) {
14869 return function (match, identifier, format, width) {
14870 if (match === '$$') {
14871 // escape sequence
14872 return '$';
14873 }
14874
14875 if (typeof values[identifier] === 'undefined') {
14876 return match;
14877 }
14878
14879 var value = '' + values[identifier];
14880
14881 if (identifier === 'RepresentationID') {
14882 // Format tag shall not be present with RepresentationID
14883 return value;
14884 }
14885
14886 if (!format) {
14887 width = 1;
14888 } else {
14889 width = parseInt(width, 10);
14890 }
14891
14892 if (value.length >= width) {
14893 return value;
14894 }
14895
14896 return "" + new Array(width - value.length + 1).join('0') + value;
14897 };
14898 };
14899 /**
14900 * Constructs a segment url from a template string
14901 *
14902 * @param {string} url
14903 * Template string to construct url from
14904 * @param {Obect} values
14905 * Object containing values that shall be used to replace known identifiers
14906 * @param {number} values.RepresentationID
14907 * Value of the Representation@id attribute
14908 * @param {number} values.Number
14909 * Number of the corresponding segment
14910 * @param {number} values.Bandwidth
14911 * Value of the Representation@bandwidth attribute.
14912 * @param {number} values.Time
14913 * Timestamp value of the corresponding segment
14914 * @return {string}
14915 * Segment url with identifiers replaced
14916 */
14917
14918 var constructTemplateUrl = function constructTemplateUrl(url, values) {
14919 return url.replace(identifierPattern, identifierReplacement(values));
14920 };
14921 /**
14922 * Generates a list of objects containing timing and duration information about each
14923 * segment needed to generate segment uris and the complete segment object
14924 *
14925 * @param {Object} attributes
14926 * Object containing all inherited attributes from parent elements with attribute
14927 * names as keys
14928 * @param {Object[]|undefined} segmentTimeline
14929 * List of objects representing the attributes of each S element contained within
14930 * the SegmentTimeline element
14931 * @return {{number: number, duration: number, time: number, timeline: number}[]}
14932 * List of Objects with segment timing and duration info
14933 */
14934
14935 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
14936 if (!attributes.duration && !segmentTimeline) {
14937 // if neither @duration or SegmentTimeline are present, then there shall be exactly
14938 // one media segment
14939 return [{
14940 number: attributes.startNumber || 1,
14941 duration: attributes.sourceDuration,
14942 time: 0,
14943 timeline: attributes.periodIndex
14944 }];
14945 }
14946
14947 if (attributes.duration) {
14948 return parseByDuration(attributes);
14949 }
14950
14951 return parseByTimeline(attributes, segmentTimeline);
14952 };
14953 /**
14954 * Generates a list of segments using information provided by the SegmentTemplate element
14955 *
14956 * @param {Object} attributes
14957 * Object containing all inherited attributes from parent elements with attribute
14958 * names as keys
14959 * @param {Object[]|undefined} segmentTimeline
14960 * List of objects representing the attributes of each S element contained within
14961 * the SegmentTimeline element
14962 * @return {Object[]}
14963 * List of segment objects
14964 */
14965
14966 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
14967 var templateValues = {
14968 RepresentationID: attributes.id,
14969 Bandwidth: attributes.bandwidth || 0
14970 };
14971 var _attributes$initializ = attributes.initialization,
14972 initialization = _attributes$initializ === void 0 ? {
14973 sourceURL: '',
14974 range: ''
14975 } : _attributes$initializ;
14976 var mapSegment = urlTypeToSegment({
14977 baseUrl: attributes.baseUrl,
14978 source: constructTemplateUrl(initialization.sourceURL, templateValues),
14979 range: initialization.range
14980 });
14981 var segments = parseTemplateInfo(attributes, segmentTimeline);
14982 return segments.map(function (segment) {
14983 templateValues.Number = segment.number;
14984 templateValues.Time = segment.time;
14985 var uri = constructTemplateUrl(attributes.media || '', templateValues);
14986 return {
14987 uri: uri,
14988 timeline: segment.timeline,
14989 duration: segment.duration,
14990 resolvedUri: resolveUrl_1(attributes.baseUrl || '', uri),
14991 map: mapSegment,
14992 number: segment.number
14993 };
14994 });
14995 };
14996
14997 /**
14998 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
14999 * to an object that matches the output of a segment in videojs/mpd-parser
15000 *
15001 * @param {Object} attributes
15002 * Object containing all inherited attributes from parent elements with attribute
15003 * names as keys
15004 * @param {Object} segmentUrl
15005 * <SegmentURL> node to translate into a segment object
15006 * @return {Object} translated segment object
15007 */
15008
15009 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
15010 var baseUrl = attributes.baseUrl,
15011 _attributes$initializ = attributes.initialization,
15012 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
15013 var initSegment = urlTypeToSegment({
15014 baseUrl: baseUrl,
15015 source: initialization.sourceURL,
15016 range: initialization.range
15017 });
15018 var segment = urlTypeToSegment({
15019 baseUrl: baseUrl,
15020 source: segmentUrl.media,
15021 range: segmentUrl.mediaRange
15022 });
15023 segment.map = initSegment;
15024 return segment;
15025 };
15026 /**
15027 * Generates a list of segments using information provided by the SegmentList element
15028 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
15029 * node should be translated into segment.
15030 *
15031 * @param {Object} attributes
15032 * Object containing all inherited attributes from parent elements with attribute
15033 * names as keys
15034 * @param {Object[]|undefined} segmentTimeline
15035 * List of objects representing the attributes of each S element contained within
15036 * the SegmentTimeline element
15037 * @return {Object.<Array>} list of segments
15038 */
15039
15040 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
15041 var duration = attributes.duration,
15042 _attributes$segmentUr = attributes.segmentUrls,
15043 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr; // Per spec (5.3.9.2.1) no way to determine segment duration OR
15044 // if both SegmentTimeline and @duration are defined, it is outside of spec.
15045
15046 if (!duration && !segmentTimeline || duration && segmentTimeline) {
15047 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
15048 }
15049
15050 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
15051 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
15052 });
15053 var segmentTimeInfo;
15054
15055 if (duration) {
15056 segmentTimeInfo = parseByDuration(attributes);
15057 }
15058
15059 if (segmentTimeline) {
15060 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
15061 }
15062
15063 var segments = segmentTimeInfo.map(function (segmentTime, index) {
15064 if (segmentUrlMap[index]) {
15065 var segment = segmentUrlMap[index];
15066 segment.timeline = segmentTime.timeline;
15067 segment.duration = segmentTime.duration;
15068 segment.number = segmentTime.number;
15069 return segment;
15070 } // Since we're mapping we should get rid of any blank segments (in case
15071 // the given SegmentTimeline is handling for more elements than we have
15072 // SegmentURLs for).
15073 }).filter(function (segment) {
15074 return segment;
15075 });
15076 return segments;
15077 };
15078
15079 var generateSegments = function generateSegments(_ref) {
15080 var attributes = _ref.attributes,
15081 segmentInfo = _ref.segmentInfo;
15082 var segmentAttributes;
15083 var segmentsFn;
15084
15085 if (segmentInfo.template) {
15086 segmentsFn = segmentsFromTemplate;
15087 segmentAttributes = merge(attributes, segmentInfo.template);
15088 } else if (segmentInfo.base) {
15089 segmentsFn = segmentsFromBase;
15090 segmentAttributes = merge(attributes, segmentInfo.base);
15091 } else if (segmentInfo.list) {
15092 segmentsFn = segmentsFromList;
15093 segmentAttributes = merge(attributes, segmentInfo.list);
15094 }
15095
15096 var segmentsInfo = {
15097 attributes: attributes
15098 };
15099
15100 if (!segmentsFn) {
15101 return segmentsInfo;
15102 }
15103
15104 var segments = segmentsFn(segmentAttributes, segmentInfo.timeline); // The @duration attribute will be used to determin the playlist's targetDuration which
15105 // must be in seconds. Since we've generated the segment list, we no longer need
15106 // @duration to be in @timescale units, so we can convert it here.
15107
15108 if (segmentAttributes.duration) {
15109 var _segmentAttributes = segmentAttributes,
15110 duration = _segmentAttributes.duration,
15111 _segmentAttributes$ti = _segmentAttributes.timescale,
15112 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
15113 segmentAttributes.duration = duration / timescale;
15114 } else if (segments.length) {
15115 // if there is no @duration attribute, use the largest segment duration as
15116 // as target duration
15117 segmentAttributes.duration = segments.reduce(function (max, segment) {
15118 return Math.max(max, Math.ceil(segment.duration));
15119 }, 0);
15120 } else {
15121 segmentAttributes.duration = 0;
15122 }
15123
15124 segmentsInfo.attributes = segmentAttributes;
15125 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
15126
15127 if (segmentInfo.base && segmentAttributes.indexRange) {
15128 segmentsInfo.sidx = segments[0];
15129 segmentsInfo.segments = [];
15130 }
15131
15132 return segmentsInfo;
15133 };
15134 var toPlaylists = function toPlaylists(representations) {
15135 return representations.map(generateSegments);
15136 };
15137
15138 var findChildren = function findChildren(element, name) {
15139 return from(element.childNodes).filter(function (_ref) {
15140 var tagName = _ref.tagName;
15141 return tagName === name;
15142 });
15143 };
15144 var getContent = function getContent(element) {
15145 return element.textContent.trim();
15146 };
15147
15148 var parseDuration = function parseDuration(str) {
15149 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
15150 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
15151 var SECONDS_IN_DAY = 24 * 60 * 60;
15152 var SECONDS_IN_HOUR = 60 * 60;
15153 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
15154
15155 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
15156 var match = durationRegex.exec(str);
15157
15158 if (!match) {
15159 return 0;
15160 }
15161
15162 var _match$slice = match.slice(1),
15163 year = _match$slice[0],
15164 month = _match$slice[1],
15165 day = _match$slice[2],
15166 hour = _match$slice[3],
15167 minute = _match$slice[4],
15168 second = _match$slice[5];
15169
15170 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
15171 };
15172 var parseDate = function parseDate(str) {
15173 // Date format without timezone according to ISO 8601
15174 // YYY-MM-DDThh:mm:ss.ssssss
15175 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
15176 // expressed by ending with 'Z'
15177
15178 if (dateRegex.test(str)) {
15179 str += 'Z';
15180 }
15181
15182 return Date.parse(str);
15183 };
15184
15185 var parsers = {
15186 /**
15187 * Specifies the duration of the entire Media Presentation. Format is a duration string
15188 * as specified in ISO 8601
15189 *
15190 * @param {string} value
15191 * value of attribute as a string
15192 * @return {number}
15193 * The duration in seconds
15194 */
15195 mediaPresentationDuration: function mediaPresentationDuration(value) {
15196 return parseDuration(value);
15197 },
15198
15199 /**
15200 * Specifies the Segment availability start time for all Segments referred to in this
15201 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
15202 * time. Format is a date string as specified in ISO 8601
15203 *
15204 * @param {string} value
15205 * value of attribute as a string
15206 * @return {number}
15207 * The date as seconds from unix epoch
15208 */
15209 availabilityStartTime: function availabilityStartTime(value) {
15210 return parseDate(value) / 1000;
15211 },
15212
15213 /**
15214 * Specifies the smallest period between potential changes to the MPD. Format is a
15215 * duration string as specified in ISO 8601
15216 *
15217 * @param {string} value
15218 * value of attribute as a string
15219 * @return {number}
15220 * The duration in seconds
15221 */
15222 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
15223 return parseDuration(value);
15224 },
15225
15226 /**
15227 * Specifies the suggested presentation delay. Format is a
15228 * duration string as specified in ISO 8601
15229 *
15230 * @param {string} value
15231 * value of attribute as a string
15232 * @return {number}
15233 * The duration in seconds
15234 */
15235 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
15236 return parseDuration(value);
15237 },
15238
15239 /**
15240 * specifices the type of mpd. Can be either "static" or "dynamic"
15241 *
15242 * @param {string} value
15243 * value of attribute as a string
15244 *
15245 * @return {string}
15246 * The type as a string
15247 */
15248 type: function type(value) {
15249 return value;
15250 },
15251
15252 /**
15253 * Specifies the duration of the smallest time shifting buffer for any Representation
15254 * in the MPD. Format is a duration string as specified in ISO 8601
15255 *
15256 * @param {string} value
15257 * value of attribute as a string
15258 * @return {number}
15259 * The duration in seconds
15260 */
15261 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
15262 return parseDuration(value);
15263 },
15264
15265 /**
15266 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
15267 * Format is a duration string as specified in ISO 8601
15268 *
15269 * @param {string} value
15270 * value of attribute as a string
15271 * @return {number}
15272 * The duration in seconds
15273 */
15274 start: function start(value) {
15275 return parseDuration(value);
15276 },
15277
15278 /**
15279 * Specifies the width of the visual presentation
15280 *
15281 * @param {string} value
15282 * value of attribute as a string
15283 * @return {number}
15284 * The parsed width
15285 */
15286 width: function width(value) {
15287 return parseInt(value, 10);
15288 },
15289
15290 /**
15291 * Specifies the height of the visual presentation
15292 *
15293 * @param {string} value
15294 * value of attribute as a string
15295 * @return {number}
15296 * The parsed height
15297 */
15298 height: function height(value) {
15299 return parseInt(value, 10);
15300 },
15301
15302 /**
15303 * Specifies the bitrate of the representation
15304 *
15305 * @param {string} value
15306 * value of attribute as a string
15307 * @return {number}
15308 * The parsed bandwidth
15309 */
15310 bandwidth: function bandwidth(value) {
15311 return parseInt(value, 10);
15312 },
15313
15314 /**
15315 * Specifies the number of the first Media Segment in this Representation in the Period
15316 *
15317 * @param {string} value
15318 * value of attribute as a string
15319 * @return {number}
15320 * The parsed number
15321 */
15322 startNumber: function startNumber(value) {
15323 return parseInt(value, 10);
15324 },
15325
15326 /**
15327 * Specifies the timescale in units per seconds
15328 *
15329 * @param {string} value
15330 * value of attribute as a string
15331 * @return {number}
15332 * The aprsed timescale
15333 */
15334 timescale: function timescale(value) {
15335 return parseInt(value, 10);
15336 },
15337
15338 /**
15339 * Specifies the constant approximate Segment duration
15340 * NOTE: The <Period> element also contains an @duration attribute. This duration
15341 * specifies the duration of the Period. This attribute is currently not
15342 * supported by the rest of the parser, however we still check for it to prevent
15343 * errors.
15344 *
15345 * @param {string} value
15346 * value of attribute as a string
15347 * @return {number}
15348 * The parsed duration
15349 */
15350 duration: function duration(value) {
15351 var parsedValue = parseInt(value, 10);
15352
15353 if (isNaN(parsedValue)) {
15354 return parseDuration(value);
15355 }
15356
15357 return parsedValue;
15358 },
15359
15360 /**
15361 * Specifies the Segment duration, in units of the value of the @timescale.
15362 *
15363 * @param {string} value
15364 * value of attribute as a string
15365 * @return {number}
15366 * The parsed duration
15367 */
15368 d: function d(value) {
15369 return parseInt(value, 10);
15370 },
15371
15372 /**
15373 * Specifies the MPD start time, in @timescale units, the first Segment in the series
15374 * starts relative to the beginning of the Period
15375 *
15376 * @param {string} value
15377 * value of attribute as a string
15378 * @return {number}
15379 * The parsed time
15380 */
15381 t: function t(value) {
15382 return parseInt(value, 10);
15383 },
15384
15385 /**
15386 * Specifies the repeat count of the number of following contiguous Segments with the
15387 * same duration expressed by the value of @d
15388 *
15389 * @param {string} value
15390 * value of attribute as a string
15391 * @return {number}
15392 * The parsed number
15393 */
15394 r: function r(value) {
15395 return parseInt(value, 10);
15396 },
15397
15398 /**
15399 * Default parser for all other attributes. Acts as a no-op and just returns the value
15400 * as a string
15401 *
15402 * @param {string} value
15403 * value of attribute as a string
15404 * @return {string}
15405 * Unparsed value
15406 */
15407 DEFAULT: function DEFAULT(value) {
15408 return value;
15409 }
15410 };
15411 /**
15412 * Gets all the attributes and values of the provided node, parses attributes with known
15413 * types, and returns an object with attribute names mapped to values.
15414 *
15415 * @param {Node} el
15416 * The node to parse attributes from
15417 * @return {Object}
15418 * Object with all attributes of el parsed
15419 */
15420
15421 var parseAttributes$1 = function parseAttributes(el) {
15422 if (!(el && el.attributes)) {
15423 return {};
15424 }
15425
15426 return from(el.attributes).reduce(function (a, e) {
15427 var parseFn = parsers[e.name] || parsers.DEFAULT;
15428 a[e.name] = parseFn(e.value);
15429 return a;
15430 }, {});
15431 };
15432
15433 var keySystemsMap = {
15434 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
15435 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
15436 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
15437 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
15438 };
15439 /**
15440 * Builds a list of urls that is the product of the reference urls and BaseURL values
15441 *
15442 * @param {string[]} referenceUrls
15443 * List of reference urls to resolve to
15444 * @param {Node[]} baseUrlElements
15445 * List of BaseURL nodes from the mpd
15446 * @return {string[]}
15447 * List of resolved urls
15448 */
15449
15450 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
15451 if (!baseUrlElements.length) {
15452 return referenceUrls;
15453 }
15454
15455 return flatten(referenceUrls.map(function (reference) {
15456 return baseUrlElements.map(function (baseUrlElement) {
15457 return resolveUrl_1(reference, getContent(baseUrlElement));
15458 });
15459 }));
15460 };
15461 /**
15462 * Contains all Segment information for its containing AdaptationSet
15463 *
15464 * @typedef {Object} SegmentInformation
15465 * @property {Object|undefined} template
15466 * Contains the attributes for the SegmentTemplate node
15467 * @property {Object[]|undefined} timeline
15468 * Contains a list of atrributes for each S node within the SegmentTimeline node
15469 * @property {Object|undefined} list
15470 * Contains the attributes for the SegmentList node
15471 * @property {Object|undefined} base
15472 * Contains the attributes for the SegmentBase node
15473 */
15474
15475 /**
15476 * Returns all available Segment information contained within the AdaptationSet node
15477 *
15478 * @param {Node} adaptationSet
15479 * The AdaptationSet node to get Segment information from
15480 * @return {SegmentInformation}
15481 * The Segment information contained within the provided AdaptationSet
15482 */
15483
15484 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
15485 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
15486 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
15487 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
15488 return merge({
15489 tag: 'SegmentURL'
15490 }, parseAttributes$1(s));
15491 });
15492 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
15493 var segmentTimelineParentNode = segmentList || segmentTemplate;
15494 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
15495 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
15496 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
15497 // @initialization and an <Initialization> node. @initialization can be templated,
15498 // while the node can have a url and range specified. If the <SegmentTemplate> has
15499 // both @initialization and an <Initialization> subelement we opt to override with
15500 // the node, as this interaction is not defined in the spec.
15501
15502 var template = segmentTemplate && parseAttributes$1(segmentTemplate);
15503
15504 if (template && segmentInitialization) {
15505 template.initialization = segmentInitialization && parseAttributes$1(segmentInitialization);
15506 } else if (template && template.initialization) {
15507 // If it is @initialization we convert it to an object since this is the format that
15508 // later functions will rely on for the initialization segment. This is only valid
15509 // for <SegmentTemplate>
15510 template.initialization = {
15511 sourceURL: template.initialization
15512 };
15513 }
15514
15515 var segmentInfo = {
15516 template: template,
15517 timeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
15518 return parseAttributes$1(s);
15519 }),
15520 list: segmentList && merge(parseAttributes$1(segmentList), {
15521 segmentUrls: segmentUrls,
15522 initialization: parseAttributes$1(segmentInitialization)
15523 }),
15524 base: segmentBase && merge(parseAttributes$1(segmentBase), {
15525 initialization: parseAttributes$1(segmentInitialization)
15526 })
15527 };
15528 Object.keys(segmentInfo).forEach(function (key) {
15529 if (!segmentInfo[key]) {
15530 delete segmentInfo[key];
15531 }
15532 });
15533 return segmentInfo;
15534 };
15535 /**
15536 * Contains Segment information and attributes needed to construct a Playlist object
15537 * from a Representation
15538 *
15539 * @typedef {Object} RepresentationInformation
15540 * @property {SegmentInformation} segmentInfo
15541 * Segment information for this Representation
15542 * @property {Object} attributes
15543 * Inherited attributes for this Representation
15544 */
15545
15546 /**
15547 * Maps a Representation node to an object containing Segment information and attributes
15548 *
15549 * @name inheritBaseUrlsCallback
15550 * @function
15551 * @param {Node} representation
15552 * Representation node from the mpd
15553 * @return {RepresentationInformation}
15554 * Representation information needed to construct a Playlist object
15555 */
15556
15557 /**
15558 * Returns a callback for Array.prototype.map for mapping Representation nodes to
15559 * Segment information and attributes using inherited BaseURL nodes.
15560 *
15561 * @param {Object} adaptationSetAttributes
15562 * Contains attributes inherited by the AdaptationSet
15563 * @param {string[]} adaptationSetBaseUrls
15564 * Contains list of resolved base urls inherited by the AdaptationSet
15565 * @param {SegmentInformation} adaptationSetSegmentInfo
15566 * Contains Segment information for the AdaptationSet
15567 * @return {inheritBaseUrlsCallback}
15568 * Callback map function
15569 */
15570
15571 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
15572 return function (representation) {
15573 var repBaseUrlElements = findChildren(representation, 'BaseURL');
15574 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
15575 var attributes = merge(adaptationSetAttributes, parseAttributes$1(representation));
15576 var representationSegmentInfo = getSegmentInformation(representation);
15577 return repBaseUrls.map(function (baseUrl) {
15578 return {
15579 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
15580 attributes: merge(attributes, {
15581 baseUrl: baseUrl
15582 })
15583 };
15584 });
15585 };
15586 };
15587 /**
15588 * Tranforms a series of content protection nodes to
15589 * an object containing pssh data by key system
15590 *
15591 * @param {Node[]} contentProtectionNodes
15592 * Content protection nodes
15593 * @return {Object}
15594 * Object containing pssh data by key system
15595 */
15596
15597 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
15598 return contentProtectionNodes.reduce(function (acc, node) {
15599 var attributes = parseAttributes$1(node);
15600 var keySystem = keySystemsMap[attributes.schemeIdUri];
15601
15602 if (keySystem) {
15603 acc[keySystem] = {
15604 attributes: attributes
15605 };
15606 var psshNode = findChildren(node, 'cenc:pssh')[0];
15607
15608 if (psshNode) {
15609 var pssh = getContent(psshNode);
15610 var psshBuffer = pssh && decodeB64ToUint8Array_1(pssh);
15611 acc[keySystem].pssh = psshBuffer;
15612 }
15613 }
15614
15615 return acc;
15616 }, {});
15617 };
15618 /**
15619 * Maps an AdaptationSet node to a list of Representation information objects
15620 *
15621 * @name toRepresentationsCallback
15622 * @function
15623 * @param {Node} adaptationSet
15624 * AdaptationSet node from the mpd
15625 * @return {RepresentationInformation[]}
15626 * List of objects containing Representaion information
15627 */
15628
15629 /**
15630 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
15631 * Representation information objects
15632 *
15633 * @param {Object} periodAttributes
15634 * Contains attributes inherited by the Period
15635 * @param {string[]} periodBaseUrls
15636 * Contains list of resolved base urls inherited by the Period
15637 * @param {string[]} periodSegmentInfo
15638 * Contains Segment Information at the period level
15639 * @return {toRepresentationsCallback}
15640 * Callback map function
15641 */
15642
15643 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
15644 return function (adaptationSet) {
15645 var adaptationSetAttributes = parseAttributes$1(adaptationSet);
15646 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
15647 var role = findChildren(adaptationSet, 'Role')[0];
15648 var roleAttributes = {
15649 role: parseAttributes$1(role)
15650 };
15651 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
15652 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
15653
15654 if (Object.keys(contentProtection).length) {
15655 attrs = merge(attrs, {
15656 contentProtection: contentProtection
15657 });
15658 }
15659
15660 var segmentInfo = getSegmentInformation(adaptationSet);
15661 var representations = findChildren(adaptationSet, 'Representation');
15662 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
15663 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
15664 };
15665 };
15666 /**
15667 * Maps an Period node to a list of Representation inforamtion objects for all
15668 * AdaptationSet nodes contained within the Period
15669 *
15670 * @name toAdaptationSetsCallback
15671 * @function
15672 * @param {Node} period
15673 * Period node from the mpd
15674 * @param {number} periodIndex
15675 * Index of the Period within the mpd
15676 * @return {RepresentationInformation[]}
15677 * List of objects containing Representaion information
15678 */
15679
15680 /**
15681 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
15682 * Representation information objects
15683 *
15684 * @param {Object} mpdAttributes
15685 * Contains attributes inherited by the mpd
15686 * @param {string[]} mpdBaseUrls
15687 * Contains list of resolved base urls inherited by the mpd
15688 * @return {toAdaptationSetsCallback}
15689 * Callback map function
15690 */
15691
15692 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
15693 return function (period, index) {
15694 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period, 'BaseURL'));
15695 var periodAtt = parseAttributes$1(period);
15696 var parsedPeriodId = parseInt(periodAtt.id, 10); // fallback to mapping index if Period@id is not a number
15697
15698 var periodIndex = window_1.isNaN(parsedPeriodId) ? index : parsedPeriodId;
15699 var periodAttributes = merge(mpdAttributes, {
15700 periodIndex: periodIndex
15701 });
15702 var adaptationSets = findChildren(period, 'AdaptationSet');
15703 var periodSegmentInfo = getSegmentInformation(period);
15704 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
15705 };
15706 };
15707 /**
15708 * Traverses the mpd xml tree to generate a list of Representation information objects
15709 * that have inherited attributes from parent nodes
15710 *
15711 * @param {Node} mpd
15712 * The root node of the mpd
15713 * @param {Object} options
15714 * Available options for inheritAttributes
15715 * @param {string} options.manifestUri
15716 * The uri source of the mpd
15717 * @param {number} options.NOW
15718 * Current time per DASH IOP. Default is current time in ms since epoch
15719 * @param {number} options.clientOffset
15720 * Client time difference from NOW (in milliseconds)
15721 * @return {RepresentationInformation[]}
15722 * List of objects containing Representation information
15723 */
15724
15725 var inheritAttributes = function inheritAttributes(mpd, options) {
15726 if (options === void 0) {
15727 options = {};
15728 }
15729
15730 var _options = options,
15731 _options$manifestUri = _options.manifestUri,
15732 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
15733 _options$NOW = _options.NOW,
15734 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
15735 _options$clientOffset = _options.clientOffset,
15736 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
15737 var periods = findChildren(mpd, 'Period');
15738
15739 if (!periods.length) {
15740 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
15741 }
15742
15743 var mpdAttributes = parseAttributes$1(mpd);
15744 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL'));
15745 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
15746 mpdAttributes.NOW = NOW;
15747 mpdAttributes.clientOffset = clientOffset;
15748 return flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)));
15749 };
15750
15751 var stringToMpdXml = function stringToMpdXml(manifestString) {
15752 if (manifestString === '') {
15753 throw new Error(errors.DASH_EMPTY_MANIFEST);
15754 }
15755
15756 var parser = new domParser_3();
15757 var xml = parser.parseFromString(manifestString, 'application/xml');
15758 var mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
15759
15760 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
15761 throw new Error(errors.DASH_INVALID_XML);
15762 }
15763
15764 return mpd;
15765 };
15766
15767 /**
15768 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
15769 *
15770 * @param {string} mpd
15771 * XML string of the MPD manifest
15772 * @return {Object|null}
15773 * Attributes of UTCTiming node specified in the manifest. Null if none found
15774 */
15775
15776 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
15777 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
15778
15779 if (!UTCTimingNode) {
15780 return null;
15781 }
15782
15783 var attributes = parseAttributes$1(UTCTimingNode);
15784
15785 switch (attributes.schemeIdUri) {
15786 case 'urn:mpeg:dash:utc:http-head:2014':
15787 case 'urn:mpeg:dash:utc:http-head:2012':
15788 attributes.method = 'HEAD';
15789 break;
15790
15791 case 'urn:mpeg:dash:utc:http-xsdate:2014':
15792 case 'urn:mpeg:dash:utc:http-iso:2014':
15793 case 'urn:mpeg:dash:utc:http-xsdate:2012':
15794 case 'urn:mpeg:dash:utc:http-iso:2012':
15795 attributes.method = 'GET';
15796 break;
15797
15798 case 'urn:mpeg:dash:utc:direct:2014':
15799 case 'urn:mpeg:dash:utc:direct:2012':
15800 attributes.method = 'DIRECT';
15801 attributes.value = Date.parse(attributes.value);
15802 break;
15803
15804 case 'urn:mpeg:dash:utc:http-ntp:2014':
15805 case 'urn:mpeg:dash:utc:ntp:2014':
15806 case 'urn:mpeg:dash:utc:sntp:2014':
15807 default:
15808 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
15809 }
15810
15811 return attributes;
15812 };
15813
15814 var parse = function parse(manifestString, options) {
15815 if (options === void 0) {
15816 options = {};
15817 }
15818
15819 return toM3u8(toPlaylists(inheritAttributes(stringToMpdXml(manifestString), options)), options.sidxMapping);
15820 };
15821 /**
15822 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
15823 *
15824 * @param {string} manifestString
15825 * XML string of the MPD manifest
15826 * @return {Object|null}
15827 * Attributes of UTCTiming node specified in the manifest. Null if none found
15828 */
15829
15830 var parseUTCTiming = function parseUTCTiming(manifestString) {
15831 return parseUTCTimingScheme(stringToMpdXml(manifestString));
15832 };
15833
15834 /**
15835 * mux.js
15836 *
15837 * Copyright (c) Brightcove
15838 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15839 */
15840 var toUnsigned = function toUnsigned(value) {
15841 return value >>> 0;
15842 };
15843
15844 var toHexString = function toHexString(value) {
15845 return ('00' + value.toString(16)).slice(-2);
15846 };
15847
15848 var bin = {
15849 toUnsigned: toUnsigned,
15850 toHexString: toHexString
15851 };
15852
15853 var inspectMp4,
15854 _textifyMp,
15855 toUnsigned$1 = bin.toUnsigned,
15856 parseMp4Date = function parseMp4Date(seconds) {
15857 return new Date(seconds * 1000 - 2082844800000);
15858 },
15859 parseSampleFlags = function parseSampleFlags(flags) {
15860 return {
15861 isLeading: (flags[0] & 0x0c) >>> 2,
15862 dependsOn: flags[0] & 0x03,
15863 isDependedOn: (flags[1] & 0xc0) >>> 6,
15864 hasRedundancy: (flags[1] & 0x30) >>> 4,
15865 paddingValue: (flags[1] & 0x0e) >>> 1,
15866 isNonSyncSample: flags[1] & 0x01,
15867 degradationPriority: flags[2] << 8 | flags[3]
15868 };
15869 },
15870
15871 /**
15872 * Returns the string representation of an ASCII encoded four byte buffer.
15873 * @param buffer {Uint8Array} a four-byte buffer to translate
15874 * @return {string} the corresponding string
15875 */
15876 parseType = function parseType(buffer) {
15877 var result = '';
15878 result += String.fromCharCode(buffer[0]);
15879 result += String.fromCharCode(buffer[1]);
15880 result += String.fromCharCode(buffer[2]);
15881 result += String.fromCharCode(buffer[3]);
15882 return result;
15883 },
15884
15885 // Find the data for a box specified by its path
15886 findBox = function findBox(data, path) {
15887 var results = [],
15888 i,
15889 size,
15890 type,
15891 end,
15892 subresults;
15893
15894 if (!path.length) {
15895 // short-circuit the search for empty paths
15896 return null;
15897 }
15898
15899 for (i = 0; i < data.byteLength;) {
15900 size = toUnsigned$1(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
15901
15902 type = parseType(data.subarray(i + 4, i + 8));
15903
15904 end = size > 1 ? i + size : data.byteLength;
15905
15906 if (type === path[0]) {
15907 if (path.length === 1) {
15908 // this is the end of the path and we've found the box we were
15909 // looking for
15910 results.push(data.subarray(i + 8, end));
15911 } else {
15912 // recursively search for the next box along the path
15913 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
15914 if (subresults.length) {
15915 results = results.concat(subresults);
15916 }
15917 }
15918 }
15919 i = end;
15920 }
15921
15922 // we've finished searching all of data
15923 return results;
15924 },
15925 nalParse = function nalParse(avcStream) {
15926 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
15927 result = [],
15928 i,
15929 length;
15930 for (i = 0; i + 4 < avcStream.length; i += length) {
15931 length = avcView.getUint32(i);
15932 i += 4;
15933
15934 // bail if this doesn't appear to be an H264 stream
15935 if (length <= 0) {
15936 result.push('<span style=\'color:red;\'>MALFORMED DATA</span>');
15937 continue;
15938 }
15939
15940 switch (avcStream[i] & 0x1F) {
15941 case 0x01:
15942 result.push('slice_layer_without_partitioning_rbsp');
15943 break;
15944 case 0x05:
15945 result.push('slice_layer_without_partitioning_rbsp_idr');
15946 break;
15947 case 0x06:
15948 result.push('sei_rbsp');
15949 break;
15950 case 0x07:
15951 result.push('seq_parameter_set_rbsp');
15952 break;
15953 case 0x08:
15954 result.push('pic_parameter_set_rbsp');
15955 break;
15956 case 0x09:
15957 result.push('access_unit_delimiter_rbsp');
15958 break;
15959 default:
15960 result.push('UNKNOWN NAL - ' + avcStream[i] & 0x1F);
15961 break;
15962 }
15963 }
15964 return result;
15965 },
15966
15967
15968 // registry of handlers for individual mp4 box types
15969 parse$1 = {
15970 // codingname, not a first-class box type. stsd entries share the
15971 // same format as real boxes so the parsing infrastructure can be
15972 // shared
15973 avc1: function avc1(data) {
15974 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
15975 return {
15976 dataReferenceIndex: view.getUint16(6),
15977 width: view.getUint16(24),
15978 height: view.getUint16(26),
15979 horizresolution: view.getUint16(28) + view.getUint16(30) / 16,
15980 vertresolution: view.getUint16(32) + view.getUint16(34) / 16,
15981 frameCount: view.getUint16(40),
15982 depth: view.getUint16(74),
15983 config: inspectMp4(data.subarray(78, data.byteLength))
15984 };
15985 },
15986 avcC: function avcC(data) {
15987 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
15988 result = {
15989 configurationVersion: data[0],
15990 avcProfileIndication: data[1],
15991 profileCompatibility: data[2],
15992 avcLevelIndication: data[3],
15993 lengthSizeMinusOne: data[4] & 0x03,
15994 sps: [],
15995 pps: []
15996 },
15997 numOfSequenceParameterSets = data[5] & 0x1f,
15998 numOfPictureParameterSets,
15999 nalSize,
16000 offset,
16001 i;
16002
16003 // iterate past any SPSs
16004 offset = 6;
16005 for (i = 0; i < numOfSequenceParameterSets; i++) {
16006 nalSize = view.getUint16(offset);
16007 offset += 2;
16008 result.sps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
16009 offset += nalSize;
16010 }
16011 // iterate past any PPSs
16012 numOfPictureParameterSets = data[offset];
16013 offset++;
16014 for (i = 0; i < numOfPictureParameterSets; i++) {
16015 nalSize = view.getUint16(offset);
16016 offset += 2;
16017 result.pps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
16018 offset += nalSize;
16019 }
16020 return result;
16021 },
16022 btrt: function btrt(data) {
16023 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
16024 return {
16025 bufferSizeDB: view.getUint32(0),
16026 maxBitrate: view.getUint32(4),
16027 avgBitrate: view.getUint32(8)
16028 };
16029 },
16030 esds: function esds(data) {
16031 return {
16032 version: data[0],
16033 flags: new Uint8Array(data.subarray(1, 4)),
16034 esId: data[6] << 8 | data[7],
16035 streamPriority: data[8] & 0x1f,
16036 decoderConfig: {
16037 objectProfileIndication: data[11],
16038 streamType: data[12] >>> 2 & 0x3f,
16039 bufferSize: data[13] << 16 | data[14] << 8 | data[15],
16040 maxBitrate: data[16] << 24 | data[17] << 16 | data[18] << 8 | data[19],
16041 avgBitrate: data[20] << 24 | data[21] << 16 | data[22] << 8 | data[23],
16042 decoderConfigDescriptor: {
16043 tag: data[24],
16044 length: data[25],
16045 audioObjectType: data[26] >>> 3 & 0x1f,
16046 samplingFrequencyIndex: (data[26] & 0x07) << 1 | data[27] >>> 7 & 0x01,
16047 channelConfiguration: data[27] >>> 3 & 0x0f
16048 }
16049 }
16050 };
16051 },
16052 ftyp: function ftyp(data) {
16053 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16054 result = {
16055 majorBrand: parseType(data.subarray(0, 4)),
16056 minorVersion: view.getUint32(4),
16057 compatibleBrands: []
16058 },
16059 i = 8;
16060 while (i < data.byteLength) {
16061 result.compatibleBrands.push(parseType(data.subarray(i, i + 4)));
16062 i += 4;
16063 }
16064 return result;
16065 },
16066 dinf: function dinf(data) {
16067 return {
16068 boxes: inspectMp4(data)
16069 };
16070 },
16071 dref: function dref(data) {
16072 return {
16073 version: data[0],
16074 flags: new Uint8Array(data.subarray(1, 4)),
16075 dataReferences: inspectMp4(data.subarray(8))
16076 };
16077 },
16078 hdlr: function hdlr(data) {
16079 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16080 result = {
16081 version: view.getUint8(0),
16082 flags: new Uint8Array(data.subarray(1, 4)),
16083 handlerType: parseType(data.subarray(8, 12)),
16084 name: ''
16085 },
16086 i = 8;
16087
16088 // parse out the name field
16089 for (i = 24; i < data.byteLength; i++) {
16090 if (data[i] === 0x00) {
16091 // the name field is null-terminated
16092 i++;
16093 break;
16094 }
16095 result.name += String.fromCharCode(data[i]);
16096 }
16097 // decode UTF-8 to javascript's internal representation
16098 // see http://ecmanaut.blogspot.com/2006/07/encoding-decoding-utf8-in-javascript.html
16099 result.name = decodeURIComponent(escape(result.name));
16100
16101 return result;
16102 },
16103 mdat: function mdat(data) {
16104 return {
16105 byteLength: data.byteLength,
16106 nals: nalParse(data)
16107 };
16108 },
16109 mdhd: function mdhd(data) {
16110 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16111 i = 4,
16112 language,
16113 result = {
16114 version: view.getUint8(0),
16115 flags: new Uint8Array(data.subarray(1, 4)),
16116 language: ''
16117 };
16118 if (result.version === 1) {
16119 i += 4;
16120 result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16121 i += 8;
16122 result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16123 i += 4;
16124 result.timescale = view.getUint32(i);
16125 i += 8;
16126 result.duration = view.getUint32(i); // truncating top 4 bytes
16127 } else {
16128 result.creationTime = parseMp4Date(view.getUint32(i));
16129 i += 4;
16130 result.modificationTime = parseMp4Date(view.getUint32(i));
16131 i += 4;
16132 result.timescale = view.getUint32(i);
16133 i += 4;
16134 result.duration = view.getUint32(i);
16135 }
16136 i += 4;
16137 // language is stored as an ISO-639-2/T code in an array of three 5-bit fields
16138 // each field is the packed difference between its ASCII value and 0x60
16139 language = view.getUint16(i);
16140 result.language += String.fromCharCode((language >> 10) + 0x60);
16141 result.language += String.fromCharCode(((language & 0x03e0) >> 5) + 0x60);
16142 result.language += String.fromCharCode((language & 0x1f) + 0x60);
16143
16144 return result;
16145 },
16146 mdia: function mdia(data) {
16147 return {
16148 boxes: inspectMp4(data)
16149 };
16150 },
16151 mfhd: function mfhd(data) {
16152 return {
16153 version: data[0],
16154 flags: new Uint8Array(data.subarray(1, 4)),
16155 sequenceNumber: data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]
16156 };
16157 },
16158 minf: function minf(data) {
16159 return {
16160 boxes: inspectMp4(data)
16161 };
16162 },
16163 // codingname, not a first-class box type. stsd entries share the
16164 // same format as real boxes so the parsing infrastructure can be
16165 // shared
16166 mp4a: function mp4a(data) {
16167 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16168 result = {
16169 // 6 bytes reserved
16170 dataReferenceIndex: view.getUint16(6),
16171 // 4 + 4 bytes reserved
16172 channelcount: view.getUint16(16),
16173 samplesize: view.getUint16(18),
16174 // 2 bytes pre_defined
16175 // 2 bytes reserved
16176 samplerate: view.getUint16(24) + view.getUint16(26) / 65536
16177 };
16178
16179 // if there are more bytes to process, assume this is an ISO/IEC
16180 // 14496-14 MP4AudioSampleEntry and parse the ESDBox
16181 if (data.byteLength > 28) {
16182 result.streamDescriptor = inspectMp4(data.subarray(28))[0];
16183 }
16184 return result;
16185 },
16186 moof: function moof(data) {
16187 return {
16188 boxes: inspectMp4(data)
16189 };
16190 },
16191 moov: function moov(data) {
16192 return {
16193 boxes: inspectMp4(data)
16194 };
16195 },
16196 mvex: function mvex(data) {
16197 return {
16198 boxes: inspectMp4(data)
16199 };
16200 },
16201 mvhd: function mvhd(data) {
16202 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16203 i = 4,
16204 result = {
16205 version: view.getUint8(0),
16206 flags: new Uint8Array(data.subarray(1, 4))
16207 };
16208
16209 if (result.version === 1) {
16210 i += 4;
16211 result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16212 i += 8;
16213 result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16214 i += 4;
16215 result.timescale = view.getUint32(i);
16216 i += 8;
16217 result.duration = view.getUint32(i); // truncating top 4 bytes
16218 } else {
16219 result.creationTime = parseMp4Date(view.getUint32(i));
16220 i += 4;
16221 result.modificationTime = parseMp4Date(view.getUint32(i));
16222 i += 4;
16223 result.timescale = view.getUint32(i);
16224 i += 4;
16225 result.duration = view.getUint32(i);
16226 }
16227 i += 4;
16228
16229 // convert fixed-point, base 16 back to a number
16230 result.rate = view.getUint16(i) + view.getUint16(i + 2) / 16;
16231 i += 4;
16232 result.volume = view.getUint8(i) + view.getUint8(i + 1) / 8;
16233 i += 2;
16234 i += 2;
16235 i += 2 * 4;
16236 result.matrix = new Uint32Array(data.subarray(i, i + 9 * 4));
16237 i += 9 * 4;
16238 i += 6 * 4;
16239 result.nextTrackId = view.getUint32(i);
16240 return result;
16241 },
16242 pdin: function pdin(data) {
16243 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
16244 return {
16245 version: view.getUint8(0),
16246 flags: new Uint8Array(data.subarray(1, 4)),
16247 rate: view.getUint32(4),
16248 initialDelay: view.getUint32(8)
16249 };
16250 },
16251 sdtp: function sdtp(data) {
16252 var result = {
16253 version: data[0],
16254 flags: new Uint8Array(data.subarray(1, 4)),
16255 samples: []
16256 },
16257 i;
16258
16259 for (i = 4; i < data.byteLength; i++) {
16260 result.samples.push({
16261 dependsOn: (data[i] & 0x30) >> 4,
16262 isDependedOn: (data[i] & 0x0c) >> 2,
16263 hasRedundancy: data[i] & 0x03
16264 });
16265 }
16266 return result;
16267 },
16268 sidx: function sidx(data) {
16269 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16270 result = {
16271 version: data[0],
16272 flags: new Uint8Array(data.subarray(1, 4)),
16273 references: [],
16274 referenceId: view.getUint32(4),
16275 timescale: view.getUint32(8),
16276 earliestPresentationTime: view.getUint32(12),
16277 firstOffset: view.getUint32(16)
16278 },
16279 referenceCount = view.getUint16(22),
16280 i;
16281
16282 for (i = 24; referenceCount; i += 12, referenceCount--) {
16283 result.references.push({
16284 referenceType: (data[i] & 0x80) >>> 7,
16285 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
16286 subsegmentDuration: view.getUint32(i + 4),
16287 startsWithSap: !!(data[i + 8] & 0x80),
16288 sapType: (data[i + 8] & 0x70) >>> 4,
16289 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
16290 });
16291 }
16292
16293 return result;
16294 },
16295 smhd: function smhd(data) {
16296 return {
16297 version: data[0],
16298 flags: new Uint8Array(data.subarray(1, 4)),
16299 balance: data[4] + data[5] / 256
16300 };
16301 },
16302 stbl: function stbl(data) {
16303 return {
16304 boxes: inspectMp4(data)
16305 };
16306 },
16307 stco: function stco(data) {
16308 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16309 result = {
16310 version: data[0],
16311 flags: new Uint8Array(data.subarray(1, 4)),
16312 chunkOffsets: []
16313 },
16314 entryCount = view.getUint32(4),
16315 i;
16316 for (i = 8; entryCount; i += 4, entryCount--) {
16317 result.chunkOffsets.push(view.getUint32(i));
16318 }
16319 return result;
16320 },
16321 stsc: function stsc(data) {
16322 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16323 entryCount = view.getUint32(4),
16324 result = {
16325 version: data[0],
16326 flags: new Uint8Array(data.subarray(1, 4)),
16327 sampleToChunks: []
16328 },
16329 i;
16330 for (i = 8; entryCount; i += 12, entryCount--) {
16331 result.sampleToChunks.push({
16332 firstChunk: view.getUint32(i),
16333 samplesPerChunk: view.getUint32(i + 4),
16334 sampleDescriptionIndex: view.getUint32(i + 8)
16335 });
16336 }
16337 return result;
16338 },
16339 stsd: function stsd(data) {
16340 return {
16341 version: data[0],
16342 flags: new Uint8Array(data.subarray(1, 4)),
16343 sampleDescriptions: inspectMp4(data.subarray(8))
16344 };
16345 },
16346 stsz: function stsz(data) {
16347 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16348 result = {
16349 version: data[0],
16350 flags: new Uint8Array(data.subarray(1, 4)),
16351 sampleSize: view.getUint32(4),
16352 entries: []
16353 },
16354 i;
16355 for (i = 12; i < data.byteLength; i += 4) {
16356 result.entries.push(view.getUint32(i));
16357 }
16358 return result;
16359 },
16360 stts: function stts(data) {
16361 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16362 result = {
16363 version: data[0],
16364 flags: new Uint8Array(data.subarray(1, 4)),
16365 timeToSamples: []
16366 },
16367 entryCount = view.getUint32(4),
16368 i;
16369
16370 for (i = 8; entryCount; i += 8, entryCount--) {
16371 result.timeToSamples.push({
16372 sampleCount: view.getUint32(i),
16373 sampleDelta: view.getUint32(i + 4)
16374 });
16375 }
16376 return result;
16377 },
16378 styp: function styp(data) {
16379 return parse$1.ftyp(data);
16380 },
16381 tfdt: function tfdt(data) {
16382 var result = {
16383 version: data[0],
16384 flags: new Uint8Array(data.subarray(1, 4)),
16385 baseMediaDecodeTime: toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
16386 };
16387 if (result.version === 1) {
16388 result.baseMediaDecodeTime *= Math.pow(2, 32);
16389 result.baseMediaDecodeTime += toUnsigned$1(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
16390 }
16391 return result;
16392 },
16393 tfhd: function tfhd(data) {
16394 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16395 result = {
16396 version: data[0],
16397 flags: new Uint8Array(data.subarray(1, 4)),
16398 trackId: view.getUint32(4)
16399 },
16400 baseDataOffsetPresent = result.flags[2] & 0x01,
16401 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
16402 defaultSampleDurationPresent = result.flags[2] & 0x08,
16403 defaultSampleSizePresent = result.flags[2] & 0x10,
16404 defaultSampleFlagsPresent = result.flags[2] & 0x20,
16405 durationIsEmpty = result.flags[0] & 0x010000,
16406 defaultBaseIsMoof = result.flags[0] & 0x020000,
16407 i;
16408
16409 i = 8;
16410 if (baseDataOffsetPresent) {
16411 i += 4; // truncate top 4 bytes
16412 // FIXME: should we read the full 64 bits?
16413 result.baseDataOffset = view.getUint32(12);
16414 i += 4;
16415 }
16416 if (sampleDescriptionIndexPresent) {
16417 result.sampleDescriptionIndex = view.getUint32(i);
16418 i += 4;
16419 }
16420 if (defaultSampleDurationPresent) {
16421 result.defaultSampleDuration = view.getUint32(i);
16422 i += 4;
16423 }
16424 if (defaultSampleSizePresent) {
16425 result.defaultSampleSize = view.getUint32(i);
16426 i += 4;
16427 }
16428 if (defaultSampleFlagsPresent) {
16429 result.defaultSampleFlags = view.getUint32(i);
16430 }
16431 if (durationIsEmpty) {
16432 result.durationIsEmpty = true;
16433 }
16434 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
16435 result.baseDataOffsetIsMoof = true;
16436 }
16437 return result;
16438 },
16439 tkhd: function tkhd(data) {
16440 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16441 i = 4,
16442 result = {
16443 version: view.getUint8(0),
16444 flags: new Uint8Array(data.subarray(1, 4))
16445 };
16446 if (result.version === 1) {
16447 i += 4;
16448 result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16449 i += 8;
16450 result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
16451 i += 4;
16452 result.trackId = view.getUint32(i);
16453 i += 4;
16454 i += 8;
16455 result.duration = view.getUint32(i); // truncating top 4 bytes
16456 } else {
16457 result.creationTime = parseMp4Date(view.getUint32(i));
16458 i += 4;
16459 result.modificationTime = parseMp4Date(view.getUint32(i));
16460 i += 4;
16461 result.trackId = view.getUint32(i);
16462 i += 4;
16463 i += 4;
16464 result.duration = view.getUint32(i);
16465 }
16466 i += 4;
16467 i += 2 * 4;
16468 result.layer = view.getUint16(i);
16469 i += 2;
16470 result.alternateGroup = view.getUint16(i);
16471 i += 2;
16472 // convert fixed-point, base 16 back to a number
16473 result.volume = view.getUint8(i) + view.getUint8(i + 1) / 8;
16474 i += 2;
16475 i += 2;
16476 result.matrix = new Uint32Array(data.subarray(i, i + 9 * 4));
16477 i += 9 * 4;
16478 result.width = view.getUint16(i) + view.getUint16(i + 2) / 65536;
16479 i += 4;
16480 result.height = view.getUint16(i) + view.getUint16(i + 2) / 65536;
16481 return result;
16482 },
16483 traf: function traf(data) {
16484 return {
16485 boxes: inspectMp4(data)
16486 };
16487 },
16488 trak: function trak(data) {
16489 return {
16490 boxes: inspectMp4(data)
16491 };
16492 },
16493 trex: function trex(data) {
16494 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
16495 return {
16496 version: data[0],
16497 flags: new Uint8Array(data.subarray(1, 4)),
16498 trackId: view.getUint32(4),
16499 defaultSampleDescriptionIndex: view.getUint32(8),
16500 defaultSampleDuration: view.getUint32(12),
16501 defaultSampleSize: view.getUint32(16),
16502 sampleDependsOn: data[20] & 0x03,
16503 sampleIsDependedOn: (data[21] & 0xc0) >> 6,
16504 sampleHasRedundancy: (data[21] & 0x30) >> 4,
16505 samplePaddingValue: (data[21] & 0x0e) >> 1,
16506 sampleIsDifferenceSample: !!(data[21] & 0x01),
16507 sampleDegradationPriority: view.getUint16(22)
16508 };
16509 },
16510 trun: function trun(data) {
16511 var result = {
16512 version: data[0],
16513 flags: new Uint8Array(data.subarray(1, 4)),
16514 samples: []
16515 },
16516 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16517
16518 // Flag interpretation
16519 dataOffsetPresent = result.flags[2] & 0x01,
16520 // compare with 2nd byte of 0x1
16521 firstSampleFlagsPresent = result.flags[2] & 0x04,
16522 // compare with 2nd byte of 0x4
16523 sampleDurationPresent = result.flags[1] & 0x01,
16524 // compare with 2nd byte of 0x100
16525 sampleSizePresent = result.flags[1] & 0x02,
16526 // compare with 2nd byte of 0x200
16527 sampleFlagsPresent = result.flags[1] & 0x04,
16528 // compare with 2nd byte of 0x400
16529 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
16530 // compare with 2nd byte of 0x800
16531 sampleCount = view.getUint32(4),
16532 offset = 8,
16533 sample;
16534
16535 if (dataOffsetPresent) {
16536 // 32 bit signed integer
16537 result.dataOffset = view.getInt32(offset);
16538 offset += 4;
16539 }
16540
16541 // Overrides the flags for the first sample only. The order of
16542 // optional values will be: duration, size, compositionTimeOffset
16543 if (firstSampleFlagsPresent && sampleCount) {
16544 sample = {
16545 flags: parseSampleFlags(data.subarray(offset, offset + 4))
16546 };
16547 offset += 4;
16548 if (sampleDurationPresent) {
16549 sample.duration = view.getUint32(offset);
16550 offset += 4;
16551 }
16552 if (sampleSizePresent) {
16553 sample.size = view.getUint32(offset);
16554 offset += 4;
16555 }
16556 if (sampleCompositionTimeOffsetPresent) {
16557 // Note: this should be a signed int if version is 1
16558 sample.compositionTimeOffset = view.getUint32(offset);
16559 offset += 4;
16560 }
16561 result.samples.push(sample);
16562 sampleCount--;
16563 }
16564
16565 while (sampleCount--) {
16566 sample = {};
16567 if (sampleDurationPresent) {
16568 sample.duration = view.getUint32(offset);
16569 offset += 4;
16570 }
16571 if (sampleSizePresent) {
16572 sample.size = view.getUint32(offset);
16573 offset += 4;
16574 }
16575 if (sampleFlagsPresent) {
16576 sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
16577 offset += 4;
16578 }
16579 if (sampleCompositionTimeOffsetPresent) {
16580 // Note: this should be a signed int if version is 1
16581 sample.compositionTimeOffset = view.getUint32(offset);
16582 offset += 4;
16583 }
16584 result.samples.push(sample);
16585 }
16586 return result;
16587 },
16588 'url ': function url(data) {
16589 return {
16590 version: data[0],
16591 flags: new Uint8Array(data.subarray(1, 4))
16592 };
16593 },
16594 vmhd: function vmhd(data) {
16595 var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
16596 return {
16597 version: data[0],
16598 flags: new Uint8Array(data.subarray(1, 4)),
16599 graphicsmode: view.getUint16(4),
16600 opcolor: new Uint16Array([view.getUint16(6), view.getUint16(8), view.getUint16(10)])
16601 };
16602 }
16603 };
16604
16605 /**
16606 * Return a javascript array of box objects parsed from an ISO base
16607 * media file.
16608 * @param data {Uint8Array} the binary data of the media to be inspected
16609 * @return {array} a javascript array of potentially nested box objects
16610 */
16611 inspectMp4 = function inspectMp4(data) {
16612 var i = 0,
16613 result = [],
16614 view,
16615 size,
16616 type,
16617 end,
16618 box;
16619
16620 // Convert data from Uint8Array to ArrayBuffer, to follow Dataview API
16621 var ab = new ArrayBuffer(data.length);
16622 var v = new Uint8Array(ab);
16623 for (var z = 0; z < data.length; ++z) {
16624 v[z] = data[z];
16625 }
16626 view = new DataView(ab);
16627
16628 while (i < data.byteLength) {
16629 // parse box data
16630 size = view.getUint32(i);
16631 type = parseType(data.subarray(i + 4, i + 8));
16632 end = size > 1 ? i + size : data.byteLength;
16633
16634 // parse type-specific data
16635 box = (parse$1[type] || function (data) {
16636 return {
16637 data: data
16638 };
16639 })(data.subarray(i + 8, end));
16640 box.size = size;
16641 box.type = type;
16642
16643 // store this box and move to the next
16644 result.push(box);
16645 i = end;
16646 }
16647 return result;
16648 };
16649
16650 /**
16651 * Returns a textual representation of the javascript represtentation
16652 * of an MP4 file. You can use it as an alternative to
16653 * JSON.stringify() to compare inspected MP4s.
16654 * @param inspectedMp4 {array} the parsed array of boxes in an MP4
16655 * file
16656 * @param depth {number} (optional) the number of ancestor boxes of
16657 * the elements of inspectedMp4. Assumed to be zero if unspecified.
16658 * @return {string} a text representation of the parsed MP4
16659 */
16660 _textifyMp = function textifyMp4(inspectedMp4, depth) {
16661 var indent;
16662 depth = depth || 0;
16663 indent = new Array(depth * 2 + 1).join(' ');
16664
16665 // iterate over all the boxes
16666 return inspectedMp4.map(function (box, index) {
16667
16668 // list the box type first at the current indentation level
16669 return indent + box.type + '\n' +
16670
16671 // the type is already included and handle child boxes separately
16672 Object.keys(box).filter(function (key) {
16673 return key !== 'type' && key !== 'boxes';
16674
16675 // output all the box properties
16676 }).map(function (key) {
16677 var prefix = indent + ' ' + key + ': ',
16678 value = box[key];
16679
16680 // print out raw bytes as hexademical
16681 if (value instanceof Uint8Array || value instanceof Uint32Array) {
16682 var bytes = Array.prototype.slice.call(new Uint8Array(value.buffer, value.byteOffset, value.byteLength)).map(function (byte) {
16683 return ' ' + ('00' + byte.toString(16)).slice(-2);
16684 }).join('').match(/.{1,24}/g);
16685 if (!bytes) {
16686 return prefix + '<>';
16687 }
16688 if (bytes.length === 1) {
16689 return prefix + '<' + bytes.join('').slice(1) + '>';
16690 }
16691 return prefix + '<\n' + bytes.map(function (line) {
16692 return indent + ' ' + line;
16693 }).join('\n') + '\n' + indent + ' >';
16694 }
16695
16696 // stringify generic objects
16697 return prefix + JSON.stringify(value, null, 2).split('\n').map(function (line, index) {
16698 if (index === 0) {
16699 return line;
16700 }
16701 return indent + ' ' + line;
16702 }).join('\n');
16703 }).join('\n') + (
16704
16705 // recursively textify the child boxes
16706 box.boxes ? '\n' + _textifyMp(box.boxes, depth + 1) : '');
16707 }).join('\n');
16708 };
16709
16710 var mp4Inspector = {
16711 inspect: inspectMp4,
16712 textify: _textifyMp,
16713 parseType: parseType,
16714 findBox: findBox,
16715 parseTraf: parse$1.traf,
16716 parseTfdt: parse$1.tfdt,
16717 parseHdlr: parse$1.hdlr,
16718 parseTfhd: parse$1.tfhd,
16719 parseTrun: parse$1.trun,
16720 parseSidx: parse$1.sidx
16721 };
16722
16723 var EventTarget$1 = videojs.EventTarget,
16724 mergeOptions$2 = videojs.mergeOptions;
16725
16726 /**
16727 * Returns a new master manifest that is the result of merging an updated master manifest
16728 * into the original version.
16729 *
16730 * @param {Object} oldMaster
16731 * The old parsed mpd object
16732 * @param {Object} newMaster
16733 * The updated parsed mpd object
16734 * @return {Object}
16735 * A new object representing the original master manifest with the updated media
16736 * playlists merged in
16737 */
16738
16739 var updateMaster$1 = function updateMaster$$1(oldMaster, newMaster) {
16740 var noChanges = void 0;
16741 var update = mergeOptions$2(oldMaster, {
16742 // These are top level properties that can be updated
16743 duration: newMaster.duration,
16744 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
16745 });
16746
16747 // First update the playlists in playlist list
16748 for (var i = 0; i < newMaster.playlists.length; i++) {
16749 var playlistUpdate = updateMaster(update, newMaster.playlists[i]);
16750
16751 if (playlistUpdate) {
16752 update = playlistUpdate;
16753 } else {
16754 noChanges = true;
16755 }
16756 }
16757
16758 // Then update media group playlists
16759 forEachMediaGroup(newMaster, function (properties, type, group, label) {
16760 if (properties.playlists && properties.playlists.length) {
16761 var id = properties.playlists[0].id;
16762 var _playlistUpdate = updateMaster(update, properties.playlists[0]);
16763
16764 if (_playlistUpdate) {
16765 update = _playlistUpdate;
16766 // update the playlist reference within media groups
16767 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
16768 noChanges = false;
16769 }
16770 }
16771 });
16772
16773 if (noChanges) {
16774 return null;
16775 }
16776
16777 return update;
16778 };
16779
16780 var generateSidxKey = function generateSidxKey(sidxInfo) {
16781 // should be non-inclusive
16782 var sidxByteRangeEnd = sidxInfo.byterange.offset + sidxInfo.byterange.length - 1;
16783
16784 return sidxInfo.uri + '-' + sidxInfo.byterange.offset + '-' + sidxByteRangeEnd;
16785 };
16786
16787 // SIDX should be equivalent if the URI and byteranges of the SIDX match.
16788 // If the SIDXs have maps, the two maps should match,
16789 // both `a` and `b` missing SIDXs is considered matching.
16790 // If `a` or `b` but not both have a map, they aren't matching.
16791 var equivalentSidx = function equivalentSidx(a, b) {
16792 var neitherMap = Boolean(!a.map && !b.map);
16793
16794 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
16795
16796 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
16797 };
16798
16799 // exported for testing
16800 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
16801 var newSidxMapping = {};
16802
16803 for (var id in playlists) {
16804 var playlist = playlists[id];
16805 var currentSidxInfo = playlist.sidx;
16806
16807 if (currentSidxInfo) {
16808 var key = generateSidxKey(currentSidxInfo);
16809
16810 if (!oldSidxMapping[key]) {
16811 break;
16812 }
16813
16814 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
16815
16816 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
16817 newSidxMapping[key] = oldSidxMapping[key];
16818 }
16819 }
16820 }
16821
16822 return newSidxMapping;
16823 };
16824
16825 /**
16826 * A function that filters out changed items as they need to be requested separately.
16827 *
16828 * The method is exported for testing
16829 *
16830 * @param {Object} masterXml the mpd XML
16831 * @param {string} srcUrl the mpd url
16832 * @param {Date} clientOffset a time difference between server and client (passed through and not used)
16833 * @param {Object} oldSidxMapping the SIDX to compare against
16834 */
16835 var filterChangedSidxMappings = function filterChangedSidxMappings(masterXml, srcUrl, clientOffset, oldSidxMapping) {
16836 // Don't pass current sidx mapping
16837 var master = parse(masterXml, {
16838 manifestUri: srcUrl,
16839 clientOffset: clientOffset
16840 });
16841
16842 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
16843 var mediaGroupSidx = videoSidx;
16844
16845 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
16846 if (properties.playlists && properties.playlists.length) {
16847 var playlists = properties.playlists;
16848
16849 mediaGroupSidx = mergeOptions$2(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
16850 }
16851 });
16852
16853 return mediaGroupSidx;
16854 };
16855
16856 // exported for testing
16857 var requestSidx_ = function requestSidx_(sidxRange, playlist, xhr, options, finishProcessingFn) {
16858 var sidxInfo = {
16859 // resolve the segment URL relative to the playlist
16860 uri: resolveManifestRedirect(options.handleManifestRedirects, sidxRange.resolvedUri),
16861 // resolvedUri: sidxRange.resolvedUri,
16862 byterange: sidxRange.byterange,
16863 // the segment's playlist
16864 playlist: playlist
16865 };
16866
16867 var sidxRequestOptions = videojs.mergeOptions(sidxInfo, {
16868 responseType: 'arraybuffer',
16869 headers: segmentXhrHeaders(sidxInfo)
16870 });
16871
16872 return xhr(sidxRequestOptions, finishProcessingFn);
16873 };
16874
16875 var DashPlaylistLoader = function (_EventTarget) {
16876 inherits(DashPlaylistLoader, _EventTarget);
16877
16878 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
16879 // playlist loader setups from media groups will expect to be able to pass a playlist
16880 // (since there aren't external URLs to media playlists with DASH)
16881 function DashPlaylistLoader(srcUrlOrPlaylist, hls) {
16882 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
16883 var masterPlaylistLoader = arguments[3];
16884 classCallCheck(this, DashPlaylistLoader);
16885
16886 var _this = possibleConstructorReturn(this, (DashPlaylistLoader.__proto__ || Object.getPrototypeOf(DashPlaylistLoader)).call(this));
16887
16888 var _options$withCredenti = options.withCredentials,
16889 withCredentials = _options$withCredenti === undefined ? false : _options$withCredenti,
16890 _options$handleManife = options.handleManifestRedirects,
16891 handleManifestRedirects = _options$handleManife === undefined ? false : _options$handleManife;
16892
16893
16894 _this.hls_ = hls;
16895 _this.withCredentials = withCredentials;
16896 _this.handleManifestRedirects = handleManifestRedirects;
16897
16898 if (!srcUrlOrPlaylist) {
16899 throw new Error('A non-empty playlist URL or playlist is required');
16900 }
16901
16902 // event naming?
16903 _this.on('minimumUpdatePeriod', function () {
16904 _this.refreshXml_();
16905 });
16906
16907 // live playlist staleness timeout
16908 _this.on('mediaupdatetimeout', function () {
16909 _this.refreshMedia_(_this.media().id);
16910 });
16911
16912 _this.state = 'HAVE_NOTHING';
16913 _this.loadedPlaylists_ = {};
16914
16915 // initialize the loader state
16916 // The masterPlaylistLoader will be created with a string
16917 if (typeof srcUrlOrPlaylist === 'string') {
16918 _this.srcUrl = srcUrlOrPlaylist;
16919 // TODO: reset sidxMapping between period changes
16920 // once multi-period is refactored
16921 _this.sidxMapping_ = {};
16922 return possibleConstructorReturn(_this);
16923 }
16924
16925 _this.setupChildLoader(masterPlaylistLoader, srcUrlOrPlaylist);
16926 return _this;
16927 }
16928
16929 createClass(DashPlaylistLoader, [{
16930 key: 'setupChildLoader',
16931 value: function setupChildLoader(masterPlaylistLoader, playlist) {
16932 this.masterPlaylistLoader_ = masterPlaylistLoader;
16933 this.childPlaylist_ = playlist;
16934 }
16935 }, {
16936 key: 'dispose',
16937 value: function dispose() {
16938 this.trigger('dispose');
16939 this.stopRequest();
16940 this.loadedPlaylists_ = {};
16941 window_1.clearTimeout(this.minimumUpdatePeriodTimeout_);
16942 window_1.clearTimeout(this.mediaRequest_);
16943 window_1.clearTimeout(this.mediaUpdateTimeout);
16944
16945 this.off();
16946 }
16947 }, {
16948 key: 'hasPendingRequest',
16949 value: function hasPendingRequest() {
16950 return this.request || this.mediaRequest_;
16951 }
16952 }, {
16953 key: 'stopRequest',
16954 value: function stopRequest() {
16955 if (this.request) {
16956 var oldRequest = this.request;
16957
16958 this.request = null;
16959 oldRequest.onreadystatechange = null;
16960 oldRequest.abort();
16961 }
16962 }
16963 }, {
16964 key: 'sidxRequestFinished_',
16965 value: function sidxRequestFinished_(playlist, master, startingState, doneFn) {
16966 var _this2 = this;
16967
16968 return function (err, request) {
16969 // disposed
16970 if (!_this2.request) {
16971 return;
16972 }
16973
16974 // pending request is cleared
16975 _this2.request = null;
16976
16977 if (err) {
16978 _this2.error = {
16979 status: request.status,
16980 message: 'DASH playlist request error at URL: ' + playlist.uri,
16981 response: request.response,
16982 // MEDIA_ERR_NETWORK
16983 code: 2
16984 };
16985 if (startingState) {
16986 _this2.state = startingState;
16987 }
16988
16989 _this2.trigger('error');
16990 return doneFn(master, null);
16991 }
16992
16993 var bytes = new Uint8Array(request.response);
16994 var sidx = mp4Inspector.parseSidx(bytes.subarray(8));
16995
16996 return doneFn(master, sidx);
16997 };
16998 }
16999 }, {
17000 key: 'media',
17001 value: function media(playlist) {
17002 var _this3 = this;
17003
17004 // getter
17005 if (!playlist) {
17006 return this.media_;
17007 }
17008
17009 // setter
17010 if (this.state === 'HAVE_NOTHING') {
17011 throw new Error('Cannot switch media playlist from ' + this.state);
17012 }
17013
17014 var startingState = this.state;
17015
17016 // find the playlist object if the target playlist has been specified by URI
17017 if (typeof playlist === 'string') {
17018 if (!this.master.playlists[playlist]) {
17019 throw new Error('Unknown playlist URI: ' + playlist);
17020 }
17021 playlist = this.master.playlists[playlist];
17022 }
17023
17024 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
17025
17026 // switch to previously loaded playlists immediately
17027 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
17028 this.state = 'HAVE_METADATA';
17029 this.media_ = playlist;
17030
17031 // trigger media change if the active media has been updated
17032 if (mediaChange) {
17033 this.trigger('mediachanging');
17034 this.trigger('mediachange');
17035 }
17036 return;
17037 }
17038
17039 // switching to the active playlist is a no-op
17040 if (!mediaChange) {
17041 return;
17042 }
17043
17044 // switching from an already loaded playlist
17045 if (this.media_) {
17046 this.trigger('mediachanging');
17047 }
17048
17049 if (!playlist.sidx) {
17050 // Continue asynchronously if there is no sidx
17051 // wait one tick to allow haveMaster to run first on a child loader
17052 this.mediaRequest_ = window_1.setTimeout(this.haveMetadata.bind(this, { startingState: startingState, playlist: playlist }), 0);
17053
17054 // exit early and don't do sidx work
17055 return;
17056 }
17057
17058 // we have sidx mappings
17059 var oldMaster = void 0;
17060 var sidxMapping = void 0;
17061
17062 // sidxMapping is used when parsing the masterXml, so store
17063 // it on the masterPlaylistLoader
17064 if (this.masterPlaylistLoader_) {
17065 oldMaster = this.masterPlaylistLoader_.master;
17066 sidxMapping = this.masterPlaylistLoader_.sidxMapping_;
17067 } else {
17068 oldMaster = this.master;
17069 sidxMapping = this.sidxMapping_;
17070 }
17071
17072 var sidxKey = generateSidxKey(playlist.sidx);
17073
17074 sidxMapping[sidxKey] = {
17075 sidxInfo: playlist.sidx
17076 };
17077
17078 this.request = requestSidx_(playlist.sidx, playlist, this.hls_.xhr, { handleManifestRedirects: this.handleManifestRedirects }, this.sidxRequestFinished_(playlist, oldMaster, startingState, function (newMaster, sidx) {
17079 if (!newMaster || !sidx) {
17080 throw new Error('failed to request sidx');
17081 }
17082
17083 // update loader's sidxMapping with parsed sidx box
17084 sidxMapping[sidxKey].sidx = sidx;
17085
17086 // everything is ready just continue to haveMetadata
17087 _this3.haveMetadata({
17088 startingState: startingState,
17089 playlist: newMaster.playlists[playlist.id]
17090 });
17091 }));
17092 }
17093 }, {
17094 key: 'haveMetadata',
17095 value: function haveMetadata(_ref) {
17096 var startingState = _ref.startingState,
17097 playlist = _ref.playlist;
17098
17099 this.state = 'HAVE_METADATA';
17100 this.loadedPlaylists_[playlist.id] = playlist;
17101 this.mediaRequest_ = null;
17102
17103 // This will trigger loadedplaylist
17104 this.refreshMedia_(playlist.id);
17105
17106 // fire loadedmetadata the first time a media playlist is loaded
17107 // to resolve setup of media groups
17108 if (startingState === 'HAVE_MASTER') {
17109 this.trigger('loadedmetadata');
17110 } else {
17111 // trigger media change if the active media has been updated
17112 this.trigger('mediachange');
17113 }
17114 }
17115 }, {
17116 key: 'pause',
17117 value: function pause() {
17118 this.stopRequest();
17119 window_1.clearTimeout(this.mediaUpdateTimeout);
17120 window_1.clearTimeout(this.minimumUpdatePeriodTimeout_);
17121 if (this.state === 'HAVE_NOTHING') {
17122 // If we pause the loader before any data has been retrieved, its as if we never
17123 // started, so reset to an unstarted state.
17124 this.started = false;
17125 }
17126 }
17127 }, {
17128 key: 'load',
17129 value: function load(isFinalRendition) {
17130 var _this4 = this;
17131
17132 window_1.clearTimeout(this.mediaUpdateTimeout);
17133 window_1.clearTimeout(this.minimumUpdatePeriodTimeout_);
17134
17135 var media = this.media();
17136
17137 if (isFinalRendition) {
17138 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
17139
17140 this.mediaUpdateTimeout = window_1.setTimeout(function () {
17141 return _this4.load();
17142 }, delay);
17143 return;
17144 }
17145
17146 // because the playlists are internal to the manifest, load should either load the
17147 // main manifest, or do nothing but trigger an event
17148 if (!this.started) {
17149 this.start();
17150 return;
17151 }
17152
17153 if (media && !media.endList) {
17154 this.trigger('mediaupdatetimeout');
17155 } else {
17156 this.trigger('loadedplaylist');
17157 }
17158 }
17159
17160 /**
17161 * Parses the master xml string and updates playlist uri references
17162 *
17163 * @return {Object}
17164 * The parsed mpd manifest object
17165 */
17166
17167 }, {
17168 key: 'parseMasterXml',
17169 value: function parseMasterXml() {
17170 var master = parse(this.masterXml_, {
17171 manifestUri: this.srcUrl,
17172 clientOffset: this.clientOffset_,
17173 sidxMapping: this.sidxMapping_
17174 });
17175
17176 master.uri = this.srcUrl;
17177
17178 // Set up phony URIs for the playlists since we won't have external URIs for DASH
17179 // but reference playlists by their URI throughout the project
17180 // TODO: Should we create the dummy uris in mpd-parser as well (leaning towards yes).
17181 for (var i = 0; i < master.playlists.length; i++) {
17182 var phonyUri = 'placeholder-uri-' + i;
17183
17184 master.playlists[i].uri = phonyUri;
17185 }
17186
17187 // set up phony URIs for the media group playlists since we won't have external
17188 // URIs for DASH but reference playlists by their URI throughout the project
17189 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
17190 if (properties.playlists && properties.playlists.length) {
17191 var _phonyUri = 'placeholder-uri-' + mediaType + '-' + groupKey + '-' + labelKey;
17192 var id = createPlaylistID(0, _phonyUri);
17193
17194 properties.playlists[0].uri = _phonyUri;
17195 properties.playlists[0].id = id;
17196 // setup ID and URI references (URI for backwards compatibility)
17197 master.playlists[id] = properties.playlists[0];
17198 master.playlists[_phonyUri] = properties.playlists[0];
17199 }
17200 });
17201
17202 setupMediaPlaylists(master);
17203 resolveMediaGroupUris(master);
17204
17205 return master;
17206 }
17207 }, {
17208 key: 'start',
17209 value: function start() {
17210 var _this5 = this;
17211
17212 this.started = true;
17213
17214 // We don't need to request the master manifest again
17215 // Call this asynchronously to match the xhr request behavior below
17216 if (this.masterPlaylistLoader_) {
17217 this.mediaRequest_ = window_1.setTimeout(this.haveMaster_.bind(this), 0);
17218 return;
17219 }
17220
17221 // request the specified URL
17222 this.request = this.hls_.xhr({
17223 uri: this.srcUrl,
17224 withCredentials: this.withCredentials
17225 }, function (error, req) {
17226 // disposed
17227 if (!_this5.request) {
17228 return;
17229 }
17230
17231 // clear the loader's request reference
17232 _this5.request = null;
17233
17234 if (error) {
17235 _this5.error = {
17236 status: req.status,
17237 message: 'DASH playlist request error at URL: ' + _this5.srcUrl,
17238 responseText: req.responseText,
17239 // MEDIA_ERR_NETWORK
17240 code: 2
17241 };
17242 if (_this5.state === 'HAVE_NOTHING') {
17243 _this5.started = false;
17244 }
17245 return _this5.trigger('error');
17246 }
17247
17248 _this5.masterXml_ = req.responseText;
17249
17250 if (req.responseHeaders && req.responseHeaders.date) {
17251 _this5.masterLoaded_ = Date.parse(req.responseHeaders.date);
17252 } else {
17253 _this5.masterLoaded_ = Date.now();
17254 }
17255
17256 _this5.srcUrl = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.srcUrl, req);
17257
17258 _this5.syncClientServerClock_(_this5.onClientServerClockSync_.bind(_this5));
17259 });
17260 }
17261
17262 /**
17263 * Parses the master xml for UTCTiming node to sync the client clock to the server
17264 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
17265 *
17266 * @param {Function} done
17267 * Function to call when clock sync has completed
17268 */
17269
17270 }, {
17271 key: 'syncClientServerClock_',
17272 value: function syncClientServerClock_(done) {
17273 var _this6 = this;
17274
17275 var utcTiming = parseUTCTiming(this.masterXml_);
17276
17277 // No UTCTiming element found in the mpd. Use Date header from mpd request as the
17278 // server clock
17279 if (utcTiming === null) {
17280 this.clientOffset_ = this.masterLoaded_ - Date.now();
17281 return done();
17282 }
17283
17284 if (utcTiming.method === 'DIRECT') {
17285 this.clientOffset_ = utcTiming.value - Date.now();
17286 return done();
17287 }
17288
17289 this.request = this.hls_.xhr({
17290 uri: resolveUrl(this.srcUrl, utcTiming.value),
17291 method: utcTiming.method,
17292 withCredentials: this.withCredentials
17293 }, function (error, req) {
17294 // disposed
17295 if (!_this6.request) {
17296 return;
17297 }
17298
17299 if (error) {
17300 // sync request failed, fall back to using date header from mpd
17301 // TODO: log warning
17302 _this6.clientOffset_ = _this6.masterLoaded_ - Date.now();
17303 return done();
17304 }
17305
17306 var serverTime = void 0;
17307
17308 if (utcTiming.method === 'HEAD') {
17309 if (!req.responseHeaders || !req.responseHeaders.date) {
17310 // expected date header not preset, fall back to using date header from mpd
17311 // TODO: log warning
17312 serverTime = _this6.masterLoaded_;
17313 } else {
17314 serverTime = Date.parse(req.responseHeaders.date);
17315 }
17316 } else {
17317 serverTime = Date.parse(req.responseText);
17318 }
17319
17320 _this6.clientOffset_ = serverTime - Date.now();
17321
17322 done();
17323 });
17324 }
17325 }, {
17326 key: 'haveMaster_',
17327 value: function haveMaster_() {
17328 this.state = 'HAVE_MASTER';
17329 // clear media request
17330 this.mediaRequest_ = null;
17331
17332 if (!this.masterPlaylistLoader_) {
17333 this.master = this.parseMasterXml();
17334 // We have the master playlist at this point, so
17335 // trigger this to allow MasterPlaylistController
17336 // to make an initial playlist selection
17337 this.trigger('loadedplaylist');
17338 } else if (!this.media_) {
17339 // no media playlist was specifically selected so select
17340 // the one the child playlist loader was created with
17341 this.media(this.childPlaylist_);
17342 }
17343 }
17344
17345 /**
17346 * Handler for after client/server clock synchronization has happened. Sets up
17347 * xml refresh timer if specificed by the manifest.
17348 */
17349
17350 }, {
17351 key: 'onClientServerClockSync_',
17352 value: function onClientServerClockSync_() {
17353 var _this7 = this;
17354
17355 this.haveMaster_();
17356
17357 if (!this.hasPendingRequest() && !this.media_) {
17358 this.media(this.master.playlists[0]);
17359 }
17360
17361 // TODO: minimumUpdatePeriod can have a value of 0. Currently the manifest will not
17362 // be refreshed when this is the case. The inter-op guide says that when the
17363 // minimumUpdatePeriod is 0, the manifest should outline all currently available
17364 // segments, but future segments may require an update. I think a good solution
17365 // would be to update the manifest at the same rate that the media playlists
17366 // are "refreshed", i.e. every targetDuration.
17367 if (this.master && this.master.minimumUpdatePeriod) {
17368 this.minimumUpdatePeriodTimeout_ = window_1.setTimeout(function () {
17369 _this7.trigger('minimumUpdatePeriod');
17370 }, this.master.minimumUpdatePeriod);
17371 }
17372 }
17373
17374 /**
17375 * Sends request to refresh the master xml and updates the parsed master manifest
17376 * TODO: Does the client offset need to be recalculated when the xml is refreshed?
17377 */
17378
17379 }, {
17380 key: 'refreshXml_',
17381 value: function refreshXml_() {
17382 var _this8 = this;
17383
17384 // The srcUrl here *may* need to pass through handleManifestsRedirects when
17385 // sidx is implemented
17386 this.request = this.hls_.xhr({
17387 uri: this.srcUrl,
17388 withCredentials: this.withCredentials
17389 }, function (error, req) {
17390 // disposed
17391 if (!_this8.request) {
17392 return;
17393 }
17394
17395 // clear the loader's request reference
17396 _this8.request = null;
17397
17398 if (error) {
17399 _this8.error = {
17400 status: req.status,
17401 message: 'DASH playlist request error at URL: ' + _this8.srcUrl,
17402 responseText: req.responseText,
17403 // MEDIA_ERR_NETWORK
17404 code: 2
17405 };
17406 if (_this8.state === 'HAVE_NOTHING') {
17407 _this8.started = false;
17408 }
17409 return _this8.trigger('error');
17410 }
17411
17412 _this8.masterXml_ = req.responseText;
17413
17414 // This will filter out updated sidx info from the mapping
17415 _this8.sidxMapping_ = filterChangedSidxMappings(_this8.masterXml_, _this8.srcUrl, _this8.clientOffset_, _this8.sidxMapping_);
17416
17417 var master = _this8.parseMasterXml();
17418 var updatedMaster = updateMaster$1(_this8.master, master);
17419 var currentSidxInfo = _this8.media().sidx;
17420
17421 if (updatedMaster) {
17422 if (currentSidxInfo) {
17423 var sidxKey = generateSidxKey(currentSidxInfo);
17424
17425 // the sidx was updated, so the previous mapping was removed
17426 if (!_this8.sidxMapping_[sidxKey]) {
17427 var playlist = _this8.media();
17428
17429 _this8.request = requestSidx_(playlist.sidx, playlist, _this8.hls_.xhr, { handleManifestRedirects: _this8.handleManifestRedirects }, _this8.sidxRequestFinished_(playlist, master, _this8.state, function (newMaster, sidx) {
17430 if (!newMaster || !sidx) {
17431 throw new Error('failed to request sidx on minimumUpdatePeriod');
17432 }
17433
17434 // update loader's sidxMapping with parsed sidx box
17435 _this8.sidxMapping_[sidxKey].sidx = sidx;
17436
17437 _this8.minimumUpdatePeriodTimeout_ = window_1.setTimeout(function () {
17438 _this8.trigger('minimumUpdatePeriod');
17439 }, _this8.master.minimumUpdatePeriod);
17440
17441 // TODO: do we need to reload the current playlist?
17442 _this8.refreshMedia_(_this8.media().id);
17443
17444 return;
17445 }));
17446 }
17447 } else {
17448
17449 _this8.master = updatedMaster;
17450 }
17451 }
17452
17453 _this8.minimumUpdatePeriodTimeout_ = window_1.setTimeout(function () {
17454 _this8.trigger('minimumUpdatePeriod');
17455 }, _this8.master.minimumUpdatePeriod);
17456 });
17457 }
17458
17459 /**
17460 * Refreshes the media playlist by re-parsing the master xml and updating playlist
17461 * references. If this is an alternate loader, the updated parsed manifest is retrieved
17462 * from the master loader.
17463 */
17464
17465 }, {
17466 key: 'refreshMedia_',
17467 value: function refreshMedia_(mediaID) {
17468 var _this9 = this;
17469
17470 if (!mediaID) {
17471 throw new Error('refreshMedia_ must take a media id');
17472 }
17473
17474 var oldMaster = void 0;
17475 var newMaster = void 0;
17476
17477 if (this.masterPlaylistLoader_) {
17478 oldMaster = this.masterPlaylistLoader_.master;
17479 newMaster = this.masterPlaylistLoader_.parseMasterXml();
17480 } else {
17481 oldMaster = this.master;
17482 newMaster = this.parseMasterXml();
17483 }
17484
17485 var updatedMaster = updateMaster$1(oldMaster, newMaster);
17486
17487 if (updatedMaster) {
17488 if (this.masterPlaylistLoader_) {
17489 this.masterPlaylistLoader_.master = updatedMaster;
17490 } else {
17491 this.master = updatedMaster;
17492 }
17493 this.media_ = updatedMaster.playlists[mediaID];
17494 } else {
17495 this.media_ = newMaster.playlists[mediaID];
17496 this.trigger('playlistunchanged');
17497 }
17498
17499 if (!this.media().endList) {
17500 this.mediaUpdateTimeout = window_1.setTimeout(function () {
17501 _this9.trigger('mediaupdatetimeout');
17502 }, refreshDelay(this.media(), !!updatedMaster));
17503 }
17504
17505 this.trigger('loadedplaylist');
17506 }
17507 }]);
17508 return DashPlaylistLoader;
17509 }(EventTarget$1);
17510
17511 var logger = function logger(source) {
17512 if (videojs.log.debug) {
17513 return videojs.log.debug.bind(videojs, 'VHS:', source + ' >');
17514 }
17515
17516 return function () {};
17517 };
17518
17519 function noop() {}
17520
17521 /**
17522 * @file source-updater.js
17523 */
17524
17525 /**
17526 * A queue of callbacks to be serialized and applied when a
17527 * MediaSource and its associated SourceBuffers are not in the
17528 * updating state. It is used by the segment loader to update the
17529 * underlying SourceBuffers when new data is loaded, for instance.
17530 *
17531 * @class SourceUpdater
17532 * @param {MediaSource} mediaSource the MediaSource to create the
17533 * SourceBuffer from
17534 * @param {String} mimeType the desired MIME type of the underlying
17535 * SourceBuffer
17536 * @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer is
17537 * added to the media source
17538 */
17539
17540 var SourceUpdater = function () {
17541 function SourceUpdater(mediaSource, mimeType, type, sourceBufferEmitter) {
17542 classCallCheck(this, SourceUpdater);
17543
17544 this.callbacks_ = [];
17545 this.pendingCallback_ = null;
17546 this.timestampOffset_ = 0;
17547 this.mediaSource = mediaSource;
17548 this.processedAppend_ = false;
17549 this.type_ = type;
17550 this.mimeType_ = mimeType;
17551 this.logger_ = logger('SourceUpdater[' + type + '][' + mimeType + ']');
17552
17553 if (mediaSource.readyState === 'closed') {
17554 mediaSource.addEventListener('sourceopen', this.createSourceBuffer_.bind(this, mimeType, sourceBufferEmitter));
17555 } else {
17556 this.createSourceBuffer_(mimeType, sourceBufferEmitter);
17557 }
17558 }
17559
17560 createClass(SourceUpdater, [{
17561 key: 'createSourceBuffer_',
17562 value: function createSourceBuffer_(mimeType, sourceBufferEmitter) {
17563 var _this = this;
17564
17565 this.sourceBuffer_ = this.mediaSource.addSourceBuffer(mimeType);
17566
17567 this.logger_('created SourceBuffer');
17568
17569 if (sourceBufferEmitter) {
17570 sourceBufferEmitter.trigger('sourcebufferadded');
17571
17572 if (this.mediaSource.sourceBuffers.length < 2) {
17573 // There's another source buffer we must wait for before we can start updating
17574 // our own (or else we can get into a bad state, i.e., appending video/audio data
17575 // before the other video/audio source buffer is available and leading to a video
17576 // or audio only buffer).
17577 sourceBufferEmitter.on('sourcebufferadded', function () {
17578 _this.start_();
17579 });
17580 return;
17581 }
17582 }
17583
17584 this.start_();
17585 }
17586 }, {
17587 key: 'start_',
17588 value: function start_() {
17589 var _this2 = this;
17590
17591 this.started_ = true;
17592
17593 // run completion handlers and process callbacks as updateend
17594 // events fire
17595 this.onUpdateendCallback_ = function () {
17596 var pendingCallback = _this2.pendingCallback_;
17597
17598 _this2.pendingCallback_ = null;
17599 _this2.sourceBuffer_.removing = false;
17600
17601 _this2.logger_('buffered [' + printableRange(_this2.buffered()) + ']');
17602
17603 if (pendingCallback) {
17604 pendingCallback();
17605 }
17606
17607 _this2.runCallback_();
17608 };
17609
17610 this.sourceBuffer_.addEventListener('updateend', this.onUpdateendCallback_);
17611
17612 this.runCallback_();
17613 }
17614
17615 /**
17616 * Aborts the current segment and resets the segment parser.
17617 *
17618 * @param {Function} done function to call when done
17619 * @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
17620 */
17621
17622 }, {
17623 key: 'abort',
17624 value: function abort(done) {
17625 var _this3 = this;
17626
17627 if (this.processedAppend_) {
17628 this.queueCallback_(function () {
17629 _this3.sourceBuffer_.abort();
17630 }, done);
17631 }
17632 }
17633
17634 /**
17635 * Queue an update to append an ArrayBuffer.
17636 *
17637 * @param {ArrayBuffer} bytes
17638 * @param {Function} done the function to call when done
17639 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
17640 */
17641
17642 }, {
17643 key: 'appendBuffer',
17644 value: function appendBuffer(config, done) {
17645 var _this4 = this;
17646
17647 this.processedAppend_ = true;
17648 this.queueCallback_(function () {
17649 if (config.videoSegmentTimingInfoCallback) {
17650 _this4.sourceBuffer_.addEventListener('videoSegmentTimingInfo', config.videoSegmentTimingInfoCallback);
17651 }
17652 _this4.sourceBuffer_.appendBuffer(config.bytes);
17653 }, function () {
17654 if (config.videoSegmentTimingInfoCallback) {
17655 _this4.sourceBuffer_.removeEventListener('videoSegmentTimingInfo', config.videoSegmentTimingInfoCallback);
17656 }
17657 done();
17658 });
17659 }
17660
17661 /**
17662 * Indicates what TimeRanges are buffered in the managed SourceBuffer.
17663 *
17664 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-buffered
17665 */
17666
17667 }, {
17668 key: 'buffered',
17669 value: function buffered() {
17670 if (!this.sourceBuffer_) {
17671 return videojs.createTimeRanges();
17672 }
17673 return this.sourceBuffer_.buffered;
17674 }
17675
17676 /**
17677 * Queue an update to remove a time range from the buffer.
17678 *
17679 * @param {Number} start where to start the removal
17680 * @param {Number} end where to end the removal
17681 * @param {Function} [done=noop] optional callback to be executed when the remove
17682 * operation is complete
17683 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
17684 */
17685
17686 }, {
17687 key: 'remove',
17688 value: function remove(start, end) {
17689 var _this5 = this;
17690
17691 var done = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop;
17692
17693 if (this.processedAppend_) {
17694 this.queueCallback_(function () {
17695 _this5.logger_('remove [' + start + ' => ' + end + ']');
17696 _this5.sourceBuffer_.removing = true;
17697 _this5.sourceBuffer_.remove(start, end);
17698 }, done);
17699 }
17700 }
17701
17702 /**
17703 * Whether the underlying sourceBuffer is updating or not
17704 *
17705 * @return {Boolean} the updating status of the SourceBuffer
17706 */
17707
17708 }, {
17709 key: 'updating',
17710 value: function updating() {
17711 // we are updating if the sourcebuffer is updating or
17712 return !this.sourceBuffer_ || this.sourceBuffer_.updating ||
17713 // if we have a pending callback that is not our internal noop
17714 !!this.pendingCallback_ && this.pendingCallback_ !== noop;
17715 }
17716
17717 /**
17718 * Set/get the timestampoffset on the SourceBuffer
17719 *
17720 * @return {Number} the timestamp offset
17721 */
17722
17723 }, {
17724 key: 'timestampOffset',
17725 value: function timestampOffset(offset) {
17726 var _this6 = this;
17727
17728 if (typeof offset !== 'undefined') {
17729 this.queueCallback_(function () {
17730 _this6.sourceBuffer_.timestampOffset = offset;
17731 _this6.runCallback_();
17732 });
17733 this.timestampOffset_ = offset;
17734 }
17735 return this.timestampOffset_;
17736 }
17737
17738 /**
17739 * Queue a callback to run
17740 */
17741
17742 }, {
17743 key: 'queueCallback_',
17744 value: function queueCallback_(callback, done) {
17745 this.callbacks_.push([callback.bind(this), done]);
17746 this.runCallback_();
17747 }
17748
17749 /**
17750 * Run a queued callback
17751 */
17752
17753 }, {
17754 key: 'runCallback_',
17755 value: function runCallback_() {
17756 var callbacks = void 0;
17757
17758 if (!this.updating() && this.callbacks_.length && this.started_) {
17759 callbacks = this.callbacks_.shift();
17760 this.pendingCallback_ = callbacks[1];
17761 callbacks[0]();
17762 }
17763 }
17764
17765 /**
17766 * dispose of the source updater and the underlying sourceBuffer
17767 */
17768
17769 }, {
17770 key: 'dispose',
17771 value: function dispose() {
17772 var _this7 = this;
17773
17774 var disposeFn = function disposeFn() {
17775 if (_this7.sourceBuffer_ && _this7.mediaSource.readyState === 'open') {
17776 _this7.sourceBuffer_.abort();
17777 }
17778 _this7.sourceBuffer_.removeEventListener('updateend', disposeFn);
17779 };
17780
17781 this.sourceBuffer_.removeEventListener('updateend', this.onUpdateendCallback_);
17782 if (this.sourceBuffer_.removing) {
17783 this.sourceBuffer_.addEventListener('updateend', disposeFn);
17784 } else {
17785 disposeFn();
17786 }
17787 }
17788 }]);
17789 return SourceUpdater;
17790 }();
17791
17792 var Config = {
17793 GOAL_BUFFER_LENGTH: 30,
17794 MAX_GOAL_BUFFER_LENGTH: 60,
17795 GOAL_BUFFER_LENGTH_RATE: 1,
17796 // 0.5 MB/s
17797 INITIAL_BANDWIDTH: 4194304,
17798 // A fudge factor to apply to advertised playlist bitrates to account for
17799 // temporary flucations in client bandwidth
17800 BANDWIDTH_VARIANCE: 1.2,
17801 // How much of the buffer must be filled before we consider upswitching
17802 BUFFER_LOW_WATER_LINE: 0,
17803 MAX_BUFFER_LOW_WATER_LINE: 30,
17804 BUFFER_LOW_WATER_LINE_RATE: 1
17805 };
17806
17807 var toUnsigned$2 = bin.toUnsigned;
17808 var toHexString$1 = bin.toHexString;
17809
17810 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks;
17811
17812 /**
17813 * Parses an MP4 initialization segment and extracts the timescale
17814 * values for any declared tracks. Timescale values indicate the
17815 * number of clock ticks per second to assume for time-based values
17816 * elsewhere in the MP4.
17817 *
17818 * To determine the start time of an MP4, you need two pieces of
17819 * information: the timescale unit and the earliest base media decode
17820 * time. Multiple timescales can be specified within an MP4 but the
17821 * base media decode time is always expressed in the timescale from
17822 * the media header box for the track:
17823 * ```
17824 * moov > trak > mdia > mdhd.timescale
17825 * ```
17826 * @param init {Uint8Array} the bytes of the init segment
17827 * @return {object} a hash of track ids to timescale values or null if
17828 * the init segment is malformed.
17829 */
17830 timescale = function timescale(init) {
17831 var result = {},
17832 traks = mp4Inspector.findBox(init, ['moov', 'trak']);
17833
17834 // mdhd timescale
17835 return traks.reduce(function (result, trak) {
17836 var tkhd, version, index, id, mdhd;
17837
17838 tkhd = mp4Inspector.findBox(trak, ['tkhd'])[0];
17839 if (!tkhd) {
17840 return null;
17841 }
17842 version = tkhd[0];
17843 index = version === 0 ? 12 : 20;
17844 id = toUnsigned$2(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
17845
17846 mdhd = mp4Inspector.findBox(trak, ['mdia', 'mdhd'])[0];
17847 if (!mdhd) {
17848 return null;
17849 }
17850 version = mdhd[0];
17851 index = version === 0 ? 12 : 20;
17852 result[id] = toUnsigned$2(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17853 return result;
17854 }, result);
17855 };
17856
17857 /**
17858 * Determine the base media decode start time, in seconds, for an MP4
17859 * fragment. If multiple fragments are specified, the earliest time is
17860 * returned.
17861 *
17862 * The base media decode time can be parsed from track fragment
17863 * metadata:
17864 * ```
17865 * moof > traf > tfdt.baseMediaDecodeTime
17866 * ```
17867 * It requires the timescale value from the mdhd to interpret.
17868 *
17869 * @param timescale {object} a hash of track ids to timescale values.
17870 * @return {number} the earliest base media decode start time for the
17871 * fragment, in seconds
17872 */
17873 startTime = function startTime(timescale, fragment) {
17874 var trafs, baseTimes, result;
17875
17876 // we need info from two childrend of each track fragment box
17877 trafs = mp4Inspector.findBox(fragment, ['moof', 'traf']);
17878
17879 // determine the start times for each track
17880 baseTimes = [].concat.apply([], trafs.map(function (traf) {
17881 return mp4Inspector.findBox(traf, ['tfhd']).map(function (tfhd) {
17882 var id, scale, baseTime;
17883
17884 // get the track id from the tfhd
17885 id = toUnsigned$2(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]);
17886 // assume a 90kHz clock if no timescale was specified
17887 scale = timescale[id] || 90e3;
17888
17889 // get the base media decode time from the tfdt
17890 baseTime = mp4Inspector.findBox(traf, ['tfdt']).map(function (tfdt) {
17891 var version, result;
17892
17893 version = tfdt[0];
17894 result = toUnsigned$2(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]);
17895 if (version === 1) {
17896 result *= Math.pow(2, 32);
17897 result += toUnsigned$2(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11]);
17898 }
17899 return result;
17900 })[0];
17901 baseTime = baseTime || Infinity;
17902
17903 // convert base time to seconds
17904 return baseTime / scale;
17905 });
17906 }));
17907
17908 // return the minimum
17909 result = Math.min.apply(null, baseTimes);
17910 return isFinite(result) ? result : 0;
17911 };
17912
17913 /**
17914 * Determine the composition start, in seconds, for an MP4
17915 * fragment.
17916 *
17917 * The composition start time of a fragment can be calculated using the base
17918 * media decode time, composition time offset, and timescale, as follows:
17919 *
17920 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
17921 *
17922 * All of the aforementioned information is contained within a media fragment's
17923 * `traf` box, except for timescale info, which comes from the initialization
17924 * segment, so a track id (also contained within a `traf`) is also necessary to
17925 * associate it with a timescale
17926 *
17927 *
17928 * @param timescales {object} - a hash of track ids to timescale values.
17929 * @param fragment {Unit8Array} - the bytes of a media segment
17930 * @return {number} the composition start time for the fragment, in seconds
17931 **/
17932 compositionStartTime = function compositionStartTime(timescales, fragment) {
17933 var trafBoxes = mp4Inspector.findBox(fragment, ['moof', 'traf']);
17934 var baseMediaDecodeTime = 0;
17935 var compositionTimeOffset = 0;
17936 var trackId;
17937
17938 if (trafBoxes && trafBoxes.length) {
17939 // The spec states that track run samples contained within a `traf` box are contiguous, but
17940 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
17941 // We will assume that they are, so we only need the first to calculate start time.
17942 var parsedTraf = mp4Inspector.parseTraf(trafBoxes[0]);
17943
17944 for (var i = 0; i < parsedTraf.boxes.length; i++) {
17945 if (parsedTraf.boxes[i].type === 'tfhd') {
17946 trackId = parsedTraf.boxes[i].trackId;
17947 } else if (parsedTraf.boxes[i].type === 'tfdt') {
17948 baseMediaDecodeTime = parsedTraf.boxes[i].baseMediaDecodeTime;
17949 } else if (parsedTraf.boxes[i].type === 'trun' && parsedTraf.boxes[i].samples.length) {
17950 compositionTimeOffset = parsedTraf.boxes[i].samples[0].compositionTimeOffset || 0;
17951 }
17952 }
17953 }
17954
17955 // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
17956 // specified.
17957 var timescale = timescales[trackId] || 90e3;
17958
17959 // return the composition start time, in seconds
17960 return (baseMediaDecodeTime + compositionTimeOffset) / timescale;
17961 };
17962
17963 /**
17964 * Find the trackIds of the video tracks in this source.
17965 * Found by parsing the Handler Reference and Track Header Boxes:
17966 * moov > trak > mdia > hdlr
17967 * moov > trak > tkhd
17968 *
17969 * @param {Uint8Array} init - The bytes of the init segment for this source
17970 * @return {Number[]} A list of trackIds
17971 *
17972 * @see ISO-BMFF-12/2015, Section 8.4.3
17973 **/
17974 getVideoTrackIds = function getVideoTrackIds(init) {
17975 var traks = mp4Inspector.findBox(init, ['moov', 'trak']);
17976 var videoTrackIds = [];
17977
17978 traks.forEach(function (trak) {
17979 var hdlrs = mp4Inspector.findBox(trak, ['mdia', 'hdlr']);
17980 var tkhds = mp4Inspector.findBox(trak, ['tkhd']);
17981
17982 hdlrs.forEach(function (hdlr, index) {
17983 var handlerType = mp4Inspector.parseType(hdlr.subarray(8, 12));
17984 var tkhd = tkhds[index];
17985 var view;
17986 var version;
17987 var trackId;
17988
17989 if (handlerType === 'vide') {
17990 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17991 version = view.getUint8(0);
17992 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
17993
17994 videoTrackIds.push(trackId);
17995 }
17996 });
17997 });
17998
17999 return videoTrackIds;
18000 };
18001
18002 /**
18003 * Get all the video, audio, and hint tracks from a non fragmented
18004 * mp4 segment
18005 */
18006 getTracks = function getTracks(init) {
18007 var traks = mp4Inspector.findBox(init, ['moov', 'trak']);
18008 var tracks = [];
18009
18010 traks.forEach(function (trak) {
18011 var track = {};
18012 var tkhd = mp4Inspector.findBox(trak, ['tkhd'])[0];
18013 var view, version;
18014
18015 // id
18016 if (tkhd) {
18017 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
18018 version = view.getUint8(0);
18019
18020 track.id = version === 0 ? view.getUint32(12) : view.getUint32(20);
18021 }
18022
18023 var hdlr = mp4Inspector.findBox(trak, ['mdia', 'hdlr'])[0];
18024
18025 // type
18026 if (hdlr) {
18027 var type = mp4Inspector.parseType(hdlr.subarray(8, 12));
18028
18029 if (type === 'vide') {
18030 track.type = 'video';
18031 } else if (type === 'soun') {
18032 track.type = 'audio';
18033 } else {
18034 track.type = type;
18035 }
18036 }
18037
18038 // codec
18039 var stsd = mp4Inspector.findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
18040
18041 if (stsd) {
18042 var sampleDescriptions = stsd.subarray(8);
18043 // gives the codec type string
18044 track.codec = mp4Inspector.parseType(sampleDescriptions.subarray(4, 8));
18045
18046 var codecBox = mp4Inspector.findBox(sampleDescriptions, [track.codec])[0];
18047 var codecConfig, codecConfigType;
18048
18049 if (codecBox) {
18050 // https://tools.ietf.org/html/rfc6381#section-3.3
18051 if (/^[a-z]vc[1-9]$/i.test(track.codec)) {
18052 // we don't need anything but the "config" parameter of the
18053 // avc1 codecBox
18054 codecConfig = codecBox.subarray(78);
18055 codecConfigType = mp4Inspector.parseType(codecConfig.subarray(4, 8));
18056
18057 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
18058 track.codec += '.';
18059
18060 // left padded with zeroes for single digit hex
18061 // profile idc
18062 track.codec += toHexString$1(codecConfig[9]);
18063 // the byte containing the constraint_set flags
18064 track.codec += toHexString$1(codecConfig[10]);
18065 // level idc
18066 track.codec += toHexString$1(codecConfig[11]);
18067 } else {
18068 // TODO: show a warning that we couldn't parse the codec
18069 // and are using the default
18070 track.codec = 'avc1.4d400d';
18071 }
18072 } else if (/^mp4[a,v]$/i.test(track.codec)) {
18073 // we do not need anything but the streamDescriptor of the mp4a codecBox
18074 codecConfig = codecBox.subarray(28);
18075 codecConfigType = mp4Inspector.parseType(codecConfig.subarray(4, 8));
18076
18077 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
18078 track.codec += '.' + toHexString$1(codecConfig[19]);
18079 // this value is only a single digit
18080 track.codec += '.' + toHexString$1(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
18081 } else {
18082 // TODO: show a warning that we couldn't parse the codec
18083 // and are using the default
18084 track.codec = 'mp4a.40.2';
18085 }
18086 } else {
18087 // TODO: show a warning? for unknown codec type
18088 }
18089 }
18090 }
18091
18092 var mdhd = mp4Inspector.findBox(trak, ['mdia', 'mdhd'])[0];
18093
18094 if (mdhd && tkhd) {
18095 var index = version === 0 ? 12 : 20;
18096
18097 track.timescale = toUnsigned$2(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
18098 }
18099
18100 tracks.push(track);
18101 });
18102
18103 return tracks;
18104 };
18105
18106 var probe = {
18107 // export mp4 inspector's findBox and parseType for backwards compatibility
18108 findBox: mp4Inspector.findBox,
18109 parseType: mp4Inspector.parseType,
18110 timescale: timescale,
18111 startTime: startTime,
18112 compositionStartTime: compositionStartTime,
18113 videoTrackIds: getVideoTrackIds,
18114 tracks: getTracks
18115 };
18116
18117 var REQUEST_ERRORS = {
18118 FAILURE: 2,
18119 TIMEOUT: -101,
18120 ABORTED: -102
18121 };
18122
18123 /**
18124 * Abort all requests
18125 *
18126 * @param {Object} activeXhrs - an object that tracks all XHR requests
18127 */
18128 var abortAll = function abortAll(activeXhrs) {
18129 activeXhrs.forEach(function (xhr) {
18130 xhr.abort();
18131 });
18132 };
18133
18134 /**
18135 * Gather important bandwidth stats once a request has completed
18136 *
18137 * @param {Object} request - the XHR request from which to gather stats
18138 */
18139 var getRequestStats = function getRequestStats(request) {
18140 return {
18141 bandwidth: request.bandwidth,
18142 bytesReceived: request.bytesReceived || 0,
18143 roundTripTime: request.roundTripTime || 0
18144 };
18145 };
18146
18147 /**
18148 * If possible gather bandwidth stats as a request is in
18149 * progress
18150 *
18151 * @param {Event} progressEvent - an event object from an XHR's progress event
18152 */
18153 var getProgressStats = function getProgressStats(progressEvent) {
18154 var request = progressEvent.target;
18155 var roundTripTime = Date.now() - request.requestTime;
18156 var stats = {
18157 bandwidth: Infinity,
18158 bytesReceived: 0,
18159 roundTripTime: roundTripTime || 0
18160 };
18161
18162 stats.bytesReceived = progressEvent.loaded;
18163 // This can result in Infinity if stats.roundTripTime is 0 but that is ok
18164 // because we should only use bandwidth stats on progress to determine when
18165 // abort a request early due to insufficient bandwidth
18166 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
18167
18168 return stats;
18169 };
18170
18171 /**
18172 * Handle all error conditions in one place and return an object
18173 * with all the information
18174 *
18175 * @param {Error|null} error - if non-null signals an error occured with the XHR
18176 * @param {Object} request - the XHR request that possibly generated the error
18177 */
18178 var handleErrors = function handleErrors(error, request) {
18179 if (request.timedout) {
18180 return {
18181 status: request.status,
18182 message: 'HLS request timed-out at URL: ' + request.uri,
18183 code: REQUEST_ERRORS.TIMEOUT,
18184 xhr: request
18185 };
18186 }
18187
18188 if (request.aborted) {
18189 return {
18190 status: request.status,
18191 message: 'HLS request aborted at URL: ' + request.uri,
18192 code: REQUEST_ERRORS.ABORTED,
18193 xhr: request
18194 };
18195 }
18196
18197 if (error) {
18198 return {
18199 status: request.status,
18200 message: 'HLS request errored at URL: ' + request.uri,
18201 code: REQUEST_ERRORS.FAILURE,
18202 xhr: request
18203 };
18204 }
18205
18206 return null;
18207 };
18208
18209 /**
18210 * Handle responses for key data and convert the key data to the correct format
18211 * for the decryption step later
18212 *
18213 * @param {Object} segment - a simplified copy of the segmentInfo object
18214 * from SegmentLoader
18215 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18216 * this request
18217 */
18218 var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
18219 return function (error, request) {
18220 var response = request.response;
18221 var errorObj = handleErrors(error, request);
18222
18223 if (errorObj) {
18224 return finishProcessingFn(errorObj, segment);
18225 }
18226
18227 if (response.byteLength !== 16) {
18228 return finishProcessingFn({
18229 status: request.status,
18230 message: 'Invalid HLS key at URL: ' + request.uri,
18231 code: REQUEST_ERRORS.FAILURE,
18232 xhr: request
18233 }, segment);
18234 }
18235
18236 var view = new DataView(response);
18237
18238 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
18239 return finishProcessingFn(null, segment);
18240 };
18241 };
18242
18243 /**
18244 * Handle init-segment responses
18245 *
18246 * @param {Object} segment - a simplified copy of the segmentInfo object
18247 * from SegmentLoader
18248 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18249 * this request
18250 */
18251 var handleInitSegmentResponse = function handleInitSegmentResponse(segment, captionParser, finishProcessingFn) {
18252 return function (error, request) {
18253 var response = request.response;
18254 var errorObj = handleErrors(error, request);
18255
18256 if (errorObj) {
18257 return finishProcessingFn(errorObj, segment);
18258 }
18259
18260 // stop processing if received empty content
18261 if (response.byteLength === 0) {
18262 return finishProcessingFn({
18263 status: request.status,
18264 message: 'Empty HLS segment content at URL: ' + request.uri,
18265 code: REQUEST_ERRORS.FAILURE,
18266 xhr: request
18267 }, segment);
18268 }
18269
18270 segment.map.bytes = new Uint8Array(request.response);
18271
18272 // Initialize CaptionParser if it hasn't been yet
18273 if (captionParser && !captionParser.isInitialized()) {
18274 captionParser.init();
18275 }
18276
18277 segment.map.timescales = probe.timescale(segment.map.bytes);
18278 segment.map.videoTrackIds = probe.videoTrackIds(segment.map.bytes);
18279
18280 return finishProcessingFn(null, segment);
18281 };
18282 };
18283
18284 /**
18285 * Response handler for segment-requests being sure to set the correct
18286 * property depending on whether the segment is encryped or not
18287 * Also records and keeps track of stats that are used for ABR purposes
18288 *
18289 * @param {Object} segment - a simplified copy of the segmentInfo object
18290 * from SegmentLoader
18291 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18292 * this request
18293 */
18294 var handleSegmentResponse = function handleSegmentResponse(segment, captionParser, finishProcessingFn) {
18295 return function (error, request) {
18296 var response = request.response;
18297 var errorObj = handleErrors(error, request);
18298 var parsed = void 0;
18299
18300 if (errorObj) {
18301 return finishProcessingFn(errorObj, segment);
18302 }
18303
18304 // stop processing if received empty content
18305 if (response.byteLength === 0) {
18306 return finishProcessingFn({
18307 status: request.status,
18308 message: 'Empty HLS segment content at URL: ' + request.uri,
18309 code: REQUEST_ERRORS.FAILURE,
18310 xhr: request
18311 }, segment);
18312 }
18313
18314 segment.stats = getRequestStats(request);
18315
18316 if (segment.key) {
18317 segment.encryptedBytes = new Uint8Array(request.response);
18318 } else {
18319 segment.bytes = new Uint8Array(request.response);
18320 }
18321
18322 // This is likely an FMP4 and has the init segment.
18323 // Run through the CaptionParser in case there are captions.
18324 if (captionParser && segment.map && segment.map.bytes) {
18325 // Initialize CaptionParser if it hasn't been yet
18326 if (!captionParser.isInitialized()) {
18327 captionParser.init();
18328 }
18329
18330 parsed = captionParser.parse(segment.bytes, segment.map.videoTrackIds, segment.map.timescales);
18331
18332 if (parsed && parsed.captions) {
18333 segment.captionStreams = parsed.captionStreams;
18334 segment.fmp4Captions = parsed.captions;
18335 }
18336 }
18337
18338 return finishProcessingFn(null, segment);
18339 };
18340 };
18341
18342 /**
18343 * Decrypt the segment via the decryption web worker
18344 *
18345 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
18346 * @param {Object} segment - a simplified copy of the segmentInfo object
18347 * from SegmentLoader
18348 * @param {Function} doneFn - a callback that is executed after decryption has completed
18349 */
18350 var decryptSegment = function decryptSegment(decrypter, segment, doneFn) {
18351 var decryptionHandler = function decryptionHandler(event) {
18352 if (event.data.source === segment.requestId) {
18353 decrypter.removeEventListener('message', decryptionHandler);
18354 var decrypted = event.data.decrypted;
18355
18356 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
18357 return doneFn(null, segment);
18358 }
18359 };
18360
18361 decrypter.addEventListener('message', decryptionHandler);
18362
18363 var keyBytes = void 0;
18364
18365 if (segment.key.bytes.slice) {
18366 keyBytes = segment.key.bytes.slice();
18367 } else {
18368 keyBytes = new Uint32Array(Array.prototype.slice.call(segment.key.bytes));
18369 }
18370
18371 // this is an encrypted segment
18372 // incrementally decrypt the segment
18373 decrypter.postMessage(createTransferableMessage({
18374 source: segment.requestId,
18375 encrypted: segment.encryptedBytes,
18376 key: keyBytes,
18377 iv: segment.key.iv
18378 }), [segment.encryptedBytes.buffer, keyBytes.buffer]);
18379 };
18380
18381 /**
18382 * This function waits for all XHRs to finish (with either success or failure)
18383 * before continueing processing via it's callback. The function gathers errors
18384 * from each request into a single errors array so that the error status for
18385 * each request can be examined later.
18386 *
18387 * @param {Object} activeXhrs - an object that tracks all XHR requests
18388 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
18389 * @param {Function} doneFn - a callback that is executed after all resources have been
18390 * downloaded and any decryption completed
18391 */
18392 var waitForCompletion = function waitForCompletion(activeXhrs, decrypter, doneFn) {
18393 var count = 0;
18394 var didError = false;
18395
18396 return function (error, segment) {
18397 if (didError) {
18398 return;
18399 }
18400
18401 if (error) {
18402 didError = true;
18403 // If there are errors, we have to abort any outstanding requests
18404 abortAll(activeXhrs);
18405
18406 // Even though the requests above are aborted, and in theory we could wait until we
18407 // handle the aborted events from those requests, there are some cases where we may
18408 // never get an aborted event. For instance, if the network connection is lost and
18409 // there were two requests, the first may have triggered an error immediately, while
18410 // the second request remains unsent. In that case, the aborted algorithm will not
18411 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
18412 //
18413 // We also can't rely on the ready state of the XHR, since the request that
18414 // triggered the connection error may also show as a ready state of 0 (unsent).
18415 // Therefore, we have to finish this group of requests immediately after the first
18416 // seen error.
18417 return doneFn(error, segment);
18418 }
18419
18420 count += 1;
18421
18422 if (count === activeXhrs.length) {
18423 // Keep track of when *all* of the requests have completed
18424 segment.endOfAllRequests = Date.now();
18425
18426 if (segment.encryptedBytes) {
18427 return decryptSegment(decrypter, segment, doneFn);
18428 }
18429 // Otherwise, everything is ready just continue
18430 return doneFn(null, segment);
18431 }
18432 };
18433 };
18434
18435 /**
18436 * Simple progress event callback handler that gathers some stats before
18437 * executing a provided callback with the `segment` object
18438 *
18439 * @param {Object} segment - a simplified copy of the segmentInfo object
18440 * from SegmentLoader
18441 * @param {Function} progressFn - a callback that is executed each time a progress event
18442 * is received
18443 * @param {Event} event - the progress event object from XMLHttpRequest
18444 */
18445 var handleProgress = function handleProgress(segment, progressFn) {
18446 return function (event) {
18447 segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event));
18448
18449 // record the time that we receive the first byte of data
18450 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
18451 segment.stats.firstBytesReceivedAt = Date.now();
18452 }
18453
18454 return progressFn(event, segment);
18455 };
18456 };
18457
18458 /**
18459 * Load all resources and does any processing necessary for a media-segment
18460 *
18461 * Features:
18462 * decrypts the media-segment if it has a key uri and an iv
18463 * aborts *all* requests if *any* one request fails
18464 *
18465 * The segment object, at minimum, has the following format:
18466 * {
18467 * resolvedUri: String,
18468 * [byterange]: {
18469 * offset: Number,
18470 * length: Number
18471 * },
18472 * [key]: {
18473 * resolvedUri: String
18474 * [byterange]: {
18475 * offset: Number,
18476 * length: Number
18477 * },
18478 * iv: {
18479 * bytes: Uint32Array
18480 * }
18481 * },
18482 * [map]: {
18483 * resolvedUri: String,
18484 * [byterange]: {
18485 * offset: Number,
18486 * length: Number
18487 * },
18488 * [bytes]: Uint8Array
18489 * }
18490 * }
18491 * ...where [name] denotes optional properties
18492 *
18493 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
18494 * @param {Object} xhrOptions - the base options to provide to all xhr requests
18495 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
18496 * decryption routines
18497 * @param {Object} segment - a simplified copy of the segmentInfo object
18498 * from SegmentLoader
18499 * @param {Function} progressFn - a callback that receives progress events from the main
18500 * segment's xhr request
18501 * @param {Function} doneFn - a callback that is executed only once all requests have
18502 * succeeded or failed
18503 * @returns {Function} a function that, when invoked, immediately aborts all
18504 * outstanding requests
18505 */
18506 var mediaSegmentRequest = function mediaSegmentRequest(xhr, xhrOptions, decryptionWorker, captionParser, segment, progressFn, doneFn) {
18507 var activeXhrs = [];
18508 var finishProcessingFn = waitForCompletion(activeXhrs, decryptionWorker, doneFn);
18509
18510 // optionally, request the decryption key
18511 if (segment.key && !segment.key.bytes) {
18512 var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
18513 uri: segment.key.resolvedUri,
18514 responseType: 'arraybuffer'
18515 });
18516 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
18517 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
18518
18519 activeXhrs.push(keyXhr);
18520 }
18521
18522 // optionally, request the associated media init segment
18523 if (segment.map && !segment.map.bytes) {
18524 var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
18525 uri: segment.map.resolvedUri,
18526 responseType: 'arraybuffer',
18527 headers: segmentXhrHeaders(segment.map)
18528 });
18529 var initSegmentRequestCallback = handleInitSegmentResponse(segment, captionParser, finishProcessingFn);
18530 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
18531
18532 activeXhrs.push(initSegmentXhr);
18533 }
18534
18535 var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
18536 uri: segment.resolvedUri,
18537 responseType: 'arraybuffer',
18538 headers: segmentXhrHeaders(segment)
18539 });
18540 var segmentRequestCallback = handleSegmentResponse(segment, captionParser, finishProcessingFn);
18541 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
18542
18543 segmentXhr.addEventListener('progress', handleProgress(segment, progressFn));
18544 activeXhrs.push(segmentXhr);
18545
18546 return function () {
18547 return abortAll(activeXhrs);
18548 };
18549 };
18550
18551 // Utilities
18552
18553 /**
18554 * Returns the CSS value for the specified property on an element
18555 * using `getComputedStyle`. Firefox has a long-standing issue where
18556 * getComputedStyle() may return null when running in an iframe with
18557 * `display: none`.
18558 *
18559 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
18560 * @param {HTMLElement} el the htmlelement to work on
18561 * @param {string} the proprety to get the style for
18562 */
18563 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
18564 var result = void 0;
18565
18566 if (!el) {
18567 return '';
18568 }
18569
18570 result = window_1.getComputedStyle(el);
18571 if (!result) {
18572 return '';
18573 }
18574
18575 return result[property];
18576 };
18577
18578 /**
18579 * Resuable stable sort function
18580 *
18581 * @param {Playlists} array
18582 * @param {Function} sortFn Different comparators
18583 * @function stableSort
18584 */
18585 var stableSort = function stableSort(array, sortFn) {
18586 var newArray = array.slice();
18587
18588 array.sort(function (left, right) {
18589 var cmp = sortFn(left, right);
18590
18591 if (cmp === 0) {
18592 return newArray.indexOf(left) - newArray.indexOf(right);
18593 }
18594 return cmp;
18595 });
18596 };
18597
18598 /**
18599 * A comparator function to sort two playlist object by bandwidth.
18600 *
18601 * @param {Object} left a media playlist object
18602 * @param {Object} right a media playlist object
18603 * @return {Number} Greater than zero if the bandwidth attribute of
18604 * left is greater than the corresponding attribute of right. Less
18605 * than zero if the bandwidth of right is greater than left and
18606 * exactly zero if the two are equal.
18607 */
18608 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
18609 var leftBandwidth = void 0;
18610 var rightBandwidth = void 0;
18611
18612 if (left.attributes.BANDWIDTH) {
18613 leftBandwidth = left.attributes.BANDWIDTH;
18614 }
18615 leftBandwidth = leftBandwidth || window_1.Number.MAX_VALUE;
18616 if (right.attributes.BANDWIDTH) {
18617 rightBandwidth = right.attributes.BANDWIDTH;
18618 }
18619 rightBandwidth = rightBandwidth || window_1.Number.MAX_VALUE;
18620
18621 return leftBandwidth - rightBandwidth;
18622 };
18623
18624 /**
18625 * A comparator function to sort two playlist object by resolution (width).
18626 * @param {Object} left a media playlist object
18627 * @param {Object} right a media playlist object
18628 * @return {Number} Greater than zero if the resolution.width attribute of
18629 * left is greater than the corresponding attribute of right. Less
18630 * than zero if the resolution.width of right is greater than left and
18631 * exactly zero if the two are equal.
18632 */
18633 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
18634 var leftWidth = void 0;
18635 var rightWidth = void 0;
18636
18637 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
18638 leftWidth = left.attributes.RESOLUTION.width;
18639 }
18640
18641 leftWidth = leftWidth || window_1.Number.MAX_VALUE;
18642
18643 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
18644 rightWidth = right.attributes.RESOLUTION.width;
18645 }
18646
18647 rightWidth = rightWidth || window_1.Number.MAX_VALUE;
18648
18649 // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
18650 // have the same media dimensions/ resolution
18651 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
18652 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
18653 }
18654 return leftWidth - rightWidth;
18655 };
18656
18657 /**
18658 * Chooses the appropriate media playlist based on bandwidth and player size
18659 *
18660 * @param {Object} master
18661 * Object representation of the master manifest
18662 * @param {Number} playerBandwidth
18663 * Current calculated bandwidth of the player
18664 * @param {Number} playerWidth
18665 * Current width of the player element (should account for the device pixel ratio)
18666 * @param {Number} playerHeight
18667 * Current height of the player element (should account for the device pixel ratio)
18668 * @param {Boolean} limitRenditionByPlayerDimensions
18669 * True if the player width and height should be used during the selection, false otherwise
18670 * @return {Playlist} the highest bitrate playlist less than the
18671 * currently detected bandwidth, accounting for some amount of
18672 * bandwidth variance
18673 */
18674 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions) {
18675 // convert the playlists to an intermediary representation to make comparisons easier
18676 var sortedPlaylistReps = master.playlists.map(function (playlist) {
18677 var width = void 0;
18678 var height = void 0;
18679 var bandwidth = void 0;
18680
18681 width = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
18682 height = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
18683 bandwidth = playlist.attributes.BANDWIDTH;
18684
18685 bandwidth = bandwidth || window_1.Number.MAX_VALUE;
18686
18687 return {
18688 bandwidth: bandwidth,
18689 width: width,
18690 height: height,
18691 playlist: playlist
18692 };
18693 });
18694
18695 stableSort(sortedPlaylistReps, function (left, right) {
18696 return left.bandwidth - right.bandwidth;
18697 });
18698
18699 // filter out any playlists that have been excluded due to
18700 // incompatible configurations
18701 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
18702 return !Playlist.isIncompatible(rep.playlist);
18703 });
18704
18705 // filter out any playlists that have been disabled manually through the representations
18706 // api or blacklisted temporarily due to playback errors.
18707 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
18708 return Playlist.isEnabled(rep.playlist);
18709 });
18710
18711 if (!enabledPlaylistReps.length) {
18712 // if there are no enabled playlists, then they have all been blacklisted or disabled
18713 // by the user through the representations api. In this case, ignore blacklisting and
18714 // fallback to what the user wants by using playlists the user has not disabled.
18715 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
18716 return !Playlist.isDisabled(rep.playlist);
18717 });
18718 }
18719
18720 // filter out any variant that has greater effective bitrate
18721 // than the current estimated bandwidth
18722 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
18723 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
18724 });
18725
18726 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1];
18727
18728 // get all of the renditions with the same (highest) bandwidth
18729 // and then taking the very first element
18730 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
18731 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
18732 })[0];
18733
18734 // if we're not going to limit renditions by player size, make an early decision.
18735 if (limitRenditionByPlayerDimensions === false) {
18736 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
18737
18738 return _chosenRep ? _chosenRep.playlist : null;
18739 }
18740
18741 // filter out playlists without resolution information
18742 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
18743 return rep.width && rep.height;
18744 });
18745
18746 // sort variants by resolution
18747 stableSort(haveResolution, function (left, right) {
18748 return left.width - right.width;
18749 });
18750
18751 // if we have the exact resolution as the player use it
18752 var resolutionBestRepList = haveResolution.filter(function (rep) {
18753 return rep.width === playerWidth && rep.height === playerHeight;
18754 });
18755
18756 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1];
18757 // ensure that we pick the highest bandwidth variant that have exact resolution
18758 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
18759 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
18760 })[0];
18761
18762 var resolutionPlusOneList = void 0;
18763 var resolutionPlusOneSmallest = void 0;
18764 var resolutionPlusOneRep = void 0;
18765
18766 // find the smallest variant that is larger than the player
18767 // if there is no match of exact resolution
18768 if (!resolutionBestRep) {
18769 resolutionPlusOneList = haveResolution.filter(function (rep) {
18770 return rep.width > playerWidth || rep.height > playerHeight;
18771 });
18772
18773 // find all the variants have the same smallest resolution
18774 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
18775 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
18776 });
18777
18778 // ensure that we also pick the highest bandwidth variant that
18779 // is just-larger-than the video player
18780 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
18781 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
18782 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
18783 })[0];
18784 }
18785
18786 // fallback chain of variants
18787 var chosenRep = resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
18788
18789 return chosenRep ? chosenRep.playlist : null;
18790 };
18791
18792 // Playlist Selectors
18793
18794 /**
18795 * Chooses the appropriate media playlist based on the most recent
18796 * bandwidth estimate and the player size.
18797 *
18798 * Expects to be called within the context of an instance of HlsHandler
18799 *
18800 * @return {Playlist} the highest bitrate playlist less than the
18801 * currently detected bandwidth, accounting for some amount of
18802 * bandwidth variance
18803 */
18804 var lastBandwidthSelector = function lastBandwidthSelector() {
18805 var pixelRatio = this.useDevicePixelRatio ? window_1.devicePixelRatio || 1 : 1;
18806
18807 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions);
18808 };
18809
18810 /**
18811 * Chooses the appropriate media playlist based on the potential to rebuffer
18812 *
18813 * @param {Object} settings
18814 * Object of information required to use this selector
18815 * @param {Object} settings.master
18816 * Object representation of the master manifest
18817 * @param {Number} settings.currentTime
18818 * The current time of the player
18819 * @param {Number} settings.bandwidth
18820 * Current measured bandwidth
18821 * @param {Number} settings.duration
18822 * Duration of the media
18823 * @param {Number} settings.segmentDuration
18824 * Segment duration to be used in round trip time calculations
18825 * @param {Number} settings.timeUntilRebuffer
18826 * Time left in seconds until the player has to rebuffer
18827 * @param {Number} settings.currentTimeline
18828 * The current timeline segments are being loaded from
18829 * @param {SyncController} settings.syncController
18830 * SyncController for determining if we have a sync point for a given playlist
18831 * @return {Object|null}
18832 * {Object} return.playlist
18833 * The highest bandwidth playlist with the least amount of rebuffering
18834 * {Number} return.rebufferingImpact
18835 * The amount of time in seconds switching to this playlist will rebuffer. A
18836 * negative value means that switching will cause zero rebuffering.
18837 */
18838 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
18839 var master = settings.master,
18840 currentTime = settings.currentTime,
18841 bandwidth = settings.bandwidth,
18842 duration$$1 = settings.duration,
18843 segmentDuration = settings.segmentDuration,
18844 timeUntilRebuffer = settings.timeUntilRebuffer,
18845 currentTimeline = settings.currentTimeline,
18846 syncController = settings.syncController;
18847
18848 // filter out any playlists that have been excluded due to
18849 // incompatible configurations
18850
18851 var compatiblePlaylists = master.playlists.filter(function (playlist) {
18852 return !Playlist.isIncompatible(playlist);
18853 });
18854
18855 // filter out any playlists that have been disabled manually through the representations
18856 // api or blacklisted temporarily due to playback errors.
18857 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
18858
18859 if (!enabledPlaylists.length) {
18860 // if there are no enabled playlists, then they have all been blacklisted or disabled
18861 // by the user through the representations api. In this case, ignore blacklisting and
18862 // fallback to what the user wants by using playlists the user has not disabled.
18863 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
18864 return !Playlist.isDisabled(playlist);
18865 });
18866 }
18867
18868 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
18869
18870 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
18871 var syncPoint = syncController.getSyncPoint(playlist, duration$$1, currentTimeline, currentTime);
18872 // If there is no sync point for this playlist, switching to it will require a
18873 // sync request first. This will double the request time
18874 var numRequests = syncPoint ? 1 : 2;
18875 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
18876 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
18877
18878 return {
18879 playlist: playlist,
18880 rebufferingImpact: rebufferingImpact
18881 };
18882 });
18883
18884 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
18885 return estimate.rebufferingImpact <= 0;
18886 });
18887
18888 // Sort by bandwidth DESC
18889 stableSort(noRebufferingPlaylists, function (a, b) {
18890 return comparePlaylistBandwidth(b.playlist, a.playlist);
18891 });
18892
18893 if (noRebufferingPlaylists.length) {
18894 return noRebufferingPlaylists[0];
18895 }
18896
18897 stableSort(rebufferingEstimates, function (a, b) {
18898 return a.rebufferingImpact - b.rebufferingImpact;
18899 });
18900
18901 return rebufferingEstimates[0] || null;
18902 };
18903
18904 /**
18905 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
18906 * one with video. If no renditions with video exist, return the lowest audio rendition.
18907 *
18908 * Expects to be called within the context of an instance of HlsHandler
18909 *
18910 * @return {Object|null}
18911 * {Object} return.playlist
18912 * The lowest bitrate playlist that contains a video codec. If no such rendition
18913 * exists pick the lowest audio rendition.
18914 */
18915 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
18916 // filter out any playlists that have been excluded due to
18917 // incompatible configurations or playback errors
18918 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled);
18919
18920 // Sort ascending by bitrate
18921 stableSort(playlists, function (a, b) {
18922 return comparePlaylistBandwidth(a, b);
18923 });
18924
18925 // Parse and assume that playlists with no video codec have no video
18926 // (this is not necessarily true, although it is generally true).
18927 //
18928 // If an entire manifest has no valid videos everything will get filtered
18929 // out.
18930 var playlistsWithVideo = playlists.filter(function (playlist) {
18931 return parseCodecs(playlist.attributes.CODECS).videoCodec;
18932 });
18933
18934 return playlistsWithVideo[0] || null;
18935 };
18936
18937 /**
18938 * Create captions text tracks on video.js if they do not exist
18939 *
18940 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
18941 * @param {Object} tech the video.js tech
18942 * @param {Object} captionStreams the caption streams to create
18943 * @private
18944 */
18945 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStreams) {
18946 for (var trackId in captionStreams) {
18947 if (!inbandTextTracks[trackId]) {
18948 tech.trigger({ type: 'usage', name: 'hls-608' });
18949 var track = tech.textTracks().getTrackById(trackId);
18950
18951 if (track) {
18952 // Resuse an existing track with a CC# id because this was
18953 // very likely created by videojs-contrib-hls from information
18954 // in the m3u8 for us to use
18955 inbandTextTracks[trackId] = track;
18956 } else {
18957 // Otherwise, create a track with the default `CC#` label and
18958 // without a language
18959 inbandTextTracks[trackId] = tech.addRemoteTextTrack({
18960 kind: 'captions',
18961 id: trackId,
18962 label: trackId
18963 }, false).track;
18964 }
18965 }
18966 }
18967 };
18968
18969 var addCaptionData = function addCaptionData(_ref) {
18970 var inbandTextTracks = _ref.inbandTextTracks,
18971 captionArray = _ref.captionArray,
18972 timestampOffset = _ref.timestampOffset;
18973
18974 if (!captionArray) {
18975 return;
18976 }
18977
18978 var Cue = window.WebKitDataCue || window.VTTCue;
18979
18980 captionArray.forEach(function (caption) {
18981 var track = caption.stream;
18982 var startTime = caption.startTime;
18983 var endTime = caption.endTime;
18984
18985 if (!inbandTextTracks[track]) {
18986 return;
18987 }
18988
18989 startTime += timestampOffset;
18990 endTime += timestampOffset;
18991
18992 inbandTextTracks[track].addCue(new Cue(startTime, endTime, caption.text));
18993 });
18994 };
18995
18996 /**
18997 * mux.js
18998 *
18999 * Copyright (c) Brightcove
19000 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
19001 *
19002 * Reads in-band caption information from a video elementary
19003 * stream. Captions must follow the CEA-708 standard for injection
19004 * into an MPEG-2 transport streams.
19005 * @see https://en.wikipedia.org/wiki/CEA-708
19006 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
19007 */
19008
19009 // Supplemental enhancement information (SEI) NAL units have a
19010 // payload type field to indicate how they are to be
19011 // interpreted. CEAS-708 caption content is always transmitted with
19012 // payload type 0x04.
19013
19014 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
19015 RBSP_TRAILING_BITS = 128;
19016
19017 /**
19018 * Parse a supplemental enhancement information (SEI) NAL unit.
19019 * Stops parsing once a message of type ITU T T35 has been found.
19020 *
19021 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
19022 * @return {object} the parsed SEI payload
19023 * @see Rec. ITU-T H.264, 7.3.2.3.1
19024 */
19025 var parseSei = function parseSei(bytes) {
19026 var i = 0,
19027 result = {
19028 payloadType: -1,
19029 payloadSize: 0
19030 },
19031 payloadType = 0,
19032 payloadSize = 0;
19033
19034 // go through the sei_rbsp parsing each each individual sei_message
19035 while (i < bytes.byteLength) {
19036 // stop once we have hit the end of the sei_rbsp
19037 if (bytes[i] === RBSP_TRAILING_BITS) {
19038 break;
19039 }
19040
19041 // Parse payload type
19042 while (bytes[i] === 0xFF) {
19043 payloadType += 255;
19044 i++;
19045 }
19046 payloadType += bytes[i++];
19047
19048 // Parse payload size
19049 while (bytes[i] === 0xFF) {
19050 payloadSize += 255;
19051 i++;
19052 }
19053 payloadSize += bytes[i++];
19054
19055 // this sei_message is a 608/708 caption so save it and break
19056 // there can only ever be one caption message in a frame's sei
19057 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
19058 result.payloadType = payloadType;
19059 result.payloadSize = payloadSize;
19060 result.payload = bytes.subarray(i, i + payloadSize);
19061 break;
19062 }
19063
19064 // skip the payload and parse the next message
19065 i += payloadSize;
19066 payloadType = 0;
19067 payloadSize = 0;
19068 }
19069
19070 return result;
19071 };
19072
19073 // see ANSI/SCTE 128-1 (2013), section 8.1
19074 var parseUserData = function parseUserData(sei) {
19075 // itu_t_t35_contry_code must be 181 (United States) for
19076 // captions
19077 if (sei.payload[0] !== 181) {
19078 return null;
19079 }
19080
19081 // itu_t_t35_provider_code should be 49 (ATSC) for captions
19082 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
19083 return null;
19084 }
19085
19086 // the user_identifier should be "GA94" to indicate ATSC1 data
19087 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
19088 return null;
19089 }
19090
19091 // finally, user_data_type_code should be 0x03 for caption data
19092 if (sei.payload[7] !== 0x03) {
19093 return null;
19094 }
19095
19096 // return the user_data_type_structure and strip the trailing
19097 // marker bits
19098 return sei.payload.subarray(8, sei.payload.length - 1);
19099 };
19100
19101 // see CEA-708-D, section 4.4
19102 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
19103 var results = [],
19104 i,
19105 count,
19106 offset,
19107 data;
19108
19109 // if this is just filler, return immediately
19110 if (!(userData[0] & 0x40)) {
19111 return results;
19112 }
19113
19114 // parse out the cc_data_1 and cc_data_2 fields
19115 count = userData[0] & 0x1f;
19116 for (i = 0; i < count; i++) {
19117 offset = i * 3;
19118 data = {
19119 type: userData[offset + 2] & 0x03,
19120 pts: pts
19121 };
19122
19123 // capture cc data when cc_valid is 1
19124 if (userData[offset + 2] & 0x04) {
19125 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
19126 results.push(data);
19127 }
19128 }
19129 return results;
19130 };
19131
19132 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
19133 var length = data.byteLength,
19134 emulationPreventionBytesPositions = [],
19135 i = 1,
19136 newLength,
19137 newData;
19138
19139 // Find all `Emulation Prevention Bytes`
19140 while (i < length - 2) {
19141 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
19142 emulationPreventionBytesPositions.push(i + 2);
19143 i += 2;
19144 } else {
19145 i++;
19146 }
19147 }
19148
19149 // If no Emulation Prevention Bytes were found just return the original
19150 // array
19151 if (emulationPreventionBytesPositions.length === 0) {
19152 return data;
19153 }
19154
19155 // Create a new array to hold the NAL unit data
19156 newLength = length - emulationPreventionBytesPositions.length;
19157 newData = new Uint8Array(newLength);
19158 var sourceIndex = 0;
19159
19160 for (i = 0; i < newLength; sourceIndex++, i++) {
19161 if (sourceIndex === emulationPreventionBytesPositions[0]) {
19162 // Skip this byte
19163 sourceIndex++;
19164 // Remove this position index
19165 emulationPreventionBytesPositions.shift();
19166 }
19167 newData[i] = data[sourceIndex];
19168 }
19169
19170 return newData;
19171 };
19172
19173 // exports
19174 var captionPacketParser = {
19175 parseSei: parseSei,
19176 parseUserData: parseUserData,
19177 parseCaptionPackets: parseCaptionPackets,
19178 discardEmulationPreventionBytes: discardEmulationPreventionBytes,
19179 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
19180 };
19181
19182 /**
19183 * mux.js
19184 *
19185 * Copyright (c) Brightcove
19186 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
19187 *
19188 * A lightweight readable stream implemention that handles event dispatching.
19189 * Objects that inherit from streams should call init in their constructors.
19190 */
19191
19192 var Stream$2 = function Stream() {
19193 this.init = function () {
19194 var listeners = {};
19195 /**
19196 * Add a listener for a specified event type.
19197 * @param type {string} the event name
19198 * @param listener {function} the callback to be invoked when an event of
19199 * the specified type occurs
19200 */
19201 this.on = function (type, listener) {
19202 if (!listeners[type]) {
19203 listeners[type] = [];
19204 }
19205 listeners[type] = listeners[type].concat(listener);
19206 };
19207 /**
19208 * Remove a listener for a specified event type.
19209 * @param type {string} the event name
19210 * @param listener {function} a function previously registered for this
19211 * type of event through `on`
19212 */
19213 this.off = function (type, listener) {
19214 var index;
19215 if (!listeners[type]) {
19216 return false;
19217 }
19218 index = listeners[type].indexOf(listener);
19219 listeners[type] = listeners[type].slice();
19220 listeners[type].splice(index, 1);
19221 return index > -1;
19222 };
19223 /**
19224 * Trigger an event of the specified type on this stream. Any additional
19225 * arguments to this function are passed as parameters to event listeners.
19226 * @param type {string} the event name
19227 */
19228 this.trigger = function (type) {
19229 var callbacks, i, length, args;
19230 callbacks = listeners[type];
19231 if (!callbacks) {
19232 return;
19233 }
19234 // Slicing the arguments on every invocation of this method
19235 // can add a significant amount of overhead. Avoid the
19236 // intermediate object creation for the common case of a
19237 // single callback argument
19238 if (arguments.length === 2) {
19239 length = callbacks.length;
19240 for (i = 0; i < length; ++i) {
19241 callbacks[i].call(this, arguments[1]);
19242 }
19243 } else {
19244 args = [];
19245 i = arguments.length;
19246 for (i = 1; i < arguments.length; ++i) {
19247 args.push(arguments[i]);
19248 }
19249 length = callbacks.length;
19250 for (i = 0; i < length; ++i) {
19251 callbacks[i].apply(this, args);
19252 }
19253 }
19254 };
19255 /**
19256 * Destroys the stream and cleans up.
19257 */
19258 this.dispose = function () {
19259 listeners = {};
19260 };
19261 };
19262 };
19263
19264 /**
19265 * Forwards all `data` events on this stream to the destination stream. The
19266 * destination stream should provide a method `push` to receive the data
19267 * events as they arrive.
19268 * @param destination {stream} the stream that will receive all `data` events
19269 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
19270 * when the current stream emits a 'done' event
19271 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
19272 */
19273 Stream$2.prototype.pipe = function (destination) {
19274 this.on('data', function (data) {
19275 destination.push(data);
19276 });
19277
19278 this.on('done', function (flushSource) {
19279 destination.flush(flushSource);
19280 });
19281
19282 this.on('partialdone', function (flushSource) {
19283 destination.partialFlush(flushSource);
19284 });
19285
19286 this.on('endedtimeline', function (flushSource) {
19287 destination.endTimeline(flushSource);
19288 });
19289
19290 this.on('reset', function (flushSource) {
19291 destination.reset(flushSource);
19292 });
19293
19294 return destination;
19295 };
19296
19297 // Default stream functions that are expected to be overridden to perform
19298 // actual work. These are provided by the prototype as a sort of no-op
19299 // implementation so that we don't have to check for their existence in the
19300 // `pipe` function above.
19301 Stream$2.prototype.push = function (data) {
19302 this.trigger('data', data);
19303 };
19304
19305 Stream$2.prototype.flush = function (flushSource) {
19306 this.trigger('done', flushSource);
19307 };
19308
19309 Stream$2.prototype.partialFlush = function (flushSource) {
19310 this.trigger('partialdone', flushSource);
19311 };
19312
19313 Stream$2.prototype.endTimeline = function (flushSource) {
19314 this.trigger('endedtimeline', flushSource);
19315 };
19316
19317 Stream$2.prototype.reset = function (flushSource) {
19318 this.trigger('reset', flushSource);
19319 };
19320
19321 var stream = Stream$2;
19322
19323 // -----------------
19324 // Link To Transport
19325 // -----------------
19326
19327
19328 var CaptionStream = function CaptionStream() {
19329
19330 CaptionStream.prototype.init.call(this);
19331
19332 this.captionPackets_ = [];
19333
19334 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
19335 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
19336 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
19337 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
19338 ];
19339
19340 this.reset();
19341
19342 // forward data and done events from CCs to this CaptionStream
19343 this.ccStreams_.forEach(function (cc) {
19344 cc.on('data', this.trigger.bind(this, 'data'));
19345 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
19346 cc.on('done', this.trigger.bind(this, 'done'));
19347 }, this);
19348 };
19349
19350 CaptionStream.prototype = new stream();
19351 CaptionStream.prototype.push = function (event) {
19352 var sei, userData, newCaptionPackets;
19353
19354 // only examine SEI NALs
19355 if (event.nalUnitType !== 'sei_rbsp') {
19356 return;
19357 }
19358
19359 // parse the sei
19360 sei = captionPacketParser.parseSei(event.escapedRBSP);
19361
19362 // ignore everything but user_data_registered_itu_t_t35
19363 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
19364 return;
19365 }
19366
19367 // parse out the user data payload
19368 userData = captionPacketParser.parseUserData(sei);
19369
19370 // ignore unrecognized userData
19371 if (!userData) {
19372 return;
19373 }
19374
19375 // Sometimes, the same segment # will be downloaded twice. To stop the
19376 // caption data from being processed twice, we track the latest dts we've
19377 // received and ignore everything with a dts before that. However, since
19378 // data for a specific dts can be split across packets on either side of
19379 // a segment boundary, we need to make sure we *don't* ignore the packets
19380 // from the *next* segment that have dts === this.latestDts_. By constantly
19381 // tracking the number of packets received with dts === this.latestDts_, we
19382 // know how many should be ignored once we start receiving duplicates.
19383 if (event.dts < this.latestDts_) {
19384 // We've started getting older data, so set the flag.
19385 this.ignoreNextEqualDts_ = true;
19386 return;
19387 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
19388 this.numSameDts_--;
19389 if (!this.numSameDts_) {
19390 // We've received the last duplicate packet, time to start processing again
19391 this.ignoreNextEqualDts_ = false;
19392 }
19393 return;
19394 }
19395
19396 // parse out CC data packets and save them for later
19397 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
19398 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
19399 if (this.latestDts_ !== event.dts) {
19400 this.numSameDts_ = 0;
19401 }
19402 this.numSameDts_++;
19403 this.latestDts_ = event.dts;
19404 };
19405
19406 CaptionStream.prototype.flushCCStreams = function (flushType) {
19407 this.ccStreams_.forEach(function (cc) {
19408 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
19409 }, this);
19410 };
19411
19412 CaptionStream.prototype.flushStream = function (flushType) {
19413 // make sure we actually parsed captions before proceeding
19414 if (!this.captionPackets_.length) {
19415 this.flushCCStreams(flushType);
19416 return;
19417 }
19418
19419 // In Chrome, the Array#sort function is not stable so add a
19420 // presortIndex that we can use to ensure we get a stable-sort
19421 this.captionPackets_.forEach(function (elem, idx) {
19422 elem.presortIndex = idx;
19423 });
19424
19425 // sort caption byte-pairs based on their PTS values
19426 this.captionPackets_.sort(function (a, b) {
19427 if (a.pts === b.pts) {
19428 return a.presortIndex - b.presortIndex;
19429 }
19430 return a.pts - b.pts;
19431 });
19432
19433 this.captionPackets_.forEach(function (packet) {
19434 if (packet.type < 2) {
19435 // Dispatch packet to the right Cea608Stream
19436 this.dispatchCea608Packet(packet);
19437 }
19438 // this is where an 'else' would go for a dispatching packets
19439 // to a theoretical Cea708Stream that handles SERVICEn data
19440 }, this);
19441
19442 this.captionPackets_.length = 0;
19443 this.flushCCStreams(flushType);
19444 };
19445
19446 CaptionStream.prototype.flush = function () {
19447 return this.flushStream('flush');
19448 };
19449
19450 // Only called if handling partial data
19451 CaptionStream.prototype.partialFlush = function () {
19452 return this.flushStream('partialFlush');
19453 };
19454
19455 CaptionStream.prototype.reset = function () {
19456 this.latestDts_ = null;
19457 this.ignoreNextEqualDts_ = false;
19458 this.numSameDts_ = 0;
19459 this.activeCea608Channel_ = [null, null];
19460 this.ccStreams_.forEach(function (ccStream) {
19461 ccStream.reset();
19462 });
19463 };
19464
19465 // From the CEA-608 spec:
19466 /*
19467 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
19468 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
19469 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
19470 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
19471 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
19472 * to switch to captioning or Text.
19473 */
19474 // With that in mind, we ignore any data between an XDS control code and a
19475 // subsequent closed-captioning control code.
19476 CaptionStream.prototype.dispatchCea608Packet = function (packet) {
19477 // NOTE: packet.type is the CEA608 field
19478 if (this.setsTextOrXDSActive(packet)) {
19479 this.activeCea608Channel_[packet.type] = null;
19480 } else if (this.setsChannel1Active(packet)) {
19481 this.activeCea608Channel_[packet.type] = 0;
19482 } else if (this.setsChannel2Active(packet)) {
19483 this.activeCea608Channel_[packet.type] = 1;
19484 }
19485 if (this.activeCea608Channel_[packet.type] === null) {
19486 // If we haven't received anything to set the active channel, or the
19487 // packets are Text/XDS data, discard the data; we don't want jumbled
19488 // captions
19489 return;
19490 }
19491 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
19492 };
19493
19494 CaptionStream.prototype.setsChannel1Active = function (packet) {
19495 return (packet.ccData & 0x7800) === 0x1000;
19496 };
19497 CaptionStream.prototype.setsChannel2Active = function (packet) {
19498 return (packet.ccData & 0x7800) === 0x1800;
19499 };
19500 CaptionStream.prototype.setsTextOrXDSActive = function (packet) {
19501 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
19502 };
19503
19504 // ----------------------
19505 // Session to Application
19506 // ----------------------
19507
19508 // This hash maps non-ASCII, special, and extended character codes to their
19509 // proper Unicode equivalent. The first keys that are only a single byte
19510 // are the non-standard ASCII characters, which simply map the CEA608 byte
19511 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
19512 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
19513 // can be performed regardless of the field and data channel on which the
19514 // character code was received.
19515 var CHARACTER_TRANSLATION = {
19516 0x2a: 0xe1, // á
19517 0x5c: 0xe9, // é
19518 0x5e: 0xed, // í
19519 0x5f: 0xf3, // ó
19520 0x60: 0xfa, // ú
19521 0x7b: 0xe7, // ç
19522 0x7c: 0xf7, // ÷
19523 0x7d: 0xd1, // Ñ
19524 0x7e: 0xf1, // ñ
19525 0x7f: 0x2588, // █
19526 0x0130: 0xae, // ®
19527 0x0131: 0xb0, // °
19528 0x0132: 0xbd, // ½
19529 0x0133: 0xbf, // ¿
19530 0x0134: 0x2122, // ™
19531 0x0135: 0xa2, // ¢
19532 0x0136: 0xa3, // £
19533 0x0137: 0x266a, // ♪
19534 0x0138: 0xe0, // à
19535 0x0139: 0xa0, //
19536 0x013a: 0xe8, // è
19537 0x013b: 0xe2, // â
19538 0x013c: 0xea, // ê
19539 0x013d: 0xee, // î
19540 0x013e: 0xf4, // ô
19541 0x013f: 0xfb, // û
19542 0x0220: 0xc1, // Á
19543 0x0221: 0xc9, // É
19544 0x0222: 0xd3, // Ó
19545 0x0223: 0xda, // Ú
19546 0x0224: 0xdc, // Ü
19547 0x0225: 0xfc, // ü
19548 0x0226: 0x2018, // ‘
19549 0x0227: 0xa1, // ¡
19550 0x0228: 0x2a, // *
19551 0x0229: 0x27, // '
19552 0x022a: 0x2014, // —
19553 0x022b: 0xa9, // ©
19554 0x022c: 0x2120, // ℠
19555 0x022d: 0x2022, // •
19556 0x022e: 0x201c, // “
19557 0x022f: 0x201d, // ”
19558 0x0230: 0xc0, // À
19559 0x0231: 0xc2, // Â
19560 0x0232: 0xc7, // Ç
19561 0x0233: 0xc8, // È
19562 0x0234: 0xca, // Ê
19563 0x0235: 0xcb, // Ë
19564 0x0236: 0xeb, // ë
19565 0x0237: 0xce, // Î
19566 0x0238: 0xcf, // Ï
19567 0x0239: 0xef, // ï
19568 0x023a: 0xd4, // Ô
19569 0x023b: 0xd9, // Ù
19570 0x023c: 0xf9, // ù
19571 0x023d: 0xdb, // Û
19572 0x023e: 0xab, // «
19573 0x023f: 0xbb, // »
19574 0x0320: 0xc3, // Ã
19575 0x0321: 0xe3, // ã
19576 0x0322: 0xcd, // Í
19577 0x0323: 0xcc, // Ì
19578 0x0324: 0xec, // ì
19579 0x0325: 0xd2, // Ò
19580 0x0326: 0xf2, // ò
19581 0x0327: 0xd5, // Õ
19582 0x0328: 0xf5, // õ
19583 0x0329: 0x7b, // {
19584 0x032a: 0x7d, // }
19585 0x032b: 0x5c, // \
19586 0x032c: 0x5e, // ^
19587 0x032d: 0x5f, // _
19588 0x032e: 0x7c, // |
19589 0x032f: 0x7e, // ~
19590 0x0330: 0xc4, // Ä
19591 0x0331: 0xe4, // ä
19592 0x0332: 0xd6, // Ö
19593 0x0333: 0xf6, // ö
19594 0x0334: 0xdf, // ß
19595 0x0335: 0xa5, // ¥
19596 0x0336: 0xa4, // ¤
19597 0x0337: 0x2502, // │
19598 0x0338: 0xc5, // Å
19599 0x0339: 0xe5, // å
19600 0x033a: 0xd8, // Ø
19601 0x033b: 0xf8, // ø
19602 0x033c: 0x250c, // ┌
19603 0x033d: 0x2510, // ┐
19604 0x033e: 0x2514, // └
19605 0x033f: 0x2518 // ┘
19606 };
19607
19608 var getCharFromCode = function getCharFromCode(code) {
19609 if (code === null) {
19610 return '';
19611 }
19612 code = CHARACTER_TRANSLATION[code] || code;
19613 return String.fromCharCode(code);
19614 };
19615
19616 // the index of the last row in a CEA-608 display buffer
19617 var BOTTOM_ROW = 14;
19618
19619 // This array is used for mapping PACs -> row #, since there's no way of
19620 // getting it through bit logic.
19621 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
19622
19623 // CEA-608 captions are rendered onto a 34x15 matrix of character
19624 // cells. The "bottom" row is the last element in the outer array.
19625 var createDisplayBuffer = function createDisplayBuffer() {
19626 var result = [],
19627 i = BOTTOM_ROW + 1;
19628 while (i--) {
19629 result.push('');
19630 }
19631 return result;
19632 };
19633
19634 var Cea608Stream = function Cea608Stream(field, dataChannel) {
19635 Cea608Stream.prototype.init.call(this);
19636
19637 this.field_ = field || 0;
19638 this.dataChannel_ = dataChannel || 0;
19639
19640 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
19641
19642 this.setConstants();
19643 this.reset();
19644
19645 this.push = function (packet) {
19646 var data, swap, char0, char1, text;
19647 // remove the parity bits
19648 data = packet.ccData & 0x7f7f;
19649
19650 // ignore duplicate control codes; the spec demands they're sent twice
19651 if (data === this.lastControlCode_) {
19652 this.lastControlCode_ = null;
19653 return;
19654 }
19655
19656 // Store control codes
19657 if ((data & 0xf000) === 0x1000) {
19658 this.lastControlCode_ = data;
19659 } else if (data !== this.PADDING_) {
19660 this.lastControlCode_ = null;
19661 }
19662
19663 char0 = data >>> 8;
19664 char1 = data & 0xff;
19665
19666 if (data === this.PADDING_) {
19667 return;
19668 } else if (data === this.RESUME_CAPTION_LOADING_) {
19669 this.mode_ = 'popOn';
19670 } else if (data === this.END_OF_CAPTION_) {
19671 // If an EOC is received while in paint-on mode, the displayed caption
19672 // text should be swapped to non-displayed memory as if it was a pop-on
19673 // caption. Because of that, we should explicitly switch back to pop-on
19674 // mode
19675 this.mode_ = 'popOn';
19676 this.clearFormatting(packet.pts);
19677 // if a caption was being displayed, it's gone now
19678 this.flushDisplayed(packet.pts);
19679
19680 // flip memory
19681 swap = this.displayed_;
19682 this.displayed_ = this.nonDisplayed_;
19683 this.nonDisplayed_ = swap;
19684
19685 // start measuring the time to display the caption
19686 this.startPts_ = packet.pts;
19687 } else if (data === this.ROLL_UP_2_ROWS_) {
19688 this.rollUpRows_ = 2;
19689 this.setRollUp(packet.pts);
19690 } else if (data === this.ROLL_UP_3_ROWS_) {
19691 this.rollUpRows_ = 3;
19692 this.setRollUp(packet.pts);
19693 } else if (data === this.ROLL_UP_4_ROWS_) {
19694 this.rollUpRows_ = 4;
19695 this.setRollUp(packet.pts);
19696 } else if (data === this.CARRIAGE_RETURN_) {
19697 this.clearFormatting(packet.pts);
19698 this.flushDisplayed(packet.pts);
19699 this.shiftRowsUp_();
19700 this.startPts_ = packet.pts;
19701 } else if (data === this.BACKSPACE_) {
19702 if (this.mode_ === 'popOn') {
19703 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
19704 } else {
19705 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
19706 }
19707 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
19708 this.flushDisplayed(packet.pts);
19709 this.displayed_ = createDisplayBuffer();
19710 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
19711 this.nonDisplayed_ = createDisplayBuffer();
19712 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
19713 if (this.mode_ !== 'paintOn') {
19714 // NOTE: This should be removed when proper caption positioning is
19715 // implemented
19716 this.flushDisplayed(packet.pts);
19717 this.displayed_ = createDisplayBuffer();
19718 }
19719 this.mode_ = 'paintOn';
19720 this.startPts_ = packet.pts;
19721
19722 // Append special characters to caption text
19723 } else if (this.isSpecialCharacter(char0, char1)) {
19724 // Bitmask char0 so that we can apply character transformations
19725 // regardless of field and data channel.
19726 // Then byte-shift to the left and OR with char1 so we can pass the
19727 // entire character code to `getCharFromCode`.
19728 char0 = (char0 & 0x03) << 8;
19729 text = getCharFromCode(char0 | char1);
19730 this[this.mode_](packet.pts, text);
19731 this.column_++;
19732
19733 // Append extended characters to caption text
19734 } else if (this.isExtCharacter(char0, char1)) {
19735 // Extended characters always follow their "non-extended" equivalents.
19736 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
19737 // decoders are supposed to drop the "è", while compliant decoders
19738 // backspace the "e" and insert "è".
19739
19740 // Delete the previous character
19741 if (this.mode_ === 'popOn') {
19742 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
19743 } else {
19744 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
19745 }
19746
19747 // Bitmask char0 so that we can apply character transformations
19748 // regardless of field and data channel.
19749 // Then byte-shift to the left and OR with char1 so we can pass the
19750 // entire character code to `getCharFromCode`.
19751 char0 = (char0 & 0x03) << 8;
19752 text = getCharFromCode(char0 | char1);
19753 this[this.mode_](packet.pts, text);
19754 this.column_++;
19755
19756 // Process mid-row codes
19757 } else if (this.isMidRowCode(char0, char1)) {
19758 // Attributes are not additive, so clear all formatting
19759 this.clearFormatting(packet.pts);
19760
19761 // According to the standard, mid-row codes
19762 // should be replaced with spaces, so add one now
19763 this[this.mode_](packet.pts, ' ');
19764 this.column_++;
19765
19766 if ((char1 & 0xe) === 0xe) {
19767 this.addFormatting(packet.pts, ['i']);
19768 }
19769
19770 if ((char1 & 0x1) === 0x1) {
19771 this.addFormatting(packet.pts, ['u']);
19772 }
19773
19774 // Detect offset control codes and adjust cursor
19775 } else if (this.isOffsetControlCode(char0, char1)) {
19776 // Cursor position is set by indent PAC (see below) in 4-column
19777 // increments, with an additional offset code of 1-3 to reach any
19778 // of the 32 columns specified by CEA-608. So all we need to do
19779 // here is increment the column cursor by the given offset.
19780 this.column_ += char1 & 0x03;
19781
19782 // Detect PACs (Preamble Address Codes)
19783 } else if (this.isPAC(char0, char1)) {
19784
19785 // There's no logic for PAC -> row mapping, so we have to just
19786 // find the row code in an array and use its index :(
19787 var row = ROWS.indexOf(data & 0x1f20);
19788
19789 // Configure the caption window if we're in roll-up mode
19790 if (this.mode_ === 'rollUp') {
19791 // This implies that the base row is incorrectly set.
19792 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
19793 // of roll-up rows set.
19794 if (row - this.rollUpRows_ + 1 < 0) {
19795 row = this.rollUpRows_ - 1;
19796 }
19797
19798 this.setRollUp(packet.pts, row);
19799 }
19800
19801 if (row !== this.row_) {
19802 // formatting is only persistent for current row
19803 this.clearFormatting(packet.pts);
19804 this.row_ = row;
19805 }
19806 // All PACs can apply underline, so detect and apply
19807 // (All odd-numbered second bytes set underline)
19808 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
19809 this.addFormatting(packet.pts, ['u']);
19810 }
19811
19812 if ((data & 0x10) === 0x10) {
19813 // We've got an indent level code. Each successive even number
19814 // increments the column cursor by 4, so we can get the desired
19815 // column position by bit-shifting to the right (to get n/2)
19816 // and multiplying by 4.
19817 this.column_ = ((data & 0xe) >> 1) * 4;
19818 }
19819
19820 if (this.isColorPAC(char1)) {
19821 // it's a color code, though we only support white, which
19822 // can be either normal or italicized. white italics can be
19823 // either 0x4e or 0x6e depending on the row, so we just
19824 // bitwise-and with 0xe to see if italics should be turned on
19825 if ((char1 & 0xe) === 0xe) {
19826 this.addFormatting(packet.pts, ['i']);
19827 }
19828 }
19829
19830 // We have a normal character in char0, and possibly one in char1
19831 } else if (this.isNormalChar(char0)) {
19832 if (char1 === 0x00) {
19833 char1 = null;
19834 }
19835 text = getCharFromCode(char0);
19836 text += getCharFromCode(char1);
19837 this[this.mode_](packet.pts, text);
19838 this.column_ += text.length;
19839 } // finish data processing
19840 };
19841 };
19842 Cea608Stream.prototype = new stream();
19843 // Trigger a cue point that captures the current state of the
19844 // display buffer
19845 Cea608Stream.prototype.flushDisplayed = function (pts) {
19846 var content = this.displayed_
19847 // remove spaces from the start and end of the string
19848 .map(function (row) {
19849 try {
19850 return row.trim();
19851 } catch (e) {
19852 // Ordinarily, this shouldn't happen. However, caption
19853 // parsing errors should not throw exceptions and
19854 // break playback.
19855 // eslint-disable-next-line no-console
19856 console.error('Skipping malformed caption.');
19857 return '';
19858 }
19859 })
19860 // combine all text rows to display in one cue
19861 .join('\n')
19862 // and remove blank rows from the start and end, but not the middle
19863 .replace(/^\n+|\n+$/g, '');
19864
19865 if (content.length) {
19866 this.trigger('data', {
19867 startPts: this.startPts_,
19868 endPts: pts,
19869 text: content,
19870 stream: this.name_
19871 });
19872 }
19873 };
19874
19875 /**
19876 * Zero out the data, used for startup and on seek
19877 */
19878 Cea608Stream.prototype.reset = function () {
19879 this.mode_ = 'popOn';
19880 // When in roll-up mode, the index of the last row that will
19881 // actually display captions. If a caption is shifted to a row
19882 // with a lower index than this, it is cleared from the display
19883 // buffer
19884 this.topRow_ = 0;
19885 this.startPts_ = 0;
19886 this.displayed_ = createDisplayBuffer();
19887 this.nonDisplayed_ = createDisplayBuffer();
19888 this.lastControlCode_ = null;
19889
19890 // Track row and column for proper line-breaking and spacing
19891 this.column_ = 0;
19892 this.row_ = BOTTOM_ROW;
19893 this.rollUpRows_ = 2;
19894
19895 // This variable holds currently-applied formatting
19896 this.formatting_ = [];
19897 };
19898
19899 /**
19900 * Sets up control code and related constants for this instance
19901 */
19902 Cea608Stream.prototype.setConstants = function () {
19903 // The following attributes have these uses:
19904 // ext_ : char0 for mid-row codes, and the base for extended
19905 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
19906 // extended codes)
19907 // control_: char0 for control codes, except byte-shifted to the
19908 // left so that we can do this.control_ | CONTROL_CODE
19909 // offset_: char0 for tab offset codes
19910 //
19911 // It's also worth noting that control codes, and _only_ control codes,
19912 // differ between field 1 and field2. Field 2 control codes are always
19913 // their field 1 value plus 1. That's why there's the "| field" on the
19914 // control value.
19915 if (this.dataChannel_ === 0) {
19916 this.BASE_ = 0x10;
19917 this.EXT_ = 0x11;
19918 this.CONTROL_ = (0x14 | this.field_) << 8;
19919 this.OFFSET_ = 0x17;
19920 } else if (this.dataChannel_ === 1) {
19921 this.BASE_ = 0x18;
19922 this.EXT_ = 0x19;
19923 this.CONTROL_ = (0x1c | this.field_) << 8;
19924 this.OFFSET_ = 0x1f;
19925 }
19926
19927 // Constants for the LSByte command codes recognized by Cea608Stream. This
19928 // list is not exhaustive. For a more comprehensive listing and semantics see
19929 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
19930 // Padding
19931 this.PADDING_ = 0x0000;
19932 // Pop-on Mode
19933 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
19934 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
19935 // Roll-up Mode
19936 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
19937 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
19938 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
19939 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
19940 // paint-on mode
19941 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
19942 // Erasure
19943 this.BACKSPACE_ = this.CONTROL_ | 0x21;
19944 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
19945 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
19946 };
19947
19948 /**
19949 * Detects if the 2-byte packet data is a special character
19950 *
19951 * Special characters have a second byte in the range 0x30 to 0x3f,
19952 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
19953 * data channel 2).
19954 *
19955 * @param {Integer} char0 The first byte
19956 * @param {Integer} char1 The second byte
19957 * @return {Boolean} Whether the 2 bytes are an special character
19958 */
19959 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
19960 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
19961 };
19962
19963 /**
19964 * Detects if the 2-byte packet data is an extended character
19965 *
19966 * Extended characters have a second byte in the range 0x20 to 0x3f,
19967 * with the first byte being 0x12 or 0x13 (for data channel 1) or
19968 * 0x1a or 0x1b (for data channel 2).
19969 *
19970 * @param {Integer} char0 The first byte
19971 * @param {Integer} char1 The second byte
19972 * @return {Boolean} Whether the 2 bytes are an extended character
19973 */
19974 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
19975 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
19976 };
19977
19978 /**
19979 * Detects if the 2-byte packet is a mid-row code
19980 *
19981 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
19982 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
19983 * channel 2).
19984 *
19985 * @param {Integer} char0 The first byte
19986 * @param {Integer} char1 The second byte
19987 * @return {Boolean} Whether the 2 bytes are a mid-row code
19988 */
19989 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
19990 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
19991 };
19992
19993 /**
19994 * Detects if the 2-byte packet is an offset control code
19995 *
19996 * Offset control codes have a second byte in the range 0x21 to 0x23,
19997 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
19998 * data channel 2).
19999 *
20000 * @param {Integer} char0 The first byte
20001 * @param {Integer} char1 The second byte
20002 * @return {Boolean} Whether the 2 bytes are an offset control code
20003 */
20004 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
20005 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
20006 };
20007
20008 /**
20009 * Detects if the 2-byte packet is a Preamble Address Code
20010 *
20011 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
20012 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
20013 * range 0x40 to 0x7f.
20014 *
20015 * @param {Integer} char0 The first byte
20016 * @param {Integer} char1 The second byte
20017 * @return {Boolean} Whether the 2 bytes are a PAC
20018 */
20019 Cea608Stream.prototype.isPAC = function (char0, char1) {
20020 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
20021 };
20022
20023 /**
20024 * Detects if a packet's second byte is in the range of a PAC color code
20025 *
20026 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
20027 * 0x60 to 0x6f.
20028 *
20029 * @param {Integer} char1 The second byte
20030 * @return {Boolean} Whether the byte is a color PAC
20031 */
20032 Cea608Stream.prototype.isColorPAC = function (char1) {
20033 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
20034 };
20035
20036 /**
20037 * Detects if a single byte is in the range of a normal character
20038 *
20039 * Normal text bytes are in the range 0x20 to 0x7f.
20040 *
20041 * @param {Integer} char The byte
20042 * @return {Boolean} Whether the byte is a normal character
20043 */
20044 Cea608Stream.prototype.isNormalChar = function (char) {
20045 return char >= 0x20 && char <= 0x7f;
20046 };
20047
20048 /**
20049 * Configures roll-up
20050 *
20051 * @param {Integer} pts Current PTS
20052 * @param {Integer} newBaseRow Used by PACs to slide the current window to
20053 * a new position
20054 */
20055 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
20056 // Reset the base row to the bottom row when switching modes
20057 if (this.mode_ !== 'rollUp') {
20058 this.row_ = BOTTOM_ROW;
20059 this.mode_ = 'rollUp';
20060 // Spec says to wipe memories when switching to roll-up
20061 this.flushDisplayed(pts);
20062 this.nonDisplayed_ = createDisplayBuffer();
20063 this.displayed_ = createDisplayBuffer();
20064 }
20065
20066 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
20067 // move currently displayed captions (up or down) to the new base row
20068 for (var i = 0; i < this.rollUpRows_; i++) {
20069 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
20070 this.displayed_[this.row_ - i] = '';
20071 }
20072 }
20073
20074 if (newBaseRow === undefined) {
20075 newBaseRow = this.row_;
20076 }
20077
20078 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
20079 };
20080
20081 // Adds the opening HTML tag for the passed character to the caption text,
20082 // and keeps track of it for later closing
20083 Cea608Stream.prototype.addFormatting = function (pts, format) {
20084 this.formatting_ = this.formatting_.concat(format);
20085 var text = format.reduce(function (text, format) {
20086 return text + '<' + format + '>';
20087 }, '');
20088 this[this.mode_](pts, text);
20089 };
20090
20091 // Adds HTML closing tags for current formatting to caption text and
20092 // clears remembered formatting
20093 Cea608Stream.prototype.clearFormatting = function (pts) {
20094 if (!this.formatting_.length) {
20095 return;
20096 }
20097 var text = this.formatting_.reverse().reduce(function (text, format) {
20098 return text + '</' + format + '>';
20099 }, '');
20100 this.formatting_ = [];
20101 this[this.mode_](pts, text);
20102 };
20103
20104 // Mode Implementations
20105 Cea608Stream.prototype.popOn = function (pts, text) {
20106 var baseRow = this.nonDisplayed_[this.row_];
20107
20108 // buffer characters
20109 baseRow += text;
20110 this.nonDisplayed_[this.row_] = baseRow;
20111 };
20112
20113 Cea608Stream.prototype.rollUp = function (pts, text) {
20114 var baseRow = this.displayed_[this.row_];
20115
20116 baseRow += text;
20117 this.displayed_[this.row_] = baseRow;
20118 };
20119
20120 Cea608Stream.prototype.shiftRowsUp_ = function () {
20121 var i;
20122 // clear out inactive rows
20123 for (i = 0; i < this.topRow_; i++) {
20124 this.displayed_[i] = '';
20125 }
20126 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
20127 this.displayed_[i] = '';
20128 }
20129 // shift displayed rows up
20130 for (i = this.topRow_; i < this.row_; i++) {
20131 this.displayed_[i] = this.displayed_[i + 1];
20132 }
20133 // clear out the bottom row
20134 this.displayed_[this.row_] = '';
20135 };
20136
20137 Cea608Stream.prototype.paintOn = function (pts, text) {
20138 var baseRow = this.displayed_[this.row_];
20139
20140 baseRow += text;
20141 this.displayed_[this.row_] = baseRow;
20142 };
20143
20144 // exports
20145 var captionStream = {
20146 CaptionStream: CaptionStream,
20147 Cea608Stream: Cea608Stream
20148 };
20149
20150 var discardEmulationPreventionBytes$1 = captionPacketParser.discardEmulationPreventionBytes;
20151 var CaptionStream$1 = captionStream.CaptionStream;
20152
20153 /**
20154 * Maps an offset in the mdat to a sample based on the the size of the samples.
20155 * Assumes that `parseSamples` has been called first.
20156 *
20157 * @param {Number} offset - The offset into the mdat
20158 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
20159 * @return {?Object} The matching sample, or null if no match was found.
20160 *
20161 * @see ISO-BMFF-12/2015, Section 8.8.8
20162 **/
20163 var mapToSample = function mapToSample(offset, samples) {
20164 var approximateOffset = offset;
20165
20166 for (var i = 0; i < samples.length; i++) {
20167 var sample = samples[i];
20168
20169 if (approximateOffset < sample.size) {
20170 return sample;
20171 }
20172
20173 approximateOffset -= sample.size;
20174 }
20175
20176 return null;
20177 };
20178
20179 /**
20180 * Finds SEI nal units contained in a Media Data Box.
20181 * Assumes that `parseSamples` has been called first.
20182 *
20183 * @param {Uint8Array} avcStream - The bytes of the mdat
20184 * @param {Object[]} samples - The samples parsed out by `parseSamples`
20185 * @param {Number} trackId - The trackId of this video track
20186 * @return {Object[]} seiNals - the parsed SEI NALUs found.
20187 * The contents of the seiNal should match what is expected by
20188 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
20189 *
20190 * @see ISO-BMFF-12/2015, Section 8.1.1
20191 * @see Rec. ITU-T H.264, 7.3.2.3.1
20192 **/
20193 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
20194 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
20195 result = [],
20196 seiNal,
20197 i,
20198 length,
20199 lastMatchedSample;
20200
20201 for (i = 0; i + 4 < avcStream.length; i += length) {
20202 length = avcView.getUint32(i);
20203 i += 4;
20204
20205 // Bail if this doesn't appear to be an H264 stream
20206 if (length <= 0) {
20207 continue;
20208 }
20209
20210 switch (avcStream[i] & 0x1F) {
20211 case 0x06:
20212 var data = avcStream.subarray(i + 1, i + 1 + length);
20213 var matchingSample = mapToSample(i, samples);
20214
20215 seiNal = {
20216 nalUnitType: 'sei_rbsp',
20217 size: length,
20218 data: data,
20219 escapedRBSP: discardEmulationPreventionBytes$1(data),
20220 trackId: trackId
20221 };
20222
20223 if (matchingSample) {
20224 seiNal.pts = matchingSample.pts;
20225 seiNal.dts = matchingSample.dts;
20226 lastMatchedSample = matchingSample;
20227 } else if (lastMatchedSample) {
20228 // If a matching sample cannot be found, use the last
20229 // sample's values as they should be as close as possible
20230 seiNal.pts = lastMatchedSample.pts;
20231 seiNal.dts = lastMatchedSample.dts;
20232 } else {
20233 // eslint-disable-next-line no-console
20234 console.log("We've encountered a nal unit without data. See mux.js#233.");
20235 break;
20236 }
20237
20238 result.push(seiNal);
20239 break;
20240 default:
20241 break;
20242 }
20243 }
20244
20245 return result;
20246 };
20247
20248 /**
20249 * Parses sample information out of Track Run Boxes and calculates
20250 * the absolute presentation and decode timestamps of each sample.
20251 *
20252 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
20253 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
20254 @see ISO-BMFF-12/2015, Section 8.8.12
20255 * @param {Object} tfhd - The parsed Track Fragment Header
20256 * @see inspect.parseTfhd
20257 * @return {Object[]} the parsed samples
20258 *
20259 * @see ISO-BMFF-12/2015, Section 8.8.8
20260 **/
20261 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
20262 var currentDts = baseMediaDecodeTime;
20263 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
20264 var defaultSampleSize = tfhd.defaultSampleSize || 0;
20265 var trackId = tfhd.trackId;
20266 var allSamples = [];
20267
20268 truns.forEach(function (trun) {
20269 // Note: We currently do not parse the sample table as well
20270 // as the trun. It's possible some sources will require this.
20271 // moov > trak > mdia > minf > stbl
20272 var trackRun = mp4Inspector.parseTrun(trun);
20273 var samples = trackRun.samples;
20274
20275 samples.forEach(function (sample) {
20276 if (sample.duration === undefined) {
20277 sample.duration = defaultSampleDuration;
20278 }
20279 if (sample.size === undefined) {
20280 sample.size = defaultSampleSize;
20281 }
20282 sample.trackId = trackId;
20283 sample.dts = currentDts;
20284 if (sample.compositionTimeOffset === undefined) {
20285 sample.compositionTimeOffset = 0;
20286 }
20287 sample.pts = currentDts + sample.compositionTimeOffset;
20288
20289 currentDts += sample.duration;
20290 });
20291
20292 allSamples = allSamples.concat(samples);
20293 });
20294
20295 return allSamples;
20296 };
20297
20298 /**
20299 * Parses out caption nals from an FMP4 segment's video tracks.
20300 *
20301 * @param {Uint8Array} segment - The bytes of a single segment
20302 * @param {Number} videoTrackId - The trackId of a video track in the segment
20303 * @return {Object.<Number, Object[]>} A mapping of video trackId to
20304 * a list of seiNals found in that track
20305 **/
20306 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
20307 // To get the samples
20308 var trafs = probe.findBox(segment, ['moof', 'traf']);
20309 // To get SEI NAL units
20310 var mdats = probe.findBox(segment, ['mdat']);
20311 var captionNals = {};
20312 var mdatTrafPairs = [];
20313
20314 // Pair up each traf with a mdat as moofs and mdats are in pairs
20315 mdats.forEach(function (mdat, index) {
20316 var matchingTraf = trafs[index];
20317 mdatTrafPairs.push({
20318 mdat: mdat,
20319 traf: matchingTraf
20320 });
20321 });
20322
20323 mdatTrafPairs.forEach(function (pair) {
20324 var mdat = pair.mdat;
20325 var traf = pair.traf;
20326 var tfhd = probe.findBox(traf, ['tfhd']);
20327 // Exactly 1 tfhd per traf
20328 var headerInfo = mp4Inspector.parseTfhd(tfhd[0]);
20329 var trackId = headerInfo.trackId;
20330 var tfdt = probe.findBox(traf, ['tfdt']);
20331 // Either 0 or 1 tfdt per traf
20332 var baseMediaDecodeTime = tfdt.length > 0 ? mp4Inspector.parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
20333 var truns = probe.findBox(traf, ['trun']);
20334 var samples;
20335 var seiNals;
20336
20337 // Only parse video data for the chosen video track
20338 if (videoTrackId === trackId && truns.length > 0) {
20339 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
20340
20341 seiNals = findSeiNals(mdat, samples, trackId);
20342
20343 if (!captionNals[trackId]) {
20344 captionNals[trackId] = [];
20345 }
20346
20347 captionNals[trackId] = captionNals[trackId].concat(seiNals);
20348 }
20349 });
20350
20351 return captionNals;
20352 };
20353
20354 /**
20355 * Parses out inband captions from an MP4 container and returns
20356 * caption objects that can be used by WebVTT and the TextTrack API.
20357 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
20358 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
20359 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
20360 *
20361 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
20362 * @param {Number} trackId - The id of the video track to parse
20363 * @param {Number} timescale - The timescale for the video track from the init segment
20364 *
20365 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
20366 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
20367 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
20368 * @return {String} parsedCaptions[].text - The visible content of the caption
20369 **/
20370 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
20371 var seiNals;
20372
20373 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
20374 if (trackId === null) {
20375 return null;
20376 }
20377
20378 seiNals = parseCaptionNals(segment, trackId);
20379
20380 return {
20381 seiNals: seiNals[trackId],
20382 timescale: timescale
20383 };
20384 };
20385
20386 /**
20387 * Converts SEI NALUs into captions that can be used by video.js
20388 **/
20389 var CaptionParser = function CaptionParser() {
20390 var isInitialized = false;
20391 var captionStream$$1;
20392
20393 // Stores segments seen before trackId and timescale are set
20394 var segmentCache;
20395 // Stores video track ID of the track being parsed
20396 var trackId;
20397 // Stores the timescale of the track being parsed
20398 var timescale;
20399 // Stores captions parsed so far
20400 var parsedCaptions;
20401 // Stores whether we are receiving partial data or not
20402 var parsingPartial;
20403
20404 /**
20405 * A method to indicate whether a CaptionParser has been initalized
20406 * @returns {Boolean}
20407 **/
20408 this.isInitialized = function () {
20409 return isInitialized;
20410 };
20411
20412 /**
20413 * Initializes the underlying CaptionStream, SEI NAL parsing
20414 * and management, and caption collection
20415 **/
20416 this.init = function (options) {
20417 captionStream$$1 = new CaptionStream$1();
20418 isInitialized = true;
20419 parsingPartial = options ? options.isPartial : false;
20420
20421 // Collect dispatched captions
20422 captionStream$$1.on('data', function (event) {
20423 // Convert to seconds in the source's timescale
20424 event.startTime = event.startPts / timescale;
20425 event.endTime = event.endPts / timescale;
20426
20427 parsedCaptions.captions.push(event);
20428 parsedCaptions.captionStreams[event.stream] = true;
20429 });
20430 };
20431
20432 /**
20433 * Determines if a new video track will be selected
20434 * or if the timescale changed
20435 * @return {Boolean}
20436 **/
20437 this.isNewInit = function (videoTrackIds, timescales) {
20438 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
20439 return false;
20440 }
20441
20442 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
20443 };
20444
20445 /**
20446 * Parses out SEI captions and interacts with underlying
20447 * CaptionStream to return dispatched captions
20448 *
20449 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
20450 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
20451 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
20452 * @see parseEmbeddedCaptions
20453 * @see m2ts/caption-stream.js
20454 **/
20455 this.parse = function (segment, videoTrackIds, timescales) {
20456 var parsedData;
20457
20458 if (!this.isInitialized()) {
20459 return null;
20460
20461 // This is not likely to be a video segment
20462 } else if (!videoTrackIds || !timescales) {
20463 return null;
20464 } else if (this.isNewInit(videoTrackIds, timescales)) {
20465 // Use the first video track only as there is no
20466 // mechanism to switch to other video tracks
20467 trackId = videoTrackIds[0];
20468 timescale = timescales[trackId];
20469
20470 // If an init segment has not been seen yet, hold onto segment
20471 // data until we have one.
20472 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
20473 } else if (trackId === null || !timescale) {
20474 segmentCache.push(segment);
20475 return null;
20476 }
20477
20478 // Now that a timescale and trackId is set, parse cached segments
20479 while (segmentCache.length > 0) {
20480 var cachedSegment = segmentCache.shift();
20481
20482 this.parse(cachedSegment, videoTrackIds, timescales);
20483 }
20484
20485 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
20486
20487 if (parsedData === null || !parsedData.seiNals) {
20488 return null;
20489 }
20490
20491 this.pushNals(parsedData.seiNals);
20492 // Force the parsed captions to be dispatched
20493 this.flushStream();
20494
20495 return parsedCaptions;
20496 };
20497
20498 /**
20499 * Pushes SEI NALUs onto CaptionStream
20500 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
20501 * Assumes that `parseCaptionNals` has been called first
20502 * @see m2ts/caption-stream.js
20503 **/
20504 this.pushNals = function (nals) {
20505 if (!this.isInitialized() || !nals || nals.length === 0) {
20506 return null;
20507 }
20508
20509 nals.forEach(function (nal) {
20510 captionStream$$1.push(nal);
20511 });
20512 };
20513
20514 /**
20515 * Flushes underlying CaptionStream to dispatch processed, displayable captions
20516 * @see m2ts/caption-stream.js
20517 **/
20518 this.flushStream = function () {
20519 if (!this.isInitialized()) {
20520 return null;
20521 }
20522
20523 if (!parsingPartial) {
20524 captionStream$$1.flush();
20525 } else {
20526 captionStream$$1.partialFlush();
20527 }
20528 };
20529
20530 /**
20531 * Reset caption buckets for new data
20532 **/
20533 this.clearParsedCaptions = function () {
20534 parsedCaptions.captions = [];
20535 parsedCaptions.captionStreams = {};
20536 };
20537
20538 /**
20539 * Resets underlying CaptionStream
20540 * @see m2ts/caption-stream.js
20541 **/
20542 this.resetCaptionStream = function () {
20543 if (!this.isInitialized()) {
20544 return null;
20545 }
20546
20547 captionStream$$1.reset();
20548 };
20549
20550 /**
20551 * Convenience method to clear all captions flushed from the
20552 * CaptionStream and still being parsed
20553 * @see m2ts/caption-stream.js
20554 **/
20555 this.clearAllCaptions = function () {
20556 this.clearParsedCaptions();
20557 this.resetCaptionStream();
20558 };
20559
20560 /**
20561 * Reset caption parser
20562 **/
20563 this.reset = function () {
20564 segmentCache = [];
20565 trackId = null;
20566 timescale = null;
20567
20568 if (!parsedCaptions) {
20569 parsedCaptions = {
20570 captions: [],
20571 // CC1, CC2, CC3, CC4
20572 captionStreams: {}
20573 };
20574 } else {
20575 this.clearParsedCaptions();
20576 }
20577
20578 this.resetCaptionStream();
20579 };
20580
20581 this.reset();
20582 };
20583
20584 var captionParser = CaptionParser;
20585
20586 /**
20587 * @file segment-loader.js
20588 */
20589
20590 // in ms
20591 var CHECK_BUFFER_DELAY = 500;
20592
20593 /**
20594 * Determines if we should call endOfStream on the media source based
20595 * on the state of the buffer or if appened segment was the final
20596 * segment in the playlist.
20597 *
20598 * @param {Object} playlist a media playlist object
20599 * @param {Object} mediaSource the MediaSource object
20600 * @param {Number} segmentIndex the index of segment we last appended
20601 * @returns {Boolean} do we need to call endOfStream on the MediaSource
20602 */
20603 var detectEndOfStream = function detectEndOfStream(playlist, mediaSource, segmentIndex) {
20604 if (!playlist || !mediaSource) {
20605 return false;
20606 }
20607
20608 var segments = playlist.segments;
20609
20610 // determine a few boolean values to help make the branch below easier
20611 // to read
20612 var appendedLastSegment = segmentIndex === segments.length;
20613
20614 // if we've buffered to the end of the video, we need to call endOfStream
20615 // so that MediaSources can trigger the `ended` event when it runs out of
20616 // buffered data instead of waiting for me
20617 return playlist.endList && mediaSource.readyState === 'open' && appendedLastSegment;
20618 };
20619
20620 var finite = function finite(num) {
20621 return typeof num === 'number' && isFinite(num);
20622 };
20623
20624 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, newSegmentMedia) {
20625 // Although these checks should most likely cover non 'main' types, for now it narrows
20626 // the scope of our checks.
20627 if (loaderType !== 'main' || !startingMedia || !newSegmentMedia) {
20628 return null;
20629 }
20630
20631 if (!newSegmentMedia.containsAudio && !newSegmentMedia.containsVideo) {
20632 return 'Neither audio nor video found in segment.';
20633 }
20634
20635 if (startingMedia.containsVideo && !newSegmentMedia.containsVideo) {
20636 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
20637 }
20638
20639 if (!startingMedia.containsVideo && newSegmentMedia.containsVideo) {
20640 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
20641 }
20642
20643 return null;
20644 };
20645
20646 /**
20647 * Calculates a time value that is safe to remove from the back buffer without interupting
20648 * playback.
20649 *
20650 * @param {TimeRange} seekable
20651 * The current seekable range
20652 * @param {Number} currentTime
20653 * The current time of the player
20654 * @param {Number} targetDuration
20655 * The target duration of the current playlist
20656 * @return {Number}
20657 * Time that is safe to remove from the back buffer without interupting playback
20658 */
20659 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable$$1, currentTime, targetDuration) {
20660 // 30 seconds before the playhead provides a safe default for trimming.
20661 //
20662 // Choosing a reasonable default is particularly important for high bitrate content and
20663 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
20664 // throw an APPEND_BUFFER_ERR.
20665 var trimTime = currentTime - 30;
20666
20667 if (seekable$$1.length) {
20668 // Some live playlists may have a shorter window of content than the full allowed back
20669 // buffer. For these playlists, don't save content that's no longer within the window.
20670 trimTime = Math.max(trimTime, seekable$$1.start(0));
20671 }
20672
20673 // Don't remove within target duration of the current time to avoid the possibility of
20674 // removing the GOP currently being played, as removing it can cause playback stalls.
20675 var maxTrimTime = currentTime - targetDuration;
20676
20677 return Math.min(maxTrimTime, trimTime);
20678 };
20679
20680 var segmentInfoString = function segmentInfoString(segmentInfo) {
20681 var _segmentInfo$segment = segmentInfo.segment,
20682 start = _segmentInfo$segment.start,
20683 end = _segmentInfo$segment.end,
20684 _segmentInfo$playlist = segmentInfo.playlist,
20685 seq = _segmentInfo$playlist.mediaSequence,
20686 id = _segmentInfo$playlist.id,
20687 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
20688 segments = _segmentInfo$playlist2 === undefined ? [] : _segmentInfo$playlist2,
20689 index = segmentInfo.mediaIndex,
20690 timeline = segmentInfo.timeline;
20691
20692
20693 return ['appending [' + index + '] of [' + seq + ', ' + (seq + segments.length) + '] from playlist [' + id + ']', '[' + start + ' => ' + end + '] in timeline [' + timeline + ']'].join(' ');
20694 };
20695
20696 /**
20697 * An object that manages segment loading and appending.
20698 *
20699 * @class SegmentLoader
20700 * @param {Object} options required and optional options
20701 * @extends videojs.EventTarget
20702 */
20703
20704 var SegmentLoader = function (_videojs$EventTarget) {
20705 inherits(SegmentLoader, _videojs$EventTarget);
20706
20707 function SegmentLoader(settings) {
20708 classCallCheck(this, SegmentLoader);
20709
20710 // check pre-conditions
20711 var _this = possibleConstructorReturn(this, (SegmentLoader.__proto__ || Object.getPrototypeOf(SegmentLoader)).call(this));
20712
20713 if (!settings) {
20714 throw new TypeError('Initialization settings are required');
20715 }
20716 if (typeof settings.currentTime !== 'function') {
20717 throw new TypeError('No currentTime getter specified');
20718 }
20719 if (!settings.mediaSource) {
20720 throw new TypeError('No MediaSource specified');
20721 }
20722 // public properties
20723 _this.bandwidth = settings.bandwidth;
20724 _this.throughput = { rate: 0, count: 0 };
20725 _this.roundTrip = NaN;
20726 _this.resetStats_();
20727 _this.mediaIndex = null;
20728
20729 // private settings
20730 _this.hasPlayed_ = settings.hasPlayed;
20731 _this.currentTime_ = settings.currentTime;
20732 _this.seekable_ = settings.seekable;
20733 _this.seeking_ = settings.seeking;
20734 _this.duration_ = settings.duration;
20735 _this.mediaSource_ = settings.mediaSource;
20736 _this.hls_ = settings.hls;
20737 _this.loaderType_ = settings.loaderType;
20738 _this.startingMedia_ = void 0;
20739 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
20740 _this.goalBufferLength_ = settings.goalBufferLength;
20741 _this.sourceType_ = settings.sourceType;
20742 _this.inbandTextTracks_ = settings.inbandTextTracks;
20743 _this.state_ = 'INIT';
20744
20745 // private instance variables
20746 _this.checkBufferTimeout_ = null;
20747 _this.error_ = void 0;
20748 _this.currentTimeline_ = -1;
20749 _this.pendingSegment_ = null;
20750 _this.mimeType_ = null;
20751 _this.sourceUpdater_ = null;
20752 _this.xhrOptions_ = null;
20753
20754 // Fragmented mp4 playback
20755 _this.activeInitSegmentId_ = null;
20756 _this.initSegments_ = {};
20757
20758 // HLSe playback
20759 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
20760 _this.keyCache_ = {};
20761
20762 // Fmp4 CaptionParser
20763 if (_this.loaderType_ === 'main') {
20764 _this.captionParser_ = new captionParser();
20765 } else {
20766 _this.captionParser_ = null;
20767 }
20768
20769 _this.decrypter_ = settings.decrypter;
20770
20771 // Manages the tracking and generation of sync-points, mappings
20772 // between a time in the display time and a segment index within
20773 // a playlist
20774 _this.syncController_ = settings.syncController;
20775 _this.syncPoint_ = {
20776 segmentIndex: 0,
20777 time: 0
20778 };
20779
20780 _this.triggerSyncInfoUpdate_ = function () {
20781 return _this.trigger('syncinfoupdate');
20782 };
20783 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
20784
20785 _this.mediaSource_.addEventListener('sourceopen', function () {
20786 return _this.ended_ = false;
20787 });
20788
20789 // ...for determining the fetch location
20790 _this.fetchAtBuffer_ = false;
20791
20792 _this.logger_ = logger('SegmentLoader[' + _this.loaderType_ + ']');
20793
20794 Object.defineProperty(_this, 'state', {
20795 get: function get$$1() {
20796 return this.state_;
20797 },
20798 set: function set$$1(newState) {
20799 if (newState !== this.state_) {
20800 this.logger_(this.state_ + ' -> ' + newState);
20801 this.state_ = newState;
20802 }
20803 }
20804 });
20805 return _this;
20806 }
20807
20808 /**
20809 * reset all of our media stats
20810 *
20811 * @private
20812 */
20813
20814
20815 createClass(SegmentLoader, [{
20816 key: 'resetStats_',
20817 value: function resetStats_() {
20818 this.mediaBytesTransferred = 0;
20819 this.mediaRequests = 0;
20820 this.mediaRequestsAborted = 0;
20821 this.mediaRequestsTimedout = 0;
20822 this.mediaRequestsErrored = 0;
20823 this.mediaTransferDuration = 0;
20824 this.mediaSecondsLoaded = 0;
20825 }
20826
20827 /**
20828 * dispose of the SegmentLoader and reset to the default state
20829 */
20830
20831 }, {
20832 key: 'dispose',
20833 value: function dispose() {
20834 this.trigger('dispose');
20835 this.state = 'DISPOSED';
20836 this.pause();
20837 this.abort_();
20838 if (this.sourceUpdater_) {
20839 this.sourceUpdater_.dispose();
20840 }
20841 this.resetStats_();
20842 if (this.captionParser_) {
20843 this.captionParser_.reset();
20844 }
20845
20846 if (this.checkBufferTimeout_) {
20847 window_1.clearTimeout(this.checkBufferTimeout_);
20848 }
20849
20850 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
20851 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
20852 }
20853
20854 this.off();
20855 }
20856
20857 /**
20858 * abort anything that is currently doing on with the SegmentLoader
20859 * and reset to a default state
20860 */
20861
20862 }, {
20863 key: 'abort',
20864 value: function abort() {
20865 if (this.state !== 'WAITING') {
20866 if (this.pendingSegment_) {
20867 this.pendingSegment_ = null;
20868 }
20869 return;
20870 }
20871
20872 this.abort_();
20873
20874 // We aborted the requests we were waiting on, so reset the loader's state to READY
20875 // since we are no longer "waiting" on any requests. XHR callback is not always run
20876 // when the request is aborted. This will prevent the loader from being stuck in the
20877 // WAITING state indefinitely.
20878 this.state = 'READY';
20879
20880 // don't wait for buffer check timeouts to begin fetching the
20881 // next segment
20882 if (!this.paused()) {
20883 this.monitorBuffer_();
20884 }
20885 }
20886
20887 /**
20888 * abort all pending xhr requests and null any pending segements
20889 *
20890 * @private
20891 */
20892
20893 }, {
20894 key: 'abort_',
20895 value: function abort_() {
20896 if (this.pendingSegment_) {
20897 this.pendingSegment_.abortRequests();
20898 }
20899
20900 // clear out the segment being processed
20901 this.pendingSegment_ = null;
20902 }
20903
20904 /**
20905 * set an error on the segment loader and null out any pending segements
20906 *
20907 * @param {Error} error the error to set on the SegmentLoader
20908 * @return {Error} the error that was set or that is currently set
20909 */
20910
20911 }, {
20912 key: 'error',
20913 value: function error(_error) {
20914 if (typeof _error !== 'undefined') {
20915 this.error_ = _error;
20916 }
20917
20918 this.pendingSegment_ = null;
20919 return this.error_;
20920 }
20921 }, {
20922 key: 'endOfStream',
20923 value: function endOfStream() {
20924 this.ended_ = true;
20925 this.pause();
20926 this.trigger('ended');
20927 }
20928
20929 /**
20930 * Indicates which time ranges are buffered
20931 *
20932 * @return {TimeRange}
20933 * TimeRange object representing the current buffered ranges
20934 */
20935
20936 }, {
20937 key: 'buffered_',
20938 value: function buffered_() {
20939 if (!this.sourceUpdater_) {
20940 return videojs.createTimeRanges();
20941 }
20942
20943 return this.sourceUpdater_.buffered();
20944 }
20945
20946 /**
20947 * Gets and sets init segment for the provided map
20948 *
20949 * @param {Object} map
20950 * The map object representing the init segment to get or set
20951 * @param {Boolean=} set
20952 * If true, the init segment for the provided map should be saved
20953 * @return {Object}
20954 * map object for desired init segment
20955 */
20956
20957 }, {
20958 key: 'initSegment',
20959 value: function initSegment(map) {
20960 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
20961
20962 if (!map) {
20963 return null;
20964 }
20965
20966 var id = initSegmentId(map);
20967 var storedMap = this.initSegments_[id];
20968
20969 if (set$$1 && !storedMap && map.bytes) {
20970 this.initSegments_[id] = storedMap = {
20971 resolvedUri: map.resolvedUri,
20972 byterange: map.byterange,
20973 bytes: map.bytes,
20974 timescales: map.timescales,
20975 videoTrackIds: map.videoTrackIds
20976 };
20977 }
20978
20979 return storedMap || map;
20980 }
20981
20982 /**
20983 * Gets and sets key for the provided key
20984 *
20985 * @param {Object} key
20986 * The key object representing the key to get or set
20987 * @param {Boolean=} set
20988 * If true, the key for the provided key should be saved
20989 * @return {Object}
20990 * Key object for desired key
20991 */
20992
20993 }, {
20994 key: 'segmentKey',
20995 value: function segmentKey(key) {
20996 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
20997
20998 if (!key) {
20999 return null;
21000 }
21001
21002 var id = segmentKeyId(key);
21003 var storedKey = this.keyCache_[id];
21004
21005 // TODO: We should use the HTTP Expires header to invalidate our cache per
21006 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
21007 if (this.cacheEncryptionKeys_ && set$$1 && !storedKey && key.bytes) {
21008 this.keyCache_[id] = storedKey = {
21009 resolvedUri: key.resolvedUri,
21010 bytes: key.bytes
21011 };
21012 }
21013
21014 var result = {
21015 resolvedUri: (storedKey || key).resolvedUri
21016 };
21017
21018 if (storedKey) {
21019 result.bytes = storedKey.bytes;
21020 }
21021
21022 return result;
21023 }
21024
21025 /**
21026 * Returns true if all configuration required for loading is present, otherwise false.
21027 *
21028 * @return {Boolean} True if the all configuration is ready for loading
21029 * @private
21030 */
21031
21032 }, {
21033 key: 'couldBeginLoading_',
21034 value: function couldBeginLoading_() {
21035 return this.playlist_ && (
21036 // the source updater is created when init_ is called, so either having a
21037 // source updater or being in the INIT state with a mimeType is enough
21038 // to say we have all the needed configuration to start loading.
21039 this.sourceUpdater_ || this.mimeType_ && this.state === 'INIT') && !this.paused();
21040 }
21041
21042 /**
21043 * load a playlist and start to fill the buffer
21044 */
21045
21046 }, {
21047 key: 'load',
21048 value: function load() {
21049 // un-pause
21050 this.monitorBuffer_();
21051
21052 // if we don't have a playlist yet, keep waiting for one to be
21053 // specified
21054 if (!this.playlist_) {
21055 return;
21056 }
21057
21058 // not sure if this is the best place for this
21059 this.syncController_.setDateTimeMapping(this.playlist_);
21060
21061 // if all the configuration is ready, initialize and begin loading
21062 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21063 return this.init_();
21064 }
21065
21066 // if we're in the middle of processing a segment already, don't
21067 // kick off an additional segment request
21068 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
21069 return;
21070 }
21071
21072 this.state = 'READY';
21073 }
21074
21075 /**
21076 * Once all the starting parameters have been specified, begin
21077 * operation. This method should only be invoked from the INIT
21078 * state.
21079 *
21080 * @private
21081 */
21082
21083 }, {
21084 key: 'init_',
21085 value: function init_() {
21086 this.state = 'READY';
21087 this.sourceUpdater_ = new SourceUpdater(this.mediaSource_, this.mimeType_, this.loaderType_, this.sourceBufferEmitter_);
21088 this.resetEverything();
21089 return this.monitorBuffer_();
21090 }
21091
21092 /**
21093 * set a playlist on the segment loader
21094 *
21095 * @param {PlaylistLoader} media the playlist to set on the segment loader
21096 */
21097
21098 }, {
21099 key: 'playlist',
21100 value: function playlist(newPlaylist) {
21101 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
21102
21103 if (!newPlaylist) {
21104 return;
21105 }
21106
21107 var oldPlaylist = this.playlist_;
21108 var segmentInfo = this.pendingSegment_;
21109
21110 this.playlist_ = newPlaylist;
21111 this.xhrOptions_ = options;
21112
21113 // when we haven't started playing yet, the start of a live playlist
21114 // is always our zero-time so force a sync update each time the playlist
21115 // is refreshed from the server
21116 //
21117 // Use the INIT state to determine if playback has started, as the playlist sync info
21118 // should be fixed once requests begin (as sync points are generated based on sync
21119 // info), but not before then.
21120 if (this.state === 'INIT') {
21121 newPlaylist.syncInfo = {
21122 mediaSequence: newPlaylist.mediaSequence,
21123 time: 0
21124 };
21125 }
21126
21127 var oldId = null;
21128
21129 if (oldPlaylist) {
21130 if (oldPlaylist.id) {
21131 oldId = oldPlaylist.id;
21132 } else if (oldPlaylist.uri) {
21133 oldId = oldPlaylist.uri;
21134 }
21135 }
21136
21137 this.logger_('playlist update [' + oldId + ' => ' + (newPlaylist.id || newPlaylist.uri) + ']');
21138
21139 // in VOD, this is always a rendition switch (or we updated our syncInfo above)
21140 // in LIVE, we always want to update with new playlists (including refreshes)
21141 this.trigger('syncinfoupdate');
21142
21143 // if we were unpaused but waiting for a playlist, start
21144 // buffering now
21145 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21146 return this.init_();
21147 }
21148
21149 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
21150 if (this.mediaIndex !== null) {
21151 // we must "resync" the segment loader when we switch renditions and
21152 // the segment loader is already synced to the previous rendition
21153 this.resyncLoader();
21154 }
21155
21156 // the rest of this function depends on `oldPlaylist` being defined
21157 return;
21158 }
21159
21160 // we reloaded the same playlist so we are in a live scenario
21161 // and we will likely need to adjust the mediaIndex
21162 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
21163
21164 this.logger_('live window shift [' + mediaSequenceDiff + ']');
21165
21166 // update the mediaIndex on the SegmentLoader
21167 // this is important because we can abort a request and this value must be
21168 // equal to the last appended mediaIndex
21169 if (this.mediaIndex !== null) {
21170 this.mediaIndex -= mediaSequenceDiff;
21171 }
21172
21173 // update the mediaIndex on the SegmentInfo object
21174 // this is important because we will update this.mediaIndex with this value
21175 // in `handleUpdateEnd_` after the segment has been successfully appended
21176 if (segmentInfo) {
21177 segmentInfo.mediaIndex -= mediaSequenceDiff;
21178
21179 // we need to update the referenced segment so that timing information is
21180 // saved for the new playlist's segment, however, if the segment fell off the
21181 // playlist, we can leave the old reference and just lose the timing info
21182 if (segmentInfo.mediaIndex >= 0) {
21183 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
21184 }
21185 }
21186
21187 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
21188 }
21189
21190 /**
21191 * Prevent the loader from fetching additional segments. If there
21192 * is a segment request outstanding, it will finish processing
21193 * before the loader halts. A segment loader can be unpaused by
21194 * calling load().
21195 */
21196
21197 }, {
21198 key: 'pause',
21199 value: function pause() {
21200 if (this.checkBufferTimeout_) {
21201 window_1.clearTimeout(this.checkBufferTimeout_);
21202
21203 this.checkBufferTimeout_ = null;
21204 }
21205 }
21206
21207 /**
21208 * Returns whether the segment loader is fetching additional
21209 * segments when given the opportunity. This property can be
21210 * modified through calls to pause() and load().
21211 */
21212
21213 }, {
21214 key: 'paused',
21215 value: function paused() {
21216 return this.checkBufferTimeout_ === null;
21217 }
21218
21219 /**
21220 * create/set the following mimetype on the SourceBuffer through a
21221 * SourceUpdater
21222 *
21223 * @param {String} mimeType the mime type string to use
21224 * @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer
21225 * is added to the media source
21226 */
21227
21228 }, {
21229 key: 'mimeType',
21230 value: function mimeType(_mimeType, sourceBufferEmitter) {
21231 if (this.mimeType_) {
21232 return;
21233 }
21234
21235 this.mimeType_ = _mimeType;
21236 this.sourceBufferEmitter_ = sourceBufferEmitter;
21237 // if we were unpaused but waiting for a sourceUpdater, start
21238 // buffering now
21239 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21240 this.init_();
21241 }
21242 }
21243
21244 /**
21245 * Delete all the buffered data and reset the SegmentLoader
21246 * @param {Function} [done] an optional callback to be executed when the remove
21247 * operation is complete
21248 */
21249
21250 }, {
21251 key: 'resetEverything',
21252 value: function resetEverything(done) {
21253 this.ended_ = false;
21254 this.resetLoader();
21255
21256 // remove from 0, the earliest point, to Infinity, to signify removal of everything.
21257 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
21258 // we then clamp the value to duration if necessary.
21259 this.remove(0, Infinity, done);
21260
21261 // clears fmp4 captions
21262 if (this.captionParser_) {
21263 this.captionParser_.clearAllCaptions();
21264 }
21265 this.trigger('reseteverything');
21266 }
21267
21268 /**
21269 * Force the SegmentLoader to resync and start loading around the currentTime instead
21270 * of starting at the end of the buffer
21271 *
21272 * Useful for fast quality changes
21273 */
21274
21275 }, {
21276 key: 'resetLoader',
21277 value: function resetLoader() {
21278 this.fetchAtBuffer_ = false;
21279 this.resyncLoader();
21280 }
21281
21282 /**
21283 * Force the SegmentLoader to restart synchronization and make a conservative guess
21284 * before returning to the simple walk-forward method
21285 */
21286
21287 }, {
21288 key: 'resyncLoader',
21289 value: function resyncLoader() {
21290 this.mediaIndex = null;
21291 this.syncPoint_ = null;
21292 this.abort();
21293 }
21294
21295 /**
21296 * Remove any data in the source buffer between start and end times
21297 * @param {Number} start - the start time of the region to remove from the buffer
21298 * @param {Number} end - the end time of the region to remove from the buffer
21299 * @param {Function} [done] - an optional callback to be executed when the remove
21300 * operation is complete
21301 */
21302
21303 }, {
21304 key: 'remove',
21305 value: function remove(start, end, done) {
21306 // clamp end to duration if we need to remove everything.
21307 // This is due to a browser bug that causes issues if we remove to Infinity.
21308 // videojs/videojs-contrib-hls#1225
21309 if (end === Infinity) {
21310 end = this.duration_();
21311 }
21312
21313 if (this.sourceUpdater_) {
21314 this.sourceUpdater_.remove(start, end, done);
21315 }
21316 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
21317
21318 if (this.inbandTextTracks_) {
21319 for (var id in this.inbandTextTracks_) {
21320 removeCuesFromTrack(start, end, this.inbandTextTracks_[id]);
21321 }
21322 }
21323 }
21324
21325 /**
21326 * (re-)schedule monitorBufferTick_ to run as soon as possible
21327 *
21328 * @private
21329 */
21330
21331 }, {
21332 key: 'monitorBuffer_',
21333 value: function monitorBuffer_() {
21334 if (this.checkBufferTimeout_) {
21335 window_1.clearTimeout(this.checkBufferTimeout_);
21336 }
21337
21338 this.checkBufferTimeout_ = window_1.setTimeout(this.monitorBufferTick_.bind(this), 1);
21339 }
21340
21341 /**
21342 * As long as the SegmentLoader is in the READY state, periodically
21343 * invoke fillBuffer_().
21344 *
21345 * @private
21346 */
21347
21348 }, {
21349 key: 'monitorBufferTick_',
21350 value: function monitorBufferTick_() {
21351 if (this.state === 'READY') {
21352 this.fillBuffer_();
21353 }
21354
21355 if (this.checkBufferTimeout_) {
21356 window_1.clearTimeout(this.checkBufferTimeout_);
21357 }
21358
21359 this.checkBufferTimeout_ = window_1.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
21360 }
21361
21362 /**
21363 * fill the buffer with segements unless the sourceBuffers are
21364 * currently updating
21365 *
21366 * Note: this function should only ever be called by monitorBuffer_
21367 * and never directly
21368 *
21369 * @private
21370 */
21371
21372 }, {
21373 key: 'fillBuffer_',
21374 value: function fillBuffer_() {
21375 if (this.sourceUpdater_.updating()) {
21376 return;
21377 }
21378
21379 if (!this.syncPoint_) {
21380 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
21381 }
21382
21383 // see if we need to begin loading immediately
21384 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
21385
21386 if (!segmentInfo) {
21387 return;
21388 }
21389
21390 if (this.isEndOfStream_(segmentInfo.mediaIndex)) {
21391 this.endOfStream();
21392 return;
21393 }
21394
21395 if (segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
21396 return;
21397 }
21398
21399 // We will need to change timestampOffset of the sourceBuffer if:
21400 // - The segment.timeline !== this.currentTimeline
21401 // (we are crossing a discontinuity somehow)
21402 // - The "timestampOffset" for the start of this segment is less than
21403 // the currently set timestampOffset
21404 // Also, clear captions if we are crossing a discontinuity boundary
21405 // Previously, we changed the timestampOffset if the start of this segment
21406 // is less than the currently set timestampOffset but this isn't wanted
21407 // as it can produce bad behavior, especially around long running
21408 // live streams
21409 if (segmentInfo.timeline !== this.currentTimeline_) {
21410 this.syncController_.reset();
21411 segmentInfo.timestampOffset = segmentInfo.startOfSegment;
21412 if (this.captionParser_) {
21413 this.captionParser_.clearAllCaptions();
21414 }
21415 }
21416
21417 this.loadSegment_(segmentInfo);
21418 }
21419
21420 /**
21421 * Determines if this segment loader is at the end of it's stream.
21422 *
21423 * @param {Number} mediaIndex the index of segment we last appended
21424 * @param {Object} [playlist=this.playlist_] a media playlist object
21425 * @returns {Boolean} true if at end of stream, false otherwise.
21426 */
21427
21428 }, {
21429 key: 'isEndOfStream_',
21430 value: function isEndOfStream_(mediaIndex) {
21431 var playlist = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : this.playlist_;
21432
21433 return detectEndOfStream(playlist, this.mediaSource_, mediaIndex) && !this.sourceUpdater_.updating();
21434 }
21435
21436 /**
21437 * Determines what segment request should be made, given current playback
21438 * state.
21439 *
21440 * @param {TimeRanges} buffered - the state of the buffer
21441 * @param {Object} playlist - the playlist object to fetch segments from
21442 * @param {Number} mediaIndex - the previous mediaIndex fetched or null
21443 * @param {Boolean} hasPlayed - a flag indicating whether we have played or not
21444 * @param {Number} currentTime - the playback position in seconds
21445 * @param {Object} syncPoint - a segment info object that describes the
21446 * @returns {Object} a segment request object that describes the segment to load
21447 */
21448
21449 }, {
21450 key: 'checkBuffer_',
21451 value: function checkBuffer_(buffered, playlist, mediaIndex, hasPlayed, currentTime, syncPoint) {
21452 var lastBufferedEnd = 0;
21453 var startOfSegment = void 0;
21454
21455 if (buffered.length) {
21456 lastBufferedEnd = buffered.end(buffered.length - 1);
21457 }
21458
21459 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
21460
21461 if (!playlist.segments.length) {
21462 return null;
21463 }
21464
21465 // if there is plenty of content buffered, and the video has
21466 // been played before relax for awhile
21467 if (bufferedTime >= this.goalBufferLength_()) {
21468 return null;
21469 }
21470
21471 // if the video has not yet played once, and we already have
21472 // one segment downloaded do nothing
21473 if (!hasPlayed && bufferedTime >= 1) {
21474 return null;
21475 }
21476
21477 // When the syncPoint is null, there is no way of determining a good
21478 // conservative segment index to fetch from
21479 // The best thing to do here is to get the kind of sync-point data by
21480 // making a request
21481 if (syncPoint === null) {
21482 mediaIndex = this.getSyncSegmentCandidate_(playlist);
21483 return this.generateSegmentInfo_(playlist, mediaIndex, null, true);
21484 }
21485
21486 // Under normal playback conditions fetching is a simple walk forward
21487 if (mediaIndex !== null) {
21488 var segment = playlist.segments[mediaIndex];
21489
21490 startOfSegment = lastBufferedEnd;
21491
21492 return this.generateSegmentInfo_(playlist, mediaIndex + 1, startOfSegment, false);
21493 }
21494
21495 // There is a sync-point but the lack of a mediaIndex indicates that
21496 // we need to make a good conservative guess about which segment to
21497 // fetch
21498 if (this.fetchAtBuffer_) {
21499 // Find the segment containing the end of the buffer
21500 var mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, lastBufferedEnd, syncPoint.segmentIndex, syncPoint.time);
21501
21502 mediaIndex = mediaSourceInfo.mediaIndex;
21503 startOfSegment = mediaSourceInfo.startTime;
21504 } else {
21505 // Find the segment containing currentTime
21506 var _mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, currentTime, syncPoint.segmentIndex, syncPoint.time);
21507
21508 mediaIndex = _mediaSourceInfo.mediaIndex;
21509 startOfSegment = _mediaSourceInfo.startTime;
21510 }
21511
21512 return this.generateSegmentInfo_(playlist, mediaIndex, startOfSegment, false);
21513 }
21514
21515 /**
21516 * The segment loader has no recourse except to fetch a segment in the
21517 * current playlist and use the internal timestamps in that segment to
21518 * generate a syncPoint. This function returns a good candidate index
21519 * for that process.
21520 *
21521 * @param {Object} playlist - the playlist object to look for a
21522 * @returns {Number} An index of a segment from the playlist to load
21523 */
21524
21525 }, {
21526 key: 'getSyncSegmentCandidate_',
21527 value: function getSyncSegmentCandidate_(playlist) {
21528 var _this2 = this;
21529
21530 if (this.currentTimeline_ === -1) {
21531 return 0;
21532 }
21533
21534 var segmentIndexArray = playlist.segments.map(function (s, i) {
21535 return {
21536 timeline: s.timeline,
21537 segmentIndex: i
21538 };
21539 }).filter(function (s) {
21540 return s.timeline === _this2.currentTimeline_;
21541 });
21542
21543 if (segmentIndexArray.length) {
21544 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
21545 }
21546
21547 return Math.max(playlist.segments.length - 1, 0);
21548 }
21549 }, {
21550 key: 'generateSegmentInfo_',
21551 value: function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest) {
21552 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
21553 return null;
21554 }
21555
21556 var segment = playlist.segments[mediaIndex];
21557
21558 return {
21559 requestId: 'segment-loader-' + Math.random(),
21560 // resolve the segment URL relative to the playlist
21561 uri: segment.resolvedUri,
21562 // the segment's mediaIndex at the time it was requested
21563 mediaIndex: mediaIndex,
21564 // whether or not to update the SegmentLoader's state with this
21565 // segment's mediaIndex
21566 isSyncRequest: isSyncRequest,
21567 startOfSegment: startOfSegment,
21568 // the segment's playlist
21569 playlist: playlist,
21570 // unencrypted bytes of the segment
21571 bytes: null,
21572 // when a key is defined for this segment, the encrypted bytes
21573 encryptedBytes: null,
21574 // The target timestampOffset for this segment when we append it
21575 // to the source buffer
21576 timestampOffset: null,
21577 // The timeline that the segment is in
21578 timeline: segment.timeline,
21579 // The expected duration of the segment in seconds
21580 duration: segment.duration,
21581 // retain the segment in case the playlist updates while doing an async process
21582 segment: segment
21583 };
21584 }
21585
21586 /**
21587 * Determines if the network has enough bandwidth to complete the current segment
21588 * request in a timely manner. If not, the request will be aborted early and bandwidth
21589 * updated to trigger a playlist switch.
21590 *
21591 * @param {Object} stats
21592 * Object containing stats about the request timing and size
21593 * @return {Boolean} True if the request was aborted, false otherwise
21594 * @private
21595 */
21596
21597 }, {
21598 key: 'abortRequestEarly_',
21599 value: function abortRequestEarly_(stats) {
21600 if (this.hls_.tech_.paused() ||
21601 // Don't abort if the current playlist is on the lowestEnabledRendition
21602 // TODO: Replace using timeout with a boolean indicating whether this playlist is
21603 // the lowestEnabledRendition.
21604 !this.xhrOptions_.timeout ||
21605 // Don't abort if we have no bandwidth information to estimate segment sizes
21606 !this.playlist_.attributes.BANDWIDTH) {
21607 return false;
21608 }
21609
21610 // Wait at least 1 second since the first byte of data has been received before
21611 // using the calculated bandwidth from the progress event to allow the bitrate
21612 // to stabilize
21613 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
21614 return false;
21615 }
21616
21617 var currentTime = this.currentTime_();
21618 var measuredBandwidth = stats.bandwidth;
21619 var segmentDuration = this.pendingSegment_.duration;
21620
21621 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived);
21622
21623 // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
21624 // if we are only left with less than 1 second when the request completes.
21625 // A negative timeUntilRebuffering indicates we are already rebuffering
21626 var timeUntilRebuffer$$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.hls_.tech_.playbackRate()) - 1;
21627
21628 // Only consider aborting early if the estimated time to finish the download
21629 // is larger than the estimated time until the player runs out of forward buffer
21630 if (requestTimeRemaining <= timeUntilRebuffer$$1) {
21631 return false;
21632 }
21633
21634 var switchCandidate = minRebufferMaxBandwidthSelector({
21635 master: this.hls_.playlists.master,
21636 currentTime: currentTime,
21637 bandwidth: measuredBandwidth,
21638 duration: this.duration_(),
21639 segmentDuration: segmentDuration,
21640 timeUntilRebuffer: timeUntilRebuffer$$1,
21641 currentTimeline: this.currentTimeline_,
21642 syncController: this.syncController_
21643 });
21644
21645 if (!switchCandidate) {
21646 return;
21647 }
21648
21649 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$$1;
21650
21651 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
21652
21653 var minimumTimeSaving = 0.5;
21654
21655 // If we are already rebuffering, increase the amount of variance we add to the
21656 // potential round trip time of the new request so that we are not too aggressive
21657 // with switching to a playlist that might save us a fraction of a second.
21658 if (timeUntilRebuffer$$1 <= TIME_FUDGE_FACTOR) {
21659 minimumTimeSaving = 1;
21660 }
21661
21662 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
21663 return false;
21664 }
21665
21666 // set the bandwidth to that of the desired playlist being sure to scale by
21667 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
21668 // don't trigger a bandwidthupdate as the bandwidth is artifial
21669 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
21670 this.abort();
21671 this.trigger('earlyabort');
21672 return true;
21673 }
21674
21675 /**
21676 * XHR `progress` event handler
21677 *
21678 * @param {Event}
21679 * The XHR `progress` event
21680 * @param {Object} simpleSegment
21681 * A simplified segment object copy
21682 * @private
21683 */
21684
21685 }, {
21686 key: 'handleProgress_',
21687 value: function handleProgress_(event, simpleSegment) {
21688 if (!this.pendingSegment_ || simpleSegment.requestId !== this.pendingSegment_.requestId || this.abortRequestEarly_(simpleSegment.stats)) {
21689 return;
21690 }
21691
21692 this.trigger('progress');
21693 }
21694
21695 /**
21696 * load a specific segment from a request into the buffer
21697 *
21698 * @private
21699 */
21700
21701 }, {
21702 key: 'loadSegment_',
21703 value: function loadSegment_(segmentInfo) {
21704 this.state = 'WAITING';
21705 this.pendingSegment_ = segmentInfo;
21706 this.trimBackBuffer_(segmentInfo);
21707
21708 segmentInfo.abortRequests = mediaSegmentRequest(this.hls_.xhr, this.xhrOptions_, this.decrypter_, this.captionParser_, this.createSimplifiedSegmentObj_(segmentInfo),
21709 // progress callback
21710 this.handleProgress_.bind(this), this.segmentRequestFinished_.bind(this));
21711 }
21712
21713 /**
21714 * trim the back buffer so that we don't have too much data
21715 * in the source buffer
21716 *
21717 * @private
21718 *
21719 * @param {Object} segmentInfo - the current segment
21720 */
21721
21722 }, {
21723 key: 'trimBackBuffer_',
21724 value: function trimBackBuffer_(segmentInfo) {
21725 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10);
21726
21727 // Chrome has a hard limit of 150MB of
21728 // buffer and a very conservative "garbage collector"
21729 // We manually clear out the old buffer to ensure
21730 // we don't trigger the QuotaExceeded error
21731 // on the source buffer during subsequent appends
21732
21733 if (removeToTime > 0) {
21734 this.remove(0, removeToTime);
21735 }
21736 }
21737
21738 /**
21739 * created a simplified copy of the segment object with just the
21740 * information necessary to perform the XHR and decryption
21741 *
21742 * @private
21743 *
21744 * @param {Object} segmentInfo - the current segment
21745 * @returns {Object} a simplified segment object copy
21746 */
21747
21748 }, {
21749 key: 'createSimplifiedSegmentObj_',
21750 value: function createSimplifiedSegmentObj_(segmentInfo) {
21751 var segment = segmentInfo.segment;
21752 var simpleSegment = {
21753 resolvedUri: segment.resolvedUri,
21754 byterange: segment.byterange,
21755 requestId: segmentInfo.requestId
21756 };
21757
21758 if (segment.key) {
21759 // if the media sequence is greater than 2^32, the IV will be incorrect
21760 // assuming 10s segments, that would be about 1300 years
21761 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
21762
21763 simpleSegment.key = this.segmentKey(segment.key);
21764 simpleSegment.key.iv = iv;
21765 }
21766
21767 if (segment.map) {
21768 simpleSegment.map = this.initSegment(segment.map);
21769 }
21770
21771 return simpleSegment;
21772 }
21773
21774 /**
21775 * Handle the callback from the segmentRequest function and set the
21776 * associated SegmentLoader state and errors if necessary
21777 *
21778 * @private
21779 */
21780
21781 }, {
21782 key: 'segmentRequestFinished_',
21783 value: function segmentRequestFinished_(error, simpleSegment) {
21784 // every request counts as a media request even if it has been aborted
21785 // or canceled due to a timeout
21786 this.mediaRequests += 1;
21787
21788 if (simpleSegment.stats) {
21789 this.mediaBytesTransferred += simpleSegment.stats.bytesReceived;
21790 this.mediaTransferDuration += simpleSegment.stats.roundTripTime;
21791 }
21792
21793 // The request was aborted and the SegmentLoader has already been reset
21794 if (!this.pendingSegment_) {
21795 this.mediaRequestsAborted += 1;
21796 return;
21797 }
21798
21799 // the request was aborted and the SegmentLoader has already started
21800 // another request. this can happen when the timeout for an aborted
21801 // request triggers due to a limitation in the XHR library
21802 // do not count this as any sort of request or we risk double-counting
21803 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
21804 return;
21805 }
21806
21807 // an error occurred from the active pendingSegment_ so reset everything
21808 if (error) {
21809 this.pendingSegment_ = null;
21810 this.state = 'READY';
21811
21812 // the requests were aborted just record the aborted stat and exit
21813 // this is not a true error condition and nothing corrective needs
21814 // to be done
21815 if (error.code === REQUEST_ERRORS.ABORTED) {
21816 this.mediaRequestsAborted += 1;
21817 return;
21818 }
21819
21820 this.pause();
21821
21822 // the error is really just that at least one of the requests timed-out
21823 // set the bandwidth to a very low value and trigger an ABR switch to
21824 // take emergency action
21825 if (error.code === REQUEST_ERRORS.TIMEOUT) {
21826 this.mediaRequestsTimedout += 1;
21827 this.bandwidth = 1;
21828 this.roundTrip = NaN;
21829 this.trigger('bandwidthupdate');
21830 return;
21831 }
21832
21833 // if control-flow has arrived here, then the error is real
21834 // emit an error event to blacklist the current playlist
21835 this.mediaRequestsErrored += 1;
21836 this.error(error);
21837 this.trigger('error');
21838 return;
21839 }
21840
21841 // the response was a success so set any bandwidth stats the request
21842 // generated for ABR purposes
21843 this.bandwidth = simpleSegment.stats.bandwidth;
21844 this.roundTrip = simpleSegment.stats.roundTripTime;
21845
21846 // if this request included an initialization segment, save that data
21847 // to the initSegment cache
21848 if (simpleSegment.map) {
21849 simpleSegment.map = this.initSegment(simpleSegment.map, true);
21850 }
21851
21852 // if this request included a segment key, save that data in the cache
21853 if (simpleSegment.key) {
21854 this.segmentKey(simpleSegment.key, true);
21855 }
21856
21857 this.processSegmentResponse_(simpleSegment);
21858 }
21859
21860 /**
21861 * Move any important data from the simplified segment object
21862 * back to the real segment object for future phases
21863 *
21864 * @private
21865 */
21866
21867 }, {
21868 key: 'processSegmentResponse_',
21869 value: function processSegmentResponse_(simpleSegment) {
21870 var segmentInfo = this.pendingSegment_;
21871
21872 segmentInfo.bytes = simpleSegment.bytes;
21873 if (simpleSegment.map) {
21874 segmentInfo.segment.map.bytes = simpleSegment.map.bytes;
21875 }
21876
21877 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
21878
21879 // This has fmp4 captions, add them to text tracks
21880 if (simpleSegment.fmp4Captions) {
21881 createCaptionsTrackIfNotExists(this.inbandTextTracks_, this.hls_.tech_, simpleSegment.captionStreams);
21882 addCaptionData({
21883 inbandTextTracks: this.inbandTextTracks_,
21884 captionArray: simpleSegment.fmp4Captions,
21885 // fmp4s will not have a timestamp offset
21886 timestampOffset: 0
21887 });
21888 // Reset stored captions since we added parsed
21889 // captions to a text track at this point
21890 if (this.captionParser_) {
21891 this.captionParser_.clearParsedCaptions();
21892 }
21893 }
21894
21895 this.handleSegment_();
21896 }
21897
21898 /**
21899 * append a decrypted segement to the SourceBuffer through a SourceUpdater
21900 *
21901 * @private
21902 */
21903
21904 }, {
21905 key: 'handleSegment_',
21906 value: function handleSegment_() {
21907 var _this3 = this;
21908
21909 if (!this.pendingSegment_) {
21910 this.state = 'READY';
21911 return;
21912 }
21913
21914 var segmentInfo = this.pendingSegment_;
21915 var segment = segmentInfo.segment;
21916 var timingInfo = this.syncController_.probeSegmentInfo(segmentInfo);
21917
21918 // When we have our first timing info, determine what media types this loader is
21919 // dealing with. Although we're maintaining extra state, it helps to preserve the
21920 // separation of segment loader from the actual source buffers.
21921 if (typeof this.startingMedia_ === 'undefined' && timingInfo && (
21922 // Guard against cases where we're not getting timing info at all until we are
21923 // certain that all streams will provide it.
21924 timingInfo.containsAudio || timingInfo.containsVideo)) {
21925 this.startingMedia_ = {
21926 containsAudio: timingInfo.containsAudio,
21927 containsVideo: timingInfo.containsVideo
21928 };
21929 }
21930
21931 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.startingMedia_, timingInfo);
21932
21933 if (illegalMediaSwitchError) {
21934 this.error({
21935 message: illegalMediaSwitchError,
21936 blacklistDuration: Infinity
21937 });
21938 this.trigger('error');
21939 return;
21940 }
21941
21942 if (segmentInfo.isSyncRequest) {
21943 this.trigger('syncinfoupdate');
21944 this.pendingSegment_ = null;
21945 this.state = 'READY';
21946 return;
21947 }
21948
21949 if (segmentInfo.timestampOffset !== null && segmentInfo.timestampOffset !== this.sourceUpdater_.timestampOffset()) {
21950
21951 // Subtract any difference between the PTS and DTS times of the first frame
21952 // from the timeStampOffset (which currently equals the buffered.end) to prevent
21953 // creating any gaps in the buffer
21954 if (timingInfo && timingInfo.segmentTimestampInfo) {
21955 var ptsStartTime = timingInfo.segmentTimestampInfo[0].ptsTime;
21956 var dtsStartTime = timingInfo.segmentTimestampInfo[0].dtsTime;
21957
21958 segmentInfo.timestampOffset -= ptsStartTime - dtsStartTime;
21959 }
21960
21961 this.sourceUpdater_.timestampOffset(segmentInfo.timestampOffset);
21962 // fired when a timestamp offset is set in HLS (can also identify discontinuities)
21963 this.trigger('timestampoffset');
21964 }
21965
21966 var timelineMapping = this.syncController_.mappingForTimeline(segmentInfo.timeline);
21967
21968 if (timelineMapping !== null) {
21969 this.trigger({
21970 type: 'segmenttimemapping',
21971 mapping: timelineMapping
21972 });
21973 }
21974
21975 this.state = 'APPENDING';
21976
21977 // if the media initialization segment is changing, append it
21978 // before the content segment
21979 if (segment.map) {
21980 var initId = initSegmentId(segment.map);
21981
21982 if (!this.activeInitSegmentId_ || this.activeInitSegmentId_ !== initId) {
21983 var initSegment = this.initSegment(segment.map);
21984
21985 this.sourceUpdater_.appendBuffer({
21986 bytes: initSegment.bytes
21987 }, function () {
21988 _this3.activeInitSegmentId_ = initId;
21989 });
21990 }
21991 }
21992
21993 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
21994 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
21995 this.mediaSecondsLoaded += segment.end - segment.start;
21996 } else {
21997 this.mediaSecondsLoaded += segment.duration;
21998 }
21999
22000 this.logger_(segmentInfoString(segmentInfo));
22001
22002 this.sourceUpdater_.appendBuffer({
22003 bytes: segmentInfo.bytes,
22004 videoSegmentTimingInfoCallback: this.handleVideoSegmentTimingInfo_.bind(this, segmentInfo.requestId)
22005 }, this.handleUpdateEnd_.bind(this));
22006 }
22007 }, {
22008 key: 'handleVideoSegmentTimingInfo_',
22009 value: function handleVideoSegmentTimingInfo_(requestId, event) {
22010 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
22011 return;
22012 }
22013
22014 var segment = this.pendingSegment_.segment;
22015
22016 if (!segment.videoTimingInfo) {
22017 segment.videoTimingInfo = {};
22018 }
22019
22020 segment.videoTimingInfo.transmuxerPrependedSeconds = event.videoSegmentTimingInfo.prependedContentDuration || 0;
22021 segment.videoTimingInfo.transmuxedPresentationStart = event.videoSegmentTimingInfo.start.presentation;
22022 segment.videoTimingInfo.transmuxedPresentationEnd = event.videoSegmentTimingInfo.end.presentation;
22023 // mainly used as a reference for debugging
22024 segment.videoTimingInfo.baseMediaDecodeTime = event.videoSegmentTimingInfo.baseMediaDecodeTime;
22025 }
22026
22027 /**
22028 * callback to run when appendBuffer is finished. detects if we are
22029 * in a good state to do things with the data we got, or if we need
22030 * to wait for more
22031 *
22032 * @private
22033 */
22034
22035 }, {
22036 key: 'handleUpdateEnd_',
22037 value: function handleUpdateEnd_() {
22038 if (!this.pendingSegment_) {
22039 this.state = 'READY';
22040 if (!this.paused()) {
22041 this.monitorBuffer_();
22042 }
22043 return;
22044 }
22045
22046 var segmentInfo = this.pendingSegment_;
22047 var segment = segmentInfo.segment;
22048 var isWalkingForward = this.mediaIndex !== null;
22049
22050 this.pendingSegment_ = null;
22051 this.recordThroughput_(segmentInfo);
22052 this.addSegmentMetadataCue_(segmentInfo);
22053
22054 this.state = 'READY';
22055
22056 this.mediaIndex = segmentInfo.mediaIndex;
22057 this.fetchAtBuffer_ = true;
22058 this.currentTimeline_ = segmentInfo.timeline;
22059
22060 // We must update the syncinfo to recalculate the seekable range before
22061 // the following conditional otherwise it may consider this a bad "guess"
22062 // and attempt to resync when the post-update seekable window and live
22063 // point would mean that this was the perfect segment to fetch
22064 this.trigger('syncinfoupdate');
22065
22066 // If we previously appended a segment that ends more than 3 targetDurations before
22067 // the currentTime_ that means that our conservative guess was too conservative.
22068 // In that case, reset the loader state so that we try to use any information gained
22069 // from the previous request to create a new, more accurate, sync-point.
22070 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
22071 this.resetEverything();
22072 return;
22073 }
22074
22075 // Don't do a rendition switch unless we have enough time to get a sync segment
22076 // and conservatively guess
22077 if (isWalkingForward) {
22078 this.trigger('bandwidthupdate');
22079 }
22080 this.trigger('progress');
22081
22082 // any time an update finishes and the last segment is in the
22083 // buffer, end the stream. this ensures the "ended" event will
22084 // fire if playback reaches that point.
22085 if (this.isEndOfStream_(segmentInfo.mediaIndex + 1, segmentInfo.playlist)) {
22086 this.endOfStream();
22087 }
22088
22089 if (!this.paused()) {
22090 this.monitorBuffer_();
22091 }
22092 }
22093
22094 /**
22095 * Records the current throughput of the decrypt, transmux, and append
22096 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
22097 * moving average of the throughput. `throughput.count` is the number of
22098 * data points in the average.
22099 *
22100 * @private
22101 * @param {Object} segmentInfo the object returned by loadSegment
22102 */
22103
22104 }, {
22105 key: 'recordThroughput_',
22106 value: function recordThroughput_(segmentInfo) {
22107 var rate = this.throughput.rate;
22108 // Add one to the time to ensure that we don't accidentally attempt to divide
22109 // by zero in the case where the throughput is ridiculously high
22110 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1;
22111 // Multiply by 8000 to convert from bytes/millisecond to bits/second
22112 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000);
22113
22114 // This is just a cumulative moving average calculation:
22115 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
22116 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
22117 }
22118
22119 /**
22120 * Adds a cue to the segment-metadata track with some metadata information about the
22121 * segment
22122 *
22123 * @private
22124 * @param {Object} segmentInfo
22125 * the object returned by loadSegment
22126 * @method addSegmentMetadataCue_
22127 */
22128
22129 }, {
22130 key: 'addSegmentMetadataCue_',
22131 value: function addSegmentMetadataCue_(segmentInfo) {
22132 if (!this.segmentMetadataTrack_) {
22133 return;
22134 }
22135
22136 var segment = segmentInfo.segment;
22137 var start = segment.start;
22138 var end = segment.end;
22139
22140 // Do not try adding the cue if the start and end times are invalid.
22141 if (!finite(start) || !finite(end)) {
22142 return;
22143 }
22144
22145 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
22146
22147 var Cue = window_1.WebKitDataCue || window_1.VTTCue;
22148 var value = {
22149 custom: segment.custom,
22150 dateTimeObject: segment.dateTimeObject,
22151 dateTimeString: segment.dateTimeString,
22152 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
22153 resolution: segmentInfo.playlist.attributes.RESOLUTION,
22154 codecs: segmentInfo.playlist.attributes.CODECS,
22155 byteLength: segmentInfo.byteLength,
22156 uri: segmentInfo.uri,
22157 timeline: segmentInfo.timeline,
22158 playlist: segmentInfo.playlist.id,
22159 start: start,
22160 end: end
22161 };
22162 var data = JSON.stringify(value);
22163 var cue = new Cue(start, end, data);
22164
22165 // Attach the metadata to the value property of the cue to keep consistency between
22166 // the differences of WebKitDataCue in safari and VTTCue in other browsers
22167 cue.value = value;
22168
22169 this.segmentMetadataTrack_.addCue(cue);
22170 }
22171 }]);
22172 return SegmentLoader;
22173 }(videojs.EventTarget);
22174
22175 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
22176 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
22177 };
22178
22179 /**
22180 * @file vtt-segment-loader.js
22181 */
22182
22183 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
22184 return char.charCodeAt(0);
22185 }));
22186
22187 /**
22188 * An object that manages segment loading and appending.
22189 *
22190 * @class VTTSegmentLoader
22191 * @param {Object} options required and optional options
22192 * @extends videojs.EventTarget
22193 */
22194
22195 var VTTSegmentLoader = function (_SegmentLoader) {
22196 inherits(VTTSegmentLoader, _SegmentLoader);
22197
22198 function VTTSegmentLoader(settings) {
22199 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
22200 classCallCheck(this, VTTSegmentLoader);
22201
22202 // SegmentLoader requires a MediaSource be specified or it will throw an error;
22203 // however, VTTSegmentLoader has no need of a media source, so delete the reference
22204 var _this = possibleConstructorReturn(this, (VTTSegmentLoader.__proto__ || Object.getPrototypeOf(VTTSegmentLoader)).call(this, settings, options));
22205
22206 _this.mediaSource_ = null;
22207
22208 _this.subtitlesTrack_ = null;
22209
22210 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
22211 return _this;
22212 }
22213
22214 /**
22215 * Indicates which time ranges are buffered
22216 *
22217 * @return {TimeRange}
22218 * TimeRange object representing the current buffered ranges
22219 */
22220
22221
22222 createClass(VTTSegmentLoader, [{
22223 key: 'buffered_',
22224 value: function buffered_() {
22225 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues.length) {
22226 return videojs.createTimeRanges();
22227 }
22228
22229 var cues = this.subtitlesTrack_.cues;
22230 var start = cues[0].startTime;
22231 var end = cues[cues.length - 1].startTime;
22232
22233 return videojs.createTimeRanges([[start, end]]);
22234 }
22235
22236 /**
22237 * Gets and sets init segment for the provided map
22238 *
22239 * @param {Object} map
22240 * The map object representing the init segment to get or set
22241 * @param {Boolean=} set
22242 * If true, the init segment for the provided map should be saved
22243 * @return {Object}
22244 * map object for desired init segment
22245 */
22246
22247 }, {
22248 key: 'initSegment',
22249 value: function initSegment(map) {
22250 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
22251
22252 if (!map) {
22253 return null;
22254 }
22255
22256 var id = initSegmentId(map);
22257 var storedMap = this.initSegments_[id];
22258
22259 if (set$$1 && !storedMap && map.bytes) {
22260 // append WebVTT line terminators to the media initialization segment if it exists
22261 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
22262 // requires two or more WebVTT line terminators between the WebVTT header and the
22263 // rest of the file
22264 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
22265 var combinedSegment = new Uint8Array(combinedByteLength);
22266
22267 combinedSegment.set(map.bytes);
22268 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
22269
22270 this.initSegments_[id] = storedMap = {
22271 resolvedUri: map.resolvedUri,
22272 byterange: map.byterange,
22273 bytes: combinedSegment
22274 };
22275 }
22276
22277 return storedMap || map;
22278 }
22279
22280 /**
22281 * Returns true if all configuration required for loading is present, otherwise false.
22282 *
22283 * @return {Boolean} True if the all configuration is ready for loading
22284 * @private
22285 */
22286
22287 }, {
22288 key: 'couldBeginLoading_',
22289 value: function couldBeginLoading_() {
22290 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
22291 }
22292
22293 /**
22294 * Once all the starting parameters have been specified, begin
22295 * operation. This method should only be invoked from the INIT
22296 * state.
22297 *
22298 * @private
22299 */
22300
22301 }, {
22302 key: 'init_',
22303 value: function init_() {
22304 this.state = 'READY';
22305 this.resetEverything();
22306 return this.monitorBuffer_();
22307 }
22308
22309 /**
22310 * Set a subtitle track on the segment loader to add subtitles to
22311 *
22312 * @param {TextTrack=} track
22313 * The text track to add loaded subtitles to
22314 * @return {TextTrack}
22315 * Returns the subtitles track
22316 */
22317
22318 }, {
22319 key: 'track',
22320 value: function track(_track) {
22321 if (typeof _track === 'undefined') {
22322 return this.subtitlesTrack_;
22323 }
22324
22325 this.subtitlesTrack_ = _track;
22326
22327 // if we were unpaused but waiting for a sourceUpdater, start
22328 // buffering now
22329 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22330 this.init_();
22331 }
22332
22333 return this.subtitlesTrack_;
22334 }
22335
22336 /**
22337 * Remove any data in the source buffer between start and end times
22338 * @param {Number} start - the start time of the region to remove from the buffer
22339 * @param {Number} end - the end time of the region to remove from the buffer
22340 */
22341
22342 }, {
22343 key: 'remove',
22344 value: function remove(start, end) {
22345 removeCuesFromTrack(start, end, this.subtitlesTrack_);
22346 }
22347
22348 /**
22349 * fill the buffer with segements unless the sourceBuffers are
22350 * currently updating
22351 *
22352 * Note: this function should only ever be called by monitorBuffer_
22353 * and never directly
22354 *
22355 * @private
22356 */
22357
22358 }, {
22359 key: 'fillBuffer_',
22360 value: function fillBuffer_() {
22361 var _this2 = this;
22362
22363 if (!this.syncPoint_) {
22364 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
22365 }
22366
22367 // see if we need to begin loading immediately
22368 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
22369
22370 segmentInfo = this.skipEmptySegments_(segmentInfo);
22371
22372 if (!segmentInfo) {
22373 return;
22374 }
22375
22376 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
22377 // We don't have the timestamp offset that we need to sync subtitles.
22378 // Rerun on a timestamp offset or user interaction.
22379 var checkTimestampOffset = function checkTimestampOffset() {
22380 _this2.state = 'READY';
22381 if (!_this2.paused()) {
22382 // if not paused, queue a buffer check as soon as possible
22383 _this2.monitorBuffer_();
22384 }
22385 };
22386
22387 this.syncController_.one('timestampoffset', checkTimestampOffset);
22388 this.state = 'WAITING_ON_TIMELINE';
22389 return;
22390 }
22391
22392 this.loadSegment_(segmentInfo);
22393 }
22394
22395 /**
22396 * Prevents the segment loader from requesting segments we know contain no subtitles
22397 * by walking forward until we find the next segment that we don't know whether it is
22398 * empty or not.
22399 *
22400 * @param {Object} segmentInfo
22401 * a segment info object that describes the current segment
22402 * @return {Object}
22403 * a segment info object that describes the current segment
22404 */
22405
22406 }, {
22407 key: 'skipEmptySegments_',
22408 value: function skipEmptySegments_(segmentInfo) {
22409 while (segmentInfo && segmentInfo.segment.empty) {
22410 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
22411 }
22412 return segmentInfo;
22413 }
22414
22415 /**
22416 * append a decrypted segement to the SourceBuffer through a SourceUpdater
22417 *
22418 * @private
22419 */
22420
22421 }, {
22422 key: 'handleSegment_',
22423 value: function handleSegment_() {
22424 var _this3 = this;
22425
22426 if (!this.pendingSegment_ || !this.subtitlesTrack_) {
22427 this.state = 'READY';
22428 return;
22429 }
22430
22431 this.state = 'APPENDING';
22432
22433 var segmentInfo = this.pendingSegment_;
22434 var segment = segmentInfo.segment;
22435
22436 // Make sure that vttjs has loaded, otherwise, wait till it finished loading
22437 if (typeof window_1.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
22438
22439 var loadHandler = void 0;
22440 var errorHandler = function errorHandler() {
22441 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
22442 _this3.error({
22443 message: 'Error loading vtt.js'
22444 });
22445 _this3.state = 'READY';
22446 _this3.pause();
22447 _this3.trigger('error');
22448 };
22449
22450 loadHandler = function loadHandler() {
22451 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
22452 _this3.handleSegment_();
22453 };
22454
22455 this.state = 'WAITING_ON_VTTJS';
22456 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
22457 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
22458
22459 return;
22460 }
22461
22462 segment.requested = true;
22463
22464 try {
22465 this.parseVTTCues_(segmentInfo);
22466 } catch (e) {
22467 this.error({
22468 message: e.message
22469 });
22470 this.state = 'READY';
22471 this.pause();
22472 return this.trigger('error');
22473 }
22474
22475 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
22476
22477 if (segmentInfo.isSyncRequest) {
22478 this.trigger('syncinfoupdate');
22479 this.pendingSegment_ = null;
22480 this.state = 'READY';
22481 return;
22482 }
22483
22484 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
22485
22486 this.mediaSecondsLoaded += segment.duration;
22487
22488 if (segmentInfo.cues.length) {
22489 // remove any overlapping cues to prevent doubling
22490 this.remove(segmentInfo.cues[0].endTime, segmentInfo.cues[segmentInfo.cues.length - 1].endTime);
22491 }
22492
22493 segmentInfo.cues.forEach(function (cue) {
22494 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window_1.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
22495 });
22496
22497 this.handleUpdateEnd_();
22498 }
22499
22500 /**
22501 * Uses the WebVTT parser to parse the segment response
22502 *
22503 * @param {Object} segmentInfo
22504 * a segment info object that describes the current segment
22505 * @private
22506 */
22507
22508 }, {
22509 key: 'parseVTTCues_',
22510 value: function parseVTTCues_(segmentInfo) {
22511 var decoder = void 0;
22512 var decodeBytesToString = false;
22513
22514 if (typeof window_1.TextDecoder === 'function') {
22515 decoder = new window_1.TextDecoder('utf8');
22516 } else {
22517 decoder = window_1.WebVTT.StringDecoder();
22518 decodeBytesToString = true;
22519 }
22520
22521 var parser = new window_1.WebVTT.Parser(window_1, window_1.vttjs, decoder);
22522
22523 segmentInfo.cues = [];
22524 segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
22525
22526 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
22527 parser.ontimestampmap = function (map) {
22528 return segmentInfo.timestampmap = map;
22529 };
22530 parser.onparsingerror = function (error) {
22531 videojs.log.warn('Error encountered when parsing cues: ' + error.message);
22532 };
22533
22534 if (segmentInfo.segment.map) {
22535 var mapData = segmentInfo.segment.map.bytes;
22536
22537 if (decodeBytesToString) {
22538 mapData = uint8ToUtf8(mapData);
22539 }
22540
22541 parser.parse(mapData);
22542 }
22543
22544 var segmentData = segmentInfo.bytes;
22545
22546 if (decodeBytesToString) {
22547 segmentData = uint8ToUtf8(segmentData);
22548 }
22549
22550 parser.parse(segmentData);
22551 parser.flush();
22552 }
22553
22554 /**
22555 * Updates the start and end times of any cues parsed by the WebVTT parser using
22556 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
22557 * from the SyncController
22558 *
22559 * @param {Object} segmentInfo
22560 * a segment info object that describes the current segment
22561 * @param {Object} mappingObj
22562 * object containing a mapping from TS to media time
22563 * @param {Object} playlist
22564 * the playlist object containing the segment
22565 * @private
22566 */
22567
22568 }, {
22569 key: 'updateTimeMapping_',
22570 value: function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
22571 var segment = segmentInfo.segment;
22572
22573 if (!mappingObj) {
22574 // If the sync controller does not have a mapping of TS to Media Time for the
22575 // timeline, then we don't have enough information to update the cue
22576 // start/end times
22577 return;
22578 }
22579
22580 if (!segmentInfo.cues.length) {
22581 // If there are no cues, we also do not have enough information to figure out
22582 // segment timing. Mark that the segment contains no cues so we don't re-request
22583 // an empty segment.
22584 segment.empty = true;
22585 return;
22586 }
22587
22588 var timestampmap = segmentInfo.timestampmap;
22589 var diff = timestampmap.MPEGTS / 90000 - timestampmap.LOCAL + mappingObj.mapping;
22590
22591 segmentInfo.cues.forEach(function (cue) {
22592 // First convert cue time to TS time using the timestamp-map provided within the vtt
22593 cue.startTime += diff;
22594 cue.endTime += diff;
22595 });
22596
22597 if (!playlist.syncInfo) {
22598 var firstStart = segmentInfo.cues[0].startTime;
22599 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
22600
22601 playlist.syncInfo = {
22602 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
22603 time: Math.min(firstStart, lastStart - segment.duration)
22604 };
22605 }
22606 }
22607 }]);
22608 return VTTSegmentLoader;
22609 }(SegmentLoader);
22610
22611 /**
22612 * @file ad-cue-tags.js
22613 */
22614
22615 /**
22616 * Searches for an ad cue that overlaps with the given mediaTime
22617 */
22618 var findAdCue = function findAdCue(track, mediaTime) {
22619 var cues = track.cues;
22620
22621 for (var i = 0; i < cues.length; i++) {
22622 var cue = cues[i];
22623
22624 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
22625 return cue;
22626 }
22627 }
22628 return null;
22629 };
22630
22631 var updateAdCues = function updateAdCues(media, track) {
22632 var offset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
22633
22634 if (!media.segments) {
22635 return;
22636 }
22637
22638 var mediaTime = offset;
22639 var cue = void 0;
22640
22641 for (var i = 0; i < media.segments.length; i++) {
22642 var segment = media.segments[i];
22643
22644 if (!cue) {
22645 // Since the cues will span for at least the segment duration, adding a fudge
22646 // factor of half segment duration will prevent duplicate cues from being
22647 // created when timing info is not exact (e.g. cue start time initialized
22648 // at 10.006677, but next call mediaTime is 10.003332 )
22649 cue = findAdCue(track, mediaTime + segment.duration / 2);
22650 }
22651
22652 if (cue) {
22653 if ('cueIn' in segment) {
22654 // Found a CUE-IN so end the cue
22655 cue.endTime = mediaTime;
22656 cue.adEndTime = mediaTime;
22657 mediaTime += segment.duration;
22658 cue = null;
22659 continue;
22660 }
22661
22662 if (mediaTime < cue.endTime) {
22663 // Already processed this mediaTime for this cue
22664 mediaTime += segment.duration;
22665 continue;
22666 }
22667
22668 // otherwise extend cue until a CUE-IN is found
22669 cue.endTime += segment.duration;
22670 } else {
22671 if ('cueOut' in segment) {
22672 cue = new window_1.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
22673 cue.adStartTime = mediaTime;
22674 // Assumes tag format to be
22675 // #EXT-X-CUE-OUT:30
22676 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
22677 track.addCue(cue);
22678 }
22679
22680 if ('cueOutCont' in segment) {
22681 // Entered into the middle of an ad cue
22682 var adOffset = void 0;
22683 var adTotal = void 0;
22684
22685 // Assumes tag formate to be
22686 // #EXT-X-CUE-OUT-CONT:10/30
22687
22688 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat);
22689
22690 var _segment$cueOutCont$s2 = slicedToArray(_segment$cueOutCont$s, 2);
22691
22692 adOffset = _segment$cueOutCont$s2[0];
22693 adTotal = _segment$cueOutCont$s2[1];
22694
22695
22696 cue = new window_1.VTTCue(mediaTime, mediaTime + segment.duration, '');
22697 cue.adStartTime = mediaTime - adOffset;
22698 cue.adEndTime = cue.adStartTime + adTotal;
22699 track.addCue(cue);
22700 }
22701 }
22702 mediaTime += segment.duration;
22703 }
22704 };
22705
22706 /**
22707 * mux.js
22708 *
22709 * Copyright (c) Brightcove
22710 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
22711 */
22712
22713 var streamTypes = {
22714 H264_STREAM_TYPE: 0x1B,
22715 ADTS_STREAM_TYPE: 0x0F,
22716 METADATA_STREAM_TYPE: 0x15
22717 };
22718
22719 var MAX_TS = 8589934592;
22720
22721 var RO_THRESH = 4294967296;
22722
22723 var TYPE_SHARED = 'shared';
22724
22725 var handleRollover = function handleRollover(value, reference) {
22726 var direction = 1;
22727
22728 if (value > reference) {
22729 // If the current timestamp value is greater than our reference timestamp and we detect a
22730 // timestamp rollover, this means the roll over is happening in the opposite direction.
22731 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
22732 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
22733 // rollover point. In loading this segment, the timestamp values will be very large,
22734 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
22735 // the time stamp to be `value - 2^33`.
22736 direction = -1;
22737 }
22738
22739 // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
22740 // cause an incorrect adjustment.
22741 while (Math.abs(reference - value) > RO_THRESH) {
22742 value += direction * MAX_TS;
22743 }
22744
22745 return value;
22746 };
22747
22748 var TimestampRolloverStream = function TimestampRolloverStream(type) {
22749 var lastDTS, referenceDTS;
22750
22751 TimestampRolloverStream.prototype.init.call(this);
22752
22753 // The "shared" type is used in cases where a stream will contain muxed
22754 // video and audio. We could use `undefined` here, but having a string
22755 // makes debugging a little clearer.
22756 this.type_ = type || TYPE_SHARED;
22757
22758 this.push = function (data) {
22759
22760 // Any "shared" rollover streams will accept _all_ data. Otherwise,
22761 // streams will only accept data that matches their type.
22762 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
22763 return;
22764 }
22765
22766 if (referenceDTS === undefined) {
22767 referenceDTS = data.dts;
22768 }
22769
22770 data.dts = handleRollover(data.dts, referenceDTS);
22771 data.pts = handleRollover(data.pts, referenceDTS);
22772
22773 lastDTS = data.dts;
22774
22775 this.trigger('data', data);
22776 };
22777
22778 this.flush = function () {
22779 referenceDTS = lastDTS;
22780 this.trigger('done');
22781 };
22782
22783 this.endTimeline = function () {
22784 this.flush();
22785 this.trigger('endedtimeline');
22786 };
22787
22788 this.discontinuity = function () {
22789 referenceDTS = void 0;
22790 lastDTS = void 0;
22791 };
22792
22793 this.reset = function () {
22794 this.discontinuity();
22795 this.trigger('reset');
22796 };
22797 };
22798
22799 TimestampRolloverStream.prototype = new stream();
22800
22801 var timestampRolloverStream = {
22802 TimestampRolloverStream: TimestampRolloverStream,
22803 handleRollover: handleRollover
22804 };
22805
22806 var parsePid = function parsePid(packet) {
22807 var pid = packet[1] & 0x1f;
22808 pid <<= 8;
22809 pid |= packet[2];
22810 return pid;
22811 };
22812
22813 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
22814 return !!(packet[1] & 0x40);
22815 };
22816
22817 var parseAdaptionField = function parseAdaptionField(packet) {
22818 var offset = 0;
22819 // if an adaption field is present, its length is specified by the
22820 // fifth byte of the TS packet header. The adaptation field is
22821 // used to add stuffing to PES packets that don't fill a complete
22822 // TS packet, and to specify some forms of timing and control data
22823 // that we do not currently use.
22824 if ((packet[3] & 0x30) >>> 4 > 0x01) {
22825 offset += packet[4] + 1;
22826 }
22827 return offset;
22828 };
22829
22830 var parseType$1 = function parseType(packet, pmtPid) {
22831 var pid = parsePid(packet);
22832 if (pid === 0) {
22833 return 'pat';
22834 } else if (pid === pmtPid) {
22835 return 'pmt';
22836 } else if (pmtPid) {
22837 return 'pes';
22838 }
22839 return null;
22840 };
22841
22842 var parsePat = function parsePat(packet) {
22843 var pusi = parsePayloadUnitStartIndicator(packet);
22844 var offset = 4 + parseAdaptionField(packet);
22845
22846 if (pusi) {
22847 offset += packet[offset] + 1;
22848 }
22849
22850 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
22851 };
22852
22853 var parsePmt = function parsePmt(packet) {
22854 var programMapTable = {};
22855 var pusi = parsePayloadUnitStartIndicator(packet);
22856 var payloadOffset = 4 + parseAdaptionField(packet);
22857
22858 if (pusi) {
22859 payloadOffset += packet[payloadOffset] + 1;
22860 }
22861
22862 // PMTs can be sent ahead of the time when they should actually
22863 // take effect. We don't believe this should ever be the case
22864 // for HLS but we'll ignore "forward" PMT declarations if we see
22865 // them. Future PMT declarations have the current_next_indicator
22866 // set to zero.
22867 if (!(packet[payloadOffset + 5] & 0x01)) {
22868 return;
22869 }
22870
22871 var sectionLength, tableEnd, programInfoLength;
22872 // the mapping table ends at the end of the current section
22873 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
22874 tableEnd = 3 + sectionLength - 4;
22875
22876 // to determine where the table is, we have to figure out how
22877 // long the program info descriptors are
22878 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
22879
22880 // advance the offset to the first entry in the mapping table
22881 var offset = 12 + programInfoLength;
22882 while (offset < tableEnd) {
22883 var i = payloadOffset + offset;
22884 // add an entry that maps the elementary_pid to the stream_type
22885 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
22886
22887 // move to the next table entry
22888 // skip past the elementary stream descriptors, if present
22889 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
22890 }
22891 return programMapTable;
22892 };
22893
22894 var parsePesType = function parsePesType(packet, programMapTable) {
22895 var pid = parsePid(packet);
22896 var type = programMapTable[pid];
22897 switch (type) {
22898 case streamTypes.H264_STREAM_TYPE:
22899 return 'video';
22900 case streamTypes.ADTS_STREAM_TYPE:
22901 return 'audio';
22902 case streamTypes.METADATA_STREAM_TYPE:
22903 return 'timed-metadata';
22904 default:
22905 return null;
22906 }
22907 };
22908
22909 var parsePesTime = function parsePesTime(packet) {
22910 var pusi = parsePayloadUnitStartIndicator(packet);
22911 if (!pusi) {
22912 return null;
22913 }
22914
22915 var offset = 4 + parseAdaptionField(packet);
22916
22917 if (offset >= packet.byteLength) {
22918 // From the H 222.0 MPEG-TS spec
22919 // "For transport stream packets carrying PES packets, stuffing is needed when there
22920 // is insufficient PES packet data to completely fill the transport stream packet
22921 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
22922 // the sum of the lengths of the data elements in it, so that the payload bytes
22923 // remaining after the adaptation field exactly accommodates the available PES packet
22924 // data."
22925 //
22926 // If the offset is >= the length of the packet, then the packet contains no data
22927 // and instead is just adaption field stuffing bytes
22928 return null;
22929 }
22930
22931 var pes = null;
22932 var ptsDtsFlags;
22933
22934 // PES packets may be annotated with a PTS value, or a PTS value
22935 // and a DTS value. Determine what combination of values is
22936 // available to work with.
22937 ptsDtsFlags = packet[offset + 7];
22938
22939 // PTS and DTS are normally stored as a 33-bit number. Javascript
22940 // performs all bitwise operations on 32-bit integers but javascript
22941 // supports a much greater range (52-bits) of integer using standard
22942 // mathematical operations.
22943 // We construct a 31-bit value using bitwise operators over the 31
22944 // most significant bits and then multiply by 4 (equal to a left-shift
22945 // of 2) before we add the final 2 least significant bits of the
22946 // timestamp (equal to an OR.)
22947 if (ptsDtsFlags & 0xC0) {
22948 pes = {};
22949 // the PTS and DTS are not written out directly. For information
22950 // on how they are encoded, see
22951 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
22952 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
22953 pes.pts *= 4; // Left shift by 2
22954 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
22955 pes.dts = pes.pts;
22956 if (ptsDtsFlags & 0x40) {
22957 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
22958 pes.dts *= 4; // Left shift by 2
22959 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
22960 }
22961 }
22962 return pes;
22963 };
22964
22965 var parseNalUnitType = function parseNalUnitType(type) {
22966 switch (type) {
22967 case 0x05:
22968 return 'slice_layer_without_partitioning_rbsp_idr';
22969 case 0x06:
22970 return 'sei_rbsp';
22971 case 0x07:
22972 return 'seq_parameter_set_rbsp';
22973 case 0x08:
22974 return 'pic_parameter_set_rbsp';
22975 case 0x09:
22976 return 'access_unit_delimiter_rbsp';
22977 default:
22978 return null;
22979 }
22980 };
22981
22982 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
22983 var offset = 4 + parseAdaptionField(packet);
22984 var frameBuffer = packet.subarray(offset);
22985 var frameI = 0;
22986 var frameSyncPoint = 0;
22987 var foundKeyFrame = false;
22988 var nalType;
22989
22990 // advance the sync point to a NAL start, if necessary
22991 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
22992 if (frameBuffer[frameSyncPoint + 2] === 1) {
22993 // the sync point is properly aligned
22994 frameI = frameSyncPoint + 5;
22995 break;
22996 }
22997 }
22998
22999 while (frameI < frameBuffer.byteLength) {
23000 // look at the current byte to determine if we've hit the end of
23001 // a NAL unit boundary
23002 switch (frameBuffer[frameI]) {
23003 case 0:
23004 // skip past non-sync sequences
23005 if (frameBuffer[frameI - 1] !== 0) {
23006 frameI += 2;
23007 break;
23008 } else if (frameBuffer[frameI - 2] !== 0) {
23009 frameI++;
23010 break;
23011 }
23012
23013 if (frameSyncPoint + 3 !== frameI - 2) {
23014 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
23015 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
23016 foundKeyFrame = true;
23017 }
23018 }
23019
23020 // drop trailing zeroes
23021 do {
23022 frameI++;
23023 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
23024 frameSyncPoint = frameI - 2;
23025 frameI += 3;
23026 break;
23027 case 1:
23028 // skip past non-sync sequences
23029 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
23030 frameI += 3;
23031 break;
23032 }
23033
23034 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
23035 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
23036 foundKeyFrame = true;
23037 }
23038 frameSyncPoint = frameI - 2;
23039 frameI += 3;
23040 break;
23041 default:
23042 // the current byte isn't a one or zero, so it cannot be part
23043 // of a sync sequence
23044 frameI += 3;
23045 break;
23046 }
23047 }
23048 frameBuffer = frameBuffer.subarray(frameSyncPoint);
23049 frameI -= frameSyncPoint;
23050 frameSyncPoint = 0;
23051 // parse the final nal
23052 if (frameBuffer && frameBuffer.byteLength > 3) {
23053 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
23054 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
23055 foundKeyFrame = true;
23056 }
23057 }
23058
23059 return foundKeyFrame;
23060 };
23061
23062 var probe$1 = {
23063 parseType: parseType$1,
23064 parsePat: parsePat,
23065 parsePmt: parsePmt,
23066 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
23067 parsePesType: parsePesType,
23068 parsePesTime: parsePesTime,
23069 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
23070 };
23071
23072 /**
23073 * mux.js
23074 *
23075 * Copyright (c) Brightcove
23076 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
23077 *
23078 * Utilities to detect basic properties and metadata about Aac data.
23079 */
23080
23081 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
23082
23083 var isLikelyAacData = function isLikelyAacData(data) {
23084 if (data[0] === 'I'.charCodeAt(0) && data[1] === 'D'.charCodeAt(0) && data[2] === '3'.charCodeAt(0)) {
23085 return true;
23086 }
23087 return false;
23088 };
23089
23090 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
23091 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
23092 };
23093
23094 // return a percent-encoded representation of the specified byte range
23095 // @see http://en.wikipedia.org/wiki/Percent-encoding
23096 var percentEncode = function percentEncode(bytes, start, end) {
23097 var i,
23098 result = '';
23099 for (i = start; i < end; i++) {
23100 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
23101 }
23102 return result;
23103 };
23104
23105 // return the string representation of the specified byte range,
23106 // interpreted as ISO-8859-1.
23107 var parseIso88591 = function parseIso88591(bytes, start, end) {
23108 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
23109 };
23110
23111 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
23112 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
23113 flags = header[byteIndex + 5],
23114 footerPresent = (flags & 16) >> 4;
23115
23116 if (footerPresent) {
23117 return returnSize + 20;
23118 }
23119 return returnSize + 10;
23120 };
23121
23122 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
23123 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
23124 middle = header[byteIndex + 4] << 3,
23125 highTwo = header[byteIndex + 3] & 0x3 << 11;
23126
23127 return highTwo | middle | lowThree;
23128 };
23129
23130 var parseType$2 = function parseType(header, byteIndex) {
23131 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
23132 return 'timed-metadata';
23133 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
23134 return 'audio';
23135 }
23136 return null;
23137 };
23138
23139 var parseSampleRate = function parseSampleRate(packet) {
23140 var i = 0;
23141
23142 while (i + 5 < packet.length) {
23143 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
23144 // If a valid header was not found, jump one forward and attempt to
23145 // find a valid ADTS header starting at the next byte
23146 i++;
23147 continue;
23148 }
23149 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
23150 }
23151
23152 return null;
23153 };
23154
23155 var parseAacTimestamp = function parseAacTimestamp(packet) {
23156 var frameStart, frameSize, frame, frameHeader;
23157
23158 // find the start of the first frame and the end of the tag
23159 frameStart = 10;
23160 if (packet[5] & 0x40) {
23161 // advance the frame start past the extended header
23162 frameStart += 4; // header size field
23163 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
23164 }
23165
23166 // parse one or more ID3 frames
23167 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
23168 do {
23169 // determine the number of bytes in this frame
23170 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
23171 if (frameSize < 1) {
23172 return null;
23173 }
23174 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
23175
23176 if (frameHeader === 'PRIV') {
23177 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
23178
23179 for (var i = 0; i < frame.byteLength; i++) {
23180 if (frame[i] === 0) {
23181 var owner = parseIso88591(frame, 0, i);
23182 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
23183 var d = frame.subarray(i + 1);
23184 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
23185 size *= 4;
23186 size += d[7] & 0x03;
23187
23188 return size;
23189 }
23190 break;
23191 }
23192 }
23193 }
23194
23195 frameStart += 10; // advance past the frame header
23196 frameStart += frameSize; // advance past the frame body
23197 } while (frameStart < packet.byteLength);
23198 return null;
23199 };
23200
23201 var utils$1 = {
23202 isLikelyAacData: isLikelyAacData,
23203 parseId3TagSize: parseId3TagSize,
23204 parseAdtsSize: parseAdtsSize,
23205 parseType: parseType$2,
23206 parseSampleRate: parseSampleRate,
23207 parseAacTimestamp: parseAacTimestamp
23208 };
23209
23210 /**
23211 * mux.js
23212 *
23213 * Copyright (c) Brightcove
23214 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
23215 */
23216 var ONE_SECOND_IN_TS$1 = 90000,
23217 // 90kHz clock
23218 secondsToVideoTs,
23219 secondsToAudioTs,
23220 videoTsToSeconds,
23221 audioTsToSeconds,
23222 audioTsToVideoTs,
23223 videoTsToAudioTs,
23224 metadataTsToSeconds;
23225
23226 secondsToVideoTs = function secondsToVideoTs(seconds) {
23227 return seconds * ONE_SECOND_IN_TS$1;
23228 };
23229
23230 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
23231 return seconds * sampleRate;
23232 };
23233
23234 videoTsToSeconds = function videoTsToSeconds(timestamp) {
23235 return timestamp / ONE_SECOND_IN_TS$1;
23236 };
23237
23238 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
23239 return timestamp / sampleRate;
23240 };
23241
23242 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
23243 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
23244 };
23245
23246 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
23247 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
23248 };
23249
23250 /**
23251 * Adjust ID3 tag or caption timing information by the timeline pts values
23252 * (if keepOriginalTimestamps is false) and convert to seconds
23253 */
23254 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
23255 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
23256 };
23257
23258 var clock = {
23259 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$1,
23260 secondsToVideoTs: secondsToVideoTs,
23261 secondsToAudioTs: secondsToAudioTs,
23262 videoTsToSeconds: videoTsToSeconds,
23263 audioTsToSeconds: audioTsToSeconds,
23264 audioTsToVideoTs: audioTsToVideoTs,
23265 videoTsToAudioTs: videoTsToAudioTs,
23266 metadataTsToSeconds: metadataTsToSeconds
23267 };
23268
23269 var handleRollover$1 = timestampRolloverStream.handleRollover;
23270 var probe$2 = {};
23271 probe$2.ts = probe$1;
23272 probe$2.aac = utils$1;
23273 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
23274
23275 var MP2T_PACKET_LENGTH = 188,
23276 // bytes
23277 SYNC_BYTE = 0x47;
23278
23279 /**
23280 * walks through segment data looking for pat and pmt packets to parse out
23281 * program map table information
23282 */
23283 var parsePsi_ = function parsePsi_(bytes, pmt) {
23284 var startIndex = 0,
23285 endIndex = MP2T_PACKET_LENGTH,
23286 packet,
23287 type;
23288
23289 while (endIndex < bytes.byteLength) {
23290 // Look for a pair of start and end sync bytes in the data..
23291 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
23292 // We found a packet
23293 packet = bytes.subarray(startIndex, endIndex);
23294 type = probe$2.ts.parseType(packet, pmt.pid);
23295
23296 switch (type) {
23297 case 'pat':
23298 if (!pmt.pid) {
23299 pmt.pid = probe$2.ts.parsePat(packet);
23300 }
23301 break;
23302 case 'pmt':
23303 if (!pmt.table) {
23304 pmt.table = probe$2.ts.parsePmt(packet);
23305 }
23306 break;
23307 default:
23308 break;
23309 }
23310
23311 // Found the pat and pmt, we can stop walking the segment
23312 if (pmt.pid && pmt.table) {
23313 return;
23314 }
23315
23316 startIndex += MP2T_PACKET_LENGTH;
23317 endIndex += MP2T_PACKET_LENGTH;
23318 continue;
23319 }
23320
23321 // If we get here, we have somehow become de-synchronized and we need to step
23322 // forward one byte at a time until we find a pair of sync bytes that denote
23323 // a packet
23324 startIndex++;
23325 endIndex++;
23326 }
23327 };
23328
23329 /**
23330 * walks through the segment data from the start and end to get timing information
23331 * for the first and last audio pes packets
23332 */
23333 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
23334 var startIndex = 0,
23335 endIndex = MP2T_PACKET_LENGTH,
23336 packet,
23337 type,
23338 pesType,
23339 pusi,
23340 parsed;
23341
23342 var endLoop = false;
23343
23344 // Start walking from start of segment to get first audio packet
23345 while (endIndex <= bytes.byteLength) {
23346 // Look for a pair of start and end sync bytes in the data..
23347 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
23348 // We found a packet
23349 packet = bytes.subarray(startIndex, endIndex);
23350 type = probe$2.ts.parseType(packet, pmt.pid);
23351
23352 switch (type) {
23353 case 'pes':
23354 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23355 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23356 if (pesType === 'audio' && pusi) {
23357 parsed = probe$2.ts.parsePesTime(packet);
23358 if (parsed) {
23359 parsed.type = 'audio';
23360 result.audio.push(parsed);
23361 endLoop = true;
23362 }
23363 }
23364 break;
23365 default:
23366 break;
23367 }
23368
23369 if (endLoop) {
23370 break;
23371 }
23372
23373 startIndex += MP2T_PACKET_LENGTH;
23374 endIndex += MP2T_PACKET_LENGTH;
23375 continue;
23376 }
23377
23378 // If we get here, we have somehow become de-synchronized and we need to step
23379 // forward one byte at a time until we find a pair of sync bytes that denote
23380 // a packet
23381 startIndex++;
23382 endIndex++;
23383 }
23384
23385 // Start walking from end of segment to get last audio packet
23386 endIndex = bytes.byteLength;
23387 startIndex = endIndex - MP2T_PACKET_LENGTH;
23388 endLoop = false;
23389 while (startIndex >= 0) {
23390 // Look for a pair of start and end sync bytes in the data..
23391 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
23392 // We found a packet
23393 packet = bytes.subarray(startIndex, endIndex);
23394 type = probe$2.ts.parseType(packet, pmt.pid);
23395
23396 switch (type) {
23397 case 'pes':
23398 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23399 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23400 if (pesType === 'audio' && pusi) {
23401 parsed = probe$2.ts.parsePesTime(packet);
23402 if (parsed) {
23403 parsed.type = 'audio';
23404 result.audio.push(parsed);
23405 endLoop = true;
23406 }
23407 }
23408 break;
23409 default:
23410 break;
23411 }
23412
23413 if (endLoop) {
23414 break;
23415 }
23416
23417 startIndex -= MP2T_PACKET_LENGTH;
23418 endIndex -= MP2T_PACKET_LENGTH;
23419 continue;
23420 }
23421
23422 // If we get here, we have somehow become de-synchronized and we need to step
23423 // forward one byte at a time until we find a pair of sync bytes that denote
23424 // a packet
23425 startIndex--;
23426 endIndex--;
23427 }
23428 };
23429
23430 /**
23431 * walks through the segment data from the start and end to get timing information
23432 * for the first and last video pes packets as well as timing information for the first
23433 * key frame.
23434 */
23435 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
23436 var startIndex = 0,
23437 endIndex = MP2T_PACKET_LENGTH,
23438 packet,
23439 type,
23440 pesType,
23441 pusi,
23442 parsed,
23443 frame,
23444 i,
23445 pes;
23446
23447 var endLoop = false;
23448
23449 var currentFrame = {
23450 data: [],
23451 size: 0
23452 };
23453
23454 // Start walking from start of segment to get first video packet
23455 while (endIndex < bytes.byteLength) {
23456 // Look for a pair of start and end sync bytes in the data..
23457 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
23458 // We found a packet
23459 packet = bytes.subarray(startIndex, endIndex);
23460 type = probe$2.ts.parseType(packet, pmt.pid);
23461
23462 switch (type) {
23463 case 'pes':
23464 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23465 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23466 if (pesType === 'video') {
23467 if (pusi && !endLoop) {
23468 parsed = probe$2.ts.parsePesTime(packet);
23469 if (parsed) {
23470 parsed.type = 'video';
23471 result.video.push(parsed);
23472 endLoop = true;
23473 }
23474 }
23475 if (!result.firstKeyFrame) {
23476 if (pusi) {
23477 if (currentFrame.size !== 0) {
23478 frame = new Uint8Array(currentFrame.size);
23479 i = 0;
23480 while (currentFrame.data.length) {
23481 pes = currentFrame.data.shift();
23482 frame.set(pes, i);
23483 i += pes.byteLength;
23484 }
23485 if (probe$2.ts.videoPacketContainsKeyFrame(frame)) {
23486 var firstKeyFrame = probe$2.ts.parsePesTime(frame);
23487
23488 // PTS/DTS may not be available. Simply *not* setting
23489 // the keyframe seems to work fine with HLS playback
23490 // and definitely preferable to a crash with TypeError...
23491 if (firstKeyFrame) {
23492 result.firstKeyFrame = firstKeyFrame;
23493 result.firstKeyFrame.type = 'video';
23494 } else {
23495 // eslint-disable-next-line
23496 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
23497 }
23498 }
23499 currentFrame.size = 0;
23500 }
23501 }
23502 currentFrame.data.push(packet);
23503 currentFrame.size += packet.byteLength;
23504 }
23505 }
23506 break;
23507 default:
23508 break;
23509 }
23510
23511 if (endLoop && result.firstKeyFrame) {
23512 break;
23513 }
23514
23515 startIndex += MP2T_PACKET_LENGTH;
23516 endIndex += MP2T_PACKET_LENGTH;
23517 continue;
23518 }
23519
23520 // If we get here, we have somehow become de-synchronized and we need to step
23521 // forward one byte at a time until we find a pair of sync bytes that denote
23522 // a packet
23523 startIndex++;
23524 endIndex++;
23525 }
23526
23527 // Start walking from end of segment to get last video packet
23528 endIndex = bytes.byteLength;
23529 startIndex = endIndex - MP2T_PACKET_LENGTH;
23530 endLoop = false;
23531 while (startIndex >= 0) {
23532 // Look for a pair of start and end sync bytes in the data..
23533 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
23534 // We found a packet
23535 packet = bytes.subarray(startIndex, endIndex);
23536 type = probe$2.ts.parseType(packet, pmt.pid);
23537
23538 switch (type) {
23539 case 'pes':
23540 pesType = probe$2.ts.parsePesType(packet, pmt.table);
23541 pusi = probe$2.ts.parsePayloadUnitStartIndicator(packet);
23542 if (pesType === 'video' && pusi) {
23543 parsed = probe$2.ts.parsePesTime(packet);
23544 if (parsed) {
23545 parsed.type = 'video';
23546 result.video.push(parsed);
23547 endLoop = true;
23548 }
23549 }
23550 break;
23551 default:
23552 break;
23553 }
23554
23555 if (endLoop) {
23556 break;
23557 }
23558
23559 startIndex -= MP2T_PACKET_LENGTH;
23560 endIndex -= MP2T_PACKET_LENGTH;
23561 continue;
23562 }
23563
23564 // If we get here, we have somehow become de-synchronized and we need to step
23565 // forward one byte at a time until we find a pair of sync bytes that denote
23566 // a packet
23567 startIndex--;
23568 endIndex--;
23569 }
23570 };
23571
23572 /**
23573 * Adjusts the timestamp information for the segment to account for
23574 * rollover and convert to seconds based on pes packet timescale (90khz clock)
23575 */
23576 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
23577 if (segmentInfo.audio && segmentInfo.audio.length) {
23578 var audioBaseTimestamp = baseTimestamp;
23579 if (typeof audioBaseTimestamp === 'undefined') {
23580 audioBaseTimestamp = segmentInfo.audio[0].dts;
23581 }
23582 segmentInfo.audio.forEach(function (info) {
23583 info.dts = handleRollover$1(info.dts, audioBaseTimestamp);
23584 info.pts = handleRollover$1(info.pts, audioBaseTimestamp);
23585 // time in seconds
23586 info.dtsTime = info.dts / ONE_SECOND_IN_TS$2;
23587 info.ptsTime = info.pts / ONE_SECOND_IN_TS$2;
23588 });
23589 }
23590
23591 if (segmentInfo.video && segmentInfo.video.length) {
23592 var videoBaseTimestamp = baseTimestamp;
23593 if (typeof videoBaseTimestamp === 'undefined') {
23594 videoBaseTimestamp = segmentInfo.video[0].dts;
23595 }
23596 segmentInfo.video.forEach(function (info) {
23597 info.dts = handleRollover$1(info.dts, videoBaseTimestamp);
23598 info.pts = handleRollover$1(info.pts, videoBaseTimestamp);
23599 // time in seconds
23600 info.dtsTime = info.dts / ONE_SECOND_IN_TS$2;
23601 info.ptsTime = info.pts / ONE_SECOND_IN_TS$2;
23602 });
23603 if (segmentInfo.firstKeyFrame) {
23604 var frame = segmentInfo.firstKeyFrame;
23605 frame.dts = handleRollover$1(frame.dts, videoBaseTimestamp);
23606 frame.pts = handleRollover$1(frame.pts, videoBaseTimestamp);
23607 // time in seconds
23608 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS$2;
23609 frame.ptsTime = frame.dts / ONE_SECOND_IN_TS$2;
23610 }
23611 }
23612 };
23613
23614 /**
23615 * inspects the aac data stream for start and end time information
23616 */
23617 var inspectAac_ = function inspectAac_(bytes) {
23618 var endLoop = false,
23619 audioCount = 0,
23620 sampleRate = null,
23621 timestamp = null,
23622 frameSize = 0,
23623 byteIndex = 0,
23624 packet;
23625
23626 while (bytes.length - byteIndex >= 3) {
23627 var type = probe$2.aac.parseType(bytes, byteIndex);
23628 switch (type) {
23629 case 'timed-metadata':
23630 // Exit early because we don't have enough to parse
23631 // the ID3 tag header
23632 if (bytes.length - byteIndex < 10) {
23633 endLoop = true;
23634 break;
23635 }
23636
23637 frameSize = probe$2.aac.parseId3TagSize(bytes, byteIndex);
23638
23639 // Exit early if we don't have enough in the buffer
23640 // to emit a full packet
23641 if (frameSize > bytes.length) {
23642 endLoop = true;
23643 break;
23644 }
23645 if (timestamp === null) {
23646 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
23647 timestamp = probe$2.aac.parseAacTimestamp(packet);
23648 }
23649 byteIndex += frameSize;
23650 break;
23651 case 'audio':
23652 // Exit early because we don't have enough to parse
23653 // the ADTS frame header
23654 if (bytes.length - byteIndex < 7) {
23655 endLoop = true;
23656 break;
23657 }
23658
23659 frameSize = probe$2.aac.parseAdtsSize(bytes, byteIndex);
23660
23661 // Exit early if we don't have enough in the buffer
23662 // to emit a full packet
23663 if (frameSize > bytes.length) {
23664 endLoop = true;
23665 break;
23666 }
23667 if (sampleRate === null) {
23668 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
23669 sampleRate = probe$2.aac.parseSampleRate(packet);
23670 }
23671 audioCount++;
23672 byteIndex += frameSize;
23673 break;
23674 default:
23675 byteIndex++;
23676 break;
23677 }
23678 if (endLoop) {
23679 return null;
23680 }
23681 }
23682 if (sampleRate === null || timestamp === null) {
23683 return null;
23684 }
23685
23686 var audioTimescale = ONE_SECOND_IN_TS$2 / sampleRate;
23687
23688 var result = {
23689 audio: [{
23690 type: 'audio',
23691 dts: timestamp,
23692 pts: timestamp
23693 }, {
23694 type: 'audio',
23695 dts: timestamp + audioCount * 1024 * audioTimescale,
23696 pts: timestamp + audioCount * 1024 * audioTimescale
23697 }]
23698 };
23699
23700 return result;
23701 };
23702
23703 /**
23704 * inspects the transport stream segment data for start and end time information
23705 * of the audio and video tracks (when present) as well as the first key frame's
23706 * start time.
23707 */
23708 var inspectTs_ = function inspectTs_(bytes) {
23709 var pmt = {
23710 pid: null,
23711 table: null
23712 };
23713
23714 var result = {};
23715
23716 parsePsi_(bytes, pmt);
23717
23718 for (var pid in pmt.table) {
23719 if (pmt.table.hasOwnProperty(pid)) {
23720 var type = pmt.table[pid];
23721 switch (type) {
23722 case streamTypes.H264_STREAM_TYPE:
23723 result.video = [];
23724 parseVideoPes_(bytes, pmt, result);
23725 if (result.video.length === 0) {
23726 delete result.video;
23727 }
23728 break;
23729 case streamTypes.ADTS_STREAM_TYPE:
23730 result.audio = [];
23731 parseAudioPes_(bytes, pmt, result);
23732 if (result.audio.length === 0) {
23733 delete result.audio;
23734 }
23735 break;
23736 default:
23737 break;
23738 }
23739 }
23740 }
23741 return result;
23742 };
23743
23744 /**
23745 * Inspects segment byte data and returns an object with start and end timing information
23746 *
23747 * @param {Uint8Array} bytes The segment byte data
23748 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
23749 * timestamps for rollover. This value must be in 90khz clock.
23750 * @return {Object} Object containing start and end frame timing info of segment.
23751 */
23752 var inspect = function inspect(bytes, baseTimestamp) {
23753 var isAacData = probe$2.aac.isLikelyAacData(bytes);
23754
23755 var result;
23756
23757 if (isAacData) {
23758 result = inspectAac_(bytes);
23759 } else {
23760 result = inspectTs_(bytes);
23761 }
23762
23763 if (!result || !result.audio && !result.video) {
23764 return null;
23765 }
23766
23767 adjustTimestamp_(result, baseTimestamp);
23768
23769 return result;
23770 };
23771
23772 var tsInspector = {
23773 inspect: inspect,
23774 parseAudioPes_: parseAudioPes_
23775 };
23776
23777 /**
23778 * @file sync-controller.js
23779 */
23780
23781 var tsprobe = tsInspector.inspect;
23782
23783 var syncPointStrategies = [
23784 // Stategy "VOD": Handle the VOD-case where the sync-point is *always*
23785 // the equivalence display-time 0 === segment-index 0
23786 {
23787 name: 'VOD',
23788 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23789 if (duration$$1 !== Infinity) {
23790 var syncPoint = {
23791 time: 0,
23792 segmentIndex: 0
23793 };
23794
23795 return syncPoint;
23796 }
23797 return null;
23798 }
23799 },
23800 // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
23801 {
23802 name: 'ProgramDateTime',
23803 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23804 if (!syncController.datetimeToDisplayTime) {
23805 return null;
23806 }
23807
23808 var segments = playlist.segments || [];
23809 var syncPoint = null;
23810 var lastDistance = null;
23811
23812 currentTime = currentTime || 0;
23813
23814 for (var i = 0; i < segments.length; i++) {
23815 var segment = segments[i];
23816
23817 if (segment.dateTimeObject) {
23818 var segmentTime = segment.dateTimeObject.getTime() / 1000;
23819 var segmentStart = segmentTime + syncController.datetimeToDisplayTime;
23820 var distance = Math.abs(currentTime - segmentStart);
23821
23822 // Once the distance begins to increase, or if distance is 0, we have passed
23823 // currentTime and can stop looking for better candidates
23824 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
23825 break;
23826 }
23827
23828 lastDistance = distance;
23829 syncPoint = {
23830 time: segmentStart,
23831 segmentIndex: i
23832 };
23833 }
23834 }
23835 return syncPoint;
23836 }
23837 },
23838 // Stategy "Segment": We have a known time mapping for a timeline and a
23839 // segment in the current timeline with timing data
23840 {
23841 name: 'Segment',
23842 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23843 var segments = playlist.segments || [];
23844 var syncPoint = null;
23845 var lastDistance = null;
23846
23847 currentTime = currentTime || 0;
23848
23849 for (var i = 0; i < segments.length; i++) {
23850 var segment = segments[i];
23851
23852 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
23853 var distance = Math.abs(currentTime - segment.start);
23854
23855 // Once the distance begins to increase, we have passed
23856 // currentTime and can stop looking for better candidates
23857 if (lastDistance !== null && lastDistance < distance) {
23858 break;
23859 }
23860
23861 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
23862 lastDistance = distance;
23863 syncPoint = {
23864 time: segment.start,
23865 segmentIndex: i
23866 };
23867 }
23868 }
23869 }
23870 return syncPoint;
23871 }
23872 },
23873 // Stategy "Discontinuity": We have a discontinuity with a known
23874 // display-time
23875 {
23876 name: 'Discontinuity',
23877 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23878 var syncPoint = null;
23879
23880 currentTime = currentTime || 0;
23881
23882 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
23883 var lastDistance = null;
23884
23885 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
23886 var segmentIndex = playlist.discontinuityStarts[i];
23887 var discontinuity = playlist.discontinuitySequence + i + 1;
23888 var discontinuitySync = syncController.discontinuities[discontinuity];
23889
23890 if (discontinuitySync) {
23891 var distance = Math.abs(currentTime - discontinuitySync.time);
23892
23893 // Once the distance begins to increase, we have passed
23894 // currentTime and can stop looking for better candidates
23895 if (lastDistance !== null && lastDistance < distance) {
23896 break;
23897 }
23898
23899 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
23900 lastDistance = distance;
23901 syncPoint = {
23902 time: discontinuitySync.time,
23903 segmentIndex: segmentIndex
23904 };
23905 }
23906 }
23907 }
23908 }
23909 return syncPoint;
23910 }
23911 },
23912 // Stategy "Playlist": We have a playlist with a known mapping of
23913 // segment index to display time
23914 {
23915 name: 'Playlist',
23916 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
23917 if (playlist.syncInfo) {
23918 var syncPoint = {
23919 time: playlist.syncInfo.time,
23920 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
23921 };
23922
23923 return syncPoint;
23924 }
23925 return null;
23926 }
23927 }];
23928
23929 var SyncController = function (_videojs$EventTarget) {
23930 inherits(SyncController, _videojs$EventTarget);
23931
23932 function SyncController() {
23933 classCallCheck(this, SyncController);
23934
23935 // Segment Loader state variables...
23936 // ...for synching across variants
23937 var _this = possibleConstructorReturn(this, (SyncController.__proto__ || Object.getPrototypeOf(SyncController)).call(this));
23938
23939 _this.inspectCache_ = undefined;
23940
23941 // ...for synching across variants
23942 _this.timelines = [];
23943 _this.discontinuities = [];
23944 _this.datetimeToDisplayTime = null;
23945
23946 _this.logger_ = logger('SyncController');
23947 return _this;
23948 }
23949
23950 /**
23951 * Find a sync-point for the playlist specified
23952 *
23953 * A sync-point is defined as a known mapping from display-time to
23954 * a segment-index in the current playlist.
23955 *
23956 * @param {Playlist} playlist
23957 * The playlist that needs a sync-point
23958 * @param {Number} duration
23959 * Duration of the MediaSource (Infinite if playing a live source)
23960 * @param {Number} currentTimeline
23961 * The last timeline from which a segment was loaded
23962 * @returns {Object}
23963 * A sync-point object
23964 */
23965
23966
23967 createClass(SyncController, [{
23968 key: 'getSyncPoint',
23969 value: function getSyncPoint(playlist, duration$$1, currentTimeline, currentTime) {
23970 var syncPoints = this.runStrategies_(playlist, duration$$1, currentTimeline, currentTime);
23971
23972 if (!syncPoints.length) {
23973 // Signal that we need to attempt to get a sync-point manually
23974 // by fetching a segment in the playlist and constructing
23975 // a sync-point from that information
23976 return null;
23977 }
23978
23979 // Now find the sync-point that is closest to the currentTime because
23980 // that should result in the most accurate guess about which segment
23981 // to fetch
23982 return this.selectSyncPoint_(syncPoints, { key: 'time', value: currentTime });
23983 }
23984
23985 /**
23986 * Calculate the amount of time that has expired off the playlist during playback
23987 *
23988 * @param {Playlist} playlist
23989 * Playlist object to calculate expired from
23990 * @param {Number} duration
23991 * Duration of the MediaSource (Infinity if playling a live source)
23992 * @returns {Number|null}
23993 * The amount of time that has expired off the playlist during playback. Null
23994 * if no sync-points for the playlist can be found.
23995 */
23996
23997 }, {
23998 key: 'getExpiredTime',
23999 value: function getExpiredTime(playlist, duration$$1) {
24000 if (!playlist || !playlist.segments) {
24001 return null;
24002 }
24003
24004 var syncPoints = this.runStrategies_(playlist, duration$$1, playlist.discontinuitySequence, 0);
24005
24006 // Without sync-points, there is not enough information to determine the expired time
24007 if (!syncPoints.length) {
24008 return null;
24009 }
24010
24011 var syncPoint = this.selectSyncPoint_(syncPoints, {
24012 key: 'segmentIndex',
24013 value: 0
24014 });
24015
24016 // If the sync-point is beyond the start of the playlist, we want to subtract the
24017 // duration from index 0 to syncPoint.segmentIndex instead of adding.
24018 if (syncPoint.segmentIndex > 0) {
24019 syncPoint.time *= -1;
24020 }
24021
24022 return Math.abs(syncPoint.time + sumDurations(playlist, syncPoint.segmentIndex, 0));
24023 }
24024
24025 /**
24026 * Runs each sync-point strategy and returns a list of sync-points returned by the
24027 * strategies
24028 *
24029 * @private
24030 * @param {Playlist} playlist
24031 * The playlist that needs a sync-point
24032 * @param {Number} duration
24033 * Duration of the MediaSource (Infinity if playing a live source)
24034 * @param {Number} currentTimeline
24035 * The last timeline from which a segment was loaded
24036 * @returns {Array}
24037 * A list of sync-point objects
24038 */
24039
24040 }, {
24041 key: 'runStrategies_',
24042 value: function runStrategies_(playlist, duration$$1, currentTimeline, currentTime) {
24043 var syncPoints = [];
24044
24045 // Try to find a sync-point in by utilizing various strategies...
24046 for (var i = 0; i < syncPointStrategies.length; i++) {
24047 var strategy = syncPointStrategies[i];
24048 var syncPoint = strategy.run(this, playlist, duration$$1, currentTimeline, currentTime);
24049
24050 if (syncPoint) {
24051 syncPoint.strategy = strategy.name;
24052 syncPoints.push({
24053 strategy: strategy.name,
24054 syncPoint: syncPoint
24055 });
24056 }
24057 }
24058
24059 return syncPoints;
24060 }
24061
24062 /**
24063 * Selects the sync-point nearest the specified target
24064 *
24065 * @private
24066 * @param {Array} syncPoints
24067 * List of sync-points to select from
24068 * @param {Object} target
24069 * Object specifying the property and value we are targeting
24070 * @param {String} target.key
24071 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
24072 * @param {Number} target.value
24073 * The value to target for the specified key.
24074 * @returns {Object}
24075 * The sync-point nearest the target
24076 */
24077
24078 }, {
24079 key: 'selectSyncPoint_',
24080 value: function selectSyncPoint_(syncPoints, target) {
24081 var bestSyncPoint = syncPoints[0].syncPoint;
24082 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
24083 var bestStrategy = syncPoints[0].strategy;
24084
24085 for (var i = 1; i < syncPoints.length; i++) {
24086 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
24087
24088 if (newDistance < bestDistance) {
24089 bestDistance = newDistance;
24090 bestSyncPoint = syncPoints[i].syncPoint;
24091 bestStrategy = syncPoints[i].strategy;
24092 }
24093 }
24094
24095 this.logger_('syncPoint for [' + target.key + ': ' + target.value + '] chosen with strategy' + (' [' + bestStrategy + ']: [time:' + bestSyncPoint.time + ',') + (' segmentIndex:' + bestSyncPoint.segmentIndex + ']'));
24096
24097 return bestSyncPoint;
24098 }
24099
24100 /**
24101 * Save any meta-data present on the segments when segments leave
24102 * the live window to the playlist to allow for synchronization at the
24103 * playlist level later.
24104 *
24105 * @param {Playlist} oldPlaylist - The previous active playlist
24106 * @param {Playlist} newPlaylist - The updated and most current playlist
24107 */
24108
24109 }, {
24110 key: 'saveExpiredSegmentInfo',
24111 value: function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
24112 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
24113
24114 // When a segment expires from the playlist and it has a start time
24115 // save that information as a possible sync-point reference in future
24116 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
24117 var lastRemovedSegment = oldPlaylist.segments[i];
24118
24119 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
24120 newPlaylist.syncInfo = {
24121 mediaSequence: oldPlaylist.mediaSequence + i,
24122 time: lastRemovedSegment.start
24123 };
24124 this.logger_('playlist refresh sync: [time:' + newPlaylist.syncInfo.time + ',' + (' mediaSequence: ' + newPlaylist.syncInfo.mediaSequence + ']'));
24125 this.trigger('syncinfoupdate');
24126 break;
24127 }
24128 }
24129 }
24130
24131 /**
24132 * Save the mapping from playlist's ProgramDateTime to display. This should
24133 * only ever happen once at the start of playback.
24134 *
24135 * @param {Playlist} playlist - The currently active playlist
24136 */
24137
24138 }, {
24139 key: 'setDateTimeMapping',
24140 value: function setDateTimeMapping(playlist) {
24141 if (!this.datetimeToDisplayTime && playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
24142 var playlistTimestamp = playlist.segments[0].dateTimeObject.getTime() / 1000;
24143
24144 this.datetimeToDisplayTime = -playlistTimestamp;
24145 }
24146 }
24147
24148 /**
24149 * Reset the state of the inspection cache when we do a rendition
24150 * switch
24151 */
24152
24153 }, {
24154 key: 'reset',
24155 value: function reset() {
24156 this.inspectCache_ = undefined;
24157 }
24158
24159 /**
24160 * Probe or inspect a fmp4 or an mpeg2-ts segment to determine the start
24161 * and end of the segment in it's internal "media time". Used to generate
24162 * mappings from that internal "media time" to the display time that is
24163 * shown on the player.
24164 *
24165 * @param {SegmentInfo} segmentInfo - The current active request information
24166 */
24167
24168 }, {
24169 key: 'probeSegmentInfo',
24170 value: function probeSegmentInfo(segmentInfo) {
24171 var segment = segmentInfo.segment;
24172 var playlist = segmentInfo.playlist;
24173 var timingInfo = void 0;
24174
24175 if (segment.map) {
24176 timingInfo = this.probeMp4Segment_(segmentInfo);
24177 } else {
24178 timingInfo = this.probeTsSegment_(segmentInfo);
24179 }
24180
24181 if (timingInfo) {
24182 if (this.calculateSegmentTimeMapping_(segmentInfo, timingInfo)) {
24183 this.saveDiscontinuitySyncInfo_(segmentInfo);
24184
24185 // If the playlist does not have sync information yet, record that information
24186 // now with segment timing information
24187 if (!playlist.syncInfo) {
24188 playlist.syncInfo = {
24189 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
24190 time: segment.start
24191 };
24192 }
24193 }
24194 }
24195
24196 return timingInfo;
24197 }
24198
24199 /**
24200 * Probe an fmp4 segment to determine the start of the segment
24201 * in it's internal "composition time", which is equal to the base
24202 * media decode time plus the composition time offset value
24203 *
24204 * @private
24205 * @param {SegmentInfo} segmentInfo - The current active request information
24206 * @return {object} The start and end time of the current segment in "composition time"
24207 */
24208
24209 }, {
24210 key: 'probeMp4Segment_',
24211 value: function probeMp4Segment_(segmentInfo) {
24212 var segment = segmentInfo.segment;
24213 // get timescales from init segment
24214 var timescales = probe.timescale(segment.map.bytes);
24215 // calculate composition start time using the timescales and information
24216 // contained within the media segment
24217 var compositionStartTime = probe.compositionStartTime(timescales, segmentInfo.bytes);
24218
24219 if (segmentInfo.timestampOffset !== null) {
24220 segmentInfo.timestampOffset -= compositionStartTime;
24221 }
24222
24223 return {
24224 start: compositionStartTime,
24225 end: compositionStartTime + segment.duration
24226 };
24227 }
24228
24229 /**
24230 * Probe an mpeg2-ts segment to determine the start and end of the segment
24231 * in it's internal "media time".
24232 *
24233 * @private
24234 * @param {SegmentInfo} segmentInfo - The current active request information
24235 * @return {object} The start and end time of the current segment in "media time"
24236 */
24237
24238 }, {
24239 key: 'probeTsSegment_',
24240 value: function probeTsSegment_(segmentInfo) {
24241 var timeInfo = tsprobe(segmentInfo.bytes, this.inspectCache_);
24242 var segmentStartTime = void 0;
24243 var segmentEndTime = void 0;
24244 var segmentTimestampInfo = void 0;
24245
24246 if (!timeInfo) {
24247 return null;
24248 }
24249
24250 if (timeInfo.video && timeInfo.video.length === 2) {
24251 this.inspectCache_ = timeInfo.video[1].dts;
24252 segmentStartTime = timeInfo.video[0].dtsTime;
24253 segmentEndTime = timeInfo.video[1].dtsTime;
24254 segmentTimestampInfo = timeInfo.video;
24255 } else if (timeInfo.audio && timeInfo.audio.length === 2) {
24256 this.inspectCache_ = timeInfo.audio[1].dts;
24257 segmentStartTime = timeInfo.audio[0].dtsTime;
24258 segmentEndTime = timeInfo.audio[1].dtsTime;
24259 segmentTimestampInfo = timeInfo.audio;
24260 }
24261
24262 var probedInfo = {
24263 segmentTimestampInfo: segmentTimestampInfo,
24264 start: segmentStartTime,
24265 end: segmentEndTime,
24266 containsVideo: timeInfo.video && timeInfo.video.length === 2,
24267 containsAudio: timeInfo.audio && timeInfo.audio.length === 2
24268 };
24269
24270 return probedInfo;
24271 }
24272 }, {
24273 key: 'timestampOffsetForTimeline',
24274 value: function timestampOffsetForTimeline(timeline) {
24275 if (typeof this.timelines[timeline] === 'undefined') {
24276 return null;
24277 }
24278 return this.timelines[timeline].time;
24279 }
24280 }, {
24281 key: 'mappingForTimeline',
24282 value: function mappingForTimeline(timeline) {
24283 if (typeof this.timelines[timeline] === 'undefined') {
24284 return null;
24285 }
24286 return this.timelines[timeline].mapping;
24287 }
24288
24289 /**
24290 * Use the "media time" for a segment to generate a mapping to "display time" and
24291 * save that display time to the segment.
24292 *
24293 * @private
24294 * @param {SegmentInfo} segmentInfo
24295 * The current active request information
24296 * @param {object} timingInfo
24297 * The start and end time of the current segment in "media time"
24298 * @returns {Boolean}
24299 * Returns false if segment time mapping could not be calculated
24300 */
24301
24302 }, {
24303 key: 'calculateSegmentTimeMapping_',
24304 value: function calculateSegmentTimeMapping_(segmentInfo, timingInfo) {
24305 var segment = segmentInfo.segment;
24306 var mappingObj = this.timelines[segmentInfo.timeline];
24307
24308 if (segmentInfo.timestampOffset !== null) {
24309 mappingObj = {
24310 time: segmentInfo.startOfSegment,
24311 mapping: segmentInfo.startOfSegment - timingInfo.start
24312 };
24313 this.timelines[segmentInfo.timeline] = mappingObj;
24314 this.trigger('timestampoffset');
24315
24316 this.logger_('time mapping for timeline ' + segmentInfo.timeline + ': ' + ('[time: ' + mappingObj.time + '] [mapping: ' + mappingObj.mapping + ']'));
24317
24318 segment.start = segmentInfo.startOfSegment;
24319 segment.end = timingInfo.end + mappingObj.mapping;
24320 } else if (mappingObj) {
24321 segment.start = timingInfo.start + mappingObj.mapping;
24322 segment.end = timingInfo.end + mappingObj.mapping;
24323 } else {
24324 return false;
24325 }
24326
24327 return true;
24328 }
24329
24330 /**
24331 * Each time we have discontinuity in the playlist, attempt to calculate the location
24332 * in display of the start of the discontinuity and save that. We also save an accuracy
24333 * value so that we save values with the most accuracy (closest to 0.)
24334 *
24335 * @private
24336 * @param {SegmentInfo} segmentInfo - The current active request information
24337 */
24338
24339 }, {
24340 key: 'saveDiscontinuitySyncInfo_',
24341 value: function saveDiscontinuitySyncInfo_(segmentInfo) {
24342 var playlist = segmentInfo.playlist;
24343 var segment = segmentInfo.segment;
24344
24345 // If the current segment is a discontinuity then we know exactly where
24346 // the start of the range and it's accuracy is 0 (greater accuracy values
24347 // mean more approximation)
24348 if (segment.discontinuity) {
24349 this.discontinuities[segment.timeline] = {
24350 time: segment.start,
24351 accuracy: 0
24352 };
24353 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
24354 // Search for future discontinuities that we can provide better timing
24355 // information for and save that information for sync purposes
24356 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
24357 var segmentIndex = playlist.discontinuityStarts[i];
24358 var discontinuity = playlist.discontinuitySequence + i + 1;
24359 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
24360 var accuracy = Math.abs(mediaIndexDiff);
24361
24362 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
24363 var time = void 0;
24364
24365 if (mediaIndexDiff < 0) {
24366 time = segment.start - sumDurations(playlist, segmentInfo.mediaIndex, segmentIndex);
24367 } else {
24368 time = segment.end + sumDurations(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
24369 }
24370
24371 this.discontinuities[discontinuity] = {
24372 time: time,
24373 accuracy: accuracy
24374 };
24375 }
24376 }
24377 }
24378 }
24379 }, {
24380 key: 'dispose',
24381 value: function dispose() {
24382 this.trigger('dispose');
24383 this.off();
24384 }
24385 }]);
24386 return SyncController;
24387 }(videojs.EventTarget);
24388
24389 var Decrypter$1 = new shimWorker("./decrypter-worker.worker.js", function (window, document) {
24390 var self = this;
24391 var decrypterWorker = function () {
24392
24393 /*
24394 * pkcs7.pad
24395 * https://github.com/brightcove/pkcs7
24396 *
24397 * Copyright (c) 2014 Brightcove
24398 * Licensed under the apache2 license.
24399 */
24400
24401 /**
24402 * Returns the subarray of a Uint8Array without PKCS#7 padding.
24403 * @param padded {Uint8Array} unencrypted bytes that have been padded
24404 * @return {Uint8Array} the unpadded bytes
24405 * @see http://tools.ietf.org/html/rfc5652
24406 */
24407
24408 function unpad(padded) {
24409 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
24410 }
24411
24412 var classCallCheck = function classCallCheck(instance, Constructor) {
24413 if (!(instance instanceof Constructor)) {
24414 throw new TypeError("Cannot call a class as a function");
24415 }
24416 };
24417
24418 var createClass = function () {
24419 function defineProperties(target, props) {
24420 for (var i = 0; i < props.length; i++) {
24421 var descriptor = props[i];
24422 descriptor.enumerable = descriptor.enumerable || false;
24423 descriptor.configurable = true;
24424 if ("value" in descriptor) descriptor.writable = true;
24425 Object.defineProperty(target, descriptor.key, descriptor);
24426 }
24427 }
24428
24429 return function (Constructor, protoProps, staticProps) {
24430 if (protoProps) defineProperties(Constructor.prototype, protoProps);
24431 if (staticProps) defineProperties(Constructor, staticProps);
24432 return Constructor;
24433 };
24434 }();
24435
24436 var inherits = function inherits(subClass, superClass) {
24437 if (typeof superClass !== "function" && superClass !== null) {
24438 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
24439 }
24440
24441 subClass.prototype = Object.create(superClass && superClass.prototype, {
24442 constructor: {
24443 value: subClass,
24444 enumerable: false,
24445 writable: true,
24446 configurable: true
24447 }
24448 });
24449 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
24450 };
24451
24452 var possibleConstructorReturn = function possibleConstructorReturn(self, call) {
24453 if (!self) {
24454 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
24455 }
24456
24457 return call && (typeof call === "object" || typeof call === "function") ? call : self;
24458 };
24459
24460 /**
24461 * @file aes.js
24462 *
24463 * This file contains an adaptation of the AES decryption algorithm
24464 * from the Standford Javascript Cryptography Library. That work is
24465 * covered by the following copyright and permissions notice:
24466 *
24467 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
24468 * All rights reserved.
24469 *
24470 * Redistribution and use in source and binary forms, with or without
24471 * modification, are permitted provided that the following conditions are
24472 * met:
24473 *
24474 * 1. Redistributions of source code must retain the above copyright
24475 * notice, this list of conditions and the following disclaimer.
24476 *
24477 * 2. Redistributions in binary form must reproduce the above
24478 * copyright notice, this list of conditions and the following
24479 * disclaimer in the documentation and/or other materials provided
24480 * with the distribution.
24481 *
24482 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
24483 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
24484 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24485 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
24486 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24487 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24488 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24489 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24490 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
24491 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
24492 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24493 *
24494 * The views and conclusions contained in the software and documentation
24495 * are those of the authors and should not be interpreted as representing
24496 * official policies, either expressed or implied, of the authors.
24497 */
24498
24499 /**
24500 * Expand the S-box tables.
24501 *
24502 * @private
24503 */
24504 var precompute = function precompute() {
24505 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
24506 var encTable = tables[0];
24507 var decTable = tables[1];
24508 var sbox = encTable[4];
24509 var sboxInv = decTable[4];
24510 var i = void 0;
24511 var x = void 0;
24512 var xInv = void 0;
24513 var d = [];
24514 var th = [];
24515 var x2 = void 0;
24516 var x4 = void 0;
24517 var x8 = void 0;
24518 var s = void 0;
24519 var tEnc = void 0;
24520 var tDec = void 0;
24521
24522 // Compute double and third tables
24523 for (i = 0; i < 256; i++) {
24524 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
24525 }
24526
24527 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
24528 // Compute sbox
24529 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
24530 s = s >> 8 ^ s & 255 ^ 99;
24531 sbox[x] = s;
24532 sboxInv[s] = x;
24533
24534 // Compute MixColumns
24535 x8 = d[x4 = d[x2 = d[x]]];
24536 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
24537 tEnc = d[s] * 0x101 ^ s * 0x1010100;
24538
24539 for (i = 0; i < 4; i++) {
24540 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
24541 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
24542 }
24543 }
24544
24545 // Compactify. Considerable speedup on Firefox.
24546 for (i = 0; i < 5; i++) {
24547 encTable[i] = encTable[i].slice(0);
24548 decTable[i] = decTable[i].slice(0);
24549 }
24550 return tables;
24551 };
24552 var aesTables = null;
24553
24554 /**
24555 * Schedule out an AES key for both encryption and decryption. This
24556 * is a low-level class. Use a cipher mode to do bulk encryption.
24557 *
24558 * @class AES
24559 * @param key {Array} The key as an array of 4, 6 or 8 words.
24560 */
24561
24562 var AES = function () {
24563 function AES(key) {
24564 classCallCheck(this, AES);
24565
24566 /**
24567 * The expanded S-box and inverse S-box tables. These will be computed
24568 * on the client so that we don't have to send them down the wire.
24569 *
24570 * There are two tables, _tables[0] is for encryption and
24571 * _tables[1] is for decryption.
24572 *
24573 * The first 4 sub-tables are the expanded S-box with MixColumns. The
24574 * last (_tables[01][4]) is the S-box itself.
24575 *
24576 * @private
24577 */
24578 // if we have yet to precompute the S-box tables
24579 // do so now
24580 if (!aesTables) {
24581 aesTables = precompute();
24582 }
24583 // then make a copy of that object for use
24584 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
24585 var i = void 0;
24586 var j = void 0;
24587 var tmp = void 0;
24588 var encKey = void 0;
24589 var decKey = void 0;
24590 var sbox = this._tables[0][4];
24591 var decTable = this._tables[1];
24592 var keyLen = key.length;
24593 var rcon = 1;
24594
24595 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
24596 throw new Error('Invalid aes key size');
24597 }
24598
24599 encKey = key.slice(0);
24600 decKey = [];
24601 this._key = [encKey, decKey];
24602
24603 // schedule encryption keys
24604 for (i = keyLen; i < 4 * keyLen + 28; i++) {
24605 tmp = encKey[i - 1];
24606
24607 // apply sbox
24608 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
24609 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
24610
24611 // shift rows and add rcon
24612 if (i % keyLen === 0) {
24613 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
24614 rcon = rcon << 1 ^ (rcon >> 7) * 283;
24615 }
24616 }
24617
24618 encKey[i] = encKey[i - keyLen] ^ tmp;
24619 }
24620
24621 // schedule decryption keys
24622 for (j = 0; i; j++, i--) {
24623 tmp = encKey[j & 3 ? i : i - 4];
24624 if (i <= 4 || j < 4) {
24625 decKey[j] = tmp;
24626 } else {
24627 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
24628 }
24629 }
24630 }
24631
24632 /**
24633 * Decrypt 16 bytes, specified as four 32-bit words.
24634 *
24635 * @param {Number} encrypted0 the first word to decrypt
24636 * @param {Number} encrypted1 the second word to decrypt
24637 * @param {Number} encrypted2 the third word to decrypt
24638 * @param {Number} encrypted3 the fourth word to decrypt
24639 * @param {Int32Array} out the array to write the decrypted words
24640 * into
24641 * @param {Number} offset the offset into the output array to start
24642 * writing results
24643 * @return {Array} The plaintext.
24644 */
24645
24646 AES.prototype.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
24647 var key = this._key[1];
24648 // state variables a,b,c,d are loaded with pre-whitened data
24649 var a = encrypted0 ^ key[0];
24650 var b = encrypted3 ^ key[1];
24651 var c = encrypted2 ^ key[2];
24652 var d = encrypted1 ^ key[3];
24653 var a2 = void 0;
24654 var b2 = void 0;
24655 var c2 = void 0;
24656
24657 // key.length === 2 ?
24658 var nInnerRounds = key.length / 4 - 2;
24659 var i = void 0;
24660 var kIndex = 4;
24661 var table = this._tables[1];
24662
24663 // load up the tables
24664 var table0 = table[0];
24665 var table1 = table[1];
24666 var table2 = table[2];
24667 var table3 = table[3];
24668 var sbox = table[4];
24669
24670 // Inner rounds. Cribbed from OpenSSL.
24671 for (i = 0; i < nInnerRounds; i++) {
24672 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
24673 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
24674 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
24675 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
24676 kIndex += 4;
24677 a = a2;b = b2;c = c2;
24678 }
24679
24680 // Last round.
24681 for (i = 0; i < 4; i++) {
24682 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
24683 a2 = a;a = b;b = c;c = d;d = a2;
24684 }
24685 };
24686
24687 return AES;
24688 }();
24689
24690 /**
24691 * @file stream.js
24692 */
24693 /**
24694 * A lightweight readable stream implemention that handles event dispatching.
24695 *
24696 * @class Stream
24697 */
24698 var Stream = function () {
24699 function Stream() {
24700 classCallCheck(this, Stream);
24701
24702 this.listeners = {};
24703 }
24704
24705 /**
24706 * Add a listener for a specified event type.
24707 *
24708 * @param {String} type the event name
24709 * @param {Function} listener the callback to be invoked when an event of
24710 * the specified type occurs
24711 */
24712
24713 Stream.prototype.on = function on(type, listener) {
24714 if (!this.listeners[type]) {
24715 this.listeners[type] = [];
24716 }
24717 this.listeners[type].push(listener);
24718 };
24719
24720 /**
24721 * Remove a listener for a specified event type.
24722 *
24723 * @param {String} type the event name
24724 * @param {Function} listener a function previously registered for this
24725 * type of event through `on`
24726 * @return {Boolean} if we could turn it off or not
24727 */
24728
24729 Stream.prototype.off = function off(type, listener) {
24730 if (!this.listeners[type]) {
24731 return false;
24732 }
24733
24734 var index = this.listeners[type].indexOf(listener);
24735
24736 this.listeners[type].splice(index, 1);
24737 return index > -1;
24738 };
24739
24740 /**
24741 * Trigger an event of the specified type on this stream. Any additional
24742 * arguments to this function are passed as parameters to event listeners.
24743 *
24744 * @param {String} type the event name
24745 */
24746
24747 Stream.prototype.trigger = function trigger(type) {
24748 var callbacks = this.listeners[type];
24749
24750 if (!callbacks) {
24751 return;
24752 }
24753
24754 // Slicing the arguments on every invocation of this method
24755 // can add a significant amount of overhead. Avoid the
24756 // intermediate object creation for the common case of a
24757 // single callback argument
24758 if (arguments.length === 2) {
24759 var length = callbacks.length;
24760
24761 for (var i = 0; i < length; ++i) {
24762 callbacks[i].call(this, arguments[1]);
24763 }
24764 } else {
24765 var args = Array.prototype.slice.call(arguments, 1);
24766 var _length = callbacks.length;
24767
24768 for (var _i = 0; _i < _length; ++_i) {
24769 callbacks[_i].apply(this, args);
24770 }
24771 }
24772 };
24773
24774 /**
24775 * Destroys the stream and cleans up.
24776 */
24777
24778 Stream.prototype.dispose = function dispose() {
24779 this.listeners = {};
24780 };
24781 /**
24782 * Forwards all `data` events on this stream to the destination stream. The
24783 * destination stream should provide a method `push` to receive the data
24784 * events as they arrive.
24785 *
24786 * @param {Stream} destination the stream that will receive all `data` events
24787 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
24788 */
24789
24790 Stream.prototype.pipe = function pipe(destination) {
24791 this.on('data', function (data) {
24792 destination.push(data);
24793 });
24794 };
24795
24796 return Stream;
24797 }();
24798
24799 /**
24800 * @file async-stream.js
24801 */
24802 /**
24803 * A wrapper around the Stream class to use setTiemout
24804 * and run stream "jobs" Asynchronously
24805 *
24806 * @class AsyncStream
24807 * @extends Stream
24808 */
24809
24810 var AsyncStream = function (_Stream) {
24811 inherits(AsyncStream, _Stream);
24812
24813 function AsyncStream() {
24814 classCallCheck(this, AsyncStream);
24815
24816 var _this = possibleConstructorReturn(this, _Stream.call(this, Stream));
24817
24818 _this.jobs = [];
24819 _this.delay = 1;
24820 _this.timeout_ = null;
24821 return _this;
24822 }
24823
24824 /**
24825 * process an async job
24826 *
24827 * @private
24828 */
24829
24830 AsyncStream.prototype.processJob_ = function processJob_() {
24831 this.jobs.shift()();
24832 if (this.jobs.length) {
24833 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
24834 } else {
24835 this.timeout_ = null;
24836 }
24837 };
24838
24839 /**
24840 * push a job into the stream
24841 *
24842 * @param {Function} job the job to push into the stream
24843 */
24844
24845 AsyncStream.prototype.push = function push(job) {
24846 this.jobs.push(job);
24847 if (!this.timeout_) {
24848 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
24849 }
24850 };
24851
24852 return AsyncStream;
24853 }(Stream);
24854
24855 /**
24856 * @file decrypter.js
24857 *
24858 * An asynchronous implementation of AES-128 CBC decryption with
24859 * PKCS#7 padding.
24860 */
24861
24862 /**
24863 * Convert network-order (big-endian) bytes into their little-endian
24864 * representation.
24865 */
24866 var ntoh = function ntoh(word) {
24867 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
24868 };
24869
24870 /**
24871 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
24872 *
24873 * @param {Uint8Array} encrypted the encrypted bytes
24874 * @param {Uint32Array} key the bytes of the decryption key
24875 * @param {Uint32Array} initVector the initialization vector (IV) to
24876 * use for the first round of CBC.
24877 * @return {Uint8Array} the decrypted bytes
24878 *
24879 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
24880 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
24881 * @see https://tools.ietf.org/html/rfc2315
24882 */
24883 var decrypt = function decrypt(encrypted, key, initVector) {
24884 // word-level access to the encrypted bytes
24885 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
24886
24887 var decipher = new AES(Array.prototype.slice.call(key));
24888
24889 // byte and word-level access for the decrypted output
24890 var decrypted = new Uint8Array(encrypted.byteLength);
24891 var decrypted32 = new Int32Array(decrypted.buffer);
24892
24893 // temporary variables for working with the IV, encrypted, and
24894 // decrypted data
24895 var init0 = void 0;
24896 var init1 = void 0;
24897 var init2 = void 0;
24898 var init3 = void 0;
24899 var encrypted0 = void 0;
24900 var encrypted1 = void 0;
24901 var encrypted2 = void 0;
24902 var encrypted3 = void 0;
24903
24904 // iteration variable
24905 var wordIx = void 0;
24906
24907 // pull out the words of the IV to ensure we don't modify the
24908 // passed-in reference and easier access
24909 init0 = initVector[0];
24910 init1 = initVector[1];
24911 init2 = initVector[2];
24912 init3 = initVector[3];
24913
24914 // decrypt four word sequences, applying cipher-block chaining (CBC)
24915 // to each decrypted block
24916 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
24917 // convert big-endian (network order) words into little-endian
24918 // (javascript order)
24919 encrypted0 = ntoh(encrypted32[wordIx]);
24920 encrypted1 = ntoh(encrypted32[wordIx + 1]);
24921 encrypted2 = ntoh(encrypted32[wordIx + 2]);
24922 encrypted3 = ntoh(encrypted32[wordIx + 3]);
24923
24924 // decrypt the block
24925 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
24926
24927 // XOR with the IV, and restore network byte-order to obtain the
24928 // plaintext
24929 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
24930 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
24931 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
24932 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
24933
24934 // setup the IV for the next round
24935 init0 = encrypted0;
24936 init1 = encrypted1;
24937 init2 = encrypted2;
24938 init3 = encrypted3;
24939 }
24940
24941 return decrypted;
24942 };
24943
24944 /**
24945 * The `Decrypter` class that manages decryption of AES
24946 * data through `AsyncStream` objects and the `decrypt`
24947 * function
24948 *
24949 * @param {Uint8Array} encrypted the encrypted bytes
24950 * @param {Uint32Array} key the bytes of the decryption key
24951 * @param {Uint32Array} initVector the initialization vector (IV) to
24952 * @param {Function} done the function to run when done
24953 * @class Decrypter
24954 */
24955
24956 var Decrypter = function () {
24957 function Decrypter(encrypted, key, initVector, done) {
24958 classCallCheck(this, Decrypter);
24959
24960 var step = Decrypter.STEP;
24961 var encrypted32 = new Int32Array(encrypted.buffer);
24962 var decrypted = new Uint8Array(encrypted.byteLength);
24963 var i = 0;
24964
24965 this.asyncStream_ = new AsyncStream();
24966
24967 // split up the encryption job and do the individual chunks asynchronously
24968 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
24969 for (i = step; i < encrypted32.length; i += step) {
24970 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
24971 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
24972 }
24973 // invoke the done() callback when everything is finished
24974 this.asyncStream_.push(function () {
24975 // remove pkcs#7 padding from the decrypted bytes
24976 done(null, unpad(decrypted));
24977 });
24978 }
24979
24980 /**
24981 * a getter for step the maximum number of bytes to process at one time
24982 *
24983 * @return {Number} the value of step 32000
24984 */
24985
24986 /**
24987 * @private
24988 */
24989 Decrypter.prototype.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
24990 return function () {
24991 var bytes = decrypt(encrypted, key, initVector);
24992
24993 decrypted.set(bytes, encrypted.byteOffset);
24994 };
24995 };
24996
24997 createClass(Decrypter, null, [{
24998 key: 'STEP',
24999 get: function get$$1() {
25000 // 4 * 8000;
25001 return 32000;
25002 }
25003 }]);
25004 return Decrypter;
25005 }();
25006
25007 /**
25008 * @file bin-utils.js
25009 */
25010
25011 /**
25012 * Creates an object for sending to a web worker modifying properties that are TypedArrays
25013 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
25014 *
25015 * @param {Object} message
25016 * Object of properties and values to send to the web worker
25017 * @return {Object}
25018 * Modified message with TypedArray values expanded
25019 * @function createTransferableMessage
25020 */
25021 var createTransferableMessage = function createTransferableMessage(message) {
25022 var transferable = {};
25023
25024 Object.keys(message).forEach(function (key) {
25025 var value = message[key];
25026
25027 if (ArrayBuffer.isView(value)) {
25028 transferable[key] = {
25029 bytes: value.buffer,
25030 byteOffset: value.byteOffset,
25031 byteLength: value.byteLength
25032 };
25033 } else {
25034 transferable[key] = value;
25035 }
25036 });
25037
25038 return transferable;
25039 };
25040
25041 /**
25042 * Our web worker interface so that things can talk to aes-decrypter
25043 * that will be running in a web worker. the scope is passed to this by
25044 * webworkify.
25045 *
25046 * @param {Object} self
25047 * the scope for the web worker
25048 */
25049 var DecrypterWorker = function DecrypterWorker(self) {
25050 self.onmessage = function (event) {
25051 var data = event.data;
25052 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
25053 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
25054 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
25055
25056 /* eslint-disable no-new, handle-callback-err */
25057 new Decrypter(encrypted, key, iv, function (err, bytes) {
25058 self.postMessage(createTransferableMessage({
25059 source: data.source,
25060 decrypted: bytes
25061 }), [bytes.buffer]);
25062 });
25063 /* eslint-enable */
25064 };
25065 };
25066
25067 var decrypterWorker = new DecrypterWorker(self);
25068
25069 return decrypterWorker;
25070 }();
25071 });
25072
25073 /**
25074 * Convert the properties of an HLS track into an audioTrackKind.
25075 *
25076 * @private
25077 */
25078 var audioTrackKind_ = function audioTrackKind_(properties) {
25079 var kind = properties.default ? 'main' : 'alternative';
25080
25081 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
25082 kind = 'main-desc';
25083 }
25084
25085 return kind;
25086 };
25087
25088 /**
25089 * Pause provided segment loader and playlist loader if active
25090 *
25091 * @param {SegmentLoader} segmentLoader
25092 * SegmentLoader to pause
25093 * @param {Object} mediaType
25094 * Active media type
25095 * @function stopLoaders
25096 */
25097 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
25098 segmentLoader.abort();
25099 segmentLoader.pause();
25100
25101 if (mediaType && mediaType.activePlaylistLoader) {
25102 mediaType.activePlaylistLoader.pause();
25103 mediaType.activePlaylistLoader = null;
25104 }
25105 };
25106
25107 /**
25108 * Start loading provided segment loader and playlist loader
25109 *
25110 * @param {PlaylistLoader} playlistLoader
25111 * PlaylistLoader to start loading
25112 * @param {Object} mediaType
25113 * Active media type
25114 * @function startLoaders
25115 */
25116 var startLoaders = function startLoaders(playlistLoader, mediaType) {
25117 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
25118 // playlist loader
25119 mediaType.activePlaylistLoader = playlistLoader;
25120 playlistLoader.load();
25121 };
25122
25123 /**
25124 * Returns a function to be called when the media group changes. It performs a
25125 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
25126 * change of group is merely a rendition switch of the same content at another encoding,
25127 * rather than a change of content, such as switching audio from English to Spanish.
25128 *
25129 * @param {String} type
25130 * MediaGroup type
25131 * @param {Object} settings
25132 * Object containing required information for media groups
25133 * @return {Function}
25134 * Handler for a non-destructive resync of SegmentLoader when the active media
25135 * group changes.
25136 * @function onGroupChanged
25137 */
25138 var onGroupChanged = function onGroupChanged(type, settings) {
25139 return function () {
25140 var _settings$segmentLoad = settings.segmentLoaders,
25141 segmentLoader = _settings$segmentLoad[type],
25142 mainSegmentLoader = _settings$segmentLoad.main,
25143 mediaType = settings.mediaTypes[type];
25144
25145 var activeTrack = mediaType.activeTrack();
25146 var activeGroup = mediaType.activeGroup(activeTrack);
25147 var previousActiveLoader = mediaType.activePlaylistLoader;
25148
25149 stopLoaders(segmentLoader, mediaType);
25150
25151 if (!activeGroup) {
25152 // there is no group active
25153 return;
25154 }
25155
25156 if (!activeGroup.playlistLoader) {
25157 if (previousActiveLoader) {
25158 // The previous group had a playlist loader but the new active group does not
25159 // this means we are switching from demuxed to muxed audio. In this case we want to
25160 // do a destructive reset of the main segment loader and not restart the audio
25161 // loaders.
25162 mainSegmentLoader.resetEverything();
25163 }
25164 return;
25165 }
25166
25167 // Non-destructive resync
25168 segmentLoader.resyncLoader();
25169
25170 startLoaders(activeGroup.playlistLoader, mediaType);
25171 };
25172 };
25173
25174 /**
25175 * Returns a function to be called when the media track changes. It performs a
25176 * destructive reset of the SegmentLoader to ensure we start loading as close to
25177 * currentTime as possible.
25178 *
25179 * @param {String} type
25180 * MediaGroup type
25181 * @param {Object} settings
25182 * Object containing required information for media groups
25183 * @return {Function}
25184 * Handler for a destructive reset of SegmentLoader when the active media
25185 * track changes.
25186 * @function onTrackChanged
25187 */
25188 var onTrackChanged = function onTrackChanged(type, settings) {
25189 return function () {
25190 var _settings$segmentLoad2 = settings.segmentLoaders,
25191 segmentLoader = _settings$segmentLoad2[type],
25192 mainSegmentLoader = _settings$segmentLoad2.main,
25193 mediaType = settings.mediaTypes[type];
25194
25195 var activeTrack = mediaType.activeTrack();
25196 var activeGroup = mediaType.activeGroup(activeTrack);
25197 var previousActiveLoader = mediaType.activePlaylistLoader;
25198
25199 stopLoaders(segmentLoader, mediaType);
25200
25201 if (!activeGroup) {
25202 // there is no group active so we do not want to restart loaders
25203 return;
25204 }
25205
25206 if (!activeGroup.playlistLoader) {
25207 // when switching from demuxed audio/video to muxed audio/video (noted by no playlist
25208 // loader for the audio group), we want to do a destructive reset of the main segment
25209 // loader and not restart the audio loaders
25210 mainSegmentLoader.resetEverything();
25211 return;
25212 }
25213
25214 if (previousActiveLoader === activeGroup.playlistLoader) {
25215 // Nothing has actually changed. This can happen because track change events can fire
25216 // multiple times for a "single" change. One for enabling the new active track, and
25217 // one for disabling the track that was active
25218 startLoaders(activeGroup.playlistLoader, mediaType);
25219 return;
25220 }
25221
25222 if (segmentLoader.track) {
25223 // For WebVTT, set the new text track in the segmentloader
25224 segmentLoader.track(activeTrack);
25225 }
25226
25227 // destructive reset
25228 segmentLoader.resetEverything();
25229
25230 startLoaders(activeGroup.playlistLoader, mediaType);
25231 };
25232 };
25233
25234 var onError = {
25235 /**
25236 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
25237 * an error.
25238 *
25239 * @param {String} type
25240 * MediaGroup type
25241 * @param {Object} settings
25242 * Object containing required information for media groups
25243 * @return {Function}
25244 * Error handler. Logs warning (or error if the playlist is blacklisted) to
25245 * console and switches back to default audio track.
25246 * @function onError.AUDIO
25247 */
25248 AUDIO: function AUDIO(type, settings) {
25249 return function () {
25250 var segmentLoader = settings.segmentLoaders[type],
25251 mediaType = settings.mediaTypes[type],
25252 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
25253
25254
25255 stopLoaders(segmentLoader, mediaType);
25256
25257 // switch back to default audio track
25258 var activeTrack = mediaType.activeTrack();
25259 var activeGroup = mediaType.activeGroup();
25260 var id = (activeGroup.filter(function (group) {
25261 return group.default;
25262 })[0] || activeGroup[0]).id;
25263 var defaultTrack = mediaType.tracks[id];
25264
25265 if (activeTrack === defaultTrack) {
25266 // Default track encountered an error. All we can do now is blacklist the current
25267 // rendition and hope another will switch audio groups
25268 blacklistCurrentPlaylist({
25269 message: 'Problem encountered loading the default audio track.'
25270 });
25271 return;
25272 }
25273
25274 videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
25275
25276 for (var trackId in mediaType.tracks) {
25277 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
25278 }
25279
25280 mediaType.onTrackChanged();
25281 };
25282 },
25283 /**
25284 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
25285 * an error.
25286 *
25287 * @param {String} type
25288 * MediaGroup type
25289 * @param {Object} settings
25290 * Object containing required information for media groups
25291 * @return {Function}
25292 * Error handler. Logs warning to console and disables the active subtitle track
25293 * @function onError.SUBTITLES
25294 */
25295 SUBTITLES: function SUBTITLES(type, settings) {
25296 return function () {
25297 var segmentLoader = settings.segmentLoaders[type],
25298 mediaType = settings.mediaTypes[type];
25299
25300
25301 videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
25302
25303 stopLoaders(segmentLoader, mediaType);
25304
25305 var track = mediaType.activeTrack();
25306
25307 if (track) {
25308 track.mode = 'disabled';
25309 }
25310
25311 mediaType.onTrackChanged();
25312 };
25313 }
25314 };
25315
25316 var setupListeners = {
25317 /**
25318 * Setup event listeners for audio playlist loader
25319 *
25320 * @param {String} type
25321 * MediaGroup type
25322 * @param {PlaylistLoader|null} playlistLoader
25323 * PlaylistLoader to register listeners on
25324 * @param {Object} settings
25325 * Object containing required information for media groups
25326 * @function setupListeners.AUDIO
25327 */
25328 AUDIO: function AUDIO(type, playlistLoader, settings) {
25329 if (!playlistLoader) {
25330 // no playlist loader means audio will be muxed with the video
25331 return;
25332 }
25333
25334 var tech = settings.tech,
25335 requestOptions = settings.requestOptions,
25336 segmentLoader = settings.segmentLoaders[type];
25337
25338
25339 playlistLoader.on('loadedmetadata', function () {
25340 var media = playlistLoader.media();
25341
25342 segmentLoader.playlist(media, requestOptions);
25343
25344 // if the video is already playing, or if this isn't a live video and preload
25345 // permits, start downloading segments
25346 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
25347 segmentLoader.load();
25348 }
25349 });
25350
25351 playlistLoader.on('loadedplaylist', function () {
25352 segmentLoader.playlist(playlistLoader.media(), requestOptions);
25353
25354 // If the player isn't paused, ensure that the segment loader is running
25355 if (!tech.paused()) {
25356 segmentLoader.load();
25357 }
25358 });
25359
25360 playlistLoader.on('error', onError[type](type, settings));
25361 },
25362 /**
25363 * Setup event listeners for subtitle playlist loader
25364 *
25365 * @param {String} type
25366 * MediaGroup type
25367 * @param {PlaylistLoader|null} playlistLoader
25368 * PlaylistLoader to register listeners on
25369 * @param {Object} settings
25370 * Object containing required information for media groups
25371 * @function setupListeners.SUBTITLES
25372 */
25373 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
25374 var tech = settings.tech,
25375 requestOptions = settings.requestOptions,
25376 segmentLoader = settings.segmentLoaders[type],
25377 mediaType = settings.mediaTypes[type];
25378
25379
25380 playlistLoader.on('loadedmetadata', function () {
25381 var media = playlistLoader.media();
25382
25383 segmentLoader.playlist(media, requestOptions);
25384 segmentLoader.track(mediaType.activeTrack());
25385
25386 // if the video is already playing, or if this isn't a live video and preload
25387 // permits, start downloading segments
25388 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
25389 segmentLoader.load();
25390 }
25391 });
25392
25393 playlistLoader.on('loadedplaylist', function () {
25394 segmentLoader.playlist(playlistLoader.media(), requestOptions);
25395
25396 // If the player isn't paused, ensure that the segment loader is running
25397 if (!tech.paused()) {
25398 segmentLoader.load();
25399 }
25400 });
25401
25402 playlistLoader.on('error', onError[type](type, settings));
25403 }
25404 };
25405
25406 var initialize = {
25407 /**
25408 * Setup PlaylistLoaders and AudioTracks for the audio groups
25409 *
25410 * @param {String} type
25411 * MediaGroup type
25412 * @param {Object} settings
25413 * Object containing required information for media groups
25414 * @function initialize.AUDIO
25415 */
25416 'AUDIO': function AUDIO(type, settings) {
25417 var hls = settings.hls,
25418 sourceType = settings.sourceType,
25419 segmentLoader = settings.segmentLoaders[type],
25420 requestOptions = settings.requestOptions,
25421 mediaGroups = settings.master.mediaGroups,
25422 _settings$mediaTypes$ = settings.mediaTypes[type],
25423 groups = _settings$mediaTypes$.groups,
25424 tracks = _settings$mediaTypes$.tracks,
25425 masterPlaylistLoader = settings.masterPlaylistLoader;
25426
25427 // force a default if we have none
25428
25429 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
25430 mediaGroups[type] = { main: { default: { default: true } } };
25431 }
25432
25433 for (var groupId in mediaGroups[type]) {
25434 if (!groups[groupId]) {
25435 groups[groupId] = [];
25436 }
25437
25438 // List of playlists that have an AUDIO attribute value matching the current
25439 // group ID
25440
25441 for (var variantLabel in mediaGroups[type][groupId]) {
25442 var properties = mediaGroups[type][groupId][variantLabel];
25443 var playlistLoader = void 0;
25444
25445 if (properties.resolvedUri) {
25446 playlistLoader = new PlaylistLoader(properties.resolvedUri, hls, requestOptions);
25447 } else if (properties.playlists && sourceType === 'dash') {
25448 playlistLoader = new DashPlaylistLoader(properties.playlists[0], hls, requestOptions, masterPlaylistLoader);
25449 } else {
25450 // no resolvedUri means the audio is muxed with the video when using this
25451 // audio track
25452 playlistLoader = null;
25453 }
25454
25455 properties = videojs.mergeOptions({ id: variantLabel, playlistLoader: playlistLoader }, properties);
25456
25457 setupListeners[type](type, properties.playlistLoader, settings);
25458
25459 groups[groupId].push(properties);
25460
25461 if (typeof tracks[variantLabel] === 'undefined') {
25462 var track = new videojs.AudioTrack({
25463 id: variantLabel,
25464 kind: audioTrackKind_(properties),
25465 enabled: false,
25466 language: properties.language,
25467 default: properties.default,
25468 label: variantLabel
25469 });
25470
25471 tracks[variantLabel] = track;
25472 }
25473 }
25474 }
25475
25476 // setup single error event handler for the segment loader
25477 segmentLoader.on('error', onError[type](type, settings));
25478 },
25479 /**
25480 * Setup PlaylistLoaders and TextTracks for the subtitle groups
25481 *
25482 * @param {String} type
25483 * MediaGroup type
25484 * @param {Object} settings
25485 * Object containing required information for media groups
25486 * @function initialize.SUBTITLES
25487 */
25488 'SUBTITLES': function SUBTITLES(type, settings) {
25489 var tech = settings.tech,
25490 hls = settings.hls,
25491 sourceType = settings.sourceType,
25492 segmentLoader = settings.segmentLoaders[type],
25493 requestOptions = settings.requestOptions,
25494 mediaGroups = settings.master.mediaGroups,
25495 _settings$mediaTypes$2 = settings.mediaTypes[type],
25496 groups = _settings$mediaTypes$2.groups,
25497 tracks = _settings$mediaTypes$2.tracks,
25498 masterPlaylistLoader = settings.masterPlaylistLoader;
25499
25500
25501 for (var groupId in mediaGroups[type]) {
25502 if (!groups[groupId]) {
25503 groups[groupId] = [];
25504 }
25505
25506 for (var variantLabel in mediaGroups[type][groupId]) {
25507 if (mediaGroups[type][groupId][variantLabel].forced) {
25508 // Subtitle playlists with the forced attribute are not selectable in Safari.
25509 // According to Apple's HLS Authoring Specification:
25510 // If content has forced subtitles and regular subtitles in a given language,
25511 // the regular subtitles track in that language MUST contain both the forced
25512 // subtitles and the regular subtitles for that language.
25513 // Because of this requirement and that Safari does not add forced subtitles,
25514 // forced subtitles are skipped here to maintain consistent experience across
25515 // all platforms
25516 continue;
25517 }
25518
25519 var properties = mediaGroups[type][groupId][variantLabel];
25520
25521 var playlistLoader = void 0;
25522
25523 if (sourceType === 'hls') {
25524 playlistLoader = new PlaylistLoader(properties.resolvedUri, hls, requestOptions);
25525 } else if (sourceType === 'dash') {
25526 playlistLoader = new DashPlaylistLoader(properties.playlists[0], hls, requestOptions, masterPlaylistLoader);
25527 }
25528
25529 properties = videojs.mergeOptions({
25530 id: variantLabel,
25531 playlistLoader: playlistLoader
25532 }, properties);
25533
25534 setupListeners[type](type, properties.playlistLoader, settings);
25535
25536 groups[groupId].push(properties);
25537
25538 if (typeof tracks[variantLabel] === 'undefined') {
25539 var track = tech.addRemoteTextTrack({
25540 id: variantLabel,
25541 kind: 'subtitles',
25542 default: properties.default && properties.autoselect,
25543 language: properties.language,
25544 label: variantLabel
25545 }, false).track;
25546
25547 tracks[variantLabel] = track;
25548 }
25549 }
25550 }
25551
25552 // setup single error event handler for the segment loader
25553 segmentLoader.on('error', onError[type](type, settings));
25554 },
25555 /**
25556 * Setup TextTracks for the closed-caption groups
25557 *
25558 * @param {String} type
25559 * MediaGroup type
25560 * @param {Object} settings
25561 * Object containing required information for media groups
25562 * @function initialize['CLOSED-CAPTIONS']
25563 */
25564 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
25565 var tech = settings.tech,
25566 mediaGroups = settings.master.mediaGroups,
25567 _settings$mediaTypes$3 = settings.mediaTypes[type],
25568 groups = _settings$mediaTypes$3.groups,
25569 tracks = _settings$mediaTypes$3.tracks;
25570
25571
25572 for (var groupId in mediaGroups[type]) {
25573 if (!groups[groupId]) {
25574 groups[groupId] = [];
25575 }
25576
25577 for (var variantLabel in mediaGroups[type][groupId]) {
25578 var properties = mediaGroups[type][groupId][variantLabel];
25579
25580 // We only support CEA608 captions for now, so ignore anything that
25581 // doesn't use a CCx INSTREAM-ID
25582 if (!properties.instreamId.match(/CC\d/)) {
25583 continue;
25584 }
25585
25586 // No PlaylistLoader is required for Closed-Captions because the captions are
25587 // embedded within the video stream
25588 groups[groupId].push(videojs.mergeOptions({ id: variantLabel }, properties));
25589
25590 if (typeof tracks[variantLabel] === 'undefined') {
25591 var track = tech.addRemoteTextTrack({
25592 id: properties.instreamId,
25593 kind: 'captions',
25594 default: properties.default && properties.autoselect,
25595 language: properties.language,
25596 label: variantLabel
25597 }, false).track;
25598
25599 tracks[variantLabel] = track;
25600 }
25601 }
25602 }
25603 }
25604 };
25605
25606 /**
25607 * Returns a function used to get the active group of the provided type
25608 *
25609 * @param {String} type
25610 * MediaGroup type
25611 * @param {Object} settings
25612 * Object containing required information for media groups
25613 * @return {Function}
25614 * Function that returns the active media group for the provided type. Takes an
25615 * optional parameter {TextTrack} track. If no track is provided, a list of all
25616 * variants in the group, otherwise the variant corresponding to the provided
25617 * track is returned.
25618 * @function activeGroup
25619 */
25620 var activeGroup = function activeGroup(type, settings) {
25621 return function (track) {
25622 var masterPlaylistLoader = settings.masterPlaylistLoader,
25623 groups = settings.mediaTypes[type].groups;
25624
25625
25626 var media = masterPlaylistLoader.media();
25627
25628 if (!media) {
25629 return null;
25630 }
25631
25632 var variants = null;
25633
25634 if (media.attributes[type]) {
25635 variants = groups[media.attributes[type]];
25636 }
25637
25638 variants = variants || groups.main;
25639
25640 if (typeof track === 'undefined') {
25641 return variants;
25642 }
25643
25644 if (track === null) {
25645 // An active track was specified so a corresponding group is expected. track === null
25646 // means no track is currently active so there is no corresponding group
25647 return null;
25648 }
25649
25650 return variants.filter(function (props) {
25651 return props.id === track.id;
25652 })[0] || null;
25653 };
25654 };
25655
25656 var activeTrack = {
25657 /**
25658 * Returns a function used to get the active track of type provided
25659 *
25660 * @param {String} type
25661 * MediaGroup type
25662 * @param {Object} settings
25663 * Object containing required information for media groups
25664 * @return {Function}
25665 * Function that returns the active media track for the provided type. Returns
25666 * null if no track is active
25667 * @function activeTrack.AUDIO
25668 */
25669 AUDIO: function AUDIO(type, settings) {
25670 return function () {
25671 var tracks = settings.mediaTypes[type].tracks;
25672
25673
25674 for (var id in tracks) {
25675 if (tracks[id].enabled) {
25676 return tracks[id];
25677 }
25678 }
25679
25680 return null;
25681 };
25682 },
25683 /**
25684 * Returns a function used to get the active track of type provided
25685 *
25686 * @param {String} type
25687 * MediaGroup type
25688 * @param {Object} settings
25689 * Object containing required information for media groups
25690 * @return {Function}
25691 * Function that returns the active media track for the provided type. Returns
25692 * null if no track is active
25693 * @function activeTrack.SUBTITLES
25694 */
25695 SUBTITLES: function SUBTITLES(type, settings) {
25696 return function () {
25697 var tracks = settings.mediaTypes[type].tracks;
25698
25699
25700 for (var id in tracks) {
25701 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
25702 return tracks[id];
25703 }
25704 }
25705
25706 return null;
25707 };
25708 }
25709 };
25710
25711 /**
25712 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
25713 * Closed-Captions) specified in the master manifest.
25714 *
25715 * @param {Object} settings
25716 * Object containing required information for setting up the media groups
25717 * @param {SegmentLoader} settings.segmentLoaders.AUDIO
25718 * Audio segment loader
25719 * @param {SegmentLoader} settings.segmentLoaders.SUBTITLES
25720 * Subtitle segment loader
25721 * @param {SegmentLoader} settings.segmentLoaders.main
25722 * Main segment loader
25723 * @param {Tech} settings.tech
25724 * The tech of the player
25725 * @param {Object} settings.requestOptions
25726 * XHR request options used by the segment loaders
25727 * @param {PlaylistLoader} settings.masterPlaylistLoader
25728 * PlaylistLoader for the master source
25729 * @param {HlsHandler} settings.hls
25730 * HLS SourceHandler
25731 * @param {Object} settings.master
25732 * The parsed master manifest
25733 * @param {Object} settings.mediaTypes
25734 * Object to store the loaders, tracks, and utility methods for each media type
25735 * @param {Function} settings.blacklistCurrentPlaylist
25736 * Blacklists the current rendition and forces a rendition switch.
25737 * @function setupMediaGroups
25738 */
25739 var setupMediaGroups = function setupMediaGroups(settings) {
25740 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
25741 initialize[type](type, settings);
25742 });
25743
25744 var mediaTypes = settings.mediaTypes,
25745 masterPlaylistLoader = settings.masterPlaylistLoader,
25746 tech = settings.tech,
25747 hls = settings.hls;
25748
25749 // setup active group and track getters and change event handlers
25750
25751 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
25752 mediaTypes[type].activeGroup = activeGroup(type, settings);
25753 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
25754 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
25755 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
25756 });
25757
25758 // DO NOT enable the default subtitle or caption track.
25759 // DO enable the default audio track
25760 var audioGroup = mediaTypes.AUDIO.activeGroup();
25761 var groupId = (audioGroup.filter(function (group) {
25762 return group.default;
25763 })[0] || audioGroup[0]).id;
25764
25765 mediaTypes.AUDIO.tracks[groupId].enabled = true;
25766 mediaTypes.AUDIO.onTrackChanged();
25767
25768 masterPlaylistLoader.on('mediachange', function () {
25769 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
25770 return mediaTypes[type].onGroupChanged();
25771 });
25772 });
25773
25774 // custom audio track change event handler for usage event
25775 var onAudioTrackChanged = function onAudioTrackChanged() {
25776 mediaTypes.AUDIO.onTrackChanged();
25777 tech.trigger({ type: 'usage', name: 'hls-audio-change' });
25778 };
25779
25780 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
25781 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
25782
25783 hls.on('dispose', function () {
25784 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
25785 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
25786 });
25787
25788 // clear existing audio tracks and add the ones we just created
25789 tech.clearTracks('audio');
25790
25791 for (var id in mediaTypes.AUDIO.tracks) {
25792 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
25793 }
25794 };
25795
25796 /**
25797 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
25798 * media type
25799 *
25800 * @return {Object}
25801 * Object to store the loaders, tracks, and utility methods for each media type
25802 * @function createMediaTypes
25803 */
25804 var createMediaTypes = function createMediaTypes() {
25805 var mediaTypes = {};
25806
25807 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
25808 mediaTypes[type] = {
25809 groups: {},
25810 tracks: {},
25811 activePlaylistLoader: null,
25812 activeGroup: noop,
25813 activeTrack: noop,
25814 onGroupChanged: noop,
25815 onTrackChanged: noop
25816 };
25817 });
25818
25819 return mediaTypes;
25820 };
25821
25822 /**
25823 * @file master-playlist-controller.js
25824 */
25825
25826 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
25827
25828 var Hls = void 0;
25829
25830 // SegmentLoader stats that need to have each loader's
25831 // values summed to calculate the final value
25832 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
25833 var sumLoaderStat = function sumLoaderStat(stat) {
25834 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
25835 };
25836 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
25837 var currentPlaylist = _ref.currentPlaylist,
25838 nextPlaylist = _ref.nextPlaylist,
25839 forwardBuffer = _ref.forwardBuffer,
25840 bufferLowWaterLine = _ref.bufferLowWaterLine,
25841 duration$$1 = _ref.duration,
25842 log = _ref.log;
25843
25844 // we have no other playlist to switch to
25845 if (!nextPlaylist) {
25846 videojs.log.warn('We received no playlist to switch to. Please check your stream.');
25847 return false;
25848 }
25849
25850 // If the playlist is live, then we want to not take low water line into account.
25851 // This is because in LIVE, the player plays 3 segments from the end of the
25852 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
25853 // in those segments, a viewer will never experience a rendition upswitch.
25854 if (!currentPlaylist.endList) {
25855 return true;
25856 }
25857
25858 // For the same reason as LIVE, we ignore the low water line when the VOD
25859 // duration is below the max potential low water line
25860 if (duration$$1 < Config.MAX_BUFFER_LOW_WATER_LINE) {
25861 return true;
25862 }
25863
25864 // we want to switch down to lower resolutions quickly to continue playback, but
25865 if (nextPlaylist.attributes.BANDWIDTH < currentPlaylist.attributes.BANDWIDTH) {
25866 return true;
25867 }
25868
25869 // ensure we have some buffer before we switch up to prevent us running out of
25870 // buffer while loading a higher rendition.
25871 if (forwardBuffer >= bufferLowWaterLine) {
25872 return true;
25873 }
25874
25875 return false;
25876 };
25877
25878 /**
25879 * the master playlist controller controller all interactons
25880 * between playlists and segmentloaders. At this time this mainly
25881 * involves a master playlist and a series of audio playlists
25882 * if they are available
25883 *
25884 * @class MasterPlaylistController
25885 * @extends videojs.EventTarget
25886 */
25887 var MasterPlaylistController = function (_videojs$EventTarget) {
25888 inherits(MasterPlaylistController, _videojs$EventTarget);
25889
25890 function MasterPlaylistController(options) {
25891 classCallCheck(this, MasterPlaylistController);
25892
25893 var _this = possibleConstructorReturn(this, (MasterPlaylistController.__proto__ || Object.getPrototypeOf(MasterPlaylistController)).call(this));
25894
25895 var url = options.url,
25896 handleManifestRedirects = options.handleManifestRedirects,
25897 withCredentials = options.withCredentials,
25898 tech = options.tech,
25899 bandwidth = options.bandwidth,
25900 externHls = options.externHls,
25901 useCueTags = options.useCueTags,
25902 blacklistDuration = options.blacklistDuration,
25903 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
25904 cacheEncryptionKeys = options.cacheEncryptionKeys,
25905 sourceType = options.sourceType;
25906
25907
25908 if (!url) {
25909 throw new Error('A non-empty playlist URL is required');
25910 }
25911
25912 Hls = externHls;
25913
25914 _this.withCredentials = withCredentials;
25915 _this.tech_ = tech;
25916 _this.hls_ = tech.hls;
25917 _this.sourceType_ = sourceType;
25918 _this.useCueTags_ = useCueTags;
25919 _this.blacklistDuration = blacklistDuration;
25920 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
25921 if (_this.useCueTags_) {
25922 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
25923 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
25924 }
25925
25926 _this.requestOptions_ = {
25927 withCredentials: withCredentials,
25928 handleManifestRedirects: handleManifestRedirects,
25929 timeout: null
25930 };
25931
25932 _this.mediaTypes_ = createMediaTypes();
25933
25934 _this.mediaSource = new videojs.MediaSource();
25935
25936 // load the media source into the player
25937 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_.bind(_this));
25938
25939 _this.seekable_ = videojs.createTimeRanges();
25940 _this.hasPlayed_ = false;
25941
25942 _this.syncController_ = new SyncController(options);
25943 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
25944 kind: 'metadata',
25945 label: 'segment-metadata'
25946 }, false).track;
25947
25948 _this.decrypter_ = new Decrypter$1();
25949 _this.inbandTextTracks_ = {};
25950
25951 var segmentLoaderSettings = {
25952 hls: _this.hls_,
25953 mediaSource: _this.mediaSource,
25954 currentTime: _this.tech_.currentTime.bind(_this.tech_),
25955 seekable: function seekable$$1() {
25956 return _this.seekable();
25957 },
25958 seeking: function seeking() {
25959 return _this.tech_.seeking();
25960 },
25961 duration: function duration$$1() {
25962 return _this.mediaSource.duration;
25963 },
25964 hasPlayed: function hasPlayed() {
25965 return _this.hasPlayed_;
25966 },
25967 goalBufferLength: function goalBufferLength() {
25968 return _this.goalBufferLength();
25969 },
25970 bandwidth: bandwidth,
25971 syncController: _this.syncController_,
25972 decrypter: _this.decrypter_,
25973 sourceType: _this.sourceType_,
25974 inbandTextTracks: _this.inbandTextTracks_,
25975 cacheEncryptionKeys: cacheEncryptionKeys
25976 };
25977
25978 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(url, _this.hls_, _this.requestOptions_) : new PlaylistLoader(url, _this.hls_, _this.requestOptions_);
25979 _this.setupMasterPlaylistLoaderListeners_();
25980
25981 // setup segment loaders
25982 // combined audio/video or just video when alternate audio track is selected
25983 _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
25984 segmentMetadataTrack: _this.segmentMetadataTrack_,
25985 loaderType: 'main'
25986 }), options);
25987
25988 // alternate audio track
25989 _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
25990 loaderType: 'audio'
25991 }), options);
25992
25993 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
25994 loaderType: 'vtt',
25995 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
25996 }), options);
25997
25998 _this.setupSegmentLoaderListeners_();
25999
26000 // Create SegmentLoader stat-getters
26001 loaderStats.forEach(function (stat) {
26002 _this[stat + '_'] = sumLoaderStat.bind(_this, stat);
26003 });
26004
26005 _this.logger_ = logger('MPC');
26006
26007 _this.masterPlaylistLoader_.load();
26008 return _this;
26009 }
26010
26011 /**
26012 * Register event handlers on the master playlist loader. A helper
26013 * function for construction time.
26014 *
26015 * @private
26016 */
26017
26018
26019 createClass(MasterPlaylistController, [{
26020 key: 'setupMasterPlaylistLoaderListeners_',
26021 value: function setupMasterPlaylistLoaderListeners_() {
26022 var _this2 = this;
26023
26024 this.masterPlaylistLoader_.on('loadedmetadata', function () {
26025 var media = _this2.masterPlaylistLoader_.media();
26026 var requestTimeout = media.targetDuration * 1.5 * 1000;
26027
26028 // If we don't have any more available playlists, we don't want to
26029 // timeout the request.
26030 if (isLowestEnabledRendition(_this2.masterPlaylistLoader_.master, _this2.masterPlaylistLoader_.media())) {
26031 _this2.requestOptions_.timeout = 0;
26032 } else {
26033 _this2.requestOptions_.timeout = requestTimeout;
26034 }
26035
26036 // if this isn't a live video and preload permits, start
26037 // downloading segments
26038 if (media.endList && _this2.tech_.preload() !== 'none') {
26039 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
26040 _this2.mainSegmentLoader_.load();
26041 }
26042
26043 setupMediaGroups({
26044 sourceType: _this2.sourceType_,
26045 segmentLoaders: {
26046 AUDIO: _this2.audioSegmentLoader_,
26047 SUBTITLES: _this2.subtitleSegmentLoader_,
26048 main: _this2.mainSegmentLoader_
26049 },
26050 tech: _this2.tech_,
26051 requestOptions: _this2.requestOptions_,
26052 masterPlaylistLoader: _this2.masterPlaylistLoader_,
26053 hls: _this2.hls_,
26054 master: _this2.master(),
26055 mediaTypes: _this2.mediaTypes_,
26056 blacklistCurrentPlaylist: _this2.blacklistCurrentPlaylist.bind(_this2)
26057 });
26058
26059 _this2.triggerPresenceUsage_(_this2.master(), media);
26060
26061 try {
26062 _this2.setupSourceBuffers_();
26063 } catch (e) {
26064 videojs.log.warn('Failed to create SourceBuffers', e);
26065 return _this2.mediaSource.endOfStream('decode');
26066 }
26067 _this2.setupFirstPlay();
26068
26069 if (!_this2.mediaTypes_.AUDIO.activePlaylistLoader || _this2.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
26070 _this2.trigger('selectedinitialmedia');
26071 } else {
26072 // We must wait for the active audio playlist loader to
26073 // finish setting up before triggering this event so the
26074 // representations API and EME setup is correct
26075 _this2.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
26076 _this2.trigger('selectedinitialmedia');
26077 });
26078 }
26079 });
26080
26081 this.masterPlaylistLoader_.on('loadedplaylist', function () {
26082 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
26083
26084 if (!updatedPlaylist) {
26085 // blacklist any variants that are not supported by the browser before selecting
26086 // an initial media as the playlist selectors do not consider browser support
26087 _this2.excludeUnsupportedVariants_();
26088
26089 var selectedMedia = void 0;
26090
26091 if (_this2.enableLowInitialPlaylist) {
26092 selectedMedia = _this2.selectInitialPlaylist();
26093 }
26094
26095 if (!selectedMedia) {
26096 selectedMedia = _this2.selectPlaylist();
26097 }
26098
26099 _this2.initialMedia_ = selectedMedia;
26100 _this2.masterPlaylistLoader_.media(_this2.initialMedia_);
26101 return;
26102 }
26103
26104 if (_this2.useCueTags_) {
26105 _this2.updateAdCues_(updatedPlaylist);
26106 }
26107
26108 // TODO: Create a new event on the PlaylistLoader that signals
26109 // that the segments have changed in some way and use that to
26110 // update the SegmentLoader instead of doing it twice here and
26111 // on `mediachange`
26112 _this2.mainSegmentLoader_.playlist(updatedPlaylist, _this2.requestOptions_);
26113 _this2.updateDuration();
26114
26115 // If the player isn't paused, ensure that the segment loader is running,
26116 // as it is possible that it was temporarily stopped while waiting for
26117 // a playlist (e.g., in case the playlist errored and we re-requested it).
26118 if (!_this2.tech_.paused()) {
26119 _this2.mainSegmentLoader_.load();
26120 if (_this2.audioSegmentLoader_) {
26121 _this2.audioSegmentLoader_.load();
26122 }
26123 }
26124
26125 if (!updatedPlaylist.endList) {
26126 var addSeekableRange = function addSeekableRange() {
26127 var seekable$$1 = _this2.seekable();
26128
26129 if (seekable$$1.length !== 0) {
26130 _this2.mediaSource.addSeekableRange_(seekable$$1.start(0), seekable$$1.end(0));
26131 }
26132 };
26133
26134 if (_this2.duration() !== Infinity) {
26135 var onDurationchange = function onDurationchange() {
26136 if (_this2.duration() === Infinity) {
26137 addSeekableRange();
26138 } else {
26139 _this2.tech_.one('durationchange', onDurationchange);
26140 }
26141 };
26142
26143 _this2.tech_.one('durationchange', onDurationchange);
26144 } else {
26145 addSeekableRange();
26146 }
26147 }
26148 });
26149
26150 this.masterPlaylistLoader_.on('error', function () {
26151 _this2.blacklistCurrentPlaylist(_this2.masterPlaylistLoader_.error);
26152 });
26153
26154 this.masterPlaylistLoader_.on('mediachanging', function () {
26155 _this2.mainSegmentLoader_.abort();
26156 _this2.mainSegmentLoader_.pause();
26157 });
26158
26159 this.masterPlaylistLoader_.on('mediachange', function () {
26160 var media = _this2.masterPlaylistLoader_.media();
26161 var requestTimeout = media.targetDuration * 1.5 * 1000;
26162
26163 // If we don't have any more available playlists, we don't want to
26164 // timeout the request.
26165 if (isLowestEnabledRendition(_this2.masterPlaylistLoader_.master, _this2.masterPlaylistLoader_.media())) {
26166 _this2.requestOptions_.timeout = 0;
26167 } else {
26168 _this2.requestOptions_.timeout = requestTimeout;
26169 }
26170
26171 // TODO: Create a new event on the PlaylistLoader that signals
26172 // that the segments have changed in some way and use that to
26173 // update the SegmentLoader instead of doing it twice here and
26174 // on `loadedplaylist`
26175 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
26176
26177 _this2.mainSegmentLoader_.load();
26178
26179 _this2.tech_.trigger({
26180 type: 'mediachange',
26181 bubbles: true
26182 });
26183 });
26184
26185 this.masterPlaylistLoader_.on('playlistunchanged', function () {
26186 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
26187 var playlistOutdated = _this2.stuckAtPlaylistEnd_(updatedPlaylist);
26188
26189 if (playlistOutdated) {
26190 // Playlist has stopped updating and we're stuck at its end. Try to
26191 // blacklist it and switch to another playlist in the hope that that
26192 // one is updating (and give the player a chance to re-adjust to the
26193 // safe live point).
26194 _this2.blacklistCurrentPlaylist({
26195 message: 'Playlist no longer updating.'
26196 });
26197 // useful for monitoring QoS
26198 _this2.tech_.trigger('playliststuck');
26199 }
26200 });
26201
26202 this.masterPlaylistLoader_.on('renditiondisabled', function () {
26203 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-disabled' });
26204 });
26205 this.masterPlaylistLoader_.on('renditionenabled', function () {
26206 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-enabled' });
26207 });
26208 }
26209
26210 /**
26211 * A helper function for triggerring presence usage events once per source
26212 *
26213 * @private
26214 */
26215
26216 }, {
26217 key: 'triggerPresenceUsage_',
26218 value: function triggerPresenceUsage_(master, media) {
26219 var mediaGroups = master.mediaGroups || {};
26220 var defaultDemuxed = true;
26221 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
26222
26223 for (var mediaGroup in mediaGroups.AUDIO) {
26224 for (var label in mediaGroups.AUDIO[mediaGroup]) {
26225 var properties = mediaGroups.AUDIO[mediaGroup][label];
26226
26227 if (!properties.uri) {
26228 defaultDemuxed = false;
26229 }
26230 }
26231 }
26232
26233 if (defaultDemuxed) {
26234 this.tech_.trigger({ type: 'usage', name: 'hls-demuxed' });
26235 }
26236
26237 if (Object.keys(mediaGroups.SUBTITLES).length) {
26238 this.tech_.trigger({ type: 'usage', name: 'hls-webvtt' });
26239 }
26240
26241 if (Hls.Playlist.isAes(media)) {
26242 this.tech_.trigger({ type: 'usage', name: 'hls-aes' });
26243 }
26244
26245 if (Hls.Playlist.isFmp4(media)) {
26246 this.tech_.trigger({ type: 'usage', name: 'hls-fmp4' });
26247 }
26248
26249 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
26250 this.tech_.trigger({ type: 'usage', name: 'hls-alternate-audio' });
26251 }
26252
26253 if (this.useCueTags_) {
26254 this.tech_.trigger({ type: 'usage', name: 'hls-playlist-cue-tags' });
26255 }
26256 }
26257 /**
26258 * Register event handlers on the segment loaders. A helper function
26259 * for construction time.
26260 *
26261 * @private
26262 */
26263
26264 }, {
26265 key: 'setupSegmentLoaderListeners_',
26266 value: function setupSegmentLoaderListeners_() {
26267 var _this3 = this;
26268
26269 this.mainSegmentLoader_.on('bandwidthupdate', function () {
26270 var nextPlaylist = _this3.selectPlaylist();
26271 var currentPlaylist = _this3.masterPlaylistLoader_.media();
26272 var buffered = _this3.tech_.buffered();
26273 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - _this3.tech_.currentTime() : 0;
26274
26275 var bufferLowWaterLine = _this3.bufferLowWaterLine();
26276
26277 if (shouldSwitchToMedia({
26278 currentPlaylist: currentPlaylist,
26279 nextPlaylist: nextPlaylist,
26280 forwardBuffer: forwardBuffer,
26281 bufferLowWaterLine: bufferLowWaterLine,
26282 duration: _this3.duration(),
26283 log: _this3.logger_
26284 })) {
26285 _this3.masterPlaylistLoader_.media(nextPlaylist);
26286 }
26287
26288 _this3.tech_.trigger('bandwidthupdate');
26289 });
26290 this.mainSegmentLoader_.on('progress', function () {
26291 _this3.trigger('progress');
26292 });
26293
26294 this.mainSegmentLoader_.on('error', function () {
26295 _this3.blacklistCurrentPlaylist(_this3.mainSegmentLoader_.error());
26296 });
26297
26298 this.mainSegmentLoader_.on('syncinfoupdate', function () {
26299 _this3.onSyncInfoUpdate_();
26300 });
26301
26302 this.mainSegmentLoader_.on('timestampoffset', function () {
26303 _this3.tech_.trigger({ type: 'usage', name: 'hls-timestamp-offset' });
26304 });
26305 this.audioSegmentLoader_.on('syncinfoupdate', function () {
26306 _this3.onSyncInfoUpdate_();
26307 });
26308
26309 this.mainSegmentLoader_.on('ended', function () {
26310 _this3.onEndOfStream();
26311 });
26312
26313 this.mainSegmentLoader_.on('earlyabort', function () {
26314 _this3.blacklistCurrentPlaylist({
26315 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
26316 }, ABORT_EARLY_BLACKLIST_SECONDS);
26317 });
26318
26319 this.mainSegmentLoader_.on('reseteverything', function () {
26320 // If playing an MTS stream, a videojs.MediaSource is listening for
26321 // hls-reset to reset caption parsing state in the transmuxer
26322 _this3.tech_.trigger('hls-reset');
26323 });
26324
26325 this.mainSegmentLoader_.on('segmenttimemapping', function (event) {
26326 // If playing an MTS stream in html, a videojs.MediaSource is listening for
26327 // hls-segment-time-mapping update its internal mapping of stream to display time
26328 _this3.tech_.trigger({
26329 type: 'hls-segment-time-mapping',
26330 mapping: event.mapping
26331 });
26332 });
26333
26334 this.audioSegmentLoader_.on('ended', function () {
26335 _this3.onEndOfStream();
26336 });
26337 }
26338 }, {
26339 key: 'mediaSecondsLoaded_',
26340 value: function mediaSecondsLoaded_() {
26341 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
26342 }
26343
26344 /**
26345 * Call load on our SegmentLoaders
26346 */
26347
26348 }, {
26349 key: 'load',
26350 value: function load() {
26351 this.mainSegmentLoader_.load();
26352 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26353 this.audioSegmentLoader_.load();
26354 }
26355 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
26356 this.subtitleSegmentLoader_.load();
26357 }
26358 }
26359
26360 /**
26361 * Re-tune playback quality level for the current player
26362 * conditions without performing destructive actions, like
26363 * removing already buffered content
26364 *
26365 * @private
26366 */
26367
26368 }, {
26369 key: 'smoothQualityChange_',
26370 value: function smoothQualityChange_() {
26371 var media = this.selectPlaylist();
26372
26373 if (media !== this.masterPlaylistLoader_.media()) {
26374 this.masterPlaylistLoader_.media(media);
26375
26376 this.mainSegmentLoader_.resetLoader();
26377 // don't need to reset audio as it is reset when media changes
26378 }
26379 }
26380
26381 /**
26382 * Re-tune playback quality level for the current player
26383 * conditions. This method will perform destructive actions like removing
26384 * already buffered content in order to readjust the currently active
26385 * playlist quickly. This is good for manual quality changes
26386 *
26387 * @private
26388 */
26389
26390 }, {
26391 key: 'fastQualityChange_',
26392 value: function fastQualityChange_() {
26393 var _this4 = this;
26394
26395 var media = this.selectPlaylist();
26396
26397 if (media === this.masterPlaylistLoader_.media()) {
26398 return;
26399 }
26400
26401 this.masterPlaylistLoader_.media(media);
26402
26403 // Delete all buffered data to allow an immediate quality switch, then seek to give
26404 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
26405 // ahead is roughly the minimum that will accomplish this across a variety of content
26406 // in IE and Edge, but seeking in place is sufficient on all other browsers)
26407 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
26408 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
26409 this.mainSegmentLoader_.resetEverything(function () {
26410 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
26411 // from the previously enabled rendition to load before the new playlist has finished loading
26412 if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
26413 _this4.tech_.setCurrentTime(_this4.tech_.currentTime() + 0.04);
26414 } else {
26415 _this4.tech_.setCurrentTime(_this4.tech_.currentTime());
26416 }
26417 });
26418
26419 // don't need to reset audio as it is reset when media changes
26420 }
26421
26422 /**
26423 * Begin playback.
26424 */
26425
26426 }, {
26427 key: 'play',
26428 value: function play() {
26429 if (this.setupFirstPlay()) {
26430 return;
26431 }
26432
26433 if (this.tech_.ended()) {
26434 this.tech_.setCurrentTime(0);
26435 }
26436
26437 if (this.hasPlayed_) {
26438 this.load();
26439 }
26440
26441 var seekable$$1 = this.tech_.seekable();
26442
26443 // if the viewer has paused and we fell out of the live window,
26444 // seek forward to the live point
26445 if (this.tech_.duration() === Infinity) {
26446 if (this.tech_.currentTime() < seekable$$1.start(0)) {
26447 return this.tech_.setCurrentTime(seekable$$1.end(seekable$$1.length - 1));
26448 }
26449 }
26450 }
26451
26452 /**
26453 * Seek to the latest media position if this is a live video and the
26454 * player and video are loaded and initialized.
26455 */
26456
26457 }, {
26458 key: 'setupFirstPlay',
26459 value: function setupFirstPlay() {
26460 var _this5 = this;
26461
26462 var media = this.masterPlaylistLoader_.media();
26463
26464 // Check that everything is ready to begin buffering for the first call to play
26465 // If 1) there is no active media
26466 // 2) the player is paused
26467 // 3) the first play has already been setup
26468 // then exit early
26469 if (!media || this.tech_.paused() || this.hasPlayed_) {
26470 return false;
26471 }
26472
26473 // when the video is a live stream
26474 if (!media.endList) {
26475 var seekable$$1 = this.seekable();
26476
26477 if (!seekable$$1.length) {
26478 // without a seekable range, the player cannot seek to begin buffering at the live
26479 // point
26480 return false;
26481 }
26482
26483 if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
26484 // IE11 throws an InvalidStateError if you try to set currentTime while the
26485 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
26486 this.tech_.one('loadedmetadata', function () {
26487 _this5.trigger('firstplay');
26488 _this5.tech_.setCurrentTime(seekable$$1.end(0));
26489 _this5.hasPlayed_ = true;
26490 });
26491
26492 return false;
26493 }
26494
26495 // trigger firstplay to inform the source handler to ignore the next seek event
26496 this.trigger('firstplay');
26497 // seek to the live point
26498 this.tech_.setCurrentTime(seekable$$1.end(0));
26499 }
26500
26501 this.hasPlayed_ = true;
26502 // we can begin loading now that everything is ready
26503 this.load();
26504 return true;
26505 }
26506
26507 /**
26508 * handle the sourceopen event on the MediaSource
26509 *
26510 * @private
26511 */
26512
26513 }, {
26514 key: 'handleSourceOpen_',
26515 value: function handleSourceOpen_() {
26516 // Only attempt to create the source buffer if none already exist.
26517 // handleSourceOpen is also called when we are "re-opening" a source buffer
26518 // after `endOfStream` has been called (in response to a seek for instance)
26519 try {
26520 this.setupSourceBuffers_();
26521 } catch (e) {
26522 videojs.log.warn('Failed to create Source Buffers', e);
26523 return this.mediaSource.endOfStream('decode');
26524 }
26525
26526 // if autoplay is enabled, begin playback. This is duplicative of
26527 // code in video.js but is required because play() must be invoked
26528 // *after* the media source has opened.
26529 if (this.tech_.autoplay()) {
26530 var playPromise = this.tech_.play();
26531
26532 // Catch/silence error when a pause interrupts a play request
26533 // on browsers which return a promise
26534 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
26535 playPromise.then(null, function (e) {});
26536 }
26537 }
26538
26539 this.trigger('sourceopen');
26540 }
26541
26542 /**
26543 * Calls endOfStream on the media source when all active stream types have called
26544 * endOfStream
26545 *
26546 * @param {string} streamType
26547 * Stream type of the segment loader that called endOfStream
26548 * @private
26549 */
26550
26551 }, {
26552 key: 'onEndOfStream',
26553 value: function onEndOfStream() {
26554 var isEndOfStream = this.mainSegmentLoader_.ended_;
26555
26556 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26557 // if the audio playlist loader exists, then alternate audio is active
26558 if (!this.mainSegmentLoader_.startingMedia_ || this.mainSegmentLoader_.startingMedia_.containsVideo) {
26559 // if we do not know if the main segment loader contains video yet or if we
26560 // definitively know the main segment loader contains video, then we need to wait
26561 // for both main and audio segment loaders to call endOfStream
26562 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
26563 } else {
26564 // otherwise just rely on the audio loader
26565 isEndOfStream = this.audioSegmentLoader_.ended_;
26566 }
26567 }
26568
26569 if (!isEndOfStream) {
26570 return;
26571 }
26572
26573 this.logger_('calling mediaSource.endOfStream()');
26574 // on chrome calling endOfStream can sometimes cause an exception,
26575 // even when the media source is in a valid state.
26576 try {
26577 this.mediaSource.endOfStream();
26578 } catch (e) {
26579 videojs.log.warn('Failed to call media source endOfStream', e);
26580 }
26581 }
26582
26583 /**
26584 * Check if a playlist has stopped being updated
26585 * @param {Object} playlist the media playlist object
26586 * @return {boolean} whether the playlist has stopped being updated or not
26587 */
26588
26589 }, {
26590 key: 'stuckAtPlaylistEnd_',
26591 value: function stuckAtPlaylistEnd_(playlist) {
26592 var seekable$$1 = this.seekable();
26593
26594 if (!seekable$$1.length) {
26595 // playlist doesn't have enough information to determine whether we are stuck
26596 return false;
26597 }
26598
26599 var expired = this.syncController_.getExpiredTime(playlist, this.mediaSource.duration);
26600
26601 if (expired === null) {
26602 return false;
26603 }
26604
26605 // does not use the safe live end to calculate playlist end, since we
26606 // don't want to say we are stuck while there is still content
26607 var absolutePlaylistEnd = Hls.Playlist.playlistEnd(playlist, expired);
26608 var currentTime = this.tech_.currentTime();
26609 var buffered = this.tech_.buffered();
26610
26611 if (!buffered.length) {
26612 // return true if the playhead reached the absolute end of the playlist
26613 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
26614 }
26615 var bufferedEnd = buffered.end(buffered.length - 1);
26616
26617 // return true if there is too little buffer left and buffer has reached absolute
26618 // end of playlist
26619 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
26620 }
26621
26622 /**
26623 * Blacklists a playlist when an error occurs for a set amount of time
26624 * making it unavailable for selection by the rendition selection algorithm
26625 * and then forces a new playlist (rendition) selection.
26626 *
26627 * @param {Object=} error an optional error that may include the playlist
26628 * to blacklist
26629 * @param {Number=} blacklistDuration an optional number of seconds to blacklist the
26630 * playlist
26631 */
26632
26633 }, {
26634 key: 'blacklistCurrentPlaylist',
26635 value: function blacklistCurrentPlaylist() {
26636 var error = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
26637 var blacklistDuration = arguments[1];
26638
26639 var currentPlaylist = void 0;
26640 var nextPlaylist = void 0;
26641
26642 // If the `error` was generated by the playlist loader, it will contain
26643 // the playlist we were trying to load (but failed) and that should be
26644 // blacklisted instead of the currently selected playlist which is likely
26645 // out-of-date in this scenario
26646 currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
26647
26648 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration;
26649
26650 // If there is no current playlist, then an error occurred while we were
26651 // trying to load the master OR while we were disposing of the tech
26652 if (!currentPlaylist) {
26653 this.error = error;
26654
26655 try {
26656 return this.mediaSource.endOfStream('network');
26657 } catch (e) {
26658 return this.trigger('error');
26659 }
26660 }
26661
26662 var isFinalRendition = this.masterPlaylistLoader_.master.playlists.filter(isEnabled).length === 1;
26663 var playlists = this.masterPlaylistLoader_.master.playlists;
26664
26665 if (playlists.length === 1) {
26666 // Never blacklisting this playlist because it's the only playlist
26667 videojs.log.warn('Problem encountered with the current ' + 'HLS playlist. Trying again since it is the only playlist.');
26668
26669 this.tech_.trigger('retryplaylist');
26670 return this.masterPlaylistLoader_.load(isFinalRendition);
26671 }
26672
26673 if (isFinalRendition) {
26674 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
26675 // it, instead of erring the player or retrying this playlist, clear out the current
26676 // blacklist. This allows other playlists to be attempted in case any have been
26677 // fixed.
26678 videojs.log.warn('Removing all playlists from the blacklist because the last ' + 'rendition is about to be blacklisted.');
26679 playlists.forEach(function (playlist) {
26680 if (playlist.excludeUntil !== Infinity) {
26681 delete playlist.excludeUntil;
26682 }
26683 });
26684 // Technically we are retrying a playlist, in that we are simply retrying a previous
26685 // playlist. This is needed for users relying on the retryplaylist event to catch a
26686 // case where the player might be stuck and looping through "dead" playlists.
26687 this.tech_.trigger('retryplaylist');
26688 }
26689
26690 // Blacklist this playlist
26691 currentPlaylist.excludeUntil = Date.now() + blacklistDuration * 1000;
26692 this.tech_.trigger('blacklistplaylist');
26693 this.tech_.trigger({ type: 'usage', name: 'hls-rendition-blacklisted' });
26694
26695 // Select a new playlist
26696 nextPlaylist = this.selectPlaylist();
26697 videojs.log.warn('Problem encountered with the current HLS playlist.' + (error.message ? ' ' + error.message : '') + ' Switching to another playlist.');
26698
26699 return this.masterPlaylistLoader_.media(nextPlaylist, isFinalRendition);
26700 }
26701
26702 /**
26703 * Pause all segment loaders
26704 */
26705
26706 }, {
26707 key: 'pauseLoading',
26708 value: function pauseLoading() {
26709 this.mainSegmentLoader_.pause();
26710 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26711 this.audioSegmentLoader_.pause();
26712 }
26713 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
26714 this.subtitleSegmentLoader_.pause();
26715 }
26716 }
26717
26718 /**
26719 * set the current time on all segment loaders
26720 *
26721 * @param {TimeRange} currentTime the current time to set
26722 * @return {TimeRange} the current time
26723 */
26724
26725 }, {
26726 key: 'setCurrentTime',
26727 value: function setCurrentTime(currentTime) {
26728 var buffered = findRange(this.tech_.buffered(), currentTime);
26729
26730 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
26731 // return immediately if the metadata is not ready yet
26732 return 0;
26733 }
26734
26735 // it's clearly an edge-case but don't thrown an error if asked to
26736 // seek within an empty playlist
26737 if (!this.masterPlaylistLoader_.media().segments) {
26738 return 0;
26739 }
26740
26741 // In flash playback, the segment loaders should be reset on every seek, even
26742 // in buffer seeks. If the seek location is already buffered, continue buffering as
26743 // usual
26744 // TODO: redo this comment
26745 if (buffered && buffered.length) {
26746 return currentTime;
26747 }
26748
26749 // cancel outstanding requests so we begin buffering at the new
26750 // location
26751 this.mainSegmentLoader_.resetEverything();
26752 this.mainSegmentLoader_.abort();
26753 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26754 this.audioSegmentLoader_.resetEverything();
26755 this.audioSegmentLoader_.abort();
26756 }
26757 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
26758 this.subtitleSegmentLoader_.resetEverything();
26759 this.subtitleSegmentLoader_.abort();
26760 }
26761
26762 // start segment loader loading in case they are paused
26763 this.load();
26764 }
26765
26766 /**
26767 * get the current duration
26768 *
26769 * @return {TimeRange} the duration
26770 */
26771
26772 }, {
26773 key: 'duration',
26774 value: function duration$$1() {
26775 if (!this.masterPlaylistLoader_) {
26776 return 0;
26777 }
26778
26779 if (this.mediaSource) {
26780 return this.mediaSource.duration;
26781 }
26782
26783 return Hls.Playlist.duration(this.masterPlaylistLoader_.media());
26784 }
26785
26786 /**
26787 * check the seekable range
26788 *
26789 * @return {TimeRange} the seekable range
26790 */
26791
26792 }, {
26793 key: 'seekable',
26794 value: function seekable$$1() {
26795 return this.seekable_;
26796 }
26797 }, {
26798 key: 'onSyncInfoUpdate_',
26799 value: function onSyncInfoUpdate_() {
26800 var audioSeekable = void 0;
26801
26802 if (!this.masterPlaylistLoader_) {
26803 return;
26804 }
26805
26806 var media = this.masterPlaylistLoader_.media();
26807
26808 if (!media) {
26809 return;
26810 }
26811
26812 var expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
26813
26814 if (expired === null) {
26815 // not enough information to update seekable
26816 return;
26817 }
26818
26819 var suggestedPresentationDelay = this.masterPlaylistLoader_.master.suggestedPresentationDelay;
26820 var mainSeekable = Hls.Playlist.seekable(media, expired, suggestedPresentationDelay);
26821
26822 if (mainSeekable.length === 0) {
26823 return;
26824 }
26825
26826 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
26827 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
26828 expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
26829
26830 if (expired === null) {
26831 return;
26832 }
26833
26834 audioSeekable = Hls.Playlist.seekable(media, expired, suggestedPresentationDelay);
26835
26836 if (audioSeekable.length === 0) {
26837 return;
26838 }
26839 }
26840
26841 var oldEnd = void 0;
26842 var oldStart = void 0;
26843
26844 if (this.seekable_ && this.seekable_.length) {
26845 oldEnd = this.seekable_.end(0);
26846 oldStart = this.seekable_.start(0);
26847 }
26848
26849 if (!audioSeekable) {
26850 // seekable has been calculated based on buffering video data so it
26851 // can be returned directly
26852 this.seekable_ = mainSeekable;
26853 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
26854 // seekables are pretty far off, rely on main
26855 this.seekable_ = mainSeekable;
26856 } else {
26857 this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
26858 }
26859
26860 // seekable is the same as last time
26861 if (this.seekable_ && this.seekable_.length) {
26862 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
26863 return;
26864 }
26865 }
26866
26867 this.logger_('seekable updated [' + printableRange(this.seekable_) + ']');
26868
26869 this.tech_.trigger('seekablechanged');
26870 }
26871
26872 /**
26873 * Update the player duration
26874 */
26875
26876 }, {
26877 key: 'updateDuration',
26878 value: function updateDuration() {
26879 var _this6 = this;
26880
26881 var oldDuration = this.mediaSource.duration;
26882 var newDuration = Hls.Playlist.duration(this.masterPlaylistLoader_.media());
26883 var buffered = this.tech_.buffered();
26884 var setDuration = function setDuration() {
26885 // on firefox setting the duration may sometimes cause an exception
26886 // even if the media source is open and source buffers are not
26887 // updating, something about the media source being in an invalid state.
26888 _this6.logger_('Setting duration from ' + _this6.mediaSource.duration + ' => ' + newDuration);
26889 try {
26890 _this6.mediaSource.duration = newDuration;
26891 } catch (e) {
26892 videojs.log.warn('Failed to set media source duration', e);
26893 }
26894 _this6.tech_.trigger('durationchange');
26895
26896 _this6.mediaSource.removeEventListener('sourceopen', setDuration);
26897 };
26898
26899 if (buffered.length > 0) {
26900 newDuration = Math.max(newDuration, buffered.end(buffered.length - 1));
26901 }
26902
26903 // if the duration has changed, invalidate the cached value
26904 if (oldDuration !== newDuration) {
26905 // update the duration
26906 if (this.mediaSource.readyState !== 'open') {
26907 this.mediaSource.addEventListener('sourceopen', setDuration);
26908 } else {
26909 setDuration();
26910 }
26911 }
26912 }
26913
26914 /**
26915 * dispose of the MasterPlaylistController and everything
26916 * that it controls
26917 */
26918
26919 }, {
26920 key: 'dispose',
26921 value: function dispose() {
26922 var _this7 = this;
26923
26924 this.trigger('dispose');
26925 if (this.decrypter_) {
26926 this.decrypter_.terminate();
26927 }
26928 this.masterPlaylistLoader_.dispose();
26929 this.mainSegmentLoader_.dispose();
26930
26931 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
26932 var groups = _this7.mediaTypes_[type].groups;
26933
26934 for (var id in groups) {
26935 groups[id].forEach(function (group) {
26936 if (group.playlistLoader) {
26937 group.playlistLoader.dispose();
26938 }
26939 });
26940 }
26941 });
26942
26943 this.audioSegmentLoader_.dispose();
26944 this.subtitleSegmentLoader_.dispose();
26945 this.off();
26946
26947 if (this.mediaSource.dispose) {
26948 this.mediaSource.dispose();
26949 }
26950 }
26951
26952 /**
26953 * return the master playlist object if we have one
26954 *
26955 * @return {Object} the master playlist object that we parsed
26956 */
26957
26958 }, {
26959 key: 'master',
26960 value: function master() {
26961 return this.masterPlaylistLoader_.master;
26962 }
26963
26964 /**
26965 * return the currently selected playlist
26966 *
26967 * @return {Object} the currently selected playlist object that we parsed
26968 */
26969
26970 }, {
26971 key: 'media',
26972 value: function media() {
26973 // playlist loader will not return media if it has not been fully loaded
26974 return this.masterPlaylistLoader_.media() || this.initialMedia_;
26975 }
26976
26977 /**
26978 * setup our internal source buffers on our segment Loaders
26979 *
26980 * @private
26981 */
26982
26983 }, {
26984 key: 'setupSourceBuffers_',
26985 value: function setupSourceBuffers_() {
26986 var media = this.masterPlaylistLoader_.media();
26987 var mimeTypes = void 0;
26988
26989 // wait until a media playlist is available and the Media Source is
26990 // attached
26991 if (!media || this.mediaSource.readyState !== 'open') {
26992 return;
26993 }
26994
26995 mimeTypes = mimeTypesForPlaylist(this.masterPlaylistLoader_.master, media);
26996 if (mimeTypes.length < 1) {
26997 this.error = 'No compatible SourceBuffer configuration for the variant stream:' + media.resolvedUri;
26998 return this.mediaSource.endOfStream('decode');
26999 }
27000
27001 this.configureLoaderMimeTypes_(mimeTypes);
27002 // exclude any incompatible variant streams from future playlist
27003 // selection
27004 this.excludeIncompatibleVariants_(media);
27005 }
27006 }, {
27007 key: 'configureLoaderMimeTypes_',
27008 value: function configureLoaderMimeTypes_(mimeTypes) {
27009 // If the content is demuxed, we can't start appending segments to a source buffer
27010 // until both source buffers are set up, or else the browser may not let us add the
27011 // second source buffer (it will assume we are playing either audio only or video
27012 // only).
27013 var sourceBufferEmitter =
27014 // If there is more than one mime type
27015 mimeTypes.length > 1 &&
27016 // and the first mime type does not have muxed video and audio
27017 mimeTypes[0].indexOf(',') === -1 &&
27018 // and the two mime types are different (they can be the same in the case of audio
27019 // only with alternate audio)
27020 mimeTypes[0] !== mimeTypes[1] ?
27021 // then we want to wait on the second source buffer
27022 new videojs.EventTarget() :
27023 // otherwise there is no need to wait as the content is either audio only,
27024 // video only, or muxed content.
27025 null;
27026
27027 this.mainSegmentLoader_.mimeType(mimeTypes[0], sourceBufferEmitter);
27028 if (mimeTypes[1]) {
27029 this.audioSegmentLoader_.mimeType(mimeTypes[1], sourceBufferEmitter);
27030 }
27031 }
27032
27033 /**
27034 * Blacklists playlists with codecs that are unsupported by the browser.
27035 */
27036
27037 }, {
27038 key: 'excludeUnsupportedVariants_',
27039 value: function excludeUnsupportedVariants_() {
27040 this.master().playlists.forEach(function (variant) {
27041 if (variant.attributes.CODECS && window_1.MediaSource && window_1.MediaSource.isTypeSupported && !window_1.MediaSource.isTypeSupported('video/mp4; codecs="' + mapLegacyAvcCodecs(variant.attributes.CODECS) + '"')) {
27042 variant.excludeUntil = Infinity;
27043 }
27044 });
27045 }
27046
27047 /**
27048 * Blacklist playlists that are known to be codec or
27049 * stream-incompatible with the SourceBuffer configuration. For
27050 * instance, Media Source Extensions would cause the video element to
27051 * stall waiting for video data if you switched from a variant with
27052 * video and audio to an audio-only one.
27053 *
27054 * @param {Object} media a media playlist compatible with the current
27055 * set of SourceBuffers. Variants in the current master playlist that
27056 * do not appear to have compatible codec or stream configurations
27057 * will be excluded from the default playlist selection algorithm
27058 * indefinitely.
27059 * @private
27060 */
27061
27062 }, {
27063 key: 'excludeIncompatibleVariants_',
27064 value: function excludeIncompatibleVariants_(media) {
27065 var codecCount = 2;
27066 var videoCodec = null;
27067 var codecs = void 0;
27068
27069 if (media.attributes.CODECS) {
27070 codecs = parseCodecs(media.attributes.CODECS);
27071 videoCodec = codecs.videoCodec;
27072 codecCount = codecs.codecCount;
27073 }
27074
27075 this.master().playlists.forEach(function (variant) {
27076 var variantCodecs = {
27077 codecCount: 2,
27078 videoCodec: null
27079 };
27080
27081 if (variant.attributes.CODECS) {
27082 variantCodecs = parseCodecs(variant.attributes.CODECS);
27083 }
27084
27085 // if the streams differ in the presence or absence of audio or
27086 // video, they are incompatible
27087 if (variantCodecs.codecCount !== codecCount) {
27088 variant.excludeUntil = Infinity;
27089 }
27090
27091 // if h.264 is specified on the current playlist, some flavor of
27092 // it must be specified on all compatible variants
27093 if (variantCodecs.videoCodec !== videoCodec) {
27094 variant.excludeUntil = Infinity;
27095 }
27096 });
27097 }
27098 }, {
27099 key: 'updateAdCues_',
27100 value: function updateAdCues_(media) {
27101 var offset = 0;
27102 var seekable$$1 = this.seekable();
27103
27104 if (seekable$$1.length) {
27105 offset = seekable$$1.start(0);
27106 }
27107
27108 updateAdCues(media, this.cueTagsTrack_, offset);
27109 }
27110
27111 /**
27112 * Calculates the desired forward buffer length based on current time
27113 *
27114 * @return {Number} Desired forward buffer length in seconds
27115 */
27116
27117 }, {
27118 key: 'goalBufferLength',
27119 value: function goalBufferLength() {
27120 var currentTime = this.tech_.currentTime();
27121 var initial = Config.GOAL_BUFFER_LENGTH;
27122 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
27123 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
27124
27125 return Math.min(initial + currentTime * rate, max);
27126 }
27127
27128 /**
27129 * Calculates the desired buffer low water line based on current time
27130 *
27131 * @return {Number} Desired buffer low water line in seconds
27132 */
27133
27134 }, {
27135 key: 'bufferLowWaterLine',
27136 value: function bufferLowWaterLine() {
27137 var currentTime = this.tech_.currentTime();
27138 var initial = Config.BUFFER_LOW_WATER_LINE;
27139 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
27140 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
27141
27142 return Math.min(initial + currentTime * rate, max);
27143 }
27144 }]);
27145 return MasterPlaylistController;
27146 }(videojs.EventTarget);
27147
27148 /**
27149 * Returns a function that acts as the Enable/disable playlist function.
27150 *
27151 * @param {PlaylistLoader} loader - The master playlist loader
27152
27153 * @param {string} playlistID - id of the playlist
27154 * @param {Function} changePlaylistFn - A function to be called after a
27155 * playlist's enabled-state has been changed. Will NOT be called if a
27156 * playlist's enabled-state is unchanged
27157 * @param {Boolean=} enable - Value to set the playlist enabled-state to
27158 * or if undefined returns the current enabled-state for the playlist
27159 * @return {Function} Function for setting/getting enabled
27160 */
27161 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
27162 return function (enable) {
27163 var playlist = loader.master.playlists[playlistID];
27164 var incompatible = isIncompatible(playlist);
27165 var currentlyEnabled = isEnabled(playlist);
27166
27167 if (typeof enable === 'undefined') {
27168 return currentlyEnabled;
27169 }
27170
27171 if (enable) {
27172 delete playlist.disabled;
27173 } else {
27174 playlist.disabled = true;
27175 }
27176
27177 if (enable !== currentlyEnabled && !incompatible) {
27178 // Ensure the outside world knows about our changes
27179 changePlaylistFn();
27180 if (enable) {
27181 loader.trigger('renditionenabled');
27182 } else {
27183 loader.trigger('renditiondisabled');
27184 }
27185 }
27186 return enable;
27187 };
27188 };
27189
27190 /**
27191 * The representation object encapsulates the publicly visible information
27192 * in a media playlist along with a setter/getter-type function (enabled)
27193 * for changing the enabled-state of a particular playlist entry
27194 *
27195 * @class Representation
27196 */
27197
27198 var Representation = function Representation(hlsHandler, playlist, id) {
27199 classCallCheck(this, Representation);
27200 var mpc = hlsHandler.masterPlaylistController_,
27201 smoothQualityChange = hlsHandler.options_.smoothQualityChange;
27202 // Get a reference to a bound version of the quality change function
27203
27204 var changeType = smoothQualityChange ? 'smooth' : 'fast';
27205 var qualityChangeFunction = mpc[changeType + 'QualityChange_'].bind(mpc);
27206
27207 // some playlist attributes are optional
27208 if (playlist.attributes.RESOLUTION) {
27209 var resolution = playlist.attributes.RESOLUTION;
27210
27211 this.width = resolution.width;
27212 this.height = resolution.height;
27213 }
27214
27215 this.bandwidth = playlist.attributes.BANDWIDTH;
27216
27217 // The id is simply the ordinality of the media playlist
27218 // within the master playlist
27219 this.id = id;
27220
27221 // Partially-apply the enableFunction to create a playlist-
27222 // specific variant
27223 this.enabled = enableFunction(hlsHandler.playlists, playlist.id, qualityChangeFunction);
27224 };
27225
27226 /**
27227 * A mixin function that adds the `representations` api to an instance
27228 * of the HlsHandler class
27229 * @param {HlsHandler} hlsHandler - An instance of HlsHandler to add the
27230 * representation API into
27231 */
27232
27233
27234 var renditionSelectionMixin = function renditionSelectionMixin(hlsHandler) {
27235 var playlists = hlsHandler.playlists;
27236
27237 // Add a single API-specific function to the HlsHandler instance
27238 hlsHandler.representations = function () {
27239 if (!playlists || !playlists.master || !playlists.master.playlists) {
27240 return [];
27241 }
27242 return playlists.master.playlists.filter(function (media) {
27243 return !isIncompatible(media);
27244 }).map(function (e, i) {
27245 return new Representation(hlsHandler, e, e.id);
27246 });
27247 };
27248 };
27249
27250 /**
27251 * @file playback-watcher.js
27252 *
27253 * Playback starts, and now my watch begins. It shall not end until my death. I shall
27254 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
27255 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
27256 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
27257 * my life and honor to the Playback Watch, for this Player and all the Players to come.
27258 */
27259
27260 // Set of events that reset the playback-watcher time check logic and clear the timeout
27261 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
27262
27263 /**
27264 * @class PlaybackWatcher
27265 */
27266
27267 var PlaybackWatcher = function () {
27268 /**
27269 * Represents an PlaybackWatcher object.
27270 * @constructor
27271 * @param {object} options an object that includes the tech and settings
27272 */
27273 function PlaybackWatcher(options) {
27274 var _this = this;
27275
27276 classCallCheck(this, PlaybackWatcher);
27277
27278 this.tech_ = options.tech;
27279 this.seekable = options.seekable;
27280 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
27281 this.media = options.media;
27282
27283 this.consecutiveUpdates = 0;
27284 this.lastRecordedTime = null;
27285 this.timer_ = null;
27286 this.checkCurrentTimeTimeout_ = null;
27287 this.logger_ = logger('PlaybackWatcher');
27288
27289 this.logger_('initialize');
27290
27291 var canPlayHandler = function canPlayHandler() {
27292 return _this.monitorCurrentTime_();
27293 };
27294 var waitingHandler = function waitingHandler() {
27295 return _this.techWaiting_();
27296 };
27297 var cancelTimerHandler = function cancelTimerHandler() {
27298 return _this.cancelTimer_();
27299 };
27300 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
27301 return _this.fixesBadSeeks_();
27302 };
27303
27304 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
27305 this.tech_.on('waiting', waitingHandler);
27306 this.tech_.on(timerCancelEvents, cancelTimerHandler);
27307 this.tech_.on('canplay', canPlayHandler);
27308
27309 // Define the dispose function to clean up our events
27310 this.dispose = function () {
27311 _this.logger_('dispose');
27312 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
27313 _this.tech_.off('waiting', waitingHandler);
27314 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
27315 _this.tech_.off('canplay', canPlayHandler);
27316 if (_this.checkCurrentTimeTimeout_) {
27317 window_1.clearTimeout(_this.checkCurrentTimeTimeout_);
27318 }
27319 _this.cancelTimer_();
27320 };
27321 }
27322
27323 /**
27324 * Periodically check current time to see if playback stopped
27325 *
27326 * @private
27327 */
27328
27329
27330 createClass(PlaybackWatcher, [{
27331 key: 'monitorCurrentTime_',
27332 value: function monitorCurrentTime_() {
27333 this.checkCurrentTime_();
27334
27335 if (this.checkCurrentTimeTimeout_) {
27336 window_1.clearTimeout(this.checkCurrentTimeTimeout_);
27337 }
27338
27339 // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
27340 this.checkCurrentTimeTimeout_ = window_1.setTimeout(this.monitorCurrentTime_.bind(this), 250);
27341 }
27342
27343 /**
27344 * The purpose of this function is to emulate the "waiting" event on
27345 * browsers that do not emit it when they are waiting for more
27346 * data to continue playback
27347 *
27348 * @private
27349 */
27350
27351 }, {
27352 key: 'checkCurrentTime_',
27353 value: function checkCurrentTime_() {
27354 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
27355 this.consecutiveUpdates = 0;
27356 this.lastRecordedTime = this.tech_.currentTime();
27357 return;
27358 }
27359
27360 if (this.tech_.paused() || this.tech_.seeking()) {
27361 return;
27362 }
27363
27364 var currentTime = this.tech_.currentTime();
27365 var buffered = this.tech_.buffered();
27366
27367 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
27368 // If current time is at the end of the final buffered region, then any playback
27369 // stall is most likely caused by buffering in a low bandwidth environment. The tech
27370 // should fire a `waiting` event in this scenario, but due to browser and tech
27371 // inconsistencies. Calling `techWaiting_` here allows us to simulate
27372 // responding to a native `waiting` event when the tech fails to emit one.
27373 return this.techWaiting_();
27374 }
27375
27376 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
27377 this.consecutiveUpdates++;
27378 this.waiting_();
27379 } else if (currentTime === this.lastRecordedTime) {
27380 this.consecutiveUpdates++;
27381 } else {
27382 this.consecutiveUpdates = 0;
27383 this.lastRecordedTime = currentTime;
27384 }
27385 }
27386
27387 /**
27388 * Cancels any pending timers and resets the 'timeupdate' mechanism
27389 * designed to detect that we are stalled
27390 *
27391 * @private
27392 */
27393
27394 }, {
27395 key: 'cancelTimer_',
27396 value: function cancelTimer_() {
27397 this.consecutiveUpdates = 0;
27398
27399 if (this.timer_) {
27400 this.logger_('cancelTimer_');
27401 clearTimeout(this.timer_);
27402 }
27403
27404 this.timer_ = null;
27405 }
27406
27407 /**
27408 * Fixes situations where there's a bad seek
27409 *
27410 * @return {Boolean} whether an action was taken to fix the seek
27411 * @private
27412 */
27413
27414 }, {
27415 key: 'fixesBadSeeks_',
27416 value: function fixesBadSeeks_() {
27417 var seeking = this.tech_.seeking();
27418
27419 if (!seeking) {
27420 return false;
27421 }
27422
27423 var seekable = this.seekable();
27424 var currentTime = this.tech_.currentTime();
27425 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
27426 var seekTo = void 0;
27427
27428 if (isAfterSeekableRange) {
27429 var seekableEnd = seekable.end(seekable.length - 1);
27430
27431 // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
27432 seekTo = seekableEnd;
27433 }
27434
27435 if (this.beforeSeekableWindow_(seekable, currentTime)) {
27436 var seekableStart = seekable.start(0);
27437
27438 // sync to the beginning of the live window
27439 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
27440 seekTo = seekableStart + SAFE_TIME_DELTA;
27441 }
27442
27443 if (typeof seekTo !== 'undefined') {
27444 this.logger_('Trying to seek outside of seekable at time ' + currentTime + ' with ' + ('seekable range ' + printableRange(seekable) + '. Seeking to ') + (seekTo + '.'));
27445
27446 this.tech_.setCurrentTime(seekTo);
27447 return true;
27448 }
27449
27450 return false;
27451 }
27452
27453 /**
27454 * Handler for situations when we determine the player is waiting.
27455 *
27456 * @private
27457 */
27458
27459 }, {
27460 key: 'waiting_',
27461 value: function waiting_() {
27462 if (this.techWaiting_()) {
27463 return;
27464 }
27465
27466 // All tech waiting checks failed. Use last resort correction
27467 var currentTime = this.tech_.currentTime();
27468 var buffered = this.tech_.buffered();
27469 var currentRange = findRange(buffered, currentTime);
27470
27471 // Sometimes the player can stall for unknown reasons within a contiguous buffered
27472 // region with no indication that anything is amiss (seen in Firefox). Seeking to
27473 // currentTime is usually enough to kickstart the player. This checks that the player
27474 // is currently within a buffered region before attempting a corrective seek.
27475 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
27476 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
27477 // make sure there is ~3 seconds of forward buffer before taking any corrective action
27478 // to avoid triggering an `unknownwaiting` event when the network is slow.
27479 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
27480 this.cancelTimer_();
27481 this.tech_.setCurrentTime(currentTime);
27482
27483 this.logger_('Stopped at ' + currentTime + ' while inside a buffered region ' + ('[' + currentRange.start(0) + ' -> ' + currentRange.end(0) + ']. Attempting to resume ') + 'playback by seeking to the current time.');
27484
27485 // unknown waiting corrections may be useful for monitoring QoS
27486 this.tech_.trigger({ type: 'usage', name: 'hls-unknown-waiting' });
27487 return;
27488 }
27489 }
27490
27491 /**
27492 * Handler for situations when the tech fires a `waiting` event
27493 *
27494 * @return {Boolean}
27495 * True if an action (or none) was needed to correct the waiting. False if no
27496 * checks passed
27497 * @private
27498 */
27499
27500 }, {
27501 key: 'techWaiting_',
27502 value: function techWaiting_() {
27503 var seekable = this.seekable();
27504 var currentTime = this.tech_.currentTime();
27505
27506 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
27507 // Tech is seeking or bad seek fixed, no action needed
27508 return true;
27509 }
27510
27511 if (this.tech_.seeking() || this.timer_ !== null) {
27512 // Tech is seeking or already waiting on another action, no action needed
27513 return true;
27514 }
27515
27516 if (this.beforeSeekableWindow_(seekable, currentTime)) {
27517 var livePoint = seekable.end(seekable.length - 1);
27518
27519 this.logger_('Fell out of live window at time ' + currentTime + '. Seeking to ' + ('live point (seekable end) ' + livePoint));
27520 this.cancelTimer_();
27521 this.tech_.setCurrentTime(livePoint);
27522
27523 // live window resyncs may be useful for monitoring QoS
27524 this.tech_.trigger({ type: 'usage', name: 'hls-live-resync' });
27525 return true;
27526 }
27527
27528 var buffered = this.tech_.buffered();
27529 var nextRange = findNextRange(buffered, currentTime);
27530
27531 if (this.videoUnderflow_(nextRange, buffered, currentTime)) {
27532 // Even though the video underflowed and was stuck in a gap, the audio overplayed
27533 // the gap, leading currentTime into a buffered range. Seeking to currentTime
27534 // allows the video to catch up to the audio position without losing any audio
27535 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
27536 this.cancelTimer_();
27537 this.tech_.setCurrentTime(currentTime);
27538
27539 // video underflow may be useful for monitoring QoS
27540 this.tech_.trigger({ type: 'usage', name: 'hls-video-underflow' });
27541 return true;
27542 }
27543
27544 // check for gap
27545 if (nextRange.length > 0) {
27546 var difference = nextRange.start(0) - currentTime;
27547
27548 this.logger_('Stopped at ' + currentTime + ', setting timer for ' + difference + ', seeking ' + ('to ' + nextRange.start(0)));
27549
27550 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
27551 return true;
27552 }
27553
27554 // All checks failed. Returning false to indicate failure to correct waiting
27555 return false;
27556 }
27557 }, {
27558 key: 'afterSeekableWindow_',
27559 value: function afterSeekableWindow_(seekable, currentTime, playlist) {
27560 var allowSeeksWithinUnsafeLiveWindow = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
27561
27562 if (!seekable.length) {
27563 // we can't make a solid case if there's no seekable, default to false
27564 return false;
27565 }
27566
27567 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
27568 var isLive = !playlist.endList;
27569
27570 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
27571 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
27572 }
27573
27574 if (currentTime > allowedEnd) {
27575 return true;
27576 }
27577
27578 return false;
27579 }
27580 }, {
27581 key: 'beforeSeekableWindow_',
27582 value: function beforeSeekableWindow_(seekable, currentTime) {
27583 if (seekable.length &&
27584 // can't fall before 0 and 0 seekable start identifies VOD stream
27585 seekable.start(0) > 0 && currentTime < seekable.start(0) - SAFE_TIME_DELTA) {
27586 return true;
27587 }
27588
27589 return false;
27590 }
27591 }, {
27592 key: 'videoUnderflow_',
27593 value: function videoUnderflow_(nextRange, buffered, currentTime) {
27594 if (nextRange.length === 0) {
27595 // Even if there is no available next range, there is still a possibility we are
27596 // stuck in a gap due to video underflow.
27597 var gap = this.gapFromVideoUnderflow_(buffered, currentTime);
27598
27599 if (gap) {
27600 this.logger_('Encountered a gap in video from ' + gap.start + ' to ' + gap.end + '. ' + ('Seeking to current time ' + currentTime));
27601
27602 return true;
27603 }
27604 }
27605
27606 return false;
27607 }
27608
27609 /**
27610 * Timer callback. If playback still has not proceeded, then we seek
27611 * to the start of the next buffered region.
27612 *
27613 * @private
27614 */
27615
27616 }, {
27617 key: 'skipTheGap_',
27618 value: function skipTheGap_(scheduledCurrentTime) {
27619 var buffered = this.tech_.buffered();
27620 var currentTime = this.tech_.currentTime();
27621 var nextRange = findNextRange(buffered, currentTime);
27622
27623 this.cancelTimer_();
27624
27625 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
27626 return;
27627 }
27628
27629 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0));
27630
27631 // only seek if we still have not played
27632 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
27633
27634 this.tech_.trigger({ type: 'usage', name: 'hls-gap-skip' });
27635 }
27636 }, {
27637 key: 'gapFromVideoUnderflow_',
27638 value: function gapFromVideoUnderflow_(buffered, currentTime) {
27639 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
27640 // playing for ~3 seconds after the video gap starts. This is done to account for
27641 // video buffer underflow/underrun (note that this is not done when there is audio
27642 // buffer underflow/underrun -- in that case the video will stop as soon as it
27643 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
27644 // video stalls). The player's time will reflect the playthrough of audio, so the
27645 // time will appear as if we are in a buffered region, even if we are stuck in a
27646 // "gap."
27647 //
27648 // Example:
27649 // video buffer: 0 => 10.1, 10.2 => 20
27650 // audio buffer: 0 => 20
27651 // overall buffer: 0 => 10.1, 10.2 => 20
27652 // current time: 13
27653 //
27654 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
27655 // however, the audio continued playing until it reached ~3 seconds past the gap
27656 // (13 seconds), at which point it stops as well. Since current time is past the
27657 // gap, findNextRange will return no ranges.
27658 //
27659 // To check for this issue, we see if there is a gap that starts somewhere within
27660 // a 3 second range (3 seconds +/- 1 second) back from our current time.
27661 var gaps = findGaps(buffered);
27662
27663 for (var i = 0; i < gaps.length; i++) {
27664 var start = gaps.start(i);
27665 var end = gaps.end(i);
27666
27667 // gap is starts no more than 4 seconds back
27668 if (currentTime - start < 4 && currentTime - start > 2) {
27669 return {
27670 start: start,
27671 end: end
27672 };
27673 }
27674 }
27675
27676 return null;
27677 }
27678 }]);
27679 return PlaybackWatcher;
27680 }();
27681
27682 var defaultOptions = {
27683 errorInterval: 30,
27684 getSource: function getSource(next) {
27685 var tech = this.tech({ IWillNotUseThisInPlugins: true });
27686 var sourceObj = tech.currentSource_;
27687
27688 return next(sourceObj);
27689 }
27690 };
27691
27692 /**
27693 * Main entry point for the plugin
27694 *
27695 * @param {Player} player a reference to a videojs Player instance
27696 * @param {Object} [options] an object with plugin options
27697 * @private
27698 */
27699 var initPlugin = function initPlugin(player, options) {
27700 var lastCalled = 0;
27701 var seekTo = 0;
27702 var localOptions = videojs.mergeOptions(defaultOptions, options);
27703
27704 player.ready(function () {
27705 player.trigger({ type: 'usage', name: 'hls-error-reload-initialized' });
27706 });
27707
27708 /**
27709 * Player modifications to perform that must wait until `loadedmetadata`
27710 * has been triggered
27711 *
27712 * @private
27713 */
27714 var loadedMetadataHandler = function loadedMetadataHandler() {
27715 if (seekTo) {
27716 player.currentTime(seekTo);
27717 }
27718 };
27719
27720 /**
27721 * Set the source on the player element, play, and seek if necessary
27722 *
27723 * @param {Object} sourceObj An object specifying the source url and mime-type to play
27724 * @private
27725 */
27726 var setSource = function setSource(sourceObj) {
27727 if (sourceObj === null || sourceObj === undefined) {
27728 return;
27729 }
27730 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
27731
27732 player.one('loadedmetadata', loadedMetadataHandler);
27733
27734 player.src(sourceObj);
27735 player.trigger({ type: 'usage', name: 'hls-error-reload' });
27736 player.play();
27737 };
27738
27739 /**
27740 * Attempt to get a source from either the built-in getSource function
27741 * or a custom function provided via the options
27742 *
27743 * @private
27744 */
27745 var errorHandler = function errorHandler() {
27746 // Do not attempt to reload the source if a source-reload occurred before
27747 // 'errorInterval' time has elapsed since the last source-reload
27748 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
27749 player.trigger({ type: 'usage', name: 'hls-error-reload-canceled' });
27750 return;
27751 }
27752
27753 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
27754 videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
27755 return;
27756 }
27757 lastCalled = Date.now();
27758
27759 return localOptions.getSource.call(player, setSource);
27760 };
27761
27762 /**
27763 * Unbind any event handlers that were bound by the plugin
27764 *
27765 * @private
27766 */
27767 var cleanupEvents = function cleanupEvents() {
27768 player.off('loadedmetadata', loadedMetadataHandler);
27769 player.off('error', errorHandler);
27770 player.off('dispose', cleanupEvents);
27771 };
27772
27773 /**
27774 * Cleanup before re-initializing the plugin
27775 *
27776 * @param {Object} [newOptions] an object with plugin options
27777 * @private
27778 */
27779 var reinitPlugin = function reinitPlugin(newOptions) {
27780 cleanupEvents();
27781 initPlugin(player, newOptions);
27782 };
27783
27784 player.on('error', errorHandler);
27785 player.on('dispose', cleanupEvents);
27786
27787 // Overwrite the plugin function so that we can correctly cleanup before
27788 // initializing the plugin
27789 player.reloadSourceOnError = reinitPlugin;
27790 };
27791
27792 /**
27793 * Reload the source when an error is detected as long as there
27794 * wasn't an error previously within the last 30 seconds
27795 *
27796 * @param {Object} [options] an object with plugin options
27797 */
27798 var reloadSourceOnError = function reloadSourceOnError(options) {
27799 initPlugin(this, options);
27800 };
27801
27802 var version$2 = "1.13.4";
27803
27804 /**
27805 * @file videojs-http-streaming.js
27806 *
27807 * The main file for the HLS project.
27808 * License: https://github.com/videojs/videojs-http-streaming/blob/master/LICENSE
27809 */
27810
27811 var Hls$1 = {
27812 PlaylistLoader: PlaylistLoader,
27813 Playlist: Playlist,
27814 Decrypter: Decrypter,
27815 AsyncStream: AsyncStream,
27816 decrypt: decrypt,
27817 utils: utils,
27818
27819 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
27820 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
27821 comparePlaylistBandwidth: comparePlaylistBandwidth,
27822 comparePlaylistResolution: comparePlaylistResolution,
27823
27824 xhr: xhrFactory()
27825 };
27826
27827 // Define getter/setters for config properites
27828 ['GOAL_BUFFER_LENGTH', 'MAX_GOAL_BUFFER_LENGTH', 'GOAL_BUFFER_LENGTH_RATE', 'BUFFER_LOW_WATER_LINE', 'MAX_BUFFER_LOW_WATER_LINE', 'BUFFER_LOW_WATER_LINE_RATE', 'BANDWIDTH_VARIANCE'].forEach(function (prop) {
27829 Object.defineProperty(Hls$1, prop, {
27830 get: function get$$1() {
27831 videojs.log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
27832 return Config[prop];
27833 },
27834 set: function set$$1(value) {
27835 videojs.log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
27836
27837 if (typeof value !== 'number' || value < 0) {
27838 videojs.log.warn('value of Hls.' + prop + ' must be greater than or equal to 0');
27839 return;
27840 }
27841
27842 Config[prop] = value;
27843 }
27844 });
27845 });
27846
27847 var LOCAL_STORAGE_KEY = 'videojs-vhs';
27848
27849 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
27850 var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
27851
27852 if (mpegurlRE.test(type)) {
27853 return 'hls';
27854 }
27855
27856 var dashRE = /^application\/dash\+xml/i;
27857
27858 if (dashRE.test(type)) {
27859 return 'dash';
27860 }
27861
27862 return null;
27863 };
27864
27865 /**
27866 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.
27867 *
27868 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
27869 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
27870 * @function handleHlsMediaChange
27871 */
27872 var handleHlsMediaChange = function handleHlsMediaChange(qualityLevels, playlistLoader) {
27873 var newPlaylist = playlistLoader.media();
27874 var selectedIndex = -1;
27875
27876 for (var i = 0; i < qualityLevels.length; i++) {
27877 if (qualityLevels[i].id === newPlaylist.id) {
27878 selectedIndex = i;
27879 break;
27880 }
27881 }
27882
27883 qualityLevels.selectedIndex_ = selectedIndex;
27884 qualityLevels.trigger({
27885 selectedIndex: selectedIndex,
27886 type: 'change'
27887 });
27888 };
27889
27890 /**
27891 * Adds quality levels to list once playlist metadata is available
27892 *
27893 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
27894 * @param {Object} hls Hls object to listen to for media events.
27895 * @function handleHlsLoadedMetadata
27896 */
27897 var handleHlsLoadedMetadata = function handleHlsLoadedMetadata(qualityLevels, hls) {
27898 hls.representations().forEach(function (rep) {
27899 qualityLevels.addQualityLevel(rep);
27900 });
27901 handleHlsMediaChange(qualityLevels, hls.playlists);
27902 };
27903
27904 // HLS is a source handler, not a tech. Make sure attempts to use it
27905 // as one do not cause exceptions.
27906 Hls$1.canPlaySource = function () {
27907 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
27908 };
27909
27910 var emeKeySystems = function emeKeySystems(keySystemOptions, mainSegmentLoader, audioSegmentLoader) {
27911 if (!keySystemOptions) {
27912 return keySystemOptions;
27913 }
27914
27915 var videoMimeType = void 0;
27916 var audioMimeType = void 0;
27917
27918 // if there is a mimeType associated with the audioSegmentLoader, then the audio
27919 // and video mimeType and codec strings are already in the format we need to
27920 // pass with the other key systems
27921 if (audioSegmentLoader.mimeType_) {
27922 videoMimeType = mainSegmentLoader.mimeType_;
27923 audioMimeType = audioSegmentLoader.mimeType_;
27924
27925 // if there is no audioSegmentLoader mimeType, then we have to create the
27926 // the audio and video mimeType/codec strings from information extrapolated
27927 // from the mainSegmentLoader mimeType (ex. 'video/mp4; codecs="mp4, avc1"' -->
27928 // 'video/mp4; codecs="avc1"' and 'audio/mp4; codecs="mp4"')
27929 } else {
27930 var parsedMimeType = parseContentType(mainSegmentLoader.mimeType_);
27931 var codecs = parsedMimeType.parameters.codecs.split(',');
27932
27933 var audioCodec = void 0;
27934 var videoCodec = void 0;
27935
27936 codecs.forEach(function (codec) {
27937 codec = codec.trim();
27938
27939 if (isAudioCodec(codec)) {
27940 audioCodec = codec;
27941 } else if (isVideoCodec(codec)) {
27942 videoCodec = codec;
27943 }
27944 });
27945
27946 videoMimeType = parsedMimeType.type + '; codecs="' + videoCodec + '"';
27947 audioMimeType = parsedMimeType.type.replace('video', 'audio') + '; codecs="' + audioCodec + '"';
27948 }
27949
27950 // upsert the content types based on the selected playlist
27951 var keySystemContentTypes = {};
27952 var videoPlaylist = mainSegmentLoader.playlist_;
27953
27954 for (var keySystem in keySystemOptions) {
27955 keySystemContentTypes[keySystem] = {
27956 audioContentType: audioMimeType,
27957 videoContentType: videoMimeType
27958 };
27959
27960 if (videoPlaylist.contentProtection && videoPlaylist.contentProtection[keySystem] && videoPlaylist.contentProtection[keySystem].pssh) {
27961 keySystemContentTypes[keySystem].pssh = videoPlaylist.contentProtection[keySystem].pssh;
27962 }
27963
27964 // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
27965 // so we need to prevent overwriting the URL entirely
27966 if (typeof keySystemOptions[keySystem] === 'string') {
27967 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
27968 }
27969 }
27970
27971 return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
27972 };
27973
27974 var setupEmeOptions = function setupEmeOptions(hlsHandler) {
27975 var mainSegmentLoader = hlsHandler.masterPlaylistController_.mainSegmentLoader_;
27976 var audioSegmentLoader = hlsHandler.masterPlaylistController_.audioSegmentLoader_;
27977
27978 var player = videojs.players[hlsHandler.tech_.options_.playerId];
27979
27980 if (player.eme) {
27981 var sourceOptions = emeKeySystems(hlsHandler.source_.keySystems, mainSegmentLoader, audioSegmentLoader);
27982
27983 if (sourceOptions) {
27984 player.currentSource().keySystems = sourceOptions;
27985
27986 // Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449
27987 // in non-IE11 browsers. In IE11 this is too early to initialize media keys
27988 if (!(videojs.browser.IE_VERSION === 11) && player.eme.initializeMediaKeys) {
27989 player.eme.initializeMediaKeys();
27990 }
27991 }
27992 }
27993 };
27994
27995 var getVhsLocalStorage = function getVhsLocalStorage() {
27996 if (!window.localStorage) {
27997 return null;
27998 }
27999
28000 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
28001
28002 if (!storedObject) {
28003 return null;
28004 }
28005
28006 try {
28007 return JSON.parse(storedObject);
28008 } catch (e) {
28009 // someone may have tampered with the value
28010 return null;
28011 }
28012 };
28013
28014 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
28015 if (!window.localStorage) {
28016 return false;
28017 }
28018
28019 var objectToStore = getVhsLocalStorage();
28020
28021 objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
28022
28023 try {
28024 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
28025 } catch (e) {
28026 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
28027 // storage is set to 0).
28028 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
28029 // No need to perform any operation.
28030 return false;
28031 }
28032
28033 return objectToStore;
28034 };
28035
28036 /**
28037 * Whether the browser has built-in HLS support.
28038 */
28039 Hls$1.supportsNativeHls = function () {
28040 var video = document_1.createElement('video');
28041
28042 // native HLS is definitely not supported if HTML5 video isn't
28043 if (!videojs.getTech('Html5').isSupported()) {
28044 return false;
28045 }
28046
28047 // HLS manifests can go by many mime-types
28048 var canPlay = [
28049 // Apple santioned
28050 'application/vnd.apple.mpegurl',
28051 // Apple sanctioned for backwards compatibility
28052 'audio/mpegurl',
28053 // Very common
28054 'audio/x-mpegurl',
28055 // Very common
28056 'application/x-mpegurl',
28057 // Included for completeness
28058 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
28059
28060 return canPlay.some(function (canItPlay) {
28061 return (/maybe|probably/i.test(video.canPlayType(canItPlay))
28062 );
28063 });
28064 }();
28065
28066 Hls$1.supportsNativeDash = function () {
28067 if (!videojs.getTech('Html5').isSupported()) {
28068 return false;
28069 }
28070
28071 return (/maybe|probably/i.test(document_1.createElement('video').canPlayType('application/dash+xml'))
28072 );
28073 }();
28074
28075 Hls$1.supportsTypeNatively = function (type) {
28076 if (type === 'hls') {
28077 return Hls$1.supportsNativeHls;
28078 }
28079
28080 if (type === 'dash') {
28081 return Hls$1.supportsNativeDash;
28082 }
28083
28084 return false;
28085 };
28086
28087 /**
28088 * HLS is a source handler, not a tech. Make sure attempts to use it
28089 * as one do not cause exceptions.
28090 */
28091 Hls$1.isSupported = function () {
28092 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
28093 };
28094
28095 var Component = videojs.getComponent('Component');
28096
28097 /**
28098 * The Hls Handler object, where we orchestrate all of the parts
28099 * of HLS to interact with video.js
28100 *
28101 * @class HlsHandler
28102 * @extends videojs.Component
28103 * @param {Object} source the soruce object
28104 * @param {Tech} tech the parent tech object
28105 * @param {Object} options optional and required options
28106 */
28107
28108 var HlsHandler = function (_Component) {
28109 inherits(HlsHandler, _Component);
28110
28111 function HlsHandler(source, tech, options) {
28112 classCallCheck(this, HlsHandler);
28113
28114 // tech.player() is deprecated but setup a reference to HLS for
28115 // backwards-compatibility
28116 var _this = possibleConstructorReturn(this, (HlsHandler.__proto__ || Object.getPrototypeOf(HlsHandler)).call(this, tech, options.hls));
28117
28118 if (tech.options_ && tech.options_.playerId) {
28119 var _player = videojs(tech.options_.playerId);
28120
28121 if (!_player.hasOwnProperty('hls')) {
28122 Object.defineProperty(_player, 'hls', {
28123 get: function get$$1() {
28124 videojs.log.warn('player.hls is deprecated. Use player.tech().hls instead.');
28125 tech.trigger({ type: 'usage', name: 'hls-player-access' });
28126 return _this;
28127 },
28128 configurable: true
28129 });
28130 }
28131
28132 // Set up a reference to the HlsHandler from player.vhs. This allows users to start
28133 // migrating from player.tech_.hls... to player.vhs... for API access. Although this
28134 // isn't the most appropriate form of reference for video.js (since all APIs should
28135 // be provided through core video.js), it is a common pattern for plugins, and vhs
28136 // will act accordingly.
28137 _player.vhs = _this;
28138 // deprecated, for backwards compatibility
28139 _player.dash = _this;
28140
28141 _this.player_ = _player;
28142 }
28143
28144 _this.tech_ = tech;
28145 _this.source_ = source;
28146 _this.stats = {};
28147 _this.ignoreNextSeekingEvent_ = false;
28148 _this.setOptions_();
28149
28150 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
28151 tech.overrideNativeAudioTracks(true);
28152 tech.overrideNativeVideoTracks(true);
28153 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
28154 // overriding native HLS only works if audio tracks have been emulated
28155 // error early if we're misconfigured
28156 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
28157 }
28158
28159 // listen for fullscreenchange events for this player so that we
28160 // can adjust our quality selection quickly
28161 _this.on(document_1, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
28162 var fullscreenElement = document_1.fullscreenElement || document_1.webkitFullscreenElement || document_1.mozFullScreenElement || document_1.msFullscreenElement;
28163
28164 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
28165 _this.masterPlaylistController_.smoothQualityChange_();
28166 }
28167 });
28168
28169 _this.on(_this.tech_, 'seeking', function () {
28170 if (this.ignoreNextSeekingEvent_) {
28171 this.ignoreNextSeekingEvent_ = false;
28172 return;
28173 }
28174
28175 this.setCurrentTime(this.tech_.currentTime());
28176 });
28177
28178 _this.on(_this.tech_, 'error', function () {
28179 if (this.masterPlaylistController_) {
28180 this.masterPlaylistController_.pauseLoading();
28181 }
28182 });
28183
28184 _this.on(_this.tech_, 'play', _this.play);
28185 return _this;
28186 }
28187
28188 createClass(HlsHandler, [{
28189 key: 'setOptions_',
28190 value: function setOptions_() {
28191 var _this2 = this;
28192
28193 // defaults
28194 this.options_.withCredentials = this.options_.withCredentials || false;
28195 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects || false;
28196 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
28197 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
28198 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
28199 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
28200 this.options_.customTagParsers = this.options_.customTagParsers || [];
28201 this.options_.customTagMappers = this.options_.customTagMappers || [];
28202 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
28203
28204 if (typeof this.options_.blacklistDuration !== 'number') {
28205 this.options_.blacklistDuration = 5 * 60;
28206 }
28207
28208 if (typeof this.options_.bandwidth !== 'number') {
28209 if (this.options_.useBandwidthFromLocalStorage) {
28210 var storedObject = getVhsLocalStorage();
28211
28212 if (storedObject && storedObject.bandwidth) {
28213 this.options_.bandwidth = storedObject.bandwidth;
28214 this.tech_.trigger({ type: 'usage', name: 'hls-bandwidth-from-local-storage' });
28215 }
28216 if (storedObject && storedObject.throughput) {
28217 this.options_.throughput = storedObject.throughput;
28218 this.tech_.trigger({ type: 'usage', name: 'hls-throughput-from-local-storage' });
28219 }
28220 }
28221 }
28222 // if bandwidth was not set by options or pulled from local storage, start playlist
28223 // selection at a reasonable bandwidth
28224 if (typeof this.options_.bandwidth !== 'number') {
28225 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
28226 }
28227
28228 // If the bandwidth number is unchanged from the initial setting
28229 // then this takes precedence over the enableLowInitialPlaylist option
28230 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH;
28231
28232 // grab options passed to player.src
28233 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys'].forEach(function (option) {
28234 if (typeof _this2.source_[option] !== 'undefined') {
28235 _this2.options_[option] = _this2.source_[option];
28236 }
28237 });
28238
28239 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
28240 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
28241 }
28242 /**
28243 * called when player.src gets called, handle a new source
28244 *
28245 * @param {Object} src the source object to handle
28246 */
28247
28248 }, {
28249 key: 'src',
28250 value: function src(_src, type) {
28251 var _this3 = this;
28252
28253 // do nothing if the src is falsey
28254 if (!_src) {
28255 return;
28256 }
28257 this.setOptions_();
28258 // add master playlist controller options
28259 this.options_.url = this.source_.src;
28260 this.options_.tech = this.tech_;
28261 this.options_.externHls = Hls$1;
28262 this.options_.sourceType = simpleTypeFromSourceType(type);
28263 // Whenever we seek internally, we should update the tech
28264 this.options_.seekTo = function (time) {
28265 _this3.tech_.setCurrentTime(time);
28266 };
28267
28268 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
28269 this.playbackWatcher_ = new PlaybackWatcher(videojs.mergeOptions(this.options_, {
28270 seekable: function seekable$$1() {
28271 return _this3.seekable();
28272 },
28273 media: function media() {
28274 return _this3.masterPlaylistController_.media();
28275 }
28276 }));
28277
28278 this.masterPlaylistController_.on('error', function () {
28279 var player = videojs.players[_this3.tech_.options_.playerId];
28280
28281 player.error(_this3.masterPlaylistController_.error);
28282 });
28283
28284 // `this` in selectPlaylist should be the HlsHandler for backwards
28285 // compatibility with < v2
28286 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : Hls$1.STANDARD_PLAYLIST_SELECTOR.bind(this);
28287
28288 this.masterPlaylistController_.selectInitialPlaylist = Hls$1.INITIAL_PLAYLIST_SELECTOR.bind(this);
28289
28290 // re-expose some internal objects for backwards compatibility with < v2
28291 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
28292 this.mediaSource = this.masterPlaylistController_.mediaSource;
28293
28294 // Proxy assignment of some properties to the master playlist
28295 // controller. Using a custom property for backwards compatibility
28296 // with < v2
28297 Object.defineProperties(this, {
28298 selectPlaylist: {
28299 get: function get$$1() {
28300 return this.masterPlaylistController_.selectPlaylist;
28301 },
28302 set: function set$$1(selectPlaylist) {
28303 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
28304 }
28305 },
28306 throughput: {
28307 get: function get$$1() {
28308 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
28309 },
28310 set: function set$$1(throughput) {
28311 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput;
28312 // By setting `count` to 1 the throughput value becomes the starting value
28313 // for the cumulative average
28314 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
28315 }
28316 },
28317 bandwidth: {
28318 get: function get$$1() {
28319 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
28320 },
28321 set: function set$$1(bandwidth) {
28322 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth;
28323 // setting the bandwidth manually resets the throughput counter
28324 // `count` is set to zero that current value of `rate` isn't included
28325 // in the cumulative average
28326 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
28327 rate: 0,
28328 count: 0
28329 };
28330 }
28331 },
28332 /**
28333 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
28334 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
28335 * the entire process after that - decryption, transmuxing, and appending - provided
28336 * by `throughput`.
28337 *
28338 * Since the two process are serial, the overall system bandwidth is given by:
28339 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
28340 */
28341 systemBandwidth: {
28342 get: function get$$1() {
28343 var invBandwidth = 1 / (this.bandwidth || 1);
28344 var invThroughput = void 0;
28345
28346 if (this.throughput > 0) {
28347 invThroughput = 1 / this.throughput;
28348 } else {
28349 invThroughput = 0;
28350 }
28351
28352 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
28353
28354 return systemBitrate;
28355 },
28356 set: function set$$1() {
28357 videojs.log.error('The "systemBandwidth" property is read-only');
28358 }
28359 }
28360 });
28361
28362 if (this.options_.bandwidth) {
28363 this.bandwidth = this.options_.bandwidth;
28364 }
28365 if (this.options_.throughput) {
28366 this.throughput = this.options_.throughput;
28367 }
28368
28369 Object.defineProperties(this.stats, {
28370 bandwidth: {
28371 get: function get$$1() {
28372 return _this3.bandwidth || 0;
28373 },
28374 enumerable: true
28375 },
28376 mediaRequests: {
28377 get: function get$$1() {
28378 return _this3.masterPlaylistController_.mediaRequests_() || 0;
28379 },
28380 enumerable: true
28381 },
28382 mediaRequestsAborted: {
28383 get: function get$$1() {
28384 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
28385 },
28386 enumerable: true
28387 },
28388 mediaRequestsTimedout: {
28389 get: function get$$1() {
28390 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
28391 },
28392 enumerable: true
28393 },
28394 mediaRequestsErrored: {
28395 get: function get$$1() {
28396 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
28397 },
28398 enumerable: true
28399 },
28400 mediaTransferDuration: {
28401 get: function get$$1() {
28402 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
28403 },
28404 enumerable: true
28405 },
28406 mediaBytesTransferred: {
28407 get: function get$$1() {
28408 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
28409 },
28410 enumerable: true
28411 },
28412 mediaSecondsLoaded: {
28413 get: function get$$1() {
28414 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
28415 },
28416 enumerable: true
28417 },
28418 buffered: {
28419 get: function get$$1() {
28420 return timeRangesToArray(_this3.tech_.buffered());
28421 },
28422 enumerable: true
28423 },
28424 currentTime: {
28425 get: function get$$1() {
28426 return _this3.tech_.currentTime();
28427 },
28428 enumerable: true
28429 },
28430 currentSource: {
28431 get: function get$$1() {
28432 return _this3.tech_.currentSource_;
28433 },
28434 enumerable: true
28435 },
28436 currentTech: {
28437 get: function get$$1() {
28438 return _this3.tech_.name_;
28439 },
28440 enumerable: true
28441 },
28442 duration: {
28443 get: function get$$1() {
28444 return _this3.tech_.duration();
28445 },
28446 enumerable: true
28447 },
28448 master: {
28449 get: function get$$1() {
28450 return _this3.playlists.master;
28451 },
28452 enumerable: true
28453 },
28454 playerDimensions: {
28455 get: function get$$1() {
28456 return _this3.tech_.currentDimensions();
28457 },
28458 enumerable: true
28459 },
28460 seekable: {
28461 get: function get$$1() {
28462 return timeRangesToArray(_this3.tech_.seekable());
28463 },
28464 enumerable: true
28465 },
28466 timestamp: {
28467 get: function get$$1() {
28468 return Date.now();
28469 },
28470 enumerable: true
28471 },
28472 videoPlaybackQuality: {
28473 get: function get$$1() {
28474 return _this3.tech_.getVideoPlaybackQuality();
28475 },
28476 enumerable: true
28477 }
28478 });
28479
28480 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
28481
28482 this.tech_.on('bandwidthupdate', function () {
28483 if (_this3.options_.useBandwidthFromLocalStorage) {
28484 updateVhsLocalStorage({
28485 bandwidth: _this3.bandwidth,
28486 throughput: Math.round(_this3.throughput)
28487 });
28488 }
28489 });
28490
28491 this.masterPlaylistController_.on('selectedinitialmedia', function () {
28492 // Add the manual rendition mix-in to HlsHandler
28493 renditionSelectionMixin(_this3);
28494 setupEmeOptions(_this3);
28495 });
28496
28497 // the bandwidth of the primary segment loader is our best
28498 // estimate of overall bandwidth
28499 this.on(this.masterPlaylistController_, 'progress', function () {
28500 this.tech_.trigger('progress');
28501 });
28502
28503 // In the live case, we need to ignore the very first `seeking` event since
28504 // that will be the result of the seek-to-live behavior
28505 this.on(this.masterPlaylistController_, 'firstplay', function () {
28506 this.ignoreNextSeekingEvent_ = true;
28507 });
28508
28509 this.setupQualityLevels_();
28510
28511 // do nothing if the tech has been disposed already
28512 // this can occur if someone sets the src in player.ready(), for instance
28513 if (!this.tech_.el()) {
28514 return;
28515 }
28516
28517 this.tech_.src(videojs.URL.createObjectURL(this.masterPlaylistController_.mediaSource));
28518 }
28519
28520 /**
28521 * Initializes the quality levels and sets listeners to update them.
28522 *
28523 * @method setupQualityLevels_
28524 * @private
28525 */
28526
28527 }, {
28528 key: 'setupQualityLevels_',
28529 value: function setupQualityLevels_() {
28530 var _this4 = this;
28531
28532 var player = videojs.players[this.tech_.options_.playerId];
28533
28534 // if there isn't a player or there isn't a qualityLevels plugin
28535 // or qualityLevels_ listeners have already been setup, do nothing.
28536 if (!player || !player.qualityLevels || this.qualityLevels_) {
28537 return;
28538 }
28539
28540 this.qualityLevels_ = player.qualityLevels();
28541
28542 this.masterPlaylistController_.on('selectedinitialmedia', function () {
28543 handleHlsLoadedMetadata(_this4.qualityLevels_, _this4);
28544 });
28545
28546 this.playlists.on('mediachange', function () {
28547 handleHlsMediaChange(_this4.qualityLevels_, _this4.playlists);
28548 });
28549 }
28550
28551 /**
28552 * Begin playing the video.
28553 */
28554
28555 }, {
28556 key: 'play',
28557 value: function play() {
28558 this.masterPlaylistController_.play();
28559 }
28560
28561 /**
28562 * a wrapper around the function in MasterPlaylistController
28563 */
28564
28565 }, {
28566 key: 'setCurrentTime',
28567 value: function setCurrentTime(currentTime) {
28568 this.masterPlaylistController_.setCurrentTime(currentTime);
28569 }
28570
28571 /**
28572 * a wrapper around the function in MasterPlaylistController
28573 */
28574
28575 }, {
28576 key: 'duration',
28577 value: function duration$$1() {
28578 return this.masterPlaylistController_.duration();
28579 }
28580
28581 /**
28582 * a wrapper around the function in MasterPlaylistController
28583 */
28584
28585 }, {
28586 key: 'seekable',
28587 value: function seekable$$1() {
28588 return this.masterPlaylistController_.seekable();
28589 }
28590
28591 /**
28592 * Abort all outstanding work and cleanup.
28593 */
28594
28595 }, {
28596 key: 'dispose',
28597 value: function dispose() {
28598 if (this.playbackWatcher_) {
28599 this.playbackWatcher_.dispose();
28600 }
28601 if (this.masterPlaylistController_) {
28602 this.masterPlaylistController_.dispose();
28603 }
28604 if (this.qualityLevels_) {
28605 this.qualityLevels_.dispose();
28606 }
28607
28608 if (this.player_) {
28609 delete this.player_.vhs;
28610 delete this.player_.dash;
28611 delete this.player_.hls;
28612 }
28613
28614 if (this.tech_ && this.tech_.hls) {
28615 delete this.tech_.hls;
28616 }
28617
28618 get(HlsHandler.prototype.__proto__ || Object.getPrototypeOf(HlsHandler.prototype), 'dispose', this).call(this);
28619 }
28620 }, {
28621 key: 'convertToProgramTime',
28622 value: function convertToProgramTime(time, callback) {
28623 return getProgramTime({
28624 playlist: this.masterPlaylistController_.media(),
28625 time: time,
28626 callback: callback
28627 });
28628 }
28629
28630 // the player must be playing before calling this
28631
28632 }, {
28633 key: 'seekToProgramTime',
28634 value: function seekToProgramTime$$1(programTime, callback) {
28635 var pauseAfterSeek = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
28636 var retryCount = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 2;
28637
28638 return seekToProgramTime({
28639 programTime: programTime,
28640 playlist: this.masterPlaylistController_.media(),
28641 retryCount: retryCount,
28642 pauseAfterSeek: pauseAfterSeek,
28643 seekTo: this.options_.seekTo,
28644 tech: this.options_.tech,
28645 callback: callback
28646 });
28647 }
28648 }]);
28649 return HlsHandler;
28650 }(Component);
28651
28652 /**
28653 * The Source Handler object, which informs video.js what additional
28654 * MIME types are supported and sets up playback. It is registered
28655 * automatically to the appropriate tech based on the capabilities of
28656 * the browser it is running in. It is not necessary to use or modify
28657 * this object in normal usage.
28658 */
28659
28660
28661 var HlsSourceHandler = {
28662 name: 'videojs-http-streaming',
28663 VERSION: version$2,
28664 canHandleSource: function canHandleSource(srcObj) {
28665 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
28666
28667 var localOptions = videojs.mergeOptions(videojs.options, options);
28668
28669 return HlsSourceHandler.canPlayType(srcObj.type, localOptions);
28670 },
28671 handleSource: function handleSource(source, tech) {
28672 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
28673
28674 var localOptions = videojs.mergeOptions(videojs.options, options);
28675
28676 tech.hls = new HlsHandler(source, tech, localOptions);
28677 tech.hls.xhr = xhrFactory();
28678
28679 tech.hls.src(source.src, source.type);
28680 return tech.hls;
28681 },
28682 canPlayType: function canPlayType(type) {
28683 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
28684
28685 var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
28686 overrideNative = _videojs$mergeOptions.hls.overrideNative;
28687
28688 var supportedType = simpleTypeFromSourceType(type);
28689 var canUseMsePlayback = supportedType && (!Hls$1.supportsTypeNatively(supportedType) || overrideNative);
28690
28691 return canUseMsePlayback ? 'maybe' : '';
28692 }
28693 };
28694
28695 if (typeof videojs.MediaSource === 'undefined' || typeof videojs.URL === 'undefined') {
28696 videojs.MediaSource = MediaSource;
28697 videojs.URL = URL$1;
28698 }
28699
28700 // register source handlers with the appropriate techs
28701 if (MediaSource.supportsNativeMediaSources()) {
28702 videojs.getTech('Html5').registerSourceHandler(HlsSourceHandler, 0);
28703 }
28704
28705 videojs.HlsHandler = HlsHandler;
28706 videojs.HlsSourceHandler = HlsSourceHandler;
28707 videojs.Hls = Hls$1;
28708 if (!videojs.use) {
28709 videojs.registerComponent('Hls', Hls$1);
28710 }
28711 videojs.options.hls = videojs.options.hls || {};
28712
28713 if (videojs.registerPlugin) {
28714 videojs.registerPlugin('reloadSourceOnError', reloadSourceOnError);
28715 } else {
28716 videojs.plugin('reloadSourceOnError', reloadSourceOnError);
28717 }
28718
28719 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
28720 exports.Hls = Hls$1;
28721 exports.HlsHandler = HlsHandler;
28722 exports.HlsSourceHandler = HlsSourceHandler;
28723 exports.emeKeySystems = emeKeySystems;
28724 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
28725
28726 Object.defineProperty(exports, '__esModule', { value: true });
28727
28728})));