UNPKG

833 kBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.7.0 @license Apache-2.0 */
2'use strict';
3
4Object.defineProperty(exports, '__esModule', { value: true });
5
6var _assertThisInitialized = require('@babel/runtime/helpers/assertThisInitialized');
7var _inheritsLoose = require('@babel/runtime/helpers/inheritsLoose');
8var document = require('global/document');
9var window = require('global/window');
10var _resolveUrl = require('@videojs/vhs-utils/cjs/resolve-url.js');
11var videojs = require('video.js');
12var m3u8Parser = require('m3u8-parser');
13var codecs_js = require('@videojs/vhs-utils/cjs/codecs.js');
14var mediaTypes_js = require('@videojs/vhs-utils/cjs/media-types.js');
15var mpdParser = require('mpd-parser');
16var parseSidx = require('mux.js/lib/tools/parse-sidx');
17var id3Helpers = require('@videojs/vhs-utils/cjs/id3-helpers');
18var containers = require('@videojs/vhs-utils/cjs/containers');
19var byteHelpers = require('@videojs/vhs-utils/cjs/byte-helpers');
20var tsInspector = require('mux.js/lib/tools/ts-inspector.js');
21var clock = require('mux.js/lib/utils/clock');
22var mp4probe = require('mux.js/lib/mp4/probe');
23
24function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
25
26var _assertThisInitialized__default = /*#__PURE__*/_interopDefaultLegacy(_assertThisInitialized);
27var _inheritsLoose__default = /*#__PURE__*/_interopDefaultLegacy(_inheritsLoose);
28var document__default = /*#__PURE__*/_interopDefaultLegacy(document);
29var window__default = /*#__PURE__*/_interopDefaultLegacy(window);
30var _resolveUrl__default = /*#__PURE__*/_interopDefaultLegacy(_resolveUrl);
31var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
32var parseSidx__default = /*#__PURE__*/_interopDefaultLegacy(parseSidx);
33var tsInspector__default = /*#__PURE__*/_interopDefaultLegacy(tsInspector);
34var mp4probe__default = /*#__PURE__*/_interopDefaultLegacy(mp4probe);
35
36/**
37 * @file resolve-url.js - Handling how URLs are resolved and manipulated
38 */
39var resolveUrl = _resolveUrl__default['default'];
40/**
41 * Checks whether xhr request was redirected and returns correct url depending
42 * on `handleManifestRedirects` option
43 *
44 * @api private
45 *
46 * @param {string} url - an url being requested
47 * @param {XMLHttpRequest} req - xhr request result
48 *
49 * @return {string}
50 */
51
52var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
53 // To understand how the responseURL below is set and generated:
54 // - https://fetch.spec.whatwg.org/#concept-response-url
55 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
56 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
57 return req.responseURL;
58 }
59
60 return url;
61};
62
63var logger = function logger(source) {
64 if (videojs__default['default'].log.debug) {
65 return videojs__default['default'].log.debug.bind(videojs__default['default'], 'VHS:', source + " >");
66 }
67
68 return function () {};
69};
70
71var log = videojs__default['default'].log;
72var createPlaylistID = function createPlaylistID(index, uri) {
73 return index + "-" + uri;
74};
75/**
76 * Parses a given m3u8 playlist
77 *
78 * @param {Function} [onwarn]
79 * a function to call when the parser triggers a warning event.
80 * @param {Function} [oninfo]
81 * a function to call when the parser triggers an info event.
82 * @param {string} manifestString
83 * The downloaded manifest string
84 * @param {Object[]} [customTagParsers]
85 * An array of custom tag parsers for the m3u8-parser instance
86 * @param {Object[]} [customTagMappers]
87 * An array of custom tag mappers for the m3u8-parser instance
88 * @param {boolean} [experimentalLLHLS=false]
89 * Whether to keep ll-hls features in the manifest after parsing.
90 * @return {Object}
91 * The manifest object
92 */
93
94var parseManifest = function parseManifest(_ref) {
95 var onwarn = _ref.onwarn,
96 oninfo = _ref.oninfo,
97 manifestString = _ref.manifestString,
98 _ref$customTagParsers = _ref.customTagParsers,
99 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
100 _ref$customTagMappers = _ref.customTagMappers,
101 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
102 experimentalLLHLS = _ref.experimentalLLHLS;
103 var parser = new m3u8Parser.Parser();
104
105 if (onwarn) {
106 parser.on('warn', onwarn);
107 }
108
109 if (oninfo) {
110 parser.on('info', oninfo);
111 }
112
113 customTagParsers.forEach(function (customParser) {
114 return parser.addParser(customParser);
115 });
116 customTagMappers.forEach(function (mapper) {
117 return parser.addTagMapper(mapper);
118 });
119 parser.push(manifestString);
120 parser.end();
121 var manifest = parser.manifest; // remove llhls features from the parsed manifest
122 // if we don't want llhls support.
123
124 if (!experimentalLLHLS) {
125 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
126 if (manifest.hasOwnProperty(k)) {
127 delete manifest[k];
128 }
129 });
130
131 if (manifest.segments) {
132 manifest.segments.forEach(function (segment) {
133 ['parts', 'preloadHints'].forEach(function (k) {
134 if (segment.hasOwnProperty(k)) {
135 delete segment[k];
136 }
137 });
138 });
139 }
140 }
141
142 if (!manifest.targetDuration) {
143 var targetDuration = 10;
144
145 if (manifest.segments && manifest.segments.length) {
146 targetDuration = manifest.segments.reduce(function (acc, s) {
147 return Math.max(acc, s.duration);
148 }, 0);
149 }
150
151 if (onwarn) {
152 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
153 }
154
155 manifest.targetDuration = targetDuration;
156 }
157
158 return manifest;
159};
160/**
161 * Loops through all supported media groups in master and calls the provided
162 * callback for each group
163 *
164 * @param {Object} master
165 * The parsed master manifest object
166 * @param {Function} callback
167 * Callback to call for each media group
168 */
169
170var forEachMediaGroup = function forEachMediaGroup(master, callback) {
171 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
172 for (var groupKey in master.mediaGroups[mediaType]) {
173 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
174 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
175 callback(mediaProperties, mediaType, groupKey, labelKey);
176 }
177 }
178 });
179};
180/**
181 * Adds properties and attributes to the playlist to keep consistent functionality for
182 * playlists throughout VHS.
183 *
184 * @param {Object} config
185 * Arguments object
186 * @param {Object} config.playlist
187 * The media playlist
188 * @param {string} [config.uri]
189 * The uri to the media playlist (if media playlist is not from within a master
190 * playlist)
191 * @param {string} id
192 * ID to use for the playlist
193 */
194
195var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
196 var playlist = _ref2.playlist,
197 uri = _ref2.uri,
198 id = _ref2.id;
199 playlist.id = id;
200
201 if (uri) {
202 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
203 // playlists do not contain their own source URI, but one is needed for consistency in
204 // VHS.
205 playlist.uri = uri;
206 } // For HLS master playlists, even though certain attributes MUST be defined, the
207 // stream may still be played without them.
208 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
209 // manifest.
210 //
211 // To avoid undefined reference errors through the project, and make the code easier
212 // to write/read, add an empty attributes object for these cases.
213
214
215 playlist.attributes = playlist.attributes || {};
216};
217/**
218 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
219 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
220 * playlist references to the playlists array.
221 *
222 * @param {Object} master
223 * The master playlist
224 */
225
226var setupMediaPlaylists = function setupMediaPlaylists(master) {
227 var i = master.playlists.length;
228
229 while (i--) {
230 var playlist = master.playlists[i];
231 setupMediaPlaylist({
232 playlist: playlist,
233 id: createPlaylistID(i, playlist.uri)
234 });
235 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
236 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
237
238 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
239 // the stream can be played without it. Although an attributes property may have been
240 // added to the playlist to prevent undefined references, issue a warning to fix the
241 // manifest.
242
243 if (!playlist.attributes.BANDWIDTH) {
244 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
245 }
246 }
247};
248/**
249 * Adds resolvedUri properties to each media group.
250 *
251 * @param {Object} master
252 * The master playlist
253 */
254
255var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
256 forEachMediaGroup(master, function (properties) {
257 if (properties.uri) {
258 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
259 }
260 });
261};
262/**
263 * Creates a master playlist wrapper to insert a sole media playlist into.
264 *
265 * @param {Object} media
266 * Media playlist
267 * @param {string} uri
268 * The media URI
269 *
270 * @return {Object}
271 * Master playlist
272 */
273
274var masterForMedia = function masterForMedia(media, uri) {
275 var id = createPlaylistID(0, uri);
276 var master = {
277 mediaGroups: {
278 'AUDIO': {},
279 'VIDEO': {},
280 'CLOSED-CAPTIONS': {},
281 'SUBTITLES': {}
282 },
283 uri: window__default['default'].location.href,
284 resolvedUri: window__default['default'].location.href,
285 playlists: [{
286 uri: uri,
287 id: id,
288 resolvedUri: uri,
289 // m3u8-parser does not attach an attributes property to media playlists so make
290 // sure that the property is attached to avoid undefined reference errors
291 attributes: {}
292 }]
293 }; // set up ID reference
294
295 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
296
297 master.playlists[uri] = master.playlists[0];
298 return master;
299};
300/**
301 * Does an in-place update of the master manifest to add updated playlist URI references
302 * as well as other properties needed by VHS that aren't included by the parser.
303 *
304 * @param {Object} master
305 * Master manifest object
306 * @param {string} uri
307 * The source URI
308 */
309
310var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
311 master.uri = uri;
312
313 for (var i = 0; i < master.playlists.length; i++) {
314 if (!master.playlists[i].uri) {
315 // Set up phony URIs for the playlists since playlists are referenced by their URIs
316 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
317 // TODO: consider adding dummy URIs in mpd-parser
318 var phonyUri = "placeholder-uri-" + i;
319 master.playlists[i].uri = phonyUri;
320 }
321 }
322
323 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
324 if (!properties.playlists || !properties.playlists.length || properties.playlists[0].uri) {
325 return;
326 } // Set up phony URIs for the media group playlists since playlists are referenced by
327 // their URIs throughout VHS, but some formats (e.g., DASH) don't have external URIs
328
329
330 var phonyUri = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey;
331 var id = createPlaylistID(0, phonyUri);
332 properties.playlists[0].uri = phonyUri;
333 properties.playlists[0].id = id; // setup ID and URI references (URI for backwards compatibility)
334
335 master.playlists[id] = properties.playlists[0];
336 master.playlists[phonyUri] = properties.playlists[0];
337 });
338 setupMediaPlaylists(master);
339 resolveMediaGroupUris(master);
340};
341
342var mergeOptions$2 = videojs__default['default'].mergeOptions,
343 EventTarget$1 = videojs__default['default'].EventTarget;
344/**
345 * Returns a new segment object with properties and
346 * the parts array merged.
347 *
348 * @param {Object} a the old segment
349 * @param {Object} b the new segment
350 *
351 * @return {Object} the merged segment
352 */
353
354var updateSegment = function updateSegment(a, b) {
355 if (!a) {
356 return b;
357 }
358
359 var result = mergeOptions$2(a, b); // if only the old segment has parts
360 // then the parts are no longer valid
361
362 if (a.parts && !b.parts) {
363 delete result.parts; // if both segments have parts
364 // copy part propeties from the old segment
365 // to the new one.
366 } else if (a.parts && b.parts) {
367 for (var i = 0; i < b.parts.length; i++) {
368 if (a.parts && a.parts[i]) {
369 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
370 }
371 }
372 }
373
374 return result;
375};
376/**
377 * Returns a new array of segments that is the result of merging
378 * properties from an older list of segments onto an updated
379 * list. No properties on the updated playlist will be ovewritten.
380 *
381 * @param {Array} original the outdated list of segments
382 * @param {Array} update the updated list of segments
383 * @param {number=} offset the index of the first update
384 * segment in the original segment list. For non-live playlists,
385 * this should always be zero and does not need to be
386 * specified. For live playlists, it should be the difference
387 * between the media sequence numbers in the original and updated
388 * playlists.
389 * @return {Array} a list of merged segment objects
390 */
391
392var updateSegments = function updateSegments(original, update, offset) {
393 var oldSegments = original.slice();
394 var result = update.slice();
395 offset = offset || 0;
396 var length = Math.min(original.length, update.length + offset);
397
398 for (var i = offset; i < length; i++) {
399 var newIndex = i - offset;
400 result[newIndex] = updateSegment(oldSegments[i], result[newIndex]);
401 }
402
403 return result;
404};
405var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
406 // preloadSegment will not have a uri at all
407 // as the segment isn't actually in the manifest yet, only parts
408 if (!segment.resolvedUri && segment.uri) {
409 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
410 }
411
412 if (segment.key && !segment.key.resolvedUri) {
413 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
414 }
415
416 if (segment.map && !segment.map.resolvedUri) {
417 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
418 }
419
420 if (segment.parts && segment.parts.length) {
421 segment.parts.forEach(function (p) {
422 if (p.resolvedUri) {
423 return;
424 }
425
426 p.resolvedUri = resolveUrl(baseUri, p.uri);
427 });
428 }
429
430 if (segment.preloadHints && segment.preloadHints.length) {
431 segment.preloadHints.forEach(function (p) {
432 if (p.resolvedUri) {
433 return;
434 }
435
436 p.resolvedUri = resolveUrl(baseUri, p.uri);
437 });
438 }
439};
440
441var getAllSegments = function getAllSegments(media) {
442 var segments = media.segments || []; // a preloadSegment with only preloadHints is not currently
443 // a usable segment, only include a preloadSegment that has
444 // parts.
445
446 if (media.preloadSegment && media.preloadSegment.parts) {
447 segments.push(media.preloadSegment);
448 }
449
450 return segments;
451}; // consider the playlist unchanged if the playlist object is the same or
452// the number of segments is equal, the media sequence number is unchanged,
453// and this playlist hasn't become the end of the playlist
454
455
456var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
457 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence;
458};
459/**
460 * Returns a new master playlist that is the result of merging an
461 * updated media playlist into the original version. If the
462 * updated media playlist does not match any of the playlist
463 * entries in the original master playlist, null is returned.
464 *
465 * @param {Object} master a parsed master M3U8 object
466 * @param {Object} media a parsed media M3U8 object
467 * @return {Object} a new object that represents the original
468 * master playlist with the updated media playlist merged in, or
469 * null if the merge produced no change.
470 */
471
472var updateMaster$1 = function updateMaster(master, media, unchangedCheck) {
473 if (unchangedCheck === void 0) {
474 unchangedCheck = isPlaylistUnchanged;
475 }
476
477 var result = mergeOptions$2(master, {});
478 var playlist = result.playlists[media.id];
479
480 if (!playlist) {
481 return null;
482 }
483
484 if (unchangedCheck(playlist, media)) {
485 return null;
486 }
487
488 var mergedPlaylist = mergeOptions$2(playlist, media);
489 media.segments = getAllSegments(media); // if the update could overlap existing segment information, merge the two segment lists
490
491 if (playlist.segments) {
492 mergedPlaylist.segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
493 } // resolve any segment URIs to prevent us from having to do it later
494
495
496 mergedPlaylist.segments.forEach(function (segment) {
497 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
498 }); // TODO Right now in the playlists array there are two references to each playlist, one
499 // that is referenced by index, and one by URI. The index reference may no longer be
500 // necessary.
501
502 for (var i = 0; i < result.playlists.length; i++) {
503 if (result.playlists[i].id === media.id) {
504 result.playlists[i] = mergedPlaylist;
505 }
506 }
507
508 result.playlists[media.id] = mergedPlaylist; // URI reference added for backwards compatibility
509
510 result.playlists[media.uri] = mergedPlaylist;
511 return result;
512};
513/**
514 * Calculates the time to wait before refreshing a live playlist
515 *
516 * @param {Object} media
517 * The current media
518 * @param {boolean} update
519 * True if there were any updates from the last refresh, false otherwise
520 * @return {number}
521 * The time in ms to wait before refreshing the live playlist
522 */
523
524var refreshDelay = function refreshDelay(media, update) {
525 var lastSegment = media.segments[media.segments.length - 1];
526 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
527 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
528
529 if (update && lastDuration) {
530 return lastDuration * 1000;
531 } // if the playlist is unchanged since the last reload or last segment duration
532 // cannot be determined, try again after half the target duration
533
534
535 return (media.partTargetDuration || media.targetDuration || 10) * 500;
536};
537/**
538 * Load a playlist from a remote location
539 *
540 * @class PlaylistLoader
541 * @extends Stream
542 * @param {string|Object} src url or object of manifest
543 * @param {boolean} withCredentials the withCredentials xhr option
544 * @class
545 */
546
547var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
548 _inheritsLoose__default['default'](PlaylistLoader, _EventTarget);
549
550 function PlaylistLoader(src, vhs, options) {
551 var _this;
552
553 if (options === void 0) {
554 options = {};
555 }
556
557 _this = _EventTarget.call(this) || this;
558
559 if (!src) {
560 throw new Error('A non-empty playlist URL or object is required');
561 }
562
563 _this.logger_ = logger('PlaylistLoader');
564 var _options = options,
565 _options$withCredenti = _options.withCredentials,
566 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
567 _options$handleManife = _options.handleManifestRedirects,
568 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
569 _this.src = src;
570 _this.vhs_ = vhs;
571 _this.withCredentials = withCredentials;
572 _this.handleManifestRedirects = handleManifestRedirects;
573 var vhsOptions = vhs.options_;
574 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
575 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
576 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // initialize the loader state
577
578 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
579
580 _this.on('mediaupdatetimeout', function () {
581 if (_this.state !== 'HAVE_METADATA') {
582 // only refresh the media playlist if no other activity is going on
583 return;
584 }
585
586 _this.state = 'HAVE_CURRENT_METADATA';
587 _this.request = _this.vhs_.xhr({
588 uri: resolveUrl(_this.master.uri, _this.media().uri),
589 withCredentials: _this.withCredentials
590 }, function (error, req) {
591 // disposed
592 if (!_this.request) {
593 return;
594 }
595
596 if (error) {
597 return _this.playlistRequestError(_this.request, _this.media(), 'HAVE_METADATA');
598 }
599
600 _this.haveMetadata({
601 playlistString: _this.request.responseText,
602 url: _this.media().uri,
603 id: _this.media().id
604 });
605 });
606 });
607
608 return _this;
609 }
610
611 var _proto = PlaylistLoader.prototype;
612
613 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
614 var uri = playlist.uri,
615 id = playlist.id; // any in-flight request is now finished
616
617 this.request = null;
618
619 if (startingState) {
620 this.state = startingState;
621 }
622
623 this.error = {
624 playlist: this.master.playlists[id],
625 status: xhr.status,
626 message: "HLS playlist request error at URL: " + uri + ".",
627 responseText: xhr.responseText,
628 code: xhr.status >= 500 ? 4 : 2
629 };
630 this.trigger('error');
631 }
632 /**
633 * Update the playlist loader's state in response to a new or updated playlist.
634 *
635 * @param {string} [playlistString]
636 * Playlist string (if playlistObject is not provided)
637 * @param {Object} [playlistObject]
638 * Playlist object (if playlistString is not provided)
639 * @param {string} url
640 * URL of playlist
641 * @param {string} id
642 * ID to use for playlist
643 */
644 ;
645
646 _proto.haveMetadata = function haveMetadata(_ref) {
647 var _this2 = this;
648
649 var playlistString = _ref.playlistString,
650 playlistObject = _ref.playlistObject,
651 url = _ref.url,
652 id = _ref.id;
653 // any in-flight request is now finished
654 this.request = null;
655 this.state = 'HAVE_METADATA';
656 var playlist = playlistObject || parseManifest({
657 onwarn: function onwarn(_ref2) {
658 var message = _ref2.message;
659 return _this2.logger_("m3u8-parser warn for " + id + ": " + message);
660 },
661 oninfo: function oninfo(_ref3) {
662 var message = _ref3.message;
663 return _this2.logger_("m3u8-parser info for " + id + ": " + message);
664 },
665 manifestString: playlistString,
666 customTagParsers: this.customTagParsers,
667 customTagMappers: this.customTagMappers,
668 experimentalLLHLS: this.experimentalLLHLS
669 });
670 playlist.lastRequest = Date.now();
671 setupMediaPlaylist({
672 playlist: playlist,
673 uri: url,
674 id: id
675 }); // merge this playlist into the master
676
677 var update = updateMaster$1(this.master, playlist);
678 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
679
680 if (update) {
681 this.master = update;
682 this.media_ = this.master.playlists[id];
683 } else {
684 this.trigger('playlistunchanged');
685 } // refresh live playlists after a target duration passes
686
687
688 if (!this.media().endList) {
689 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
690 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
691 _this2.trigger('mediaupdatetimeout');
692 }, refreshDelay(this.media(), !!update));
693 }
694
695 this.trigger('loadedplaylist');
696 }
697 /**
698 * Abort any outstanding work and clean up.
699 */
700 ;
701
702 _proto.dispose = function dispose() {
703 this.trigger('dispose');
704 this.stopRequest();
705 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
706 window__default['default'].clearTimeout(this.finalRenditionTimeout);
707 this.off();
708 };
709
710 _proto.stopRequest = function stopRequest() {
711 if (this.request) {
712 var oldRequest = this.request;
713 this.request = null;
714 oldRequest.onreadystatechange = null;
715 oldRequest.abort();
716 }
717 }
718 /**
719 * When called without any arguments, returns the currently
720 * active media playlist. When called with a single argument,
721 * triggers the playlist loader to asynchronously switch to the
722 * specified media playlist. Calling this method while the
723 * loader is in the HAVE_NOTHING causes an error to be emitted
724 * but otherwise has no effect.
725 *
726 * @param {Object=} playlist the parsed media playlist
727 * object to switch to
728 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
729 *
730 * @return {Playlist} the current loaded media
731 */
732 ;
733
734 _proto.media = function media(playlist, shouldDelay) {
735 var _this3 = this;
736
737 // getter
738 if (!playlist) {
739 return this.media_;
740 } // setter
741
742
743 if (this.state === 'HAVE_NOTHING') {
744 throw new Error('Cannot switch media playlist from ' + this.state);
745 } // find the playlist object if the target playlist has been
746 // specified by URI
747
748
749 if (typeof playlist === 'string') {
750 if (!this.master.playlists[playlist]) {
751 throw new Error('Unknown playlist URI: ' + playlist);
752 }
753
754 playlist = this.master.playlists[playlist];
755 }
756
757 window__default['default'].clearTimeout(this.finalRenditionTimeout);
758
759 if (shouldDelay) {
760 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
761 this.finalRenditionTimeout = window__default['default'].setTimeout(this.media.bind(this, playlist, false), delay);
762 return;
763 }
764
765 var startingState = this.state;
766 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to fully loaded playlists immediately
767
768 if (this.master.playlists[playlist.id].endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
769 // media playlist or, for the case of demuxed audio, a resolved audio media group)
770 playlist.endList && playlist.segments.length) {
771 // abort outstanding playlist requests
772 if (this.request) {
773 this.request.onreadystatechange = null;
774 this.request.abort();
775 this.request = null;
776 }
777
778 this.state = 'HAVE_METADATA';
779 this.media_ = playlist; // trigger media change if the active media has been updated
780
781 if (mediaChange) {
782 this.trigger('mediachanging');
783
784 if (startingState === 'HAVE_MASTER') {
785 // The initial playlist was a master manifest, and the first media selected was
786 // also provided (in the form of a resolved playlist object) as part of the
787 // source object (rather than just a URL). Therefore, since the media playlist
788 // doesn't need to be requested, loadedmetadata won't trigger as part of the
789 // normal flow, and needs an explicit trigger here.
790 this.trigger('loadedmetadata');
791 } else {
792 this.trigger('mediachange');
793 }
794 }
795
796 return;
797 } // switching to the active playlist is a no-op
798
799
800 if (!mediaChange) {
801 return;
802 }
803
804 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
805
806 if (this.request) {
807 if (playlist.resolvedUri === this.request.url) {
808 // requesting to switch to the same playlist multiple times
809 // has no effect after the first
810 return;
811 }
812
813 this.request.onreadystatechange = null;
814 this.request.abort();
815 this.request = null;
816 } // request the new playlist
817
818
819 if (this.media_) {
820 this.trigger('mediachanging');
821 }
822
823 this.request = this.vhs_.xhr({
824 uri: playlist.resolvedUri,
825 withCredentials: this.withCredentials
826 }, function (error, req) {
827 // disposed
828 if (!_this3.request) {
829 return;
830 }
831
832 playlist.lastRequest = Date.now();
833 playlist.resolvedUri = resolveManifestRedirect(_this3.handleManifestRedirects, playlist.resolvedUri, req);
834
835 if (error) {
836 return _this3.playlistRequestError(_this3.request, playlist, startingState);
837 }
838
839 _this3.haveMetadata({
840 playlistString: req.responseText,
841 url: playlist.uri,
842 id: playlist.id
843 }); // fire loadedmetadata the first time a media playlist is loaded
844
845
846 if (startingState === 'HAVE_MASTER') {
847 _this3.trigger('loadedmetadata');
848 } else {
849 _this3.trigger('mediachange');
850 }
851 });
852 }
853 /**
854 * pause loading of the playlist
855 */
856 ;
857
858 _proto.pause = function pause() {
859 this.stopRequest();
860 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
861
862 if (this.state === 'HAVE_NOTHING') {
863 // If we pause the loader before any data has been retrieved, its as if we never
864 // started, so reset to an unstarted state.
865 this.started = false;
866 } // Need to restore state now that no activity is happening
867
868
869 if (this.state === 'SWITCHING_MEDIA') {
870 // if the loader was in the process of switching media, it should either return to
871 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
872 // playlist yet. This is determined by the existence of loader.media_
873 if (this.media_) {
874 this.state = 'HAVE_METADATA';
875 } else {
876 this.state = 'HAVE_MASTER';
877 }
878 } else if (this.state === 'HAVE_CURRENT_METADATA') {
879 this.state = 'HAVE_METADATA';
880 }
881 }
882 /**
883 * start loading of the playlist
884 */
885 ;
886
887 _proto.load = function load(shouldDelay) {
888 var _this4 = this;
889
890 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
891 var media = this.media();
892
893 if (shouldDelay) {
894 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
895 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
896 return _this4.load();
897 }, delay);
898 return;
899 }
900
901 if (!this.started) {
902 this.start();
903 return;
904 }
905
906 if (media && !media.endList) {
907 this.trigger('mediaupdatetimeout');
908 } else {
909 this.trigger('loadedplaylist');
910 }
911 }
912 /**
913 * start loading of the playlist
914 */
915 ;
916
917 _proto.start = function start() {
918 var _this5 = this;
919
920 this.started = true;
921
922 if (typeof this.src === 'object') {
923 // in the case of an entirely constructed manifest object (meaning there's no actual
924 // manifest on a server), default the uri to the page's href
925 if (!this.src.uri) {
926 this.src.uri = window__default['default'].location.href;
927 } // resolvedUri is added on internally after the initial request. Since there's no
928 // request for pre-resolved manifests, add on resolvedUri here.
929
930
931 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
932 // request can be skipped (since the top level of the manifest, at a minimum, is
933 // already available as a parsed manifest object). However, if the manifest object
934 // represents a master playlist, some media playlists may need to be resolved before
935 // the starting segment list is available. Therefore, go directly to setup of the
936 // initial playlist, and let the normal flow continue from there.
937 //
938 // Note that the call to setup is asynchronous, as other sections of VHS may assume
939 // that the first request is asynchronous.
940
941 setTimeout(function () {
942 _this5.setupInitialPlaylist(_this5.src);
943 }, 0);
944 return;
945 } // request the specified URL
946
947
948 this.request = this.vhs_.xhr({
949 uri: this.src,
950 withCredentials: this.withCredentials
951 }, function (error, req) {
952 // disposed
953 if (!_this5.request) {
954 return;
955 } // clear the loader's request reference
956
957
958 _this5.request = null;
959
960 if (error) {
961 _this5.error = {
962 status: req.status,
963 message: "HLS playlist request error at URL: " + _this5.src + ".",
964 responseText: req.responseText,
965 // MEDIA_ERR_NETWORK
966 code: 2
967 };
968
969 if (_this5.state === 'HAVE_NOTHING') {
970 _this5.started = false;
971 }
972
973 return _this5.trigger('error');
974 }
975
976 _this5.src = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.src, req);
977 var manifest = parseManifest({
978 manifestString: req.responseText,
979 customTagParsers: _this5.customTagParsers,
980 customTagMappers: _this5.customTagMappers,
981 llhls: _this5.llhls
982 });
983
984 _this5.setupInitialPlaylist(manifest);
985 });
986 };
987
988 _proto.srcUri = function srcUri() {
989 return typeof this.src === 'string' ? this.src : this.src.uri;
990 }
991 /**
992 * Given a manifest object that's either a master or media playlist, trigger the proper
993 * events and set the state of the playlist loader.
994 *
995 * If the manifest object represents a master playlist, `loadedplaylist` will be
996 * triggered to allow listeners to select a playlist. If none is selected, the loader
997 * will default to the first one in the playlists array.
998 *
999 * If the manifest object represents a media playlist, `loadedplaylist` will be
1000 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
1001 *
1002 * In the case of a media playlist, a master playlist object wrapper with one playlist
1003 * will be created so that all logic can handle playlists in the same fashion (as an
1004 * assumed manifest object schema).
1005 *
1006 * @param {Object} manifest
1007 * The parsed manifest object
1008 */
1009 ;
1010
1011 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
1012 this.state = 'HAVE_MASTER';
1013
1014 if (manifest.playlists) {
1015 this.master = manifest;
1016 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
1017 // then resolve URIs in advance, as they are usually done after a playlist request,
1018 // which may not happen if the playlist is resolved.
1019
1020 manifest.playlists.forEach(function (playlist) {
1021 playlist.segments = getAllSegments(playlist);
1022 playlist.segments.forEach(function (segment) {
1023 resolveSegmentUris(segment, playlist.resolvedUri);
1024 });
1025 });
1026 this.trigger('loadedplaylist');
1027
1028 if (!this.request) {
1029 // no media playlist was specifically selected so start
1030 // from the first listed one
1031 this.media(this.master.playlists[0]);
1032 }
1033
1034 return;
1035 } // In order to support media playlists passed in as vhs-json, the case where the uri
1036 // is not provided as part of the manifest should be considered, and an appropriate
1037 // default used.
1038
1039
1040 var uri = this.srcUri() || window__default['default'].location.href;
1041 this.master = masterForMedia(manifest, uri);
1042 this.haveMetadata({
1043 playlistObject: manifest,
1044 url: uri,
1045 id: this.master.playlists[0].id
1046 });
1047 this.trigger('loadedmetadata');
1048 };
1049
1050 return PlaylistLoader;
1051}(EventTarget$1);
1052
1053/**
1054 * ranges
1055 *
1056 * Utilities for working with TimeRanges.
1057 *
1058 */
1059
1060var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
1061// can be misleading because of precision differences or when the current media has poorly
1062// aligned audio and video, which can cause values to be slightly off from what you would
1063// expect. This value is what we consider to be safe to use in such comparisons to account
1064// for these scenarios.
1065
1066var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
1067
1068var filterRanges = function filterRanges(timeRanges, predicate) {
1069 var results = [];
1070 var i;
1071
1072 if (timeRanges && timeRanges.length) {
1073 // Search for ranges that match the predicate
1074 for (i = 0; i < timeRanges.length; i++) {
1075 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
1076 results.push([timeRanges.start(i), timeRanges.end(i)]);
1077 }
1078 }
1079 }
1080
1081 return videojs__default['default'].createTimeRanges(results);
1082};
1083/**
1084 * Attempts to find the buffered TimeRange that contains the specified
1085 * time.
1086 *
1087 * @param {TimeRanges} buffered - the TimeRanges object to query
1088 * @param {number} time - the time to filter on.
1089 * @return {TimeRanges} a new TimeRanges object
1090 */
1091
1092
1093var findRange = function findRange(buffered, time) {
1094 return filterRanges(buffered, function (start, end) {
1095 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
1096 });
1097};
1098/**
1099 * Returns the TimeRanges that begin later than the specified time.
1100 *
1101 * @param {TimeRanges} timeRanges - the TimeRanges object to query
1102 * @param {number} time - the time to filter on.
1103 * @return {TimeRanges} a new TimeRanges object.
1104 */
1105
1106var findNextRange = function findNextRange(timeRanges, time) {
1107 return filterRanges(timeRanges, function (start) {
1108 return start - TIME_FUDGE_FACTOR >= time;
1109 });
1110};
1111/**
1112 * Returns gaps within a list of TimeRanges
1113 *
1114 * @param {TimeRanges} buffered - the TimeRanges object
1115 * @return {TimeRanges} a TimeRanges object of gaps
1116 */
1117
1118var findGaps = function findGaps(buffered) {
1119 if (buffered.length < 2) {
1120 return videojs__default['default'].createTimeRanges();
1121 }
1122
1123 var ranges = [];
1124
1125 for (var i = 1; i < buffered.length; i++) {
1126 var start = buffered.end(i - 1);
1127 var end = buffered.start(i);
1128 ranges.push([start, end]);
1129 }
1130
1131 return videojs__default['default'].createTimeRanges(ranges);
1132};
1133/**
1134 * Calculate the intersection of two TimeRanges
1135 *
1136 * @param {TimeRanges} bufferA
1137 * @param {TimeRanges} bufferB
1138 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
1139 */
1140
1141var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
1142 var start = null;
1143 var end = null;
1144 var arity = 0;
1145 var extents = [];
1146 var ranges = [];
1147
1148 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
1149 return videojs__default['default'].createTimeRange();
1150 } // Handle the case where we have both buffers and create an
1151 // intersection of the two
1152
1153
1154 var count = bufferA.length; // A) Gather up all start and end times
1155
1156 while (count--) {
1157 extents.push({
1158 time: bufferA.start(count),
1159 type: 'start'
1160 });
1161 extents.push({
1162 time: bufferA.end(count),
1163 type: 'end'
1164 });
1165 }
1166
1167 count = bufferB.length;
1168
1169 while (count--) {
1170 extents.push({
1171 time: bufferB.start(count),
1172 type: 'start'
1173 });
1174 extents.push({
1175 time: bufferB.end(count),
1176 type: 'end'
1177 });
1178 } // B) Sort them by time
1179
1180
1181 extents.sort(function (a, b) {
1182 return a.time - b.time;
1183 }); // C) Go along one by one incrementing arity for start and decrementing
1184 // arity for ends
1185
1186 for (count = 0; count < extents.length; count++) {
1187 if (extents[count].type === 'start') {
1188 arity++; // D) If arity is ever incremented to 2 we are entering an
1189 // overlapping range
1190
1191 if (arity === 2) {
1192 start = extents[count].time;
1193 }
1194 } else if (extents[count].type === 'end') {
1195 arity--; // E) If arity is ever decremented to 1 we leaving an
1196 // overlapping range
1197
1198 if (arity === 1) {
1199 end = extents[count].time;
1200 }
1201 } // F) Record overlapping ranges
1202
1203
1204 if (start !== null && end !== null) {
1205 ranges.push([start, end]);
1206 start = null;
1207 end = null;
1208 }
1209 }
1210
1211 return videojs__default['default'].createTimeRanges(ranges);
1212};
1213/**
1214 * Gets a human readable string for a TimeRange
1215 *
1216 * @param {TimeRange} range
1217 * @return {string} a human readable string
1218 */
1219
1220var printableRange = function printableRange(range) {
1221 var strArr = [];
1222
1223 if (!range || !range.length) {
1224 return '';
1225 }
1226
1227 for (var i = 0; i < range.length; i++) {
1228 strArr.push(range.start(i) + ' => ' + range.end(i));
1229 }
1230
1231 return strArr.join(', ');
1232};
1233/**
1234 * Calculates the amount of time left in seconds until the player hits the end of the
1235 * buffer and causes a rebuffer
1236 *
1237 * @param {TimeRange} buffered
1238 * The state of the buffer
1239 * @param {Numnber} currentTime
1240 * The current time of the player
1241 * @param {number} playbackRate
1242 * The current playback rate of the player. Defaults to 1.
1243 * @return {number}
1244 * Time until the player has to start rebuffering in seconds.
1245 * @function timeUntilRebuffer
1246 */
1247
1248var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
1249 if (playbackRate === void 0) {
1250 playbackRate = 1;
1251 }
1252
1253 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
1254 return (bufferedEnd - currentTime) / playbackRate;
1255};
1256/**
1257 * Converts a TimeRanges object into an array representation
1258 *
1259 * @param {TimeRanges} timeRanges
1260 * @return {Array}
1261 */
1262
1263var timeRangesToArray = function timeRangesToArray(timeRanges) {
1264 var timeRangesList = [];
1265
1266 for (var i = 0; i < timeRanges.length; i++) {
1267 timeRangesList.push({
1268 start: timeRanges.start(i),
1269 end: timeRanges.end(i)
1270 });
1271 }
1272
1273 return timeRangesList;
1274};
1275/**
1276 * Determines if two time range objects are different.
1277 *
1278 * @param {TimeRange} a
1279 * the first time range object to check
1280 *
1281 * @param {TimeRange} b
1282 * the second time range object to check
1283 *
1284 * @return {Boolean}
1285 * Whether the time range objects differ
1286 */
1287
1288var isRangeDifferent = function isRangeDifferent(a, b) {
1289 // same object
1290 if (a === b) {
1291 return false;
1292 } // one or the other is undefined
1293
1294
1295 if (!a && b || !b && a) {
1296 return true;
1297 } // length is different
1298
1299
1300 if (a.length !== b.length) {
1301 return true;
1302 } // see if any start/end pair is different
1303
1304
1305 for (var i = 0; i < a.length; i++) {
1306 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
1307 return true;
1308 }
1309 } // if the length and every pair is the same
1310 // this is the same time range
1311
1312
1313 return false;
1314};
1315
1316/**
1317 * @file playlist.js
1318 *
1319 * Playlist related utilities.
1320 */
1321var createTimeRange = videojs__default['default'].createTimeRange;
1322/**
1323 * A function to get a combined list of parts and segments with durations
1324 * and indexes.
1325 *
1326 * @param {Playlist} playlist the playlist to get the list for.
1327 *
1328 * @return {Array} The part/segment list.
1329 */
1330
1331var getPartsAndSegments = function getPartsAndSegments(playlist) {
1332 return (playlist.segments || []).reduce(function (acc, segment, si) {
1333 if (segment.parts) {
1334 segment.parts.forEach(function (part, pi) {
1335 acc.push({
1336 duration: part.duration,
1337 segmentIndex: si,
1338 partIndex: pi
1339 });
1340 });
1341 } else {
1342 acc.push({
1343 duration: segment.duration,
1344 segmentIndex: si,
1345 partIndex: null
1346 });
1347 }
1348
1349 return acc;
1350 }, []);
1351};
1352/**
1353 * Get the number of seconds to delay from the end of a
1354 * live playlist.
1355 *
1356 * @param {Playlist} master the master playlist
1357 * @param {Playlist} media the media playlist
1358 * @return {number} the hold back in seconds.
1359 */
1360
1361
1362var liveEdgeDelay = function liveEdgeDelay(master, media) {
1363 if (media.endList) {
1364 return 0;
1365 } // dash suggestedPresentationDelay trumps everything
1366
1367
1368 if (master && master.suggestedPresentationDelay) {
1369 return master.suggestedPresentationDelay;
1370 }
1371
1372 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
1373 var hasParts = lastSegment && lastSegment.parts && lastSegment.parts.length; // look for "part" delays from ll-hls first
1374
1375 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
1376 return media.serverControl.partHoldBack;
1377 } else if (hasParts && media.partTargetDuration) {
1378 return media.partTargetDuration * 3; // finally look for full segment delays
1379 } else if (media.serverControl && media.serverControl.holdBack) {
1380 return media.serverControl.holdBack;
1381 } else if (media.targetDuration) {
1382 return media.targetDuration * 3;
1383 }
1384
1385 return 0;
1386};
1387/**
1388 * walk backward until we find a duration we can use
1389 * or return a failure
1390 *
1391 * @param {Playlist} playlist the playlist to walk through
1392 * @param {Number} endSequence the mediaSequence to stop walking on
1393 */
1394
1395var backwardDuration = function backwardDuration(playlist, endSequence) {
1396 var result = 0;
1397 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
1398 // the interval, use it
1399
1400 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
1401 // information that is earlier than endSequence
1402
1403 if (segment) {
1404 if (typeof segment.start !== 'undefined') {
1405 return {
1406 result: segment.start,
1407 precise: true
1408 };
1409 }
1410
1411 if (typeof segment.end !== 'undefined') {
1412 return {
1413 result: segment.end - segment.duration,
1414 precise: true
1415 };
1416 }
1417 }
1418
1419 while (i--) {
1420 segment = playlist.segments[i];
1421
1422 if (typeof segment.end !== 'undefined') {
1423 return {
1424 result: result + segment.end,
1425 precise: true
1426 };
1427 }
1428
1429 result += segment.duration;
1430
1431 if (typeof segment.start !== 'undefined') {
1432 return {
1433 result: result + segment.start,
1434 precise: true
1435 };
1436 }
1437 }
1438
1439 return {
1440 result: result,
1441 precise: false
1442 };
1443};
1444/**
1445 * walk forward until we find a duration we can use
1446 * or return a failure
1447 *
1448 * @param {Playlist} playlist the playlist to walk through
1449 * @param {number} endSequence the mediaSequence to stop walking on
1450 */
1451
1452
1453var forwardDuration = function forwardDuration(playlist, endSequence) {
1454 var result = 0;
1455 var segment;
1456 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
1457 // information
1458
1459 for (; i < playlist.segments.length; i++) {
1460 segment = playlist.segments[i];
1461
1462 if (typeof segment.start !== 'undefined') {
1463 return {
1464 result: segment.start - result,
1465 precise: true
1466 };
1467 }
1468
1469 result += segment.duration;
1470
1471 if (typeof segment.end !== 'undefined') {
1472 return {
1473 result: segment.end - result,
1474 precise: true
1475 };
1476 }
1477 } // indicate we didn't find a useful duration estimate
1478
1479
1480 return {
1481 result: -1,
1482 precise: false
1483 };
1484};
1485/**
1486 * Calculate the media duration from the segments associated with a
1487 * playlist. The duration of a subinterval of the available segments
1488 * may be calculated by specifying an end index.
1489 *
1490 * @param {Object} playlist a media playlist object
1491 * @param {number=} endSequence an exclusive upper boundary
1492 * for the playlist. Defaults to playlist length.
1493 * @param {number} expired the amount of time that has dropped
1494 * off the front of the playlist in a live scenario
1495 * @return {number} the duration between the first available segment
1496 * and end index.
1497 */
1498
1499
1500var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
1501 if (typeof endSequence === 'undefined') {
1502 endSequence = playlist.mediaSequence + playlist.segments.length;
1503 }
1504
1505 if (endSequence < playlist.mediaSequence) {
1506 return 0;
1507 } // do a backward walk to estimate the duration
1508
1509
1510 var backward = backwardDuration(playlist, endSequence);
1511
1512 if (backward.precise) {
1513 // if we were able to base our duration estimate on timing
1514 // information provided directly from the Media Source, return
1515 // it
1516 return backward.result;
1517 } // walk forward to see if a precise duration estimate can be made
1518 // that way
1519
1520
1521 var forward = forwardDuration(playlist, endSequence);
1522
1523 if (forward.precise) {
1524 // we found a segment that has been buffered and so it's
1525 // position is known precisely
1526 return forward.result;
1527 } // return the less-precise, playlist-based duration estimate
1528
1529
1530 return backward.result + expired;
1531};
1532/**
1533 * Calculates the duration of a playlist. If a start and end index
1534 * are specified, the duration will be for the subset of the media
1535 * timeline between those two indices. The total duration for live
1536 * playlists is always Infinity.
1537 *
1538 * @param {Object} playlist a media playlist object
1539 * @param {number=} endSequence an exclusive upper
1540 * boundary for the playlist. Defaults to the playlist media
1541 * sequence number plus its length.
1542 * @param {number=} expired the amount of time that has
1543 * dropped off the front of the playlist in a live scenario
1544 * @return {number} the duration between the start index and end
1545 * index.
1546 */
1547
1548
1549var duration = function duration(playlist, endSequence, expired) {
1550 if (!playlist) {
1551 return 0;
1552 }
1553
1554 if (typeof expired !== 'number') {
1555 expired = 0;
1556 } // if a slice of the total duration is not requested, use
1557 // playlist-level duration indicators when they're present
1558
1559
1560 if (typeof endSequence === 'undefined') {
1561 // if present, use the duration specified in the playlist
1562 if (playlist.totalDuration) {
1563 return playlist.totalDuration;
1564 } // duration should be Infinity for live playlists
1565
1566
1567 if (!playlist.endList) {
1568 return window__default['default'].Infinity;
1569 }
1570 } // calculate the total duration based on the segment durations
1571
1572
1573 return intervalDuration(playlist, endSequence, expired);
1574};
1575/**
1576 * Calculate the time between two indexes in the current playlist
1577 * neight the start- nor the end-index need to be within the current
1578 * playlist in which case, the targetDuration of the playlist is used
1579 * to approximate the durations of the segments
1580 *
1581 * @param {Object} playlist a media playlist object
1582 * @param {number} startIndex
1583 * @param {number} endIndex
1584 * @return {number} the number of seconds between startIndex and endIndex
1585 */
1586
1587var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
1588 var durations = 0;
1589
1590 if (startIndex > endIndex) {
1591 var _ref = [endIndex, startIndex];
1592 startIndex = _ref[0];
1593 endIndex = _ref[1];
1594 }
1595
1596 if (startIndex < 0) {
1597 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
1598 durations += playlist.targetDuration;
1599 }
1600
1601 startIndex = 0;
1602 }
1603
1604 for (var _i = startIndex; _i < endIndex; _i++) {
1605 durations += playlist.segments[_i].duration;
1606 }
1607
1608 return durations;
1609};
1610/**
1611 * Calculates the playlist end time
1612 *
1613 * @param {Object} playlist a media playlist object
1614 * @param {number=} expired the amount of time that has
1615 * dropped off the front of the playlist in a live scenario
1616 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
1617 * playlist end calculation should consider the safe live end
1618 * (truncate the playlist end by three segments). This is normally
1619 * used for calculating the end of the playlist's seekable range.
1620 * This takes into account the value of liveEdgePadding.
1621 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
1622 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
1623 * If this is provided, it is used in the safe live end calculation.
1624 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
1625 * Corresponds to suggestedPresentationDelay in DASH manifests.
1626 * @return {number} the end time of playlist
1627 * @function playlistEnd
1628 */
1629
1630var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
1631 if (!playlist || !playlist.segments) {
1632 return null;
1633 }
1634
1635 if (playlist.endList) {
1636 return duration(playlist);
1637 }
1638
1639 if (expired === null) {
1640 return null;
1641 }
1642
1643 expired = expired || 0;
1644 var lastSegmentTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
1645
1646 if (useSafeLiveEnd) {
1647 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
1648 lastSegmentTime -= liveEdgePadding;
1649 } // don't return a time less than zero
1650
1651
1652 return Math.max(0, lastSegmentTime);
1653};
1654/**
1655 * Calculates the interval of time that is currently seekable in a
1656 * playlist. The returned time ranges are relative to the earliest
1657 * moment in the specified playlist that is still available. A full
1658 * seekable implementation for live streams would need to offset
1659 * these values by the duration of content that has expired from the
1660 * stream.
1661 *
1662 * @param {Object} playlist a media playlist object
1663 * dropped off the front of the playlist in a live scenario
1664 * @param {number=} expired the amount of time that has
1665 * dropped off the front of the playlist in a live scenario
1666 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
1667 * Corresponds to suggestedPresentationDelay in DASH manifests.
1668 * @return {TimeRanges} the periods of time that are valid targets
1669 * for seeking
1670 */
1671
1672var seekable = function seekable(playlist, expired, liveEdgePadding) {
1673 var useSafeLiveEnd = true;
1674 var seekableStart = expired || 0;
1675 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
1676
1677 if (seekableEnd === null) {
1678 return createTimeRange();
1679 }
1680
1681 return createTimeRange(seekableStart, seekableEnd);
1682};
1683/**
1684 * Determine the index and estimated starting time of the segment that
1685 * contains a specified playback position in a media playlist.
1686 *
1687 * @param {Object} playlist the media playlist to query
1688 * @param {number} currentTime The number of seconds since the earliest
1689 * possible position to determine the containing segment for
1690 * @param {number} startIndex
1691 * @param {number} startTime
1692 * @return {Object}
1693 */
1694
1695var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
1696 var partsAndSegments = getPartsAndSegments(playlist);
1697 var time = currentTime - startTime;
1698
1699 if (time < 0) {
1700 // Walk backward from startIndex in the playlist, adding durations
1701 // until we find a segment that contains `time` and return it
1702 if (startIndex > 0) {
1703 for (var i = startIndex - 1; i >= 0; i--) {
1704 var segment = partsAndSegments[i];
1705 time += segment.duration + TIME_FUDGE_FACTOR;
1706
1707 if (time > 0) {
1708 return {
1709 mediaIndex: segment.segmentIndex,
1710 startTime: startTime - sumDurations(playlist, startIndex, segment.segmentIndex),
1711 partIndex: segment.partIndex
1712 };
1713 }
1714 }
1715 } // We were unable to find a good segment within the playlist
1716 // so select the first segment
1717
1718
1719 return {
1720 mediaIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
1721 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
1722 startTime: currentTime
1723 };
1724 } // When startIndex is negative, we first walk forward to first segment
1725 // adding target durations. If we "run out of time" before getting to
1726 // the first segment, return the first segment
1727
1728
1729 if (startIndex < 0) {
1730 for (var _i2 = startIndex; _i2 < 0; _i2++) {
1731 time -= playlist.targetDuration;
1732
1733 if (time < 0) {
1734 return {
1735 mediaIndex: partsAndSegments[0].segmentIndex,
1736 startTime: currentTime
1737 };
1738 }
1739 }
1740
1741 startIndex = 0;
1742 } // Walk forward from startIndex in the playlist, subtracting durations
1743 // until we find a segment that contains `time` and return it
1744
1745
1746 for (var _i3 = startIndex; _i3 < partsAndSegments.length; _i3++) {
1747 var partSegment = partsAndSegments[_i3];
1748 time -= partSegment.duration + TIME_FUDGE_FACTOR;
1749
1750 if (time < 0) {
1751 return {
1752 mediaIndex: partSegment.segmentIndex,
1753 startTime: startTime + sumDurations(playlist, startIndex, partSegment.segmentIndex),
1754 partIndex: partSegment.partIndex
1755 };
1756 }
1757 } // We are out of possible candidates so load the last one...
1758
1759
1760 return {
1761 mediaIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
1762 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
1763 startTime: currentTime
1764 };
1765};
1766/**
1767 * Check whether the playlist is blacklisted or not.
1768 *
1769 * @param {Object} playlist the media playlist object
1770 * @return {boolean} whether the playlist is blacklisted or not
1771 * @function isBlacklisted
1772 */
1773
1774var isBlacklisted = function isBlacklisted(playlist) {
1775 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
1776};
1777/**
1778 * Check whether the playlist is compatible with current playback configuration or has
1779 * been blacklisted permanently for being incompatible.
1780 *
1781 * @param {Object} playlist the media playlist object
1782 * @return {boolean} whether the playlist is incompatible or not
1783 * @function isIncompatible
1784 */
1785
1786var isIncompatible = function isIncompatible(playlist) {
1787 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
1788};
1789/**
1790 * Check whether the playlist is enabled or not.
1791 *
1792 * @param {Object} playlist the media playlist object
1793 * @return {boolean} whether the playlist is enabled or not
1794 * @function isEnabled
1795 */
1796
1797var isEnabled = function isEnabled(playlist) {
1798 var blacklisted = isBlacklisted(playlist);
1799 return !playlist.disabled && !blacklisted;
1800};
1801/**
1802 * Check whether the playlist has been manually disabled through the representations api.
1803 *
1804 * @param {Object} playlist the media playlist object
1805 * @return {boolean} whether the playlist is disabled manually or not
1806 * @function isDisabled
1807 */
1808
1809var isDisabled = function isDisabled(playlist) {
1810 return playlist.disabled;
1811};
1812/**
1813 * Returns whether the current playlist is an AES encrypted HLS stream
1814 *
1815 * @return {boolean} true if it's an AES encrypted HLS stream
1816 */
1817
1818var isAes = function isAes(media) {
1819 for (var i = 0; i < media.segments.length; i++) {
1820 if (media.segments[i].key) {
1821 return true;
1822 }
1823 }
1824
1825 return false;
1826};
1827/**
1828 * Checks if the playlist has a value for the specified attribute
1829 *
1830 * @param {string} attr
1831 * Attribute to check for
1832 * @param {Object} playlist
1833 * The media playlist object
1834 * @return {boolean}
1835 * Whether the playlist contains a value for the attribute or not
1836 * @function hasAttribute
1837 */
1838
1839var hasAttribute = function hasAttribute(attr, playlist) {
1840 return playlist.attributes && playlist.attributes[attr];
1841};
1842/**
1843 * Estimates the time required to complete a segment download from the specified playlist
1844 *
1845 * @param {number} segmentDuration
1846 * Duration of requested segment
1847 * @param {number} bandwidth
1848 * Current measured bandwidth of the player
1849 * @param {Object} playlist
1850 * The media playlist object
1851 * @param {number=} bytesReceived
1852 * Number of bytes already received for the request. Defaults to 0
1853 * @return {number|NaN}
1854 * The estimated time to request the segment. NaN if bandwidth information for
1855 * the given playlist is unavailable
1856 * @function estimateSegmentRequestTime
1857 */
1858
1859var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
1860 if (bytesReceived === void 0) {
1861 bytesReceived = 0;
1862 }
1863
1864 if (!hasAttribute('BANDWIDTH', playlist)) {
1865 return NaN;
1866 }
1867
1868 var size = segmentDuration * playlist.attributes.BANDWIDTH;
1869 return (size - bytesReceived * 8) / bandwidth;
1870};
1871/*
1872 * Returns whether the current playlist is the lowest rendition
1873 *
1874 * @return {Boolean} true if on lowest rendition
1875 */
1876
1877var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1878 if (master.playlists.length === 1) {
1879 return true;
1880 }
1881
1882 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1883 return master.playlists.filter(function (playlist) {
1884 if (!isEnabled(playlist)) {
1885 return false;
1886 }
1887
1888 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1889 }).length === 0;
1890};
1891var playlistMatch = function playlistMatch(a, b) {
1892 // both playlits are null
1893 // or only one playlist is non-null
1894 // no match
1895 if (!a && !b || !a && b || a && !b) {
1896 return false;
1897 } // playlist objects are the same, match
1898
1899
1900 if (a === b) {
1901 return true;
1902 } // first try to use id as it should be the most
1903 // accurate
1904
1905
1906 if (a.id && b.id && a.id === b.id) {
1907 return true;
1908 } // next try to use reslovedUri as it should be the
1909 // second most accurate.
1910
1911
1912 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
1913 return true;
1914 } // finally try to use uri as it should be accurate
1915 // but might miss a few cases for relative uris
1916
1917
1918 if (a.uri && b.uri && a.uri === b.uri) {
1919 return true;
1920 }
1921
1922 return false;
1923};
1924
1925var someAudioVariant = function someAudioVariant(master, callback) {
1926 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
1927 var found = false;
1928
1929 for (var groupName in AUDIO) {
1930 for (var label in AUDIO[groupName]) {
1931 found = callback(AUDIO[groupName][label]);
1932
1933 if (found) {
1934 break;
1935 }
1936 }
1937
1938 if (found) {
1939 break;
1940 }
1941 }
1942
1943 return !!found;
1944};
1945
1946var isAudioOnly = function isAudioOnly(master) {
1947 // we are audio only if we have no main playlists but do
1948 // have media group playlists.
1949 if (!master || !master.playlists || !master.playlists.length) {
1950 // without audio variants or playlists this
1951 // is not an audio only master.
1952 var found = someAudioVariant(master, function (variant) {
1953 return variant.playlists && variant.playlists.length || variant.uri;
1954 });
1955 return found;
1956 } // if every playlist has only an audio codec it is audio only
1957
1958
1959 var _loop = function _loop(i) {
1960 var playlist = master.playlists[i];
1961 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
1962
1963 if (CODECS && CODECS.split(',').every(function (c) {
1964 return codecs_js.isAudioCodec(c);
1965 })) {
1966 return "continue";
1967 } // playlist is in an audio group it is audio only
1968
1969
1970 var found = someAudioVariant(master, function (variant) {
1971 return playlistMatch(playlist, variant);
1972 });
1973
1974 if (found) {
1975 return "continue";
1976 } // if we make it here this playlist isn't audio and we
1977 // are not audio only
1978
1979
1980 return {
1981 v: false
1982 };
1983 };
1984
1985 for (var i = 0; i < master.playlists.length; i++) {
1986 var _ret = _loop(i);
1987
1988 if (_ret === "continue") continue;
1989 if (typeof _ret === "object") return _ret.v;
1990 } // if we make it past every playlist without returning, then
1991 // this is an audio only playlist.
1992
1993
1994 return true;
1995}; // exports
1996
1997var Playlist = {
1998 liveEdgeDelay: liveEdgeDelay,
1999 duration: duration,
2000 seekable: seekable,
2001 getMediaInfoForTime: getMediaInfoForTime,
2002 isEnabled: isEnabled,
2003 isDisabled: isDisabled,
2004 isBlacklisted: isBlacklisted,
2005 isIncompatible: isIncompatible,
2006 playlistEnd: playlistEnd,
2007 isAes: isAes,
2008 hasAttribute: hasAttribute,
2009 estimateSegmentRequestTime: estimateSegmentRequestTime,
2010 isLowestEnabledRendition: isLowestEnabledRendition,
2011 isAudioOnly: isAudioOnly,
2012 playlistMatch: playlistMatch
2013};
2014
2015/**
2016 * @file xhr.js
2017 */
2018var videojsXHR = videojs__default['default'].xhr,
2019 mergeOptions$1 = videojs__default['default'].mergeOptions;
2020
2021var callbackWrapper = function callbackWrapper(request, error, response, callback) {
2022 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
2023
2024 if (!error && reqResponse) {
2025 request.responseTime = Date.now();
2026 request.roundTripTime = request.responseTime - request.requestTime;
2027 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2028
2029 if (!request.bandwidth) {
2030 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2031 }
2032 }
2033
2034 if (response.headers) {
2035 request.responseHeaders = response.headers;
2036 } // videojs.xhr now uses a specific code on the error
2037 // object to signal that a request has timed out instead
2038 // of setting a boolean on the request object
2039
2040
2041 if (error && error.code === 'ETIMEDOUT') {
2042 request.timedout = true;
2043 } // videojs.xhr no longer considers status codes outside of 200 and 0
2044 // (for file uris) to be errors, but the old XHR did, so emulate that
2045 // behavior. Status 206 may be used in response to byterange requests.
2046
2047
2048 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2049 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2050 }
2051
2052 callback(error, request);
2053};
2054
2055var xhrFactory = function xhrFactory() {
2056 var xhr = function XhrFunction(options, callback) {
2057 // Add a default timeout
2058 options = mergeOptions$1({
2059 timeout: 45e3
2060 }, options); // Allow an optional user-specified function to modify the option
2061 // object before we construct the xhr request
2062
2063 var beforeRequest = XhrFunction.beforeRequest || videojs__default['default'].Vhs.xhr.beforeRequest;
2064
2065 if (beforeRequest && typeof beforeRequest === 'function') {
2066 var newOptions = beforeRequest(options);
2067
2068 if (newOptions) {
2069 options = newOptions;
2070 }
2071 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
2072 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
2073
2074
2075 var xhrMethod = videojs__default['default'].Vhs.xhr.original === true ? videojsXHR : videojs__default['default'].Vhs.xhr;
2076 var request = xhrMethod(options, function (error, response) {
2077 return callbackWrapper(request, error, response, callback);
2078 });
2079 var originalAbort = request.abort;
2080
2081 request.abort = function () {
2082 request.aborted = true;
2083 return originalAbort.apply(request, arguments);
2084 };
2085
2086 request.uri = options.uri;
2087 request.requestTime = Date.now();
2088 return request;
2089 };
2090
2091 xhr.original = true;
2092 return xhr;
2093};
2094/**
2095 * Turns segment byterange into a string suitable for use in
2096 * HTTP Range requests
2097 *
2098 * @param {Object} byterange - an object with two values defining the start and end
2099 * of a byte-range
2100 */
2101
2102
2103var byterangeStr = function byterangeStr(byterange) {
2104 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2105 // header uses inclusive ranges
2106 var byterangeEnd = byterange.offset + byterange.length - 1;
2107 var byterangeStart = byterange.offset;
2108 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2109};
2110/**
2111 * Defines headers for use in the xhr request for a particular segment.
2112 *
2113 * @param {Object} segment - a simplified copy of the segmentInfo object
2114 * from SegmentLoader
2115 */
2116
2117
2118var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2119 var headers = {};
2120
2121 if (segment.byterange) {
2122 headers.Range = byterangeStr(segment.byterange);
2123 }
2124
2125 return headers;
2126};
2127
2128/**
2129 * @file bin-utils.js
2130 */
2131
2132/**
2133 * convert a TimeRange to text
2134 *
2135 * @param {TimeRange} range the timerange to use for conversion
2136 * @param {number} i the iterator on the range to convert
2137 * @return {string} the range in string format
2138 */
2139var textRange = function textRange(range, i) {
2140 return range.start(i) + '-' + range.end(i);
2141};
2142/**
2143 * format a number as hex string
2144 *
2145 * @param {number} e The number
2146 * @param {number} i the iterator
2147 * @return {string} the hex formatted number as a string
2148 */
2149
2150
2151var formatHexString = function formatHexString(e, i) {
2152 var value = e.toString(16);
2153 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
2154};
2155
2156var formatAsciiString = function formatAsciiString(e) {
2157 if (e >= 0x20 && e < 0x7e) {
2158 return String.fromCharCode(e);
2159 }
2160
2161 return '.';
2162};
2163/**
2164 * Creates an object for sending to a web worker modifying properties that are TypedArrays
2165 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
2166 *
2167 * @param {Object} message
2168 * Object of properties and values to send to the web worker
2169 * @return {Object}
2170 * Modified message with TypedArray values expanded
2171 * @function createTransferableMessage
2172 */
2173
2174
2175var createTransferableMessage = function createTransferableMessage(message) {
2176 var transferable = {};
2177 Object.keys(message).forEach(function (key) {
2178 var value = message[key];
2179
2180 if (ArrayBuffer.isView(value)) {
2181 transferable[key] = {
2182 bytes: value.buffer,
2183 byteOffset: value.byteOffset,
2184 byteLength: value.byteLength
2185 };
2186 } else {
2187 transferable[key] = value;
2188 }
2189 });
2190 return transferable;
2191};
2192/**
2193 * Returns a unique string identifier for a media initialization
2194 * segment.
2195 *
2196 * @param {Object} initSegment
2197 * the init segment object.
2198 *
2199 * @return {string} the generated init segment id
2200 */
2201
2202var initSegmentId = function initSegmentId(initSegment) {
2203 var byterange = initSegment.byterange || {
2204 length: Infinity,
2205 offset: 0
2206 };
2207 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
2208};
2209/**
2210 * Returns a unique string identifier for a media segment key.
2211 *
2212 * @param {Object} key the encryption key
2213 * @return {string} the unique id for the media segment key.
2214 */
2215
2216var segmentKeyId = function segmentKeyId(key) {
2217 return key.resolvedUri;
2218};
2219/**
2220 * utils to help dump binary data to the console
2221 *
2222 * @param {Array|TypedArray} data
2223 * data to dump to a string
2224 *
2225 * @return {string} the data as a hex string.
2226 */
2227
2228var hexDump = function hexDump(data) {
2229 var bytes = Array.prototype.slice.call(data);
2230 var step = 16;
2231 var result = '';
2232 var hex;
2233 var ascii;
2234
2235 for (var j = 0; j < bytes.length / step; j++) {
2236 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
2237 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
2238 result += hex + ' ' + ascii + '\n';
2239 }
2240
2241 return result;
2242};
2243var tagDump = function tagDump(_ref) {
2244 var bytes = _ref.bytes;
2245 return hexDump(bytes);
2246};
2247var textRanges = function textRanges(ranges) {
2248 var result = '';
2249 var i;
2250
2251 for (i = 0; i < ranges.length; i++) {
2252 result += textRange(ranges, i) + ' ';
2253 }
2254
2255 return result;
2256};
2257
2258var utils = /*#__PURE__*/Object.freeze({
2259 __proto__: null,
2260 createTransferableMessage: createTransferableMessage,
2261 initSegmentId: initSegmentId,
2262 segmentKeyId: segmentKeyId,
2263 hexDump: hexDump,
2264 tagDump: tagDump,
2265 textRanges: textRanges
2266});
2267
2268// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
2269// 25% was arbitrarily chosen, and may need to be refined over time.
2270
2271var SEGMENT_END_FUDGE_PERCENT = 0.25;
2272/**
2273 * Converts a player time (any time that can be gotten/set from player.currentTime(),
2274 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
2275 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
2276 *
2277 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
2278 * point" (a point where we have a mapping from program time to player time, with player
2279 * time being the post transmux start of the segment).
2280 *
2281 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
2282 *
2283 * @param {number} playerTime the player time
2284 * @param {Object} segment the segment which contains the player time
2285 * @return {Date} program time
2286 */
2287
2288var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
2289 if (!segment.dateTimeObject) {
2290 // Can't convert without an "anchor point" for the program time (i.e., a time that can
2291 // be used to map the start of a segment with a real world time).
2292 return null;
2293 }
2294
2295 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
2296 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
2297
2298 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
2299 var offsetFromSegmentStart = playerTime - startOfSegment;
2300 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
2301};
2302var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
2303 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
2304};
2305/**
2306 * Finds a segment that contains the time requested given as an ISO-8601 string. The
2307 * returned segment might be an estimate or an accurate match.
2308 *
2309 * @param {string} programTime The ISO-8601 programTime to find a match for
2310 * @param {Object} playlist A playlist object to search within
2311 */
2312
2313var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
2314 // Assumptions:
2315 // - verifyProgramDateTimeTags has already been run
2316 // - live streams have been started
2317 var dateTimeObject;
2318
2319 try {
2320 dateTimeObject = new Date(programTime);
2321 } catch (e) {
2322 return null;
2323 }
2324
2325 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2326 return null;
2327 }
2328
2329 var segment = playlist.segments[0];
2330
2331 if (dateTimeObject < segment.dateTimeObject) {
2332 // Requested time is before stream start.
2333 return null;
2334 }
2335
2336 for (var i = 0; i < playlist.segments.length - 1; i++) {
2337 segment = playlist.segments[i];
2338 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
2339
2340 if (dateTimeObject < nextSegmentStart) {
2341 break;
2342 }
2343 }
2344
2345 var lastSegment = playlist.segments[playlist.segments.length - 1];
2346 var lastSegmentStart = lastSegment.dateTimeObject;
2347 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
2348 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
2349
2350 if (dateTimeObject > lastSegmentEnd) {
2351 // Beyond the end of the stream, or our best guess of the end of the stream.
2352 return null;
2353 }
2354
2355 if (dateTimeObject > lastSegmentStart) {
2356 segment = lastSegment;
2357 }
2358
2359 return {
2360 segment: segment,
2361 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
2362 // Although, given that all segments have accurate date time objects, the segment
2363 // selected should be accurate, unless the video has been transmuxed at some point
2364 // (determined by the presence of the videoTimingInfo object), the segment's "player
2365 // time" (the start time in the player) can't be considered accurate.
2366 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2367 };
2368};
2369/**
2370 * Finds a segment that contains the given player time(in seconds).
2371 *
2372 * @param {number} time The player time to find a match for
2373 * @param {Object} playlist A playlist object to search within
2374 */
2375
2376var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
2377 // Assumptions:
2378 // - there will always be a segment.duration
2379 // - we can start from zero
2380 // - segments are in time order
2381 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2382 return null;
2383 }
2384
2385 var segmentEnd = 0;
2386 var segment;
2387
2388 for (var i = 0; i < playlist.segments.length; i++) {
2389 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
2390 // should contain the most accurate values we have for the segment's player times.
2391 //
2392 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
2393 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
2394 // calculate an end value.
2395
2396 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
2397
2398 if (time <= segmentEnd) {
2399 break;
2400 }
2401 }
2402
2403 var lastSegment = playlist.segments[playlist.segments.length - 1];
2404
2405 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
2406 // The time requested is beyond the stream end.
2407 return null;
2408 }
2409
2410 if (time > segmentEnd) {
2411 // The time is within or beyond the last segment.
2412 //
2413 // Check to see if the time is beyond a reasonable guess of the end of the stream.
2414 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
2415 // Technically, because the duration value is only an estimate, the time may still
2416 // exist in the last segment, however, there isn't enough information to make even
2417 // a reasonable estimate.
2418 return null;
2419 }
2420
2421 segment = lastSegment;
2422 }
2423
2424 return {
2425 segment: segment,
2426 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
2427 // Because videoTimingInfo is only set after transmux, it is the only way to get
2428 // accurate timing values.
2429 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2430 };
2431};
2432/**
2433 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
2434 * If the offset returned is positive, the programTime occurs after the
2435 * comparisonTimestamp.
2436 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
2437 *
2438 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
2439 * @param {string} programTime The programTime as an ISO-8601 string
2440 * @return {number} offset
2441 */
2442
2443var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
2444 var segmentDateTime;
2445 var programDateTime;
2446
2447 try {
2448 segmentDateTime = new Date(comparisonTimeStamp);
2449 programDateTime = new Date(programTime);
2450 } catch (e) {// TODO handle error
2451 }
2452
2453 var segmentTimeEpoch = segmentDateTime.getTime();
2454 var programTimeEpoch = programDateTime.getTime();
2455 return (programTimeEpoch - segmentTimeEpoch) / 1000;
2456};
2457/**
2458 * Checks that all segments in this playlist have programDateTime tags.
2459 *
2460 * @param {Object} playlist A playlist object
2461 */
2462
2463var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
2464 if (!playlist.segments || playlist.segments.length === 0) {
2465 return false;
2466 }
2467
2468 for (var i = 0; i < playlist.segments.length; i++) {
2469 var segment = playlist.segments[i];
2470
2471 if (!segment.dateTimeObject) {
2472 return false;
2473 }
2474 }
2475
2476 return true;
2477};
2478/**
2479 * Returns the programTime of the media given a playlist and a playerTime.
2480 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
2481 * If the segments containing the time requested have not been buffered yet, an estimate
2482 * may be returned to the callback.
2483 *
2484 * @param {Object} args
2485 * @param {Object} args.playlist A playlist object to search within
2486 * @param {number} time A playerTime in seconds
2487 * @param {Function} callback(err, programTime)
2488 * @return {string} err.message A detailed error message
2489 * @return {Object} programTime
2490 * @return {number} programTime.mediaSeconds The streamTime in seconds
2491 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
2492 */
2493
2494var getProgramTime = function getProgramTime(_ref) {
2495 var playlist = _ref.playlist,
2496 _ref$time = _ref.time,
2497 time = _ref$time === void 0 ? undefined : _ref$time,
2498 callback = _ref.callback;
2499
2500 if (!callback) {
2501 throw new Error('getProgramTime: callback must be provided');
2502 }
2503
2504 if (!playlist || time === undefined) {
2505 return callback({
2506 message: 'getProgramTime: playlist and time must be provided'
2507 });
2508 }
2509
2510 var matchedSegment = findSegmentForPlayerTime(time, playlist);
2511
2512 if (!matchedSegment) {
2513 return callback({
2514 message: 'valid programTime was not found'
2515 });
2516 }
2517
2518 if (matchedSegment.type === 'estimate') {
2519 return callback({
2520 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
2521 seekTime: matchedSegment.estimatedStart
2522 });
2523 }
2524
2525 var programTimeObject = {
2526 mediaSeconds: time
2527 };
2528 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
2529
2530 if (programTime) {
2531 programTimeObject.programDateTime = programTime.toISOString();
2532 }
2533
2534 return callback(null, programTimeObject);
2535};
2536/**
2537 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
2538 *
2539 * @param {Object} args
2540 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
2541 * @param {Object} args.playlist A playlist to look within
2542 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
2543 * @param {Function} args.seekTo A method to perform a seek
2544 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
2545 * @param {Object} args.tech The tech to seek on
2546 * @param {Function} args.callback(err, newTime) A callback to return the new time to
2547 * @return {string} err.message A detailed error message
2548 * @return {number} newTime The exact time that was seeked to in seconds
2549 */
2550
2551var seekToProgramTime = function seekToProgramTime(_ref2) {
2552 var programTime = _ref2.programTime,
2553 playlist = _ref2.playlist,
2554 _ref2$retryCount = _ref2.retryCount,
2555 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
2556 seekTo = _ref2.seekTo,
2557 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
2558 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
2559 tech = _ref2.tech,
2560 callback = _ref2.callback;
2561
2562 if (!callback) {
2563 throw new Error('seekToProgramTime: callback must be provided');
2564 }
2565
2566 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
2567 return callback({
2568 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
2569 });
2570 }
2571
2572 if (!playlist.endList && !tech.hasStarted_) {
2573 return callback({
2574 message: 'player must be playing a live stream to start buffering'
2575 });
2576 }
2577
2578 if (!verifyProgramDateTimeTags(playlist)) {
2579 return callback({
2580 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
2581 });
2582 }
2583
2584 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
2585
2586 if (!matchedSegment) {
2587 return callback({
2588 message: programTime + " was not found in the stream"
2589 });
2590 }
2591
2592 var segment = matchedSegment.segment;
2593 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
2594
2595 if (matchedSegment.type === 'estimate') {
2596 // we've run out of retries
2597 if (retryCount === 0) {
2598 return callback({
2599 message: programTime + " is not buffered yet. Try again"
2600 });
2601 }
2602
2603 seekTo(matchedSegment.estimatedStart + mediaOffset);
2604 tech.one('seeked', function () {
2605 seekToProgramTime({
2606 programTime: programTime,
2607 playlist: playlist,
2608 retryCount: retryCount - 1,
2609 seekTo: seekTo,
2610 pauseAfterSeek: pauseAfterSeek,
2611 tech: tech,
2612 callback: callback
2613 });
2614 });
2615 return;
2616 } // Since the segment.start value is determined from the buffered end or ending time
2617 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
2618 // modifications.
2619
2620
2621 var seekToTime = segment.start + mediaOffset;
2622
2623 var seekedCallback = function seekedCallback() {
2624 return callback(null, tech.currentTime());
2625 }; // listen for seeked event
2626
2627
2628 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
2629
2630 if (pauseAfterSeek) {
2631 tech.pause();
2632 }
2633
2634 seekTo(seekToTime);
2635};
2636
2637// which will only happen if the request is complete.
2638
2639var callbackOnCompleted = function callbackOnCompleted(request, cb) {
2640 if (request.readyState === 4) {
2641 return cb();
2642 }
2643
2644 return;
2645};
2646
2647var containerRequest = function containerRequest(uri, xhr, cb) {
2648 var bytes = [];
2649 var id3Offset;
2650 var finished = false;
2651
2652 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
2653 req.abort();
2654 finished = true;
2655 return cb(err, req, type, _bytes);
2656 };
2657
2658 var progressListener = function progressListener(error, request) {
2659 if (finished) {
2660 return;
2661 }
2662
2663 if (error) {
2664 return endRequestAndCallback(error, request, '', bytes);
2665 } // grap the new part of content that was just downloaded
2666
2667
2668 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
2669
2670 bytes = byteHelpers.concatTypedArrays(bytes, byteHelpers.stringToBytes(newPart, true));
2671 id3Offset = id3Offset || id3Helpers.getId3Offset(bytes); // we need at least 10 bytes to determine a type
2672 // or we need at least two bytes after an id3Offset
2673
2674 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
2675 return callbackOnCompleted(request, function () {
2676 return endRequestAndCallback(error, request, '', bytes);
2677 });
2678 }
2679
2680 var type = containers.detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
2681 // to see the second sync byte, wait until we have enough data
2682 // before declaring it ts
2683
2684 if (type === 'ts' && bytes.length < 188) {
2685 return callbackOnCompleted(request, function () {
2686 return endRequestAndCallback(error, request, '', bytes);
2687 });
2688 } // this may be an unsynced ts segment
2689 // wait for 376 bytes before detecting no container
2690
2691
2692 if (!type && bytes.length < 376) {
2693 return callbackOnCompleted(request, function () {
2694 return endRequestAndCallback(error, request, '', bytes);
2695 });
2696 }
2697
2698 return endRequestAndCallback(null, request, type, bytes);
2699 };
2700
2701 var options = {
2702 uri: uri,
2703 beforeSend: function beforeSend(request) {
2704 // this forces the browser to pass the bytes to us unprocessed
2705 request.overrideMimeType('text/plain; charset=x-user-defined');
2706 request.addEventListener('progress', function (_ref) {
2707 _ref.total;
2708 _ref.loaded;
2709 return callbackWrapper(request, null, {
2710 statusCode: request.status
2711 }, progressListener);
2712 });
2713 }
2714 };
2715 var request = xhr(options, function (error, response) {
2716 return callbackWrapper(request, error, response, progressListener);
2717 });
2718 return request;
2719};
2720
2721var EventTarget = videojs__default['default'].EventTarget,
2722 mergeOptions = videojs__default['default'].mergeOptions;
2723
2724var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
2725 if (!isPlaylistUnchanged(a, b)) {
2726 return false;
2727 } // for dash the above check will often return true in scenarios where
2728 // the playlist actually has changed because mediaSequence isn't a
2729 // dash thing, and we often set it to 1. So if the playlists have the same amount
2730 // of segments we return true.
2731 // So for dash we need to make sure that the underlying segments are different.
2732 // if sidx changed then the playlists are different.
2733
2734
2735 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
2736 return false;
2737 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
2738 return false;
2739 } // one or the other does not have segments
2740 // there was a change.
2741
2742
2743 if (a.segments && !b.segments || !a.segments && b.segments) {
2744 return false;
2745 } // neither has segments nothing changed
2746
2747
2748 if (!a.segments && !b.segments) {
2749 return true;
2750 } // check segments themselves
2751
2752
2753 for (var i = 0; i < a.segments.length; i++) {
2754 var aSegment = a.segments[i];
2755 var bSegment = b.segments[i]; // if uris are different between segments there was a change
2756
2757 if (aSegment.uri !== bSegment.uri) {
2758 return false;
2759 } // neither segment has a byterange, there will be no byterange change.
2760
2761
2762 if (!aSegment.byterange && !bSegment.byterange) {
2763 continue;
2764 }
2765
2766 var aByterange = aSegment.byterange;
2767 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
2768
2769 if (aByterange && !bByterange || !aByterange && bByterange) {
2770 return false;
2771 } // if both segments have byterange with different offsets, there was a change.
2772
2773
2774 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
2775 return false;
2776 }
2777 } // if everything was the same with segments, this is the same playlist.
2778
2779
2780 return true;
2781};
2782/**
2783 * Parses the master XML string and updates playlist URI references.
2784 *
2785 * @param {Object} config
2786 * Object of arguments
2787 * @param {string} config.masterXml
2788 * The mpd XML
2789 * @param {string} config.srcUrl
2790 * The mpd URL
2791 * @param {Date} config.clientOffset
2792 * A time difference between server and client
2793 * @param {Object} config.sidxMapping
2794 * SIDX mappings for moof/mdat URIs and byte ranges
2795 * @return {Object}
2796 * The parsed mpd manifest object
2797 */
2798
2799
2800var parseMasterXml = function parseMasterXml(_ref) {
2801 var masterXml = _ref.masterXml,
2802 srcUrl = _ref.srcUrl,
2803 clientOffset = _ref.clientOffset,
2804 sidxMapping = _ref.sidxMapping;
2805 var master = mpdParser.parse(masterXml, {
2806 manifestUri: srcUrl,
2807 clientOffset: clientOffset,
2808 sidxMapping: sidxMapping
2809 });
2810 addPropertiesToMaster(master, srcUrl);
2811 return master;
2812};
2813/**
2814 * Returns a new master manifest that is the result of merging an updated master manifest
2815 * into the original version.
2816 *
2817 * @param {Object} oldMaster
2818 * The old parsed mpd object
2819 * @param {Object} newMaster
2820 * The updated parsed mpd object
2821 * @return {Object}
2822 * A new object representing the original master manifest with the updated media
2823 * playlists merged in
2824 */
2825
2826var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
2827 var noChanges = true;
2828 var update = mergeOptions(oldMaster, {
2829 // These are top level properties that can be updated
2830 duration: newMaster.duration,
2831 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
2832 }); // First update the playlists in playlist list
2833
2834 for (var i = 0; i < newMaster.playlists.length; i++) {
2835 var playlist = newMaster.playlists[i];
2836
2837 if (playlist.sidx) {
2838 var sidxKey = mpdParser.generateSidxKey(playlist.sidx);
2839
2840 if (sidxMapping && sidxMapping[sidxKey]) {
2841 mpdParser.addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
2842 }
2843 }
2844
2845 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
2846
2847 if (playlistUpdate) {
2848 update = playlistUpdate;
2849 noChanges = false;
2850 }
2851 } // Then update media group playlists
2852
2853
2854 forEachMediaGroup(newMaster, function (properties, type, group, label) {
2855 if (properties.playlists && properties.playlists.length) {
2856 var id = properties.playlists[0].id;
2857
2858 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
2859
2860 if (_playlistUpdate) {
2861 update = _playlistUpdate; // update the playlist reference within media groups
2862
2863 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
2864 noChanges = false;
2865 }
2866 }
2867 });
2868
2869 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
2870 noChanges = false;
2871 }
2872
2873 if (noChanges) {
2874 return null;
2875 }
2876
2877 return update;
2878}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
2879// If the SIDXs have maps, the two maps should match,
2880// both `a` and `b` missing SIDXs is considered matching.
2881// If `a` or `b` but not both have a map, they aren't matching.
2882
2883var equivalentSidx = function equivalentSidx(a, b) {
2884 var neitherMap = Boolean(!a.map && !b.map);
2885 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
2886 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
2887}; // exported for testing
2888
2889
2890var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
2891 var newSidxMapping = {};
2892
2893 for (var id in playlists) {
2894 var playlist = playlists[id];
2895 var currentSidxInfo = playlist.sidx;
2896
2897 if (currentSidxInfo) {
2898 var key = mpdParser.generateSidxKey(currentSidxInfo);
2899
2900 if (!oldSidxMapping[key]) {
2901 break;
2902 }
2903
2904 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
2905
2906 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
2907 newSidxMapping[key] = oldSidxMapping[key];
2908 }
2909 }
2910 }
2911
2912 return newSidxMapping;
2913};
2914/**
2915 * A function that filters out changed items as they need to be requested separately.
2916 *
2917 * The method is exported for testing
2918 *
2919 * @param {Object} master the parsed mpd XML returned via mpd-parser
2920 * @param {Object} oldSidxMapping the SIDX to compare against
2921 */
2922
2923var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
2924 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
2925 var mediaGroupSidx = videoSidx;
2926 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
2927 if (properties.playlists && properties.playlists.length) {
2928 var playlists = properties.playlists;
2929 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
2930 }
2931 });
2932 return mediaGroupSidx;
2933};
2934
2935var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
2936 _inheritsLoose__default['default'](DashPlaylistLoader, _EventTarget);
2937
2938 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
2939 // playlist loader setups from media groups will expect to be able to pass a playlist
2940 // (since there aren't external URLs to media playlists with DASH)
2941 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
2942 var _this;
2943
2944 if (options === void 0) {
2945 options = {};
2946 }
2947
2948 _this = _EventTarget.call(this) || this;
2949 _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized__default['default'](_this);
2950
2951 if (!masterPlaylistLoader) {
2952 _this.isMaster_ = true;
2953 }
2954
2955 var _options = options,
2956 _options$withCredenti = _options.withCredentials,
2957 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
2958 _options$handleManife = _options.handleManifestRedirects,
2959 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
2960 _this.vhs_ = vhs;
2961 _this.withCredentials = withCredentials;
2962 _this.handleManifestRedirects = handleManifestRedirects;
2963
2964 if (!srcUrlOrPlaylist) {
2965 throw new Error('A non-empty playlist URL or object is required');
2966 } // event naming?
2967
2968
2969 _this.on('minimumUpdatePeriod', function () {
2970 _this.refreshXml_();
2971 }); // live playlist staleness timeout
2972
2973
2974 _this.on('mediaupdatetimeout', function () {
2975 _this.refreshMedia_(_this.media().id);
2976 });
2977
2978 _this.state = 'HAVE_NOTHING';
2979 _this.loadedPlaylists_ = {};
2980 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
2981 // The masterPlaylistLoader will be created with a string
2982
2983 if (_this.isMaster_) {
2984 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
2985 // once multi-period is refactored
2986
2987 _this.masterPlaylistLoader_.sidxMapping_ = {};
2988 } else {
2989 _this.childPlaylist_ = srcUrlOrPlaylist;
2990 }
2991
2992 return _this;
2993 }
2994
2995 var _proto = DashPlaylistLoader.prototype;
2996
2997 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
2998 // disposed
2999 if (!this.request) {
3000 return true;
3001 } // pending request is cleared
3002
3003
3004 this.request = null;
3005
3006 if (err) {
3007 // use the provided error object or create one
3008 // based on the request/response
3009 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
3010 status: request.status,
3011 message: 'DASH request error at URL: ' + request.uri,
3012 response: request.response,
3013 // MEDIA_ERR_NETWORK
3014 code: 2
3015 };
3016
3017 if (startingState) {
3018 this.state = startingState;
3019 }
3020
3021 this.trigger('error');
3022 return true;
3023 }
3024 }
3025 /**
3026 * Verify that the container of the sidx segment can be parsed
3027 * and if it can, get and parse that segment.
3028 */
3029 ;
3030
3031 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
3032 var _this2 = this;
3033
3034 var sidxKey = playlist.sidx && mpdParser.generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
3035
3036 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
3037 // keep this function async
3038 this.mediaRequest_ = window__default['default'].setTimeout(function () {
3039 return cb(false);
3040 }, 0);
3041 return;
3042 } // resolve the segment URL relative to the playlist
3043
3044
3045 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
3046
3047 var fin = function fin(err, request) {
3048 if (_this2.requestErrored_(err, request, startingState)) {
3049 return;
3050 }
3051
3052 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
3053 var sidx;
3054
3055 try {
3056 sidx = parseSidx__default['default'](byteHelpers.toUint8(request.response).subarray(8));
3057 } catch (e) {
3058 // sidx parsing failed.
3059 _this2.requestErrored_(e, request, startingState);
3060
3061 return;
3062 }
3063
3064 sidxMapping[sidxKey] = {
3065 sidxInfo: playlist.sidx,
3066 sidx: sidx
3067 };
3068 mpdParser.addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
3069 return cb(true);
3070 };
3071
3072 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
3073 if (err) {
3074 return fin(err, request);
3075 }
3076
3077 if (!container || container !== 'mp4') {
3078 return fin({
3079 status: request.status,
3080 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
3081 // response is just bytes in this case
3082 // but we really don't want to return that.
3083 response: '',
3084 playlist: playlist,
3085 internal: true,
3086 blacklistDuration: Infinity,
3087 // MEDIA_ERR_NETWORK
3088 code: 2
3089 }, request);
3090 } // if we already downloaded the sidx bytes in the container request, use them
3091
3092
3093 var _playlist$sidx$bytera = playlist.sidx.byterange,
3094 offset = _playlist$sidx$bytera.offset,
3095 length = _playlist$sidx$bytera.length;
3096
3097 if (bytes.length >= length + offset) {
3098 return fin(err, {
3099 response: bytes.subarray(offset, offset + length),
3100 status: request.status,
3101 uri: request.uri
3102 });
3103 } // otherwise request sidx bytes
3104
3105
3106 _this2.request = _this2.vhs_.xhr({
3107 uri: uri,
3108 responseType: 'arraybuffer',
3109 headers: segmentXhrHeaders({
3110 byterange: playlist.sidx.byterange
3111 })
3112 }, fin);
3113 });
3114 };
3115
3116 _proto.dispose = function dispose() {
3117 this.trigger('dispose');
3118 this.stopRequest();
3119 this.loadedPlaylists_ = {};
3120 window__default['default'].clearTimeout(this.minimumUpdatePeriodTimeout_);
3121 window__default['default'].clearTimeout(this.mediaRequest_);
3122 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
3123 this.mediaUpdateTimeout = null;
3124 this.mediaRequest_ = null;
3125 this.minimumUpdatePeriodTimeout_ = null;
3126
3127 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3128 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3129 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3130 }
3131
3132 this.off();
3133 };
3134
3135 _proto.hasPendingRequest = function hasPendingRequest() {
3136 return this.request || this.mediaRequest_;
3137 };
3138
3139 _proto.stopRequest = function stopRequest() {
3140 if (this.request) {
3141 var oldRequest = this.request;
3142 this.request = null;
3143 oldRequest.onreadystatechange = null;
3144 oldRequest.abort();
3145 }
3146 };
3147
3148 _proto.media = function media(playlist) {
3149 var _this3 = this;
3150
3151 // getter
3152 if (!playlist) {
3153 return this.media_;
3154 } // setter
3155
3156
3157 if (this.state === 'HAVE_NOTHING') {
3158 throw new Error('Cannot switch media playlist from ' + this.state);
3159 }
3160
3161 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
3162
3163 if (typeof playlist === 'string') {
3164 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
3165 throw new Error('Unknown playlist URI: ' + playlist);
3166 }
3167
3168 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
3169 }
3170
3171 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
3172
3173 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
3174 this.state = 'HAVE_METADATA';
3175 this.media_ = playlist; // trigger media change if the active media has been updated
3176
3177 if (mediaChange) {
3178 this.trigger('mediachanging');
3179 this.trigger('mediachange');
3180 }
3181
3182 return;
3183 } // switching to the active playlist is a no-op
3184
3185
3186 if (!mediaChange) {
3187 return;
3188 } // switching from an already loaded playlist
3189
3190
3191 if (this.media_) {
3192 this.trigger('mediachanging');
3193 }
3194
3195 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
3196 // everything is ready just continue to haveMetadata
3197 _this3.haveMetadata({
3198 startingState: startingState,
3199 playlist: playlist
3200 });
3201 });
3202 };
3203
3204 _proto.haveMetadata = function haveMetadata(_ref2) {
3205 var startingState = _ref2.startingState,
3206 playlist = _ref2.playlist;
3207 this.state = 'HAVE_METADATA';
3208 this.loadedPlaylists_[playlist.id] = playlist;
3209 this.mediaRequest_ = null; // This will trigger loadedplaylist
3210
3211 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
3212 // to resolve setup of media groups
3213
3214 if (startingState === 'HAVE_MASTER') {
3215 this.trigger('loadedmetadata');
3216 } else {
3217 // trigger media change if the active media has been updated
3218 this.trigger('mediachange');
3219 }
3220 };
3221
3222 _proto.pause = function pause() {
3223 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3224 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3225 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3226 }
3227
3228 this.stopRequest();
3229 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
3230 window__default['default'].clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
3231 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
3232 this.mediaUpdateTimeout = null;
3233
3234 if (this.state === 'HAVE_NOTHING') {
3235 // If we pause the loader before any data has been retrieved, its as if we never
3236 // started, so reset to an unstarted state.
3237 this.started = false;
3238 }
3239 };
3240
3241 _proto.load = function load(isFinalRendition) {
3242 var _this4 = this;
3243
3244 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
3245 this.mediaUpdateTimeout = null;
3246 var media = this.media();
3247
3248 if (isFinalRendition) {
3249 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3250 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
3251 return _this4.load();
3252 }, delay);
3253 return;
3254 } // because the playlists are internal to the manifest, load should either load the
3255 // main manifest, or do nothing but trigger an event
3256
3257
3258 if (!this.started) {
3259 this.start();
3260 return;
3261 }
3262
3263 if (media && !media.endList) {
3264 this.trigger('mediaupdatetimeout');
3265 } else {
3266 this.trigger('loadedplaylist');
3267 }
3268 };
3269
3270 _proto.start = function start() {
3271 var _this5 = this;
3272
3273 this.started = true; // We don't need to request the master manifest again
3274 // Call this asynchronously to match the xhr request behavior below
3275
3276 if (!this.isMaster_) {
3277 this.mediaRequest_ = window__default['default'].setTimeout(function () {
3278 return _this5.haveMaster_();
3279 }, 0);
3280 return;
3281 }
3282
3283 this.requestMaster_(function (req, masterChanged) {
3284 _this5.haveMaster_();
3285
3286 if (!_this5.hasPendingRequest() && !_this5.media_) {
3287 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
3288 }
3289 });
3290 };
3291
3292 _proto.requestMaster_ = function requestMaster_(cb) {
3293 var _this6 = this;
3294
3295 this.request = this.vhs_.xhr({
3296 uri: this.masterPlaylistLoader_.srcUrl,
3297 withCredentials: this.withCredentials
3298 }, function (error, req) {
3299 if (_this6.requestErrored_(error, req)) {
3300 if (_this6.state === 'HAVE_NOTHING') {
3301 _this6.started = false;
3302 }
3303
3304 return;
3305 }
3306
3307 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
3308 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
3309
3310 if (req.responseHeaders && req.responseHeaders.date) {
3311 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
3312 } else {
3313 _this6.masterLoaded_ = Date.now();
3314 }
3315
3316 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
3317
3318 if (masterChanged) {
3319 _this6.handleMaster_();
3320
3321 _this6.syncClientServerClock_(function () {
3322 return cb(req, masterChanged);
3323 });
3324
3325 return;
3326 }
3327
3328 return cb(req, masterChanged);
3329 });
3330 }
3331 /**
3332 * Parses the master xml for UTCTiming node to sync the client clock to the server
3333 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
3334 *
3335 * @param {Function} done
3336 * Function to call when clock sync has completed
3337 */
3338 ;
3339
3340 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
3341 var _this7 = this;
3342
3343 var utcTiming = mpdParser.parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
3344 // server clock
3345
3346 if (utcTiming === null) {
3347 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
3348 return done();
3349 }
3350
3351 if (utcTiming.method === 'DIRECT') {
3352 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
3353 return done();
3354 }
3355
3356 this.request = this.vhs_.xhr({
3357 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
3358 method: utcTiming.method,
3359 withCredentials: this.withCredentials
3360 }, function (error, req) {
3361 // disposed
3362 if (!_this7.request) {
3363 return;
3364 }
3365
3366 if (error) {
3367 // sync request failed, fall back to using date header from mpd
3368 // TODO: log warning
3369 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
3370 return done();
3371 }
3372
3373 var serverTime;
3374
3375 if (utcTiming.method === 'HEAD') {
3376 if (!req.responseHeaders || !req.responseHeaders.date) {
3377 // expected date header not preset, fall back to using date header from mpd
3378 // TODO: log warning
3379 serverTime = _this7.masterLoaded_;
3380 } else {
3381 serverTime = Date.parse(req.responseHeaders.date);
3382 }
3383 } else {
3384 serverTime = Date.parse(req.responseText);
3385 }
3386
3387 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
3388 done();
3389 });
3390 };
3391
3392 _proto.haveMaster_ = function haveMaster_() {
3393 this.state = 'HAVE_MASTER';
3394
3395 if (this.isMaster_) {
3396 // We have the master playlist at this point, so
3397 // trigger this to allow MasterPlaylistController
3398 // to make an initial playlist selection
3399 this.trigger('loadedplaylist');
3400 } else if (!this.media_) {
3401 // no media playlist was specifically selected so select
3402 // the one the child playlist loader was created with
3403 this.media(this.childPlaylist_);
3404 }
3405 };
3406
3407 _proto.handleMaster_ = function handleMaster_() {
3408 // clear media request
3409 this.mediaRequest_ = null;
3410 var newMaster = parseMasterXml({
3411 masterXml: this.masterPlaylistLoader_.masterXml_,
3412 srcUrl: this.masterPlaylistLoader_.srcUrl,
3413 clientOffset: this.masterPlaylistLoader_.clientOffset_,
3414 sidxMapping: this.masterPlaylistLoader_.sidxMapping_
3415 });
3416 var oldMaster = this.masterPlaylistLoader_.master; // if we have an old master to compare the new master against
3417
3418 if (oldMaster) {
3419 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
3420 } // only update master if we have a new master
3421
3422
3423 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
3424 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
3425
3426 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
3427 this.masterPlaylistLoader_.srcUrl = location;
3428 }
3429
3430 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3431 this.updateMinimumUpdatePeriodTimeout_();
3432 }
3433
3434 return Boolean(newMaster);
3435 };
3436
3437 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
3438 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
3439 // a new one will be added if needed.
3440
3441 if (mpl.createMupOnMedia_) {
3442 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
3443 mpl.createMupOnMedia_ = null;
3444 } // clear any pending timeouts
3445
3446
3447 if (mpl.minimumUpdatePeriodTimeout_) {
3448 window__default['default'].clearTimeout(mpl.minimumUpdatePeriodTimeout_);
3449 mpl.minimumUpdatePeriodTimeout_ = null;
3450 }
3451
3452 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
3453 // MPD has no future validity, so a new one will need to be acquired when new
3454 // media segments are to be made available. Thus, we use the target duration
3455 // in this case
3456
3457 if (mup === 0) {
3458 if (mpl.media()) {
3459 mup = mpl.media().targetDuration * 1000;
3460 } else {
3461 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
3462 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
3463 }
3464 } // if minimumUpdatePeriod is invalid or <= zero, which
3465 // can happen when a live video becomes VOD. skip timeout
3466 // creation.
3467
3468
3469 if (typeof mup !== 'number' || mup <= 0) {
3470 if (mup < 0) {
3471 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
3472 }
3473
3474 return;
3475 }
3476
3477 this.createMUPTimeout_(mup);
3478 };
3479
3480 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
3481 var mpl = this.masterPlaylistLoader_;
3482 mpl.minimumUpdatePeriodTimeout_ = window__default['default'].setTimeout(function () {
3483 mpl.minimumUpdatePeriodTimeout_ = null;
3484 mpl.trigger('minimumUpdatePeriod');
3485 mpl.createMUPTimeout_(mup);
3486 }, mup);
3487 }
3488 /**
3489 * Sends request to refresh the master xml and updates the parsed master manifest
3490 */
3491 ;
3492
3493 _proto.refreshXml_ = function refreshXml_() {
3494 var _this8 = this;
3495
3496 this.requestMaster_(function (req, masterChanged) {
3497 if (!masterChanged) {
3498 return;
3499 }
3500
3501 if (_this8.media_) {
3502 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
3503 } // This will filter out updated sidx info from the mapping
3504
3505
3506 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
3507
3508 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
3509 // TODO: do we need to reload the current playlist?
3510 _this8.refreshMedia_(_this8.media().id);
3511 });
3512 });
3513 }
3514 /**
3515 * Refreshes the media playlist by re-parsing the master xml and updating playlist
3516 * references. If this is an alternate loader, the updated parsed manifest is retrieved
3517 * from the master loader.
3518 */
3519 ;
3520
3521 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
3522 var _this9 = this;
3523
3524 if (!mediaID) {
3525 throw new Error('refreshMedia_ must take a media id');
3526 } // for master we have to reparse the master xml
3527 // to re-create segments based on current timing values
3528 // which may change media. We only skip updating master
3529 // if this is the first time this.media_ is being set.
3530 // as master was just parsed in that case.
3531
3532
3533 if (this.media_ && this.isMaster_) {
3534 this.handleMaster_();
3535 }
3536
3537 var playlists = this.masterPlaylistLoader_.master.playlists;
3538 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
3539
3540 if (mediaChanged) {
3541 this.media_ = playlists[mediaID];
3542 } else {
3543 this.trigger('playlistunchanged');
3544 }
3545
3546 if (!this.mediaUpdateTimeout) {
3547 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
3548 if (_this9.media().endList) {
3549 return;
3550 }
3551
3552 _this9.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
3553 _this9.trigger('mediaupdatetimeout');
3554
3555 createMediaUpdateTimeout();
3556 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
3557 };
3558
3559 createMediaUpdateTimeout();
3560 }
3561
3562 this.trigger('loadedplaylist');
3563 };
3564
3565 return DashPlaylistLoader;
3566}(EventTarget);
3567
3568var Config = {
3569 GOAL_BUFFER_LENGTH: 30,
3570 MAX_GOAL_BUFFER_LENGTH: 60,
3571 BACK_BUFFER_LENGTH: 30,
3572 GOAL_BUFFER_LENGTH_RATE: 1,
3573 // 0.5 MB/s
3574 INITIAL_BANDWIDTH: 4194304,
3575 // A fudge factor to apply to advertised playlist bitrates to account for
3576 // temporary flucations in client bandwidth
3577 BANDWIDTH_VARIANCE: 1.2,
3578 // How much of the buffer must be filled before we consider upswitching
3579 BUFFER_LOW_WATER_LINE: 0,
3580 MAX_BUFFER_LOW_WATER_LINE: 30,
3581 // TODO: Remove this when experimentalBufferBasedABR is removed
3582 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
3583 BUFFER_LOW_WATER_LINE_RATE: 1,
3584 // If the buffer is greater than the high water line, we won't switch down
3585 BUFFER_HIGH_WATER_LINE: 30
3586};
3587
3588var stringToArrayBuffer = function stringToArrayBuffer(string) {
3589 var view = new Uint8Array(new ArrayBuffer(string.length));
3590
3591 for (var i = 0; i < string.length; i++) {
3592 view[i] = string.charCodeAt(i);
3593 }
3594
3595 return view.buffer;
3596};
3597
3598/* global Blob, BlobBuilder, Worker */
3599// unify worker interface
3600var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
3601 // node only supports on/off
3602 workerObj.on = workerObj.addEventListener;
3603 workerObj.off = workerObj.removeEventListener;
3604 return workerObj;
3605};
3606
3607var createObjectURL = function createObjectURL(str) {
3608 try {
3609 return URL.createObjectURL(new Blob([str], {
3610 type: 'application/javascript'
3611 }));
3612 } catch (e) {
3613 var blob = new BlobBuilder();
3614 blob.append(str);
3615 return URL.createObjectURL(blob.getBlob());
3616 }
3617};
3618
3619var factory = function factory(code) {
3620 return function () {
3621 var objectUrl = createObjectURL(code);
3622 var worker = browserWorkerPolyFill(new Worker(objectUrl));
3623 worker.objURL = objectUrl;
3624 var terminate = worker.terminate;
3625 worker.on = worker.addEventListener;
3626 worker.off = worker.removeEventListener;
3627
3628 worker.terminate = function () {
3629 URL.revokeObjectURL(objectUrl);
3630 return terminate.call(this);
3631 };
3632
3633 return worker;
3634 };
3635};
3636var transform = function transform(code) {
3637 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
3638};
3639
3640var getWorkerString = function getWorkerString(fn) {
3641 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
3642};
3643
3644/* rollup-plugin-worker-factory start for worker!/Users/gkatsevman/p/http-streaming-release/src/transmuxer-worker.js */
3645var workerCode$1 = transform(getWorkerString(function () {
3646 /**
3647 * mux.js
3648 *
3649 * Copyright (c) Brightcove
3650 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3651 *
3652 * A lightweight readable stream implemention that handles event dispatching.
3653 * Objects that inherit from streams should call init in their constructors.
3654 */
3655
3656 var Stream = function Stream() {
3657 this.init = function () {
3658 var listeners = {};
3659 /**
3660 * Add a listener for a specified event type.
3661 * @param type {string} the event name
3662 * @param listener {function} the callback to be invoked when an event of
3663 * the specified type occurs
3664 */
3665
3666 this.on = function (type, listener) {
3667 if (!listeners[type]) {
3668 listeners[type] = [];
3669 }
3670
3671 listeners[type] = listeners[type].concat(listener);
3672 };
3673 /**
3674 * Remove a listener for a specified event type.
3675 * @param type {string} the event name
3676 * @param listener {function} a function previously registered for this
3677 * type of event through `on`
3678 */
3679
3680
3681 this.off = function (type, listener) {
3682 var index;
3683
3684 if (!listeners[type]) {
3685 return false;
3686 }
3687
3688 index = listeners[type].indexOf(listener);
3689 listeners[type] = listeners[type].slice();
3690 listeners[type].splice(index, 1);
3691 return index > -1;
3692 };
3693 /**
3694 * Trigger an event of the specified type on this stream. Any additional
3695 * arguments to this function are passed as parameters to event listeners.
3696 * @param type {string} the event name
3697 */
3698
3699
3700 this.trigger = function (type) {
3701 var callbacks, i, length, args;
3702 callbacks = listeners[type];
3703
3704 if (!callbacks) {
3705 return;
3706 } // Slicing the arguments on every invocation of this method
3707 // can add a significant amount of overhead. Avoid the
3708 // intermediate object creation for the common case of a
3709 // single callback argument
3710
3711
3712 if (arguments.length === 2) {
3713 length = callbacks.length;
3714
3715 for (i = 0; i < length; ++i) {
3716 callbacks[i].call(this, arguments[1]);
3717 }
3718 } else {
3719 args = [];
3720 i = arguments.length;
3721
3722 for (i = 1; i < arguments.length; ++i) {
3723 args.push(arguments[i]);
3724 }
3725
3726 length = callbacks.length;
3727
3728 for (i = 0; i < length; ++i) {
3729 callbacks[i].apply(this, args);
3730 }
3731 }
3732 };
3733 /**
3734 * Destroys the stream and cleans up.
3735 */
3736
3737
3738 this.dispose = function () {
3739 listeners = {};
3740 };
3741 };
3742 };
3743 /**
3744 * Forwards all `data` events on this stream to the destination stream. The
3745 * destination stream should provide a method `push` to receive the data
3746 * events as they arrive.
3747 * @param destination {stream} the stream that will receive all `data` events
3748 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
3749 * when the current stream emits a 'done' event
3750 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
3751 */
3752
3753
3754 Stream.prototype.pipe = function (destination) {
3755 this.on('data', function (data) {
3756 destination.push(data);
3757 });
3758 this.on('done', function (flushSource) {
3759 destination.flush(flushSource);
3760 });
3761 this.on('partialdone', function (flushSource) {
3762 destination.partialFlush(flushSource);
3763 });
3764 this.on('endedtimeline', function (flushSource) {
3765 destination.endTimeline(flushSource);
3766 });
3767 this.on('reset', function (flushSource) {
3768 destination.reset(flushSource);
3769 });
3770 return destination;
3771 }; // Default stream functions that are expected to be overridden to perform
3772 // actual work. These are provided by the prototype as a sort of no-op
3773 // implementation so that we don't have to check for their existence in the
3774 // `pipe` function above.
3775
3776
3777 Stream.prototype.push = function (data) {
3778 this.trigger('data', data);
3779 };
3780
3781 Stream.prototype.flush = function (flushSource) {
3782 this.trigger('done', flushSource);
3783 };
3784
3785 Stream.prototype.partialFlush = function (flushSource) {
3786 this.trigger('partialdone', flushSource);
3787 };
3788
3789 Stream.prototype.endTimeline = function (flushSource) {
3790 this.trigger('endedtimeline', flushSource);
3791 };
3792
3793 Stream.prototype.reset = function (flushSource) {
3794 this.trigger('reset', flushSource);
3795 };
3796
3797 var stream = Stream;
3798 /**
3799 * mux.js
3800 *
3801 * Copyright (c) Brightcove
3802 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3803 *
3804 * Functions that generate fragmented MP4s suitable for use with Media
3805 * Source Extensions.
3806 */
3807
3808 var UINT32_MAX = Math.pow(2, 32) - 1;
3809 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
3810
3811 (function () {
3812 var i;
3813 types = {
3814 avc1: [],
3815 // codingname
3816 avcC: [],
3817 btrt: [],
3818 dinf: [],
3819 dref: [],
3820 esds: [],
3821 ftyp: [],
3822 hdlr: [],
3823 mdat: [],
3824 mdhd: [],
3825 mdia: [],
3826 mfhd: [],
3827 minf: [],
3828 moof: [],
3829 moov: [],
3830 mp4a: [],
3831 // codingname
3832 mvex: [],
3833 mvhd: [],
3834 pasp: [],
3835 sdtp: [],
3836 smhd: [],
3837 stbl: [],
3838 stco: [],
3839 stsc: [],
3840 stsd: [],
3841 stsz: [],
3842 stts: [],
3843 styp: [],
3844 tfdt: [],
3845 tfhd: [],
3846 traf: [],
3847 trak: [],
3848 trun: [],
3849 trex: [],
3850 tkhd: [],
3851 vmhd: []
3852 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
3853 // don't throw an error
3854
3855 if (typeof Uint8Array === 'undefined') {
3856 return;
3857 }
3858
3859 for (i in types) {
3860 if (types.hasOwnProperty(i)) {
3861 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
3862 }
3863 }
3864
3865 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
3866 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
3867 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
3868 VIDEO_HDLR = new Uint8Array([0x00, // version 0
3869 0x00, 0x00, 0x00, // flags
3870 0x00, 0x00, 0x00, 0x00, // pre_defined
3871 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
3872 0x00, 0x00, 0x00, 0x00, // reserved
3873 0x00, 0x00, 0x00, 0x00, // reserved
3874 0x00, 0x00, 0x00, 0x00, // reserved
3875 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
3876 ]);
3877 AUDIO_HDLR = new Uint8Array([0x00, // version 0
3878 0x00, 0x00, 0x00, // flags
3879 0x00, 0x00, 0x00, 0x00, // pre_defined
3880 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
3881 0x00, 0x00, 0x00, 0x00, // reserved
3882 0x00, 0x00, 0x00, 0x00, // reserved
3883 0x00, 0x00, 0x00, 0x00, // reserved
3884 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
3885 ]);
3886 HDLR_TYPES = {
3887 video: VIDEO_HDLR,
3888 audio: AUDIO_HDLR
3889 };
3890 DREF = new Uint8Array([0x00, // version 0
3891 0x00, 0x00, 0x00, // flags
3892 0x00, 0x00, 0x00, 0x01, // entry_count
3893 0x00, 0x00, 0x00, 0x0c, // entry_size
3894 0x75, 0x72, 0x6c, 0x20, // 'url' type
3895 0x00, // version 0
3896 0x00, 0x00, 0x01 // entry_flags
3897 ]);
3898 SMHD = new Uint8Array([0x00, // version
3899 0x00, 0x00, 0x00, // flags
3900 0x00, 0x00, // balance, 0 means centered
3901 0x00, 0x00 // reserved
3902 ]);
3903 STCO = new Uint8Array([0x00, // version
3904 0x00, 0x00, 0x00, // flags
3905 0x00, 0x00, 0x00, 0x00 // entry_count
3906 ]);
3907 STSC = STCO;
3908 STSZ = new Uint8Array([0x00, // version
3909 0x00, 0x00, 0x00, // flags
3910 0x00, 0x00, 0x00, 0x00, // sample_size
3911 0x00, 0x00, 0x00, 0x00 // sample_count
3912 ]);
3913 STTS = STCO;
3914 VMHD = new Uint8Array([0x00, // version
3915 0x00, 0x00, 0x01, // flags
3916 0x00, 0x00, // graphicsmode
3917 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
3918 ]);
3919 })();
3920
3921 box = function box(type) {
3922 var payload = [],
3923 size = 0,
3924 i,
3925 result,
3926 view;
3927
3928 for (i = 1; i < arguments.length; i++) {
3929 payload.push(arguments[i]);
3930 }
3931
3932 i = payload.length; // calculate the total size we need to allocate
3933
3934 while (i--) {
3935 size += payload[i].byteLength;
3936 }
3937
3938 result = new Uint8Array(size + 8);
3939 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
3940 view.setUint32(0, result.byteLength);
3941 result.set(type, 4); // copy the payload into the result
3942
3943 for (i = 0, size = 8; i < payload.length; i++) {
3944 result.set(payload[i], size);
3945 size += payload[i].byteLength;
3946 }
3947
3948 return result;
3949 };
3950
3951 dinf = function dinf() {
3952 return box(types.dinf, box(types.dref, DREF));
3953 };
3954
3955 esds = function esds(track) {
3956 return box(types.esds, new Uint8Array([0x00, // version
3957 0x00, 0x00, 0x00, // flags
3958 // ES_Descriptor
3959 0x03, // tag, ES_DescrTag
3960 0x19, // length
3961 0x00, 0x00, // ES_ID
3962 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
3963 // DecoderConfigDescriptor
3964 0x04, // tag, DecoderConfigDescrTag
3965 0x11, // length
3966 0x40, // object type
3967 0x15, // streamType
3968 0x00, 0x06, 0x00, // bufferSizeDB
3969 0x00, 0x00, 0xda, 0xc0, // maxBitrate
3970 0x00, 0x00, 0xda, 0xc0, // avgBitrate
3971 // DecoderSpecificInfo
3972 0x05, // tag, DecoderSpecificInfoTag
3973 0x02, // length
3974 // ISO/IEC 14496-3, AudioSpecificConfig
3975 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
3976 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
3977 ]));
3978 };
3979
3980 ftyp = function ftyp() {
3981 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
3982 };
3983
3984 hdlr = function hdlr(type) {
3985 return box(types.hdlr, HDLR_TYPES[type]);
3986 };
3987
3988 mdat = function mdat(data) {
3989 return box(types.mdat, data);
3990 };
3991
3992 mdhd = function mdhd(track) {
3993 var result = new Uint8Array([0x00, // version 0
3994 0x00, 0x00, 0x00, // flags
3995 0x00, 0x00, 0x00, 0x02, // creation_time
3996 0x00, 0x00, 0x00, 0x03, // modification_time
3997 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
3998 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
3999 0x55, 0xc4, // 'und' language (undetermined)
4000 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
4001 // defined. The sample rate can be parsed out of an ADTS header, for
4002 // instance.
4003
4004 if (track.samplerate) {
4005 result[12] = track.samplerate >>> 24 & 0xFF;
4006 result[13] = track.samplerate >>> 16 & 0xFF;
4007 result[14] = track.samplerate >>> 8 & 0xFF;
4008 result[15] = track.samplerate & 0xFF;
4009 }
4010
4011 return box(types.mdhd, result);
4012 };
4013
4014 mdia = function mdia(track) {
4015 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4016 };
4017
4018 mfhd = function mfhd(sequenceNumber) {
4019 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4020 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4021 ]));
4022 };
4023
4024 minf = function minf(track) {
4025 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4026 };
4027
4028 moof = function moof(sequenceNumber, tracks) {
4029 var trackFragments = [],
4030 i = tracks.length; // build traf boxes for each track fragment
4031
4032 while (i--) {
4033 trackFragments[i] = traf(tracks[i]);
4034 }
4035
4036 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4037 };
4038 /**
4039 * Returns a movie box.
4040 * @param tracks {array} the tracks associated with this movie
4041 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4042 */
4043
4044
4045 moov = function moov(tracks) {
4046 var i = tracks.length,
4047 boxes = [];
4048
4049 while (i--) {
4050 boxes[i] = trak(tracks[i]);
4051 }
4052
4053 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4054 };
4055
4056 mvex = function mvex(tracks) {
4057 var i = tracks.length,
4058 boxes = [];
4059
4060 while (i--) {
4061 boxes[i] = trex(tracks[i]);
4062 }
4063
4064 return box.apply(null, [types.mvex].concat(boxes));
4065 };
4066
4067 mvhd = function mvhd(duration) {
4068 var bytes = new Uint8Array([0x00, // version 0
4069 0x00, 0x00, 0x00, // flags
4070 0x00, 0x00, 0x00, 0x01, // creation_time
4071 0x00, 0x00, 0x00, 0x02, // modification_time
4072 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4073 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
4074 0x00, 0x01, 0x00, 0x00, // 1.0 rate
4075 0x01, 0x00, // 1.0 volume
4076 0x00, 0x00, // reserved
4077 0x00, 0x00, 0x00, 0x00, // reserved
4078 0x00, 0x00, 0x00, 0x00, // reserved
4079 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4080 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4081 0xff, 0xff, 0xff, 0xff // next_track_ID
4082 ]);
4083 return box(types.mvhd, bytes);
4084 };
4085
4086 sdtp = function sdtp(track) {
4087 var samples = track.samples || [],
4088 bytes = new Uint8Array(4 + samples.length),
4089 flags,
4090 i; // leave the full box header (4 bytes) all zero
4091 // write the sample table
4092
4093 for (i = 0; i < samples.length; i++) {
4094 flags = samples[i].flags;
4095 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
4096 }
4097
4098 return box(types.sdtp, bytes);
4099 };
4100
4101 stbl = function stbl(track) {
4102 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
4103 };
4104
4105 (function () {
4106 var videoSample, audioSample;
4107
4108 stsd = function stsd(track) {
4109 return box(types.stsd, new Uint8Array([0x00, // version 0
4110 0x00, 0x00, 0x00, // flags
4111 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
4112 };
4113
4114 videoSample = function videoSample(track) {
4115 var sps = track.sps || [],
4116 pps = track.pps || [],
4117 sequenceParameterSets = [],
4118 pictureParameterSets = [],
4119 i,
4120 avc1Box; // assemble the SPSs
4121
4122 for (i = 0; i < sps.length; i++) {
4123 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
4124 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
4125
4126 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
4127 } // assemble the PPSs
4128
4129
4130 for (i = 0; i < pps.length; i++) {
4131 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
4132 pictureParameterSets.push(pps[i].byteLength & 0xFF);
4133 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
4134 }
4135
4136 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4137 0x00, 0x01, // data_reference_index
4138 0x00, 0x00, // pre_defined
4139 0x00, 0x00, // reserved
4140 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4141 (track.width & 0xff00) >> 8, track.width & 0xff, // width
4142 (track.height & 0xff00) >> 8, track.height & 0xff, // height
4143 0x00, 0x48, 0x00, 0x00, // horizresolution
4144 0x00, 0x48, 0x00, 0x00, // vertresolution
4145 0x00, 0x00, 0x00, 0x00, // reserved
4146 0x00, 0x01, // frame_count
4147 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
4148 0x00, 0x18, // depth = 24
4149 0x11, 0x11 // pre_defined = -1
4150 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
4151 track.profileIdc, // AVCProfileIndication
4152 track.profileCompatibility, // profile_compatibility
4153 track.levelIdc, // AVCLevelIndication
4154 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
4155 ].concat([sps.length], // numOfSequenceParameterSets
4156 sequenceParameterSets, // "SPS"
4157 [pps.length], // numOfPictureParameterSets
4158 pictureParameterSets // "PPS"
4159 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
4160 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
4161 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
4162 ]))];
4163
4164 if (track.sarRatio) {
4165 var hSpacing = track.sarRatio[0],
4166 vSpacing = track.sarRatio[1];
4167 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
4168 }
4169
4170 return box.apply(null, avc1Box);
4171 };
4172
4173 audioSample = function audioSample(track) {
4174 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
4175 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4176 0x00, 0x01, // data_reference_index
4177 // AudioSampleEntry, ISO/IEC 14496-12
4178 0x00, 0x00, 0x00, 0x00, // reserved
4179 0x00, 0x00, 0x00, 0x00, // reserved
4180 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
4181 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
4182 0x00, 0x00, // pre_defined
4183 0x00, 0x00, // reserved
4184 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
4185 // MP4AudioSampleEntry, ISO/IEC 14496-14
4186 ]), esds(track));
4187 };
4188 })();
4189
4190 tkhd = function tkhd(track) {
4191 var result = new Uint8Array([0x00, // version 0
4192 0x00, 0x00, 0x07, // flags
4193 0x00, 0x00, 0x00, 0x00, // creation_time
4194 0x00, 0x00, 0x00, 0x00, // modification_time
4195 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4196 0x00, 0x00, 0x00, 0x00, // reserved
4197 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
4198 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4199 0x00, 0x00, // layer
4200 0x00, 0x00, // alternate_group
4201 0x01, 0x00, // non-audio track volume
4202 0x00, 0x00, // reserved
4203 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4204 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
4205 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
4206 ]);
4207 return box(types.tkhd, result);
4208 };
4209 /**
4210 * Generate a track fragment (traf) box. A traf box collects metadata
4211 * about tracks in a movie fragment (moof) box.
4212 */
4213
4214
4215 traf = function traf(track) {
4216 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
4217 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
4218 0x00, 0x00, 0x3a, // flags
4219 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4220 0x00, 0x00, 0x00, 0x01, // sample_description_index
4221 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4222 0x00, 0x00, 0x00, 0x00, // default_sample_size
4223 0x00, 0x00, 0x00, 0x00 // default_sample_flags
4224 ]));
4225 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
4226 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
4227 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
4228 0x00, 0x00, 0x00, // flags
4229 // baseMediaDecodeTime
4230 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
4231 // the containing moof to the first payload byte of the associated
4232 // mdat
4233
4234 dataOffset = 32 + // tfhd
4235 20 + // tfdt
4236 8 + // traf header
4237 16 + // mfhd
4238 8 + // moof header
4239 8; // mdat header
4240 // audio tracks require less metadata
4241
4242 if (track.type === 'audio') {
4243 trackFragmentRun = trun$1(track, dataOffset);
4244 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
4245 } // video tracks should contain an independent and disposable samples
4246 // box (sdtp)
4247 // generate one and adjust offsets to match
4248
4249
4250 sampleDependencyTable = sdtp(track);
4251 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
4252 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
4253 };
4254 /**
4255 * Generate a track box.
4256 * @param track {object} a track definition
4257 * @return {Uint8Array} the track box
4258 */
4259
4260
4261 trak = function trak(track) {
4262 track.duration = track.duration || 0xffffffff;
4263 return box(types.trak, tkhd(track), mdia(track));
4264 };
4265
4266 trex = function trex(track) {
4267 var result = new Uint8Array([0x00, // version 0
4268 0x00, 0x00, 0x00, // flags
4269 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4270 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
4271 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4272 0x00, 0x00, 0x00, 0x00, // default_sample_size
4273 0x00, 0x01, 0x00, 0x01 // default_sample_flags
4274 ]); // the last two bytes of default_sample_flags is the sample
4275 // degradation priority, a hint about the importance of this sample
4276 // relative to others. Lower the degradation priority for all sample
4277 // types other than video.
4278
4279 if (track.type !== 'video') {
4280 result[result.length - 1] = 0x00;
4281 }
4282
4283 return box(types.trex, result);
4284 };
4285
4286 (function () {
4287 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
4288 // duration is present for the first sample, it will be present for
4289 // all subsequent samples.
4290 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
4291
4292 trunHeader = function trunHeader(samples, offset) {
4293 var durationPresent = 0,
4294 sizePresent = 0,
4295 flagsPresent = 0,
4296 compositionTimeOffset = 0; // trun flag constants
4297
4298 if (samples.length) {
4299 if (samples[0].duration !== undefined) {
4300 durationPresent = 0x1;
4301 }
4302
4303 if (samples[0].size !== undefined) {
4304 sizePresent = 0x2;
4305 }
4306
4307 if (samples[0].flags !== undefined) {
4308 flagsPresent = 0x4;
4309 }
4310
4311 if (samples[0].compositionTimeOffset !== undefined) {
4312 compositionTimeOffset = 0x8;
4313 }
4314 }
4315
4316 return [0x00, // version 0
4317 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
4318 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
4319 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
4320 ];
4321 };
4322
4323 videoTrun = function videoTrun(track, offset) {
4324 var bytesOffest, bytes, header, samples, sample, i;
4325 samples = track.samples || [];
4326 offset += 8 + 12 + 16 * samples.length;
4327 header = trunHeader(samples, offset);
4328 bytes = new Uint8Array(header.length + samples.length * 16);
4329 bytes.set(header);
4330 bytesOffest = header.length;
4331
4332 for (i = 0; i < samples.length; i++) {
4333 sample = samples[i];
4334 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4335 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4336 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4337 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4338
4339 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4340 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4341 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4342 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4343
4344 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
4345 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
4346 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
4347 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
4348
4349 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
4350 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
4351 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
4352 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
4353 }
4354
4355 return box(types.trun, bytes);
4356 };
4357
4358 audioTrun = function audioTrun(track, offset) {
4359 var bytes, bytesOffest, header, samples, sample, i;
4360 samples = track.samples || [];
4361 offset += 8 + 12 + 8 * samples.length;
4362 header = trunHeader(samples, offset);
4363 bytes = new Uint8Array(header.length + samples.length * 8);
4364 bytes.set(header);
4365 bytesOffest = header.length;
4366
4367 for (i = 0; i < samples.length; i++) {
4368 sample = samples[i];
4369 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4370 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4371 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4372 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4373
4374 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4375 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4376 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4377 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4378 }
4379
4380 return box(types.trun, bytes);
4381 };
4382
4383 trun$1 = function trun(track, offset) {
4384 if (track.type === 'audio') {
4385 return audioTrun(track, offset);
4386 }
4387
4388 return videoTrun(track, offset);
4389 };
4390 })();
4391
4392 var mp4Generator = {
4393 ftyp: ftyp,
4394 mdat: mdat,
4395 moof: moof,
4396 moov: moov,
4397 initSegment: function initSegment(tracks) {
4398 var fileType = ftyp(),
4399 movie = moov(tracks),
4400 result;
4401 result = new Uint8Array(fileType.byteLength + movie.byteLength);
4402 result.set(fileType);
4403 result.set(movie, fileType.byteLength);
4404 return result;
4405 }
4406 };
4407 /**
4408 * mux.js
4409 *
4410 * Copyright (c) Brightcove
4411 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4412 */
4413 // Convert an array of nal units into an array of frames with each frame being
4414 // composed of the nal units that make up that frame
4415 // Also keep track of cummulative data about the frame from the nal units such
4416 // as the frame duration, starting pts, etc.
4417
4418 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
4419 var i,
4420 currentNal,
4421 currentFrame = [],
4422 frames = []; // TODO added for LHLS, make sure this is OK
4423
4424 frames.byteLength = 0;
4425 frames.nalCount = 0;
4426 frames.duration = 0;
4427 currentFrame.byteLength = 0;
4428
4429 for (i = 0; i < nalUnits.length; i++) {
4430 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
4431
4432 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
4433 // Since the very first nal unit is expected to be an AUD
4434 // only push to the frames array when currentFrame is not empty
4435 if (currentFrame.length) {
4436 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
4437
4438 frames.byteLength += currentFrame.byteLength;
4439 frames.nalCount += currentFrame.length;
4440 frames.duration += currentFrame.duration;
4441 frames.push(currentFrame);
4442 }
4443
4444 currentFrame = [currentNal];
4445 currentFrame.byteLength = currentNal.data.byteLength;
4446 currentFrame.pts = currentNal.pts;
4447 currentFrame.dts = currentNal.dts;
4448 } else {
4449 // Specifically flag key frames for ease of use later
4450 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
4451 currentFrame.keyFrame = true;
4452 }
4453
4454 currentFrame.duration = currentNal.dts - currentFrame.dts;
4455 currentFrame.byteLength += currentNal.data.byteLength;
4456 currentFrame.push(currentNal);
4457 }
4458 } // For the last frame, use the duration of the previous frame if we
4459 // have nothing better to go on
4460
4461
4462 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
4463 currentFrame.duration = frames[frames.length - 1].duration;
4464 } // Push the final frame
4465 // TODO added for LHLS, make sure this is OK
4466
4467
4468 frames.byteLength += currentFrame.byteLength;
4469 frames.nalCount += currentFrame.length;
4470 frames.duration += currentFrame.duration;
4471 frames.push(currentFrame);
4472 return frames;
4473 }; // Convert an array of frames into an array of Gop with each Gop being composed
4474 // of the frames that make up that Gop
4475 // Also keep track of cummulative data about the Gop from the frames such as the
4476 // Gop duration, starting pts, etc.
4477
4478
4479 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
4480 var i,
4481 currentFrame,
4482 currentGop = [],
4483 gops = []; // We must pre-set some of the values on the Gop since we
4484 // keep running totals of these values
4485
4486 currentGop.byteLength = 0;
4487 currentGop.nalCount = 0;
4488 currentGop.duration = 0;
4489 currentGop.pts = frames[0].pts;
4490 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
4491
4492 gops.byteLength = 0;
4493 gops.nalCount = 0;
4494 gops.duration = 0;
4495 gops.pts = frames[0].pts;
4496 gops.dts = frames[0].dts;
4497
4498 for (i = 0; i < frames.length; i++) {
4499 currentFrame = frames[i];
4500
4501 if (currentFrame.keyFrame) {
4502 // Since the very first frame is expected to be an keyframe
4503 // only push to the gops array when currentGop is not empty
4504 if (currentGop.length) {
4505 gops.push(currentGop);
4506 gops.byteLength += currentGop.byteLength;
4507 gops.nalCount += currentGop.nalCount;
4508 gops.duration += currentGop.duration;
4509 }
4510
4511 currentGop = [currentFrame];
4512 currentGop.nalCount = currentFrame.length;
4513 currentGop.byteLength = currentFrame.byteLength;
4514 currentGop.pts = currentFrame.pts;
4515 currentGop.dts = currentFrame.dts;
4516 currentGop.duration = currentFrame.duration;
4517 } else {
4518 currentGop.duration += currentFrame.duration;
4519 currentGop.nalCount += currentFrame.length;
4520 currentGop.byteLength += currentFrame.byteLength;
4521 currentGop.push(currentFrame);
4522 }
4523 }
4524
4525 if (gops.length && currentGop.duration <= 0) {
4526 currentGop.duration = gops[gops.length - 1].duration;
4527 }
4528
4529 gops.byteLength += currentGop.byteLength;
4530 gops.nalCount += currentGop.nalCount;
4531 gops.duration += currentGop.duration; // push the final Gop
4532
4533 gops.push(currentGop);
4534 return gops;
4535 };
4536 /*
4537 * Search for the first keyframe in the GOPs and throw away all frames
4538 * until that keyframe. Then extend the duration of the pulled keyframe
4539 * and pull the PTS and DTS of the keyframe so that it covers the time
4540 * range of the frames that were disposed.
4541 *
4542 * @param {Array} gops video GOPs
4543 * @returns {Array} modified video GOPs
4544 */
4545
4546
4547 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
4548 var currentGop;
4549
4550 if (!gops[0][0].keyFrame && gops.length > 1) {
4551 // Remove the first GOP
4552 currentGop = gops.shift();
4553 gops.byteLength -= currentGop.byteLength;
4554 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
4555 // first gop to cover the time period of the
4556 // frames we just removed
4557
4558 gops[0][0].dts = currentGop.dts;
4559 gops[0][0].pts = currentGop.pts;
4560 gops[0][0].duration += currentGop.duration;
4561 }
4562
4563 return gops;
4564 };
4565 /**
4566 * Default sample object
4567 * see ISO/IEC 14496-12:2012, section 8.6.4.3
4568 */
4569
4570
4571 var createDefaultSample = function createDefaultSample() {
4572 return {
4573 size: 0,
4574 flags: {
4575 isLeading: 0,
4576 dependsOn: 1,
4577 isDependedOn: 0,
4578 hasRedundancy: 0,
4579 degradationPriority: 0,
4580 isNonSyncSample: 1
4581 }
4582 };
4583 };
4584 /*
4585 * Collates information from a video frame into an object for eventual
4586 * entry into an MP4 sample table.
4587 *
4588 * @param {Object} frame the video frame
4589 * @param {Number} dataOffset the byte offset to position the sample
4590 * @return {Object} object containing sample table info for a frame
4591 */
4592
4593
4594 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
4595 var sample = createDefaultSample();
4596 sample.dataOffset = dataOffset;
4597 sample.compositionTimeOffset = frame.pts - frame.dts;
4598 sample.duration = frame.duration;
4599 sample.size = 4 * frame.length; // Space for nal unit size
4600
4601 sample.size += frame.byteLength;
4602
4603 if (frame.keyFrame) {
4604 sample.flags.dependsOn = 2;
4605 sample.flags.isNonSyncSample = 0;
4606 }
4607
4608 return sample;
4609 }; // generate the track's sample table from an array of gops
4610
4611
4612 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
4613 var h,
4614 i,
4615 sample,
4616 currentGop,
4617 currentFrame,
4618 dataOffset = baseDataOffset || 0,
4619 samples = [];
4620
4621 for (h = 0; h < gops.length; h++) {
4622 currentGop = gops[h];
4623
4624 for (i = 0; i < currentGop.length; i++) {
4625 currentFrame = currentGop[i];
4626 sample = sampleForFrame(currentFrame, dataOffset);
4627 dataOffset += sample.size;
4628 samples.push(sample);
4629 }
4630 }
4631
4632 return samples;
4633 }; // generate the track's raw mdat data from an array of gops
4634
4635
4636 var concatenateNalData = function concatenateNalData(gops) {
4637 var h,
4638 i,
4639 j,
4640 currentGop,
4641 currentFrame,
4642 currentNal,
4643 dataOffset = 0,
4644 nalsByteLength = gops.byteLength,
4645 numberOfNals = gops.nalCount,
4646 totalByteLength = nalsByteLength + 4 * numberOfNals,
4647 data = new Uint8Array(totalByteLength),
4648 view = new DataView(data.buffer); // For each Gop..
4649
4650 for (h = 0; h < gops.length; h++) {
4651 currentGop = gops[h]; // For each Frame..
4652
4653 for (i = 0; i < currentGop.length; i++) {
4654 currentFrame = currentGop[i]; // For each NAL..
4655
4656 for (j = 0; j < currentFrame.length; j++) {
4657 currentNal = currentFrame[j];
4658 view.setUint32(dataOffset, currentNal.data.byteLength);
4659 dataOffset += 4;
4660 data.set(currentNal.data, dataOffset);
4661 dataOffset += currentNal.data.byteLength;
4662 }
4663 }
4664 }
4665
4666 return data;
4667 }; // generate the track's sample table from a frame
4668
4669
4670 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
4671 var sample,
4672 dataOffset = baseDataOffset || 0,
4673 samples = [];
4674 sample = sampleForFrame(frame, dataOffset);
4675 samples.push(sample);
4676 return samples;
4677 }; // generate the track's raw mdat data from a frame
4678
4679
4680 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
4681 var i,
4682 currentNal,
4683 dataOffset = 0,
4684 nalsByteLength = frame.byteLength,
4685 numberOfNals = frame.length,
4686 totalByteLength = nalsByteLength + 4 * numberOfNals,
4687 data = new Uint8Array(totalByteLength),
4688 view = new DataView(data.buffer); // For each NAL..
4689
4690 for (i = 0; i < frame.length; i++) {
4691 currentNal = frame[i];
4692 view.setUint32(dataOffset, currentNal.data.byteLength);
4693 dataOffset += 4;
4694 data.set(currentNal.data, dataOffset);
4695 dataOffset += currentNal.data.byteLength;
4696 }
4697
4698 return data;
4699 };
4700
4701 var frameUtils = {
4702 groupNalsIntoFrames: groupNalsIntoFrames,
4703 groupFramesIntoGops: groupFramesIntoGops,
4704 extendFirstKeyFrame: extendFirstKeyFrame,
4705 generateSampleTable: generateSampleTable$1,
4706 concatenateNalData: concatenateNalData,
4707 generateSampleTableForFrame: generateSampleTableForFrame,
4708 concatenateNalDataForFrame: concatenateNalDataForFrame
4709 };
4710 /**
4711 * mux.js
4712 *
4713 * Copyright (c) Brightcove
4714 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4715 */
4716
4717 var highPrefix = [33, 16, 5, 32, 164, 27];
4718 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
4719
4720 var zeroFill = function zeroFill(count) {
4721 var a = [];
4722
4723 while (count--) {
4724 a.push(0);
4725 }
4726
4727 return a;
4728 };
4729
4730 var makeTable = function makeTable(metaTable) {
4731 return Object.keys(metaTable).reduce(function (obj, key) {
4732 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
4733 return arr.concat(part);
4734 }, []));
4735 return obj;
4736 }, {});
4737 };
4738
4739 var silence;
4740
4741 var silence_1 = function silence_1() {
4742 if (!silence) {
4743 // Frames-of-silence to use for filling in missing AAC frames
4744 var coneOfSilence = {
4745 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
4746 88200: [highPrefix, [231], zeroFill(170), [56]],
4747 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
4748 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
4749 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
4750 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
4751 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
4752 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
4753 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
4754 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
4755 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
4756 };
4757 silence = makeTable(coneOfSilence);
4758 }
4759
4760 return silence;
4761 };
4762 /**
4763 * mux.js
4764 *
4765 * Copyright (c) Brightcove
4766 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4767 */
4768
4769
4770 var ONE_SECOND_IN_TS$4 = 90000,
4771 // 90kHz clock
4772 secondsToVideoTs,
4773 secondsToAudioTs,
4774 videoTsToSeconds,
4775 audioTsToSeconds,
4776 audioTsToVideoTs,
4777 videoTsToAudioTs,
4778 metadataTsToSeconds;
4779
4780 secondsToVideoTs = function secondsToVideoTs(seconds) {
4781 return seconds * ONE_SECOND_IN_TS$4;
4782 };
4783
4784 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
4785 return seconds * sampleRate;
4786 };
4787
4788 videoTsToSeconds = function videoTsToSeconds(timestamp) {
4789 return timestamp / ONE_SECOND_IN_TS$4;
4790 };
4791
4792 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
4793 return timestamp / sampleRate;
4794 };
4795
4796 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
4797 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
4798 };
4799
4800 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
4801 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
4802 };
4803 /**
4804 * Adjust ID3 tag or caption timing information by the timeline pts values
4805 * (if keepOriginalTimestamps is false) and convert to seconds
4806 */
4807
4808
4809 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
4810 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
4811 };
4812
4813 var clock = {
4814 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
4815 secondsToVideoTs: secondsToVideoTs,
4816 secondsToAudioTs: secondsToAudioTs,
4817 videoTsToSeconds: videoTsToSeconds,
4818 audioTsToSeconds: audioTsToSeconds,
4819 audioTsToVideoTs: audioTsToVideoTs,
4820 videoTsToAudioTs: videoTsToAudioTs,
4821 metadataTsToSeconds: metadataTsToSeconds
4822 };
4823 /**
4824 * mux.js
4825 *
4826 * Copyright (c) Brightcove
4827 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4828 */
4829
4830 /**
4831 * Sum the `byteLength` properties of the data in each AAC frame
4832 */
4833
4834 var sumFrameByteLengths = function sumFrameByteLengths(array) {
4835 var i,
4836 currentObj,
4837 sum = 0; // sum the byteLength's all each nal unit in the frame
4838
4839 for (i = 0; i < array.length; i++) {
4840 currentObj = array[i];
4841 sum += currentObj.data.byteLength;
4842 }
4843
4844 return sum;
4845 }; // Possibly pad (prefix) the audio track with silence if appending this track
4846 // would lead to the introduction of a gap in the audio buffer
4847
4848
4849 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
4850 var baseMediaDecodeTimeTs,
4851 frameDuration = 0,
4852 audioGapDuration = 0,
4853 audioFillFrameCount = 0,
4854 audioFillDuration = 0,
4855 silentFrame,
4856 i,
4857 firstFrame;
4858
4859 if (!frames.length) {
4860 return;
4861 }
4862
4863 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
4864
4865 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
4866
4867 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
4868 // insert the shortest possible amount (audio gap or audio to video gap)
4869 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
4870
4871 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
4872 audioFillDuration = audioFillFrameCount * frameDuration;
4873 } // don't attempt to fill gaps smaller than a single frame or larger
4874 // than a half second
4875
4876
4877 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
4878 return;
4879 }
4880
4881 silentFrame = silence_1()[track.samplerate];
4882
4883 if (!silentFrame) {
4884 // we don't have a silent frame pregenerated for the sample rate, so use a frame
4885 // from the content instead
4886 silentFrame = frames[0].data;
4887 }
4888
4889 for (i = 0; i < audioFillFrameCount; i++) {
4890 firstFrame = frames[0];
4891 frames.splice(0, 0, {
4892 data: silentFrame,
4893 dts: firstFrame.dts - frameDuration,
4894 pts: firstFrame.pts - frameDuration
4895 });
4896 }
4897
4898 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
4899 return audioFillDuration;
4900 }; // If the audio segment extends before the earliest allowed dts
4901 // value, remove AAC frames until starts at or after the earliest
4902 // allowed DTS so that we don't end up with a negative baseMedia-
4903 // DecodeTime for the audio track
4904
4905
4906 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
4907 if (track.minSegmentDts >= earliestAllowedDts) {
4908 return adtsFrames;
4909 } // We will need to recalculate the earliest segment Dts
4910
4911
4912 track.minSegmentDts = Infinity;
4913 return adtsFrames.filter(function (currentFrame) {
4914 // If this is an allowed frame, keep it and record it's Dts
4915 if (currentFrame.dts >= earliestAllowedDts) {
4916 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
4917 track.minSegmentPts = track.minSegmentDts;
4918 return true;
4919 } // Otherwise, discard it
4920
4921
4922 return false;
4923 });
4924 }; // generate the track's raw mdat data from an array of frames
4925
4926
4927 var generateSampleTable = function generateSampleTable(frames) {
4928 var i,
4929 currentFrame,
4930 samples = [];
4931
4932 for (i = 0; i < frames.length; i++) {
4933 currentFrame = frames[i];
4934 samples.push({
4935 size: currentFrame.data.byteLength,
4936 duration: 1024 // For AAC audio, all samples contain 1024 samples
4937
4938 });
4939 }
4940
4941 return samples;
4942 }; // generate the track's sample table from an array of frames
4943
4944
4945 var concatenateFrameData = function concatenateFrameData(frames) {
4946 var i,
4947 currentFrame,
4948 dataOffset = 0,
4949 data = new Uint8Array(sumFrameByteLengths(frames));
4950
4951 for (i = 0; i < frames.length; i++) {
4952 currentFrame = frames[i];
4953 data.set(currentFrame.data, dataOffset);
4954 dataOffset += currentFrame.data.byteLength;
4955 }
4956
4957 return data;
4958 };
4959
4960 var audioFrameUtils = {
4961 prefixWithSilence: prefixWithSilence,
4962 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
4963 generateSampleTable: generateSampleTable,
4964 concatenateFrameData: concatenateFrameData
4965 };
4966 /**
4967 * mux.js
4968 *
4969 * Copyright (c) Brightcove
4970 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4971 */
4972
4973 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
4974 /**
4975 * Store information about the start and end of the track and the
4976 * duration for each frame/sample we process in order to calculate
4977 * the baseMediaDecodeTime
4978 */
4979
4980 var collectDtsInfo = function collectDtsInfo(track, data) {
4981 if (typeof data.pts === 'number') {
4982 if (track.timelineStartInfo.pts === undefined) {
4983 track.timelineStartInfo.pts = data.pts;
4984 }
4985
4986 if (track.minSegmentPts === undefined) {
4987 track.minSegmentPts = data.pts;
4988 } else {
4989 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
4990 }
4991
4992 if (track.maxSegmentPts === undefined) {
4993 track.maxSegmentPts = data.pts;
4994 } else {
4995 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
4996 }
4997 }
4998
4999 if (typeof data.dts === 'number') {
5000 if (track.timelineStartInfo.dts === undefined) {
5001 track.timelineStartInfo.dts = data.dts;
5002 }
5003
5004 if (track.minSegmentDts === undefined) {
5005 track.minSegmentDts = data.dts;
5006 } else {
5007 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5008 }
5009
5010 if (track.maxSegmentDts === undefined) {
5011 track.maxSegmentDts = data.dts;
5012 } else {
5013 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5014 }
5015 }
5016 };
5017 /**
5018 * Clear values used to calculate the baseMediaDecodeTime between
5019 * tracks
5020 */
5021
5022
5023 var clearDtsInfo = function clearDtsInfo(track) {
5024 delete track.minSegmentDts;
5025 delete track.maxSegmentDts;
5026 delete track.minSegmentPts;
5027 delete track.maxSegmentPts;
5028 };
5029 /**
5030 * Calculate the track's baseMediaDecodeTime based on the earliest
5031 * DTS the transmuxer has ever seen and the minimum DTS for the
5032 * current track
5033 * @param track {object} track metadata configuration
5034 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5035 * in the source; false to adjust the first segment to start at 0.
5036 */
5037
5038
5039 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5040 var baseMediaDecodeTime,
5041 scale,
5042 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
5043
5044 if (!keepOriginalTimestamps) {
5045 minSegmentDts -= track.timelineStartInfo.dts;
5046 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5047 // we want the start of the first segment to be placed
5048
5049
5050 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
5051
5052 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
5053
5054 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
5055
5056 if (track.type === 'audio') {
5057 // Audio has a different clock equal to the sampling_rate so we need to
5058 // scale the PTS values into the clock rate of the track
5059 scale = track.samplerate / ONE_SECOND_IN_TS$3;
5060 baseMediaDecodeTime *= scale;
5061 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
5062 }
5063
5064 return baseMediaDecodeTime;
5065 };
5066
5067 var trackDecodeInfo = {
5068 clearDtsInfo: clearDtsInfo,
5069 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
5070 collectDtsInfo: collectDtsInfo
5071 };
5072 /**
5073 * mux.js
5074 *
5075 * Copyright (c) Brightcove
5076 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5077 *
5078 * Reads in-band caption information from a video elementary
5079 * stream. Captions must follow the CEA-708 standard for injection
5080 * into an MPEG-2 transport streams.
5081 * @see https://en.wikipedia.org/wiki/CEA-708
5082 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
5083 */
5084 // payload type field to indicate how they are to be
5085 // interpreted. CEAS-708 caption content is always transmitted with
5086 // payload type 0x04.
5087
5088 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
5089 RBSP_TRAILING_BITS = 128;
5090 /**
5091 * Parse a supplemental enhancement information (SEI) NAL unit.
5092 * Stops parsing once a message of type ITU T T35 has been found.
5093 *
5094 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
5095 * @return {object} the parsed SEI payload
5096 * @see Rec. ITU-T H.264, 7.3.2.3.1
5097 */
5098
5099 var parseSei = function parseSei(bytes) {
5100 var i = 0,
5101 result = {
5102 payloadType: -1,
5103 payloadSize: 0
5104 },
5105 payloadType = 0,
5106 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
5107
5108 while (i < bytes.byteLength) {
5109 // stop once we have hit the end of the sei_rbsp
5110 if (bytes[i] === RBSP_TRAILING_BITS) {
5111 break;
5112 } // Parse payload type
5113
5114
5115 while (bytes[i] === 0xFF) {
5116 payloadType += 255;
5117 i++;
5118 }
5119
5120 payloadType += bytes[i++]; // Parse payload size
5121
5122 while (bytes[i] === 0xFF) {
5123 payloadSize += 255;
5124 i++;
5125 }
5126
5127 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
5128 // there can only ever be one caption message in a frame's sei
5129
5130 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
5131 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
5132
5133 if (userIdentifier === 'GA94') {
5134 result.payloadType = payloadType;
5135 result.payloadSize = payloadSize;
5136 result.payload = bytes.subarray(i, i + payloadSize);
5137 break;
5138 } else {
5139 result.payload = void 0;
5140 }
5141 } // skip the payload and parse the next message
5142
5143
5144 i += payloadSize;
5145 payloadType = 0;
5146 payloadSize = 0;
5147 }
5148
5149 return result;
5150 }; // see ANSI/SCTE 128-1 (2013), section 8.1
5151
5152
5153 var parseUserData = function parseUserData(sei) {
5154 // itu_t_t35_contry_code must be 181 (United States) for
5155 // captions
5156 if (sei.payload[0] !== 181) {
5157 return null;
5158 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
5159
5160
5161 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
5162 return null;
5163 } // the user_identifier should be "GA94" to indicate ATSC1 data
5164
5165
5166 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
5167 return null;
5168 } // finally, user_data_type_code should be 0x03 for caption data
5169
5170
5171 if (sei.payload[7] !== 0x03) {
5172 return null;
5173 } // return the user_data_type_structure and strip the trailing
5174 // marker bits
5175
5176
5177 return sei.payload.subarray(8, sei.payload.length - 1);
5178 }; // see CEA-708-D, section 4.4
5179
5180
5181 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
5182 var results = [],
5183 i,
5184 count,
5185 offset,
5186 data; // if this is just filler, return immediately
5187
5188 if (!(userData[0] & 0x40)) {
5189 return results;
5190 } // parse out the cc_data_1 and cc_data_2 fields
5191
5192
5193 count = userData[0] & 0x1f;
5194
5195 for (i = 0; i < count; i++) {
5196 offset = i * 3;
5197 data = {
5198 type: userData[offset + 2] & 0x03,
5199 pts: pts
5200 }; // capture cc data when cc_valid is 1
5201
5202 if (userData[offset + 2] & 0x04) {
5203 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
5204 results.push(data);
5205 }
5206 }
5207
5208 return results;
5209 };
5210
5211 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
5212 var length = data.byteLength,
5213 emulationPreventionBytesPositions = [],
5214 i = 1,
5215 newLength,
5216 newData; // Find all `Emulation Prevention Bytes`
5217
5218 while (i < length - 2) {
5219 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
5220 emulationPreventionBytesPositions.push(i + 2);
5221 i += 2;
5222 } else {
5223 i++;
5224 }
5225 } // If no Emulation Prevention Bytes were found just return the original
5226 // array
5227
5228
5229 if (emulationPreventionBytesPositions.length === 0) {
5230 return data;
5231 } // Create a new array to hold the NAL unit data
5232
5233
5234 newLength = length - emulationPreventionBytesPositions.length;
5235 newData = new Uint8Array(newLength);
5236 var sourceIndex = 0;
5237
5238 for (i = 0; i < newLength; sourceIndex++, i++) {
5239 if (sourceIndex === emulationPreventionBytesPositions[0]) {
5240 // Skip this byte
5241 sourceIndex++; // Remove this position index
5242
5243 emulationPreventionBytesPositions.shift();
5244 }
5245
5246 newData[i] = data[sourceIndex];
5247 }
5248
5249 return newData;
5250 }; // exports
5251
5252
5253 var captionPacketParser = {
5254 parseSei: parseSei,
5255 parseUserData: parseUserData,
5256 parseCaptionPackets: parseCaptionPackets,
5257 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
5258 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
5259 }; // Link To Transport
5260 // -----------------
5261
5262 var CaptionStream$1 = function CaptionStream(options) {
5263 options = options || {};
5264 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
5265
5266 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
5267 this.captionPackets_ = [];
5268 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
5269 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
5270 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
5271 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
5272 ];
5273
5274 if (this.parse708captions_) {
5275 this.cc708Stream_ = new Cea708Stream(); // eslint-disable-line no-use-before-define
5276 }
5277
5278 this.reset(); // forward data and done events from CCs to this CaptionStream
5279
5280 this.ccStreams_.forEach(function (cc) {
5281 cc.on('data', this.trigger.bind(this, 'data'));
5282 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
5283 cc.on('done', this.trigger.bind(this, 'done'));
5284 }, this);
5285
5286 if (this.parse708captions_) {
5287 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
5288 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
5289 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
5290 }
5291 };
5292
5293 CaptionStream$1.prototype = new stream();
5294
5295 CaptionStream$1.prototype.push = function (event) {
5296 var sei, userData, newCaptionPackets; // only examine SEI NALs
5297
5298 if (event.nalUnitType !== 'sei_rbsp') {
5299 return;
5300 } // parse the sei
5301
5302
5303 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
5304
5305 if (!sei.payload) {
5306 return;
5307 } // ignore everything but user_data_registered_itu_t_t35
5308
5309
5310 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
5311 return;
5312 } // parse out the user data payload
5313
5314
5315 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
5316
5317 if (!userData) {
5318 return;
5319 } // Sometimes, the same segment # will be downloaded twice. To stop the
5320 // caption data from being processed twice, we track the latest dts we've
5321 // received and ignore everything with a dts before that. However, since
5322 // data for a specific dts can be split across packets on either side of
5323 // a segment boundary, we need to make sure we *don't* ignore the packets
5324 // from the *next* segment that have dts === this.latestDts_. By constantly
5325 // tracking the number of packets received with dts === this.latestDts_, we
5326 // know how many should be ignored once we start receiving duplicates.
5327
5328
5329 if (event.dts < this.latestDts_) {
5330 // We've started getting older data, so set the flag.
5331 this.ignoreNextEqualDts_ = true;
5332 return;
5333 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
5334 this.numSameDts_--;
5335
5336 if (!this.numSameDts_) {
5337 // We've received the last duplicate packet, time to start processing again
5338 this.ignoreNextEqualDts_ = false;
5339 }
5340
5341 return;
5342 } // parse out CC data packets and save them for later
5343
5344
5345 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
5346 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
5347
5348 if (this.latestDts_ !== event.dts) {
5349 this.numSameDts_ = 0;
5350 }
5351
5352 this.numSameDts_++;
5353 this.latestDts_ = event.dts;
5354 };
5355
5356 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
5357 this.ccStreams_.forEach(function (cc) {
5358 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
5359 }, this);
5360 };
5361
5362 CaptionStream$1.prototype.flushStream = function (flushType) {
5363 // make sure we actually parsed captions before proceeding
5364 if (!this.captionPackets_.length) {
5365 this.flushCCStreams(flushType);
5366 return;
5367 } // In Chrome, the Array#sort function is not stable so add a
5368 // presortIndex that we can use to ensure we get a stable-sort
5369
5370
5371 this.captionPackets_.forEach(function (elem, idx) {
5372 elem.presortIndex = idx;
5373 }); // sort caption byte-pairs based on their PTS values
5374
5375 this.captionPackets_.sort(function (a, b) {
5376 if (a.pts === b.pts) {
5377 return a.presortIndex - b.presortIndex;
5378 }
5379
5380 return a.pts - b.pts;
5381 });
5382 this.captionPackets_.forEach(function (packet) {
5383 if (packet.type < 2) {
5384 // Dispatch packet to the right Cea608Stream
5385 this.dispatchCea608Packet(packet);
5386 } else {
5387 // Dispatch packet to the Cea708Stream
5388 this.dispatchCea708Packet(packet);
5389 }
5390 }, this);
5391 this.captionPackets_.length = 0;
5392 this.flushCCStreams(flushType);
5393 };
5394
5395 CaptionStream$1.prototype.flush = function () {
5396 return this.flushStream('flush');
5397 }; // Only called if handling partial data
5398
5399
5400 CaptionStream$1.prototype.partialFlush = function () {
5401 return this.flushStream('partialFlush');
5402 };
5403
5404 CaptionStream$1.prototype.reset = function () {
5405 this.latestDts_ = null;
5406 this.ignoreNextEqualDts_ = false;
5407 this.numSameDts_ = 0;
5408 this.activeCea608Channel_ = [null, null];
5409 this.ccStreams_.forEach(function (ccStream) {
5410 ccStream.reset();
5411 });
5412 }; // From the CEA-608 spec:
5413
5414 /*
5415 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
5416 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
5417 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
5418 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
5419 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
5420 * to switch to captioning or Text.
5421 */
5422 // With that in mind, we ignore any data between an XDS control code and a
5423 // subsequent closed-captioning control code.
5424
5425
5426 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
5427 // NOTE: packet.type is the CEA608 field
5428 if (this.setsTextOrXDSActive(packet)) {
5429 this.activeCea608Channel_[packet.type] = null;
5430 } else if (this.setsChannel1Active(packet)) {
5431 this.activeCea608Channel_[packet.type] = 0;
5432 } else if (this.setsChannel2Active(packet)) {
5433 this.activeCea608Channel_[packet.type] = 1;
5434 }
5435
5436 if (this.activeCea608Channel_[packet.type] === null) {
5437 // If we haven't received anything to set the active channel, or the
5438 // packets are Text/XDS data, discard the data; we don't want jumbled
5439 // captions
5440 return;
5441 }
5442
5443 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
5444 };
5445
5446 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
5447 return (packet.ccData & 0x7800) === 0x1000;
5448 };
5449
5450 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
5451 return (packet.ccData & 0x7800) === 0x1800;
5452 };
5453
5454 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
5455 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
5456 };
5457
5458 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
5459 if (this.parse708captions_) {
5460 this.cc708Stream_.push(packet);
5461 }
5462 }; // ----------------------
5463 // Session to Application
5464 // ----------------------
5465 // This hash maps special and extended character codes to their
5466 // proper Unicode equivalent. The first one-byte key is just a
5467 // non-standard character code. The two-byte keys that follow are
5468 // the extended CEA708 character codes, along with the preceding
5469 // 0x10 extended character byte to distinguish these codes from
5470 // non-extended character codes. Every CEA708 character code that
5471 // is not in this object maps directly to a standard unicode
5472 // character code.
5473 // The transparent space and non-breaking transparent space are
5474 // technically not fully supported since there is no code to
5475 // make them transparent, so they have normal non-transparent
5476 // stand-ins.
5477 // The special closed caption (CC) character isn't a standard
5478 // unicode character, so a fairly similar unicode character was
5479 // chosen in it's place.
5480
5481
5482 var CHARACTER_TRANSLATION_708 = {
5483 0x7f: 0x266a,
5484 // ♪
5485 0x1020: 0x20,
5486 // Transparent Space
5487 0x1021: 0xa0,
5488 // Nob-breaking Transparent Space
5489 0x1025: 0x2026,
5490 // …
5491 0x102a: 0x0160,
5492 // Š
5493 0x102c: 0x0152,
5494 // Œ
5495 0x1030: 0x2588,
5496 // █
5497 0x1031: 0x2018,
5498 // ‘
5499 0x1032: 0x2019,
5500 // ’
5501 0x1033: 0x201c,
5502 // “
5503 0x1034: 0x201d,
5504 // ”
5505 0x1035: 0x2022,
5506 // •
5507 0x1039: 0x2122,
5508 // ™
5509 0x103a: 0x0161,
5510 // š
5511 0x103c: 0x0153,
5512 // œ
5513 0x103d: 0x2120,
5514 // ℠
5515 0x103f: 0x0178,
5516 // Ÿ
5517 0x1076: 0x215b,
5518 // ⅛
5519 0x1077: 0x215c,
5520 // ⅜
5521 0x1078: 0x215d,
5522 // ⅝
5523 0x1079: 0x215e,
5524 // ⅞
5525 0x107a: 0x23d0,
5526 // ⏐
5527 0x107b: 0x23a4,
5528 // ⎤
5529 0x107c: 0x23a3,
5530 // ⎣
5531 0x107d: 0x23af,
5532 // ⎯
5533 0x107e: 0x23a6,
5534 // ⎦
5535 0x107f: 0x23a1,
5536 // ⎡
5537 0x10a0: 0x3138 // ㄸ (CC char)
5538
5539 };
5540
5541 var get708CharFromCode = function get708CharFromCode(code) {
5542 var newCode = CHARACTER_TRANSLATION_708[code] || code;
5543
5544 if (code & 0x1000 && code === newCode) {
5545 // Invalid extended code
5546 return '';
5547 }
5548
5549 return String.fromCharCode(newCode);
5550 };
5551
5552 var within708TextBlock = function within708TextBlock(b) {
5553 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
5554 };
5555
5556 var Cea708Window = function Cea708Window(windowNum) {
5557 this.windowNum = windowNum;
5558 this.reset();
5559 };
5560
5561 Cea708Window.prototype.reset = function () {
5562 this.clearText();
5563 this.pendingNewLine = false;
5564 this.winAttr = {};
5565 this.penAttr = {};
5566 this.penLoc = {};
5567 this.penColor = {}; // These default values are arbitrary,
5568 // defineWindow will usually override them
5569
5570 this.visible = 0;
5571 this.rowLock = 0;
5572 this.columnLock = 0;
5573 this.priority = 0;
5574 this.relativePositioning = 0;
5575 this.anchorVertical = 0;
5576 this.anchorHorizontal = 0;
5577 this.anchorPoint = 0;
5578 this.rowCount = 1;
5579 this.virtualRowCount = this.rowCount + 1;
5580 this.columnCount = 41;
5581 this.windowStyle = 0;
5582 this.penStyle = 0;
5583 };
5584
5585 Cea708Window.prototype.getText = function () {
5586 return this.rows.join('\n');
5587 };
5588
5589 Cea708Window.prototype.clearText = function () {
5590 this.rows = [''];
5591 this.rowIdx = 0;
5592 };
5593
5594 Cea708Window.prototype.newLine = function (pts) {
5595 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
5596 this.beforeRowOverflow(pts);
5597 }
5598
5599 if (this.rows.length > 0) {
5600 this.rows.push('');
5601 this.rowIdx++;
5602 } // Show all virtual rows since there's no visible scrolling
5603
5604
5605 while (this.rows.length > this.virtualRowCount) {
5606 this.rows.shift();
5607 this.rowIdx--;
5608 }
5609 };
5610
5611 Cea708Window.prototype.isEmpty = function () {
5612 if (this.rows.length === 0) {
5613 return true;
5614 } else if (this.rows.length === 1) {
5615 return this.rows[0] === '';
5616 }
5617
5618 return false;
5619 };
5620
5621 Cea708Window.prototype.addText = function (text) {
5622 this.rows[this.rowIdx] += text;
5623 };
5624
5625 Cea708Window.prototype.backspace = function () {
5626 if (!this.isEmpty()) {
5627 var row = this.rows[this.rowIdx];
5628 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
5629 }
5630 };
5631
5632 var Cea708Service = function Cea708Service(serviceNum) {
5633 this.serviceNum = serviceNum;
5634 this.text = '';
5635 this.currentWindow = new Cea708Window(-1);
5636 this.windows = [];
5637 };
5638 /**
5639 * Initialize service windows
5640 * Must be run before service use
5641 *
5642 * @param {Integer} pts PTS value
5643 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
5644 */
5645
5646
5647 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
5648 this.startPts = pts;
5649
5650 for (var win = 0; win < 8; win++) {
5651 this.windows[win] = new Cea708Window(win);
5652
5653 if (typeof beforeRowOverflow === 'function') {
5654 this.windows[win].beforeRowOverflow = beforeRowOverflow;
5655 }
5656 }
5657 };
5658 /**
5659 * Set current window of service to be affected by commands
5660 *
5661 * @param {Integer} windowNum Window number
5662 */
5663
5664
5665 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
5666 this.currentWindow = this.windows[windowNum];
5667 };
5668
5669 var Cea708Stream = function Cea708Stream() {
5670 Cea708Stream.prototype.init.call(this);
5671 var self = this;
5672 this.current708Packet = null;
5673 this.services = {};
5674
5675 this.push = function (packet) {
5676 if (packet.type === 3) {
5677 // 708 packet start
5678 self.new708Packet();
5679 self.add708Bytes(packet);
5680 } else {
5681 if (self.current708Packet === null) {
5682 // This should only happen at the start of a file if there's no packet start.
5683 self.new708Packet();
5684 }
5685
5686 self.add708Bytes(packet);
5687 }
5688 };
5689 };
5690
5691 Cea708Stream.prototype = new stream();
5692 /**
5693 * Push current 708 packet, create new 708 packet.
5694 */
5695
5696 Cea708Stream.prototype.new708Packet = function () {
5697 if (this.current708Packet !== null) {
5698 this.push708Packet();
5699 }
5700
5701 this.current708Packet = {
5702 data: [],
5703 ptsVals: []
5704 };
5705 };
5706 /**
5707 * Add pts and both bytes from packet into current 708 packet.
5708 */
5709
5710
5711 Cea708Stream.prototype.add708Bytes = function (packet) {
5712 var data = packet.ccData;
5713 var byte0 = data >>> 8;
5714 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
5715 // that service blocks will always line up with byte pairs.
5716
5717 this.current708Packet.ptsVals.push(packet.pts);
5718 this.current708Packet.data.push(byte0);
5719 this.current708Packet.data.push(byte1);
5720 };
5721 /**
5722 * Parse completed 708 packet into service blocks and push each service block.
5723 */
5724
5725
5726 Cea708Stream.prototype.push708Packet = function () {
5727 var packet708 = this.current708Packet;
5728 var packetData = packet708.data;
5729 var serviceNum = null;
5730 var blockSize = null;
5731 var i = 0;
5732 var b = packetData[i++];
5733 packet708.seq = b >> 6;
5734 packet708.sizeCode = b & 0x3f; // 0b00111111;
5735
5736 for (; i < packetData.length; i++) {
5737 b = packetData[i++];
5738 serviceNum = b >> 5;
5739 blockSize = b & 0x1f; // 0b00011111
5740
5741 if (serviceNum === 7 && blockSize > 0) {
5742 // Extended service num
5743 b = packetData[i++];
5744 serviceNum = b;
5745 }
5746
5747 this.pushServiceBlock(serviceNum, i, blockSize);
5748
5749 if (blockSize > 0) {
5750 i += blockSize - 1;
5751 }
5752 }
5753 };
5754 /**
5755 * Parse service block, execute commands, read text.
5756 *
5757 * Note: While many of these commands serve important purposes,
5758 * many others just parse out the parameters or attributes, but
5759 * nothing is done with them because this is not a full and complete
5760 * implementation of the entire 708 spec.
5761 *
5762 * @param {Integer} serviceNum Service number
5763 * @param {Integer} start Start index of the 708 packet data
5764 * @param {Integer} size Block size
5765 */
5766
5767
5768 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
5769 var b;
5770 var i = start;
5771 var packetData = this.current708Packet.data;
5772 var service = this.services[serviceNum];
5773
5774 if (!service) {
5775 service = this.initService(serviceNum, i);
5776 }
5777
5778 for (; i < start + size && i < packetData.length; i++) {
5779 b = packetData[i];
5780
5781 if (within708TextBlock(b)) {
5782 i = this.handleText(i, service);
5783 } else if (b === 0x10) {
5784 i = this.extendedCommands(i, service);
5785 } else if (0x80 <= b && b <= 0x87) {
5786 i = this.setCurrentWindow(i, service);
5787 } else if (0x98 <= b && b <= 0x9f) {
5788 i = this.defineWindow(i, service);
5789 } else if (b === 0x88) {
5790 i = this.clearWindows(i, service);
5791 } else if (b === 0x8c) {
5792 i = this.deleteWindows(i, service);
5793 } else if (b === 0x89) {
5794 i = this.displayWindows(i, service);
5795 } else if (b === 0x8a) {
5796 i = this.hideWindows(i, service);
5797 } else if (b === 0x8b) {
5798 i = this.toggleWindows(i, service);
5799 } else if (b === 0x97) {
5800 i = this.setWindowAttributes(i, service);
5801 } else if (b === 0x90) {
5802 i = this.setPenAttributes(i, service);
5803 } else if (b === 0x91) {
5804 i = this.setPenColor(i, service);
5805 } else if (b === 0x92) {
5806 i = this.setPenLocation(i, service);
5807 } else if (b === 0x8f) {
5808 service = this.reset(i, service);
5809 } else if (b === 0x08) {
5810 // BS: Backspace
5811 service.currentWindow.backspace();
5812 } else if (b === 0x0c) {
5813 // FF: Form feed
5814 service.currentWindow.clearText();
5815 } else if (b === 0x0d) {
5816 // CR: Carriage return
5817 service.currentWindow.pendingNewLine = true;
5818 } else if (b === 0x0e) {
5819 // HCR: Horizontal carriage return
5820 service.currentWindow.clearText();
5821 } else if (b === 0x8d) {
5822 // DLY: Delay, nothing to do
5823 i++;
5824 } else ;
5825 }
5826 };
5827 /**
5828 * Execute an extended command
5829 *
5830 * @param {Integer} i Current index in the 708 packet
5831 * @param {Service} service The service object to be affected
5832 * @return {Integer} New index after parsing
5833 */
5834
5835
5836 Cea708Stream.prototype.extendedCommands = function (i, service) {
5837 var packetData = this.current708Packet.data;
5838 var b = packetData[++i];
5839
5840 if (within708TextBlock(b)) {
5841 i = this.handleText(i, service, true);
5842 }
5843
5844 return i;
5845 };
5846 /**
5847 * Get PTS value of a given byte index
5848 *
5849 * @param {Integer} byteIndex Index of the byte
5850 * @return {Integer} PTS
5851 */
5852
5853
5854 Cea708Stream.prototype.getPts = function (byteIndex) {
5855 // There's 1 pts value per 2 bytes
5856 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
5857 };
5858 /**
5859 * Initializes a service
5860 *
5861 * @param {Integer} serviceNum Service number
5862 * @return {Service} Initialized service object
5863 */
5864
5865
5866 Cea708Stream.prototype.initService = function (serviceNum, i) {
5867 var self = this;
5868 this.services[serviceNum] = new Cea708Service(serviceNum);
5869 this.services[serviceNum].init(this.getPts(i), function (pts) {
5870 self.flushDisplayed(pts, self.services[serviceNum]);
5871 });
5872 return this.services[serviceNum];
5873 };
5874 /**
5875 * Execute text writing to current window
5876 *
5877 * @param {Integer} i Current index in the 708 packet
5878 * @param {Service} service The service object to be affected
5879 * @return {Integer} New index after parsing
5880 */
5881
5882
5883 Cea708Stream.prototype.handleText = function (i, service, isExtended) {
5884 var packetData = this.current708Packet.data;
5885 var b = packetData[i];
5886 var extended = isExtended ? 0x1000 : 0x0000;
5887 var char = get708CharFromCode(extended | b);
5888 var win = service.currentWindow;
5889
5890 if (win.pendingNewLine && !win.isEmpty()) {
5891 win.newLine(this.getPts(i));
5892 }
5893
5894 win.pendingNewLine = false;
5895 win.addText(char);
5896 return i;
5897 };
5898 /**
5899 * Parse and execute the CW# command.
5900 *
5901 * Set the current window.
5902 *
5903 * @param {Integer} i Current index in the 708 packet
5904 * @param {Service} service The service object to be affected
5905 * @return {Integer} New index after parsing
5906 */
5907
5908
5909 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
5910 var packetData = this.current708Packet.data;
5911 var b = packetData[i];
5912 var windowNum = b & 0x07;
5913 service.setCurrentWindow(windowNum);
5914 return i;
5915 };
5916 /**
5917 * Parse and execute the DF# command.
5918 *
5919 * Define a window and set it as the current window.
5920 *
5921 * @param {Integer} i Current index in the 708 packet
5922 * @param {Service} service The service object to be affected
5923 * @return {Integer} New index after parsing
5924 */
5925
5926
5927 Cea708Stream.prototype.defineWindow = function (i, service) {
5928 var packetData = this.current708Packet.data;
5929 var b = packetData[i];
5930 var windowNum = b & 0x07;
5931 service.setCurrentWindow(windowNum);
5932 var win = service.currentWindow;
5933 b = packetData[++i];
5934 win.visible = (b & 0x20) >> 5; // v
5935
5936 win.rowLock = (b & 0x10) >> 4; // rl
5937
5938 win.columnLock = (b & 0x08) >> 3; // cl
5939
5940 win.priority = b & 0x07; // p
5941
5942 b = packetData[++i];
5943 win.relativePositioning = (b & 0x80) >> 7; // rp
5944
5945 win.anchorVertical = b & 0x7f; // av
5946
5947 b = packetData[++i];
5948 win.anchorHorizontal = b; // ah
5949
5950 b = packetData[++i];
5951 win.anchorPoint = (b & 0xf0) >> 4; // ap
5952
5953 win.rowCount = b & 0x0f; // rc
5954
5955 b = packetData[++i];
5956 win.columnCount = b & 0x3f; // cc
5957
5958 b = packetData[++i];
5959 win.windowStyle = (b & 0x38) >> 3; // ws
5960
5961 win.penStyle = b & 0x07; // ps
5962 // The spec says there are (rowCount+1) "virtual rows"
5963
5964 win.virtualRowCount = win.rowCount + 1;
5965 return i;
5966 };
5967 /**
5968 * Parse and execute the SWA command.
5969 *
5970 * Set attributes of the current window.
5971 *
5972 * @param {Integer} i Current index in the 708 packet
5973 * @param {Service} service The service object to be affected
5974 * @return {Integer} New index after parsing
5975 */
5976
5977
5978 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
5979 var packetData = this.current708Packet.data;
5980 var b = packetData[i];
5981 var winAttr = service.currentWindow.winAttr;
5982 b = packetData[++i];
5983 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
5984
5985 winAttr.fillRed = (b & 0x30) >> 4; // fr
5986
5987 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
5988
5989 winAttr.fillBlue = b & 0x03; // fb
5990
5991 b = packetData[++i];
5992 winAttr.borderType = (b & 0xc0) >> 6; // bt
5993
5994 winAttr.borderRed = (b & 0x30) >> 4; // br
5995
5996 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
5997
5998 winAttr.borderBlue = b & 0x03; // bb
5999
6000 b = packetData[++i];
6001 winAttr.borderType += (b & 0x80) >> 5; // bt
6002
6003 winAttr.wordWrap = (b & 0x40) >> 6; // ww
6004
6005 winAttr.printDirection = (b & 0x30) >> 4; // pd
6006
6007 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
6008
6009 winAttr.justify = b & 0x03; // j
6010
6011 b = packetData[++i];
6012 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
6013
6014 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
6015
6016 winAttr.displayEffect = b & 0x03; // de
6017
6018 return i;
6019 };
6020 /**
6021 * Gather text from all displayed windows and push a caption to output.
6022 *
6023 * @param {Integer} i Current index in the 708 packet
6024 * @param {Service} service The service object to be affected
6025 */
6026
6027
6028 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
6029 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
6030 // display text in the correct order, but sample files so far have not shown any issue.
6031
6032 for (var winId = 0; winId < 8; winId++) {
6033 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
6034 displayedText.push(service.windows[winId].getText());
6035 }
6036 }
6037
6038 service.endPts = pts;
6039 service.text = displayedText.join('\n\n');
6040 this.pushCaption(service);
6041 service.startPts = pts;
6042 };
6043 /**
6044 * Push a caption to output if the caption contains text.
6045 *
6046 * @param {Service} service The service object to be affected
6047 */
6048
6049
6050 Cea708Stream.prototype.pushCaption = function (service) {
6051 if (service.text !== '') {
6052 this.trigger('data', {
6053 startPts: service.startPts,
6054 endPts: service.endPts,
6055 text: service.text,
6056 stream: 'cc708_' + service.serviceNum
6057 });
6058 service.text = '';
6059 service.startPts = service.endPts;
6060 }
6061 };
6062 /**
6063 * Parse and execute the DSW command.
6064 *
6065 * Set visible property of windows based on the parsed bitmask.
6066 *
6067 * @param {Integer} i Current index in the 708 packet
6068 * @param {Service} service The service object to be affected
6069 * @return {Integer} New index after parsing
6070 */
6071
6072
6073 Cea708Stream.prototype.displayWindows = function (i, service) {
6074 var packetData = this.current708Packet.data;
6075 var b = packetData[++i];
6076 var pts = this.getPts(i);
6077 this.flushDisplayed(pts, service);
6078
6079 for (var winId = 0; winId < 8; winId++) {
6080 if (b & 0x01 << winId) {
6081 service.windows[winId].visible = 1;
6082 }
6083 }
6084
6085 return i;
6086 };
6087 /**
6088 * Parse and execute the HDW command.
6089 *
6090 * Set visible property of windows based on the parsed bitmask.
6091 *
6092 * @param {Integer} i Current index in the 708 packet
6093 * @param {Service} service The service object to be affected
6094 * @return {Integer} New index after parsing
6095 */
6096
6097
6098 Cea708Stream.prototype.hideWindows = function (i, service) {
6099 var packetData = this.current708Packet.data;
6100 var b = packetData[++i];
6101 var pts = this.getPts(i);
6102 this.flushDisplayed(pts, service);
6103
6104 for (var winId = 0; winId < 8; winId++) {
6105 if (b & 0x01 << winId) {
6106 service.windows[winId].visible = 0;
6107 }
6108 }
6109
6110 return i;
6111 };
6112 /**
6113 * Parse and execute the TGW command.
6114 *
6115 * Set visible property of windows based on the parsed bitmask.
6116 *
6117 * @param {Integer} i Current index in the 708 packet
6118 * @param {Service} service The service object to be affected
6119 * @return {Integer} New index after parsing
6120 */
6121
6122
6123 Cea708Stream.prototype.toggleWindows = function (i, service) {
6124 var packetData = this.current708Packet.data;
6125 var b = packetData[++i];
6126 var pts = this.getPts(i);
6127 this.flushDisplayed(pts, service);
6128
6129 for (var winId = 0; winId < 8; winId++) {
6130 if (b & 0x01 << winId) {
6131 service.windows[winId].visible ^= 1;
6132 }
6133 }
6134
6135 return i;
6136 };
6137 /**
6138 * Parse and execute the CLW command.
6139 *
6140 * Clear text of windows based on the parsed bitmask.
6141 *
6142 * @param {Integer} i Current index in the 708 packet
6143 * @param {Service} service The service object to be affected
6144 * @return {Integer} New index after parsing
6145 */
6146
6147
6148 Cea708Stream.prototype.clearWindows = function (i, service) {
6149 var packetData = this.current708Packet.data;
6150 var b = packetData[++i];
6151 var pts = this.getPts(i);
6152 this.flushDisplayed(pts, service);
6153
6154 for (var winId = 0; winId < 8; winId++) {
6155 if (b & 0x01 << winId) {
6156 service.windows[winId].clearText();
6157 }
6158 }
6159
6160 return i;
6161 };
6162 /**
6163 * Parse and execute the DLW command.
6164 *
6165 * Re-initialize windows based on the parsed bitmask.
6166 *
6167 * @param {Integer} i Current index in the 708 packet
6168 * @param {Service} service The service object to be affected
6169 * @return {Integer} New index after parsing
6170 */
6171
6172
6173 Cea708Stream.prototype.deleteWindows = function (i, service) {
6174 var packetData = this.current708Packet.data;
6175 var b = packetData[++i];
6176 var pts = this.getPts(i);
6177 this.flushDisplayed(pts, service);
6178
6179 for (var winId = 0; winId < 8; winId++) {
6180 if (b & 0x01 << winId) {
6181 service.windows[winId].reset();
6182 }
6183 }
6184
6185 return i;
6186 };
6187 /**
6188 * Parse and execute the SPA command.
6189 *
6190 * Set pen attributes of the current window.
6191 *
6192 * @param {Integer} i Current index in the 708 packet
6193 * @param {Service} service The service object to be affected
6194 * @return {Integer} New index after parsing
6195 */
6196
6197
6198 Cea708Stream.prototype.setPenAttributes = function (i, service) {
6199 var packetData = this.current708Packet.data;
6200 var b = packetData[i];
6201 var penAttr = service.currentWindow.penAttr;
6202 b = packetData[++i];
6203 penAttr.textTag = (b & 0xf0) >> 4; // tt
6204
6205 penAttr.offset = (b & 0x0c) >> 2; // o
6206
6207 penAttr.penSize = b & 0x03; // s
6208
6209 b = packetData[++i];
6210 penAttr.italics = (b & 0x80) >> 7; // i
6211
6212 penAttr.underline = (b & 0x40) >> 6; // u
6213
6214 penAttr.edgeType = (b & 0x38) >> 3; // et
6215
6216 penAttr.fontStyle = b & 0x07; // fs
6217
6218 return i;
6219 };
6220 /**
6221 * Parse and execute the SPC command.
6222 *
6223 * Set pen color of the current window.
6224 *
6225 * @param {Integer} i Current index in the 708 packet
6226 * @param {Service} service The service object to be affected
6227 * @return {Integer} New index after parsing
6228 */
6229
6230
6231 Cea708Stream.prototype.setPenColor = function (i, service) {
6232 var packetData = this.current708Packet.data;
6233 var b = packetData[i];
6234 var penColor = service.currentWindow.penColor;
6235 b = packetData[++i];
6236 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
6237
6238 penColor.fgRed = (b & 0x30) >> 4; // fr
6239
6240 penColor.fgGreen = (b & 0x0c) >> 2; // fg
6241
6242 penColor.fgBlue = b & 0x03; // fb
6243
6244 b = packetData[++i];
6245 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
6246
6247 penColor.bgRed = (b & 0x30) >> 4; // br
6248
6249 penColor.bgGreen = (b & 0x0c) >> 2; // bg
6250
6251 penColor.bgBlue = b & 0x03; // bb
6252
6253 b = packetData[++i];
6254 penColor.edgeRed = (b & 0x30) >> 4; // er
6255
6256 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
6257
6258 penColor.edgeBlue = b & 0x03; // eb
6259
6260 return i;
6261 };
6262 /**
6263 * Parse and execute the SPL command.
6264 *
6265 * Set pen location of the current window.
6266 *
6267 * @param {Integer} i Current index in the 708 packet
6268 * @param {Service} service The service object to be affected
6269 * @return {Integer} New index after parsing
6270 */
6271
6272
6273 Cea708Stream.prototype.setPenLocation = function (i, service) {
6274 var packetData = this.current708Packet.data;
6275 var b = packetData[i];
6276 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
6277
6278 service.currentWindow.pendingNewLine = true;
6279 b = packetData[++i];
6280 penLoc.row = b & 0x0f; // r
6281
6282 b = packetData[++i];
6283 penLoc.column = b & 0x3f; // c
6284
6285 return i;
6286 };
6287 /**
6288 * Execute the RST command.
6289 *
6290 * Reset service to a clean slate. Re-initialize.
6291 *
6292 * @param {Integer} i Current index in the 708 packet
6293 * @param {Service} service The service object to be affected
6294 * @return {Service} Re-initialized service
6295 */
6296
6297
6298 Cea708Stream.prototype.reset = function (i, service) {
6299 var pts = this.getPts(i);
6300 this.flushDisplayed(pts, service);
6301 return this.initService(service.serviceNum, i);
6302 }; // This hash maps non-ASCII, special, and extended character codes to their
6303 // proper Unicode equivalent. The first keys that are only a single byte
6304 // are the non-standard ASCII characters, which simply map the CEA608 byte
6305 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6306 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6307 // can be performed regardless of the field and data channel on which the
6308 // character code was received.
6309
6310
6311 var CHARACTER_TRANSLATION = {
6312 0x2a: 0xe1,
6313 // á
6314 0x5c: 0xe9,
6315 // é
6316 0x5e: 0xed,
6317 // í
6318 0x5f: 0xf3,
6319 // ó
6320 0x60: 0xfa,
6321 // ú
6322 0x7b: 0xe7,
6323 // ç
6324 0x7c: 0xf7,
6325 // ÷
6326 0x7d: 0xd1,
6327 // Ñ
6328 0x7e: 0xf1,
6329 // ñ
6330 0x7f: 0x2588,
6331 // █
6332 0x0130: 0xae,
6333 // ®
6334 0x0131: 0xb0,
6335 // °
6336 0x0132: 0xbd,
6337 // ½
6338 0x0133: 0xbf,
6339 // ¿
6340 0x0134: 0x2122,
6341 // ™
6342 0x0135: 0xa2,
6343 // ¢
6344 0x0136: 0xa3,
6345 // £
6346 0x0137: 0x266a,
6347 // ♪
6348 0x0138: 0xe0,
6349 // à
6350 0x0139: 0xa0,
6351 //
6352 0x013a: 0xe8,
6353 // è
6354 0x013b: 0xe2,
6355 // â
6356 0x013c: 0xea,
6357 // ê
6358 0x013d: 0xee,
6359 // î
6360 0x013e: 0xf4,
6361 // ô
6362 0x013f: 0xfb,
6363 // û
6364 0x0220: 0xc1,
6365 // Á
6366 0x0221: 0xc9,
6367 // É
6368 0x0222: 0xd3,
6369 // Ó
6370 0x0223: 0xda,
6371 // Ú
6372 0x0224: 0xdc,
6373 // Ü
6374 0x0225: 0xfc,
6375 // ü
6376 0x0226: 0x2018,
6377 // ‘
6378 0x0227: 0xa1,
6379 // ¡
6380 0x0228: 0x2a,
6381 // *
6382 0x0229: 0x27,
6383 // '
6384 0x022a: 0x2014,
6385 // —
6386 0x022b: 0xa9,
6387 // ©
6388 0x022c: 0x2120,
6389 // ℠
6390 0x022d: 0x2022,
6391 // •
6392 0x022e: 0x201c,
6393 // “
6394 0x022f: 0x201d,
6395 // ”
6396 0x0230: 0xc0,
6397 // À
6398 0x0231: 0xc2,
6399 // Â
6400 0x0232: 0xc7,
6401 // Ç
6402 0x0233: 0xc8,
6403 // È
6404 0x0234: 0xca,
6405 // Ê
6406 0x0235: 0xcb,
6407 // Ë
6408 0x0236: 0xeb,
6409 // ë
6410 0x0237: 0xce,
6411 // Î
6412 0x0238: 0xcf,
6413 // Ï
6414 0x0239: 0xef,
6415 // ï
6416 0x023a: 0xd4,
6417 // Ô
6418 0x023b: 0xd9,
6419 // Ù
6420 0x023c: 0xf9,
6421 // ù
6422 0x023d: 0xdb,
6423 // Û
6424 0x023e: 0xab,
6425 // «
6426 0x023f: 0xbb,
6427 // »
6428 0x0320: 0xc3,
6429 // Ã
6430 0x0321: 0xe3,
6431 // ã
6432 0x0322: 0xcd,
6433 // Í
6434 0x0323: 0xcc,
6435 // Ì
6436 0x0324: 0xec,
6437 // ì
6438 0x0325: 0xd2,
6439 // Ò
6440 0x0326: 0xf2,
6441 // ò
6442 0x0327: 0xd5,
6443 // Õ
6444 0x0328: 0xf5,
6445 // õ
6446 0x0329: 0x7b,
6447 // {
6448 0x032a: 0x7d,
6449 // }
6450 0x032b: 0x5c,
6451 // \
6452 0x032c: 0x5e,
6453 // ^
6454 0x032d: 0x5f,
6455 // _
6456 0x032e: 0x7c,
6457 // |
6458 0x032f: 0x7e,
6459 // ~
6460 0x0330: 0xc4,
6461 // Ä
6462 0x0331: 0xe4,
6463 // ä
6464 0x0332: 0xd6,
6465 // Ö
6466 0x0333: 0xf6,
6467 // ö
6468 0x0334: 0xdf,
6469 // ß
6470 0x0335: 0xa5,
6471 // ¥
6472 0x0336: 0xa4,
6473 // ¤
6474 0x0337: 0x2502,
6475 // │
6476 0x0338: 0xc5,
6477 // Å
6478 0x0339: 0xe5,
6479 // å
6480 0x033a: 0xd8,
6481 // Ø
6482 0x033b: 0xf8,
6483 // ø
6484 0x033c: 0x250c,
6485 // ┌
6486 0x033d: 0x2510,
6487 // ┐
6488 0x033e: 0x2514,
6489 // └
6490 0x033f: 0x2518 // ┘
6491
6492 };
6493
6494 var getCharFromCode = function getCharFromCode(code) {
6495 if (code === null) {
6496 return '';
6497 }
6498
6499 code = CHARACTER_TRANSLATION[code] || code;
6500 return String.fromCharCode(code);
6501 }; // the index of the last row in a CEA-608 display buffer
6502
6503
6504 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
6505 // getting it through bit logic.
6506
6507 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
6508 // cells. The "bottom" row is the last element in the outer array.
6509
6510 var createDisplayBuffer = function createDisplayBuffer() {
6511 var result = [],
6512 i = BOTTOM_ROW + 1;
6513
6514 while (i--) {
6515 result.push('');
6516 }
6517
6518 return result;
6519 };
6520
6521 var Cea608Stream = function Cea608Stream(field, dataChannel) {
6522 Cea608Stream.prototype.init.call(this);
6523 this.field_ = field || 0;
6524 this.dataChannel_ = dataChannel || 0;
6525 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
6526 this.setConstants();
6527 this.reset();
6528
6529 this.push = function (packet) {
6530 var data, swap, char0, char1, text; // remove the parity bits
6531
6532 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
6533
6534 if (data === this.lastControlCode_) {
6535 this.lastControlCode_ = null;
6536 return;
6537 } // Store control codes
6538
6539
6540 if ((data & 0xf000) === 0x1000) {
6541 this.lastControlCode_ = data;
6542 } else if (data !== this.PADDING_) {
6543 this.lastControlCode_ = null;
6544 }
6545
6546 char0 = data >>> 8;
6547 char1 = data & 0xff;
6548
6549 if (data === this.PADDING_) {
6550 return;
6551 } else if (data === this.RESUME_CAPTION_LOADING_) {
6552 this.mode_ = 'popOn';
6553 } else if (data === this.END_OF_CAPTION_) {
6554 // If an EOC is received while in paint-on mode, the displayed caption
6555 // text should be swapped to non-displayed memory as if it was a pop-on
6556 // caption. Because of that, we should explicitly switch back to pop-on
6557 // mode
6558 this.mode_ = 'popOn';
6559 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
6560
6561 this.flushDisplayed(packet.pts); // flip memory
6562
6563 swap = this.displayed_;
6564 this.displayed_ = this.nonDisplayed_;
6565 this.nonDisplayed_ = swap; // start measuring the time to display the caption
6566
6567 this.startPts_ = packet.pts;
6568 } else if (data === this.ROLL_UP_2_ROWS_) {
6569 this.rollUpRows_ = 2;
6570 this.setRollUp(packet.pts);
6571 } else if (data === this.ROLL_UP_3_ROWS_) {
6572 this.rollUpRows_ = 3;
6573 this.setRollUp(packet.pts);
6574 } else if (data === this.ROLL_UP_4_ROWS_) {
6575 this.rollUpRows_ = 4;
6576 this.setRollUp(packet.pts);
6577 } else if (data === this.CARRIAGE_RETURN_) {
6578 this.clearFormatting(packet.pts);
6579 this.flushDisplayed(packet.pts);
6580 this.shiftRowsUp_();
6581 this.startPts_ = packet.pts;
6582 } else if (data === this.BACKSPACE_) {
6583 if (this.mode_ === 'popOn') {
6584 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6585 } else {
6586 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6587 }
6588 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
6589 this.flushDisplayed(packet.pts);
6590 this.displayed_ = createDisplayBuffer();
6591 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
6592 this.nonDisplayed_ = createDisplayBuffer();
6593 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
6594 if (this.mode_ !== 'paintOn') {
6595 // NOTE: This should be removed when proper caption positioning is
6596 // implemented
6597 this.flushDisplayed(packet.pts);
6598 this.displayed_ = createDisplayBuffer();
6599 }
6600
6601 this.mode_ = 'paintOn';
6602 this.startPts_ = packet.pts; // Append special characters to caption text
6603 } else if (this.isSpecialCharacter(char0, char1)) {
6604 // Bitmask char0 so that we can apply character transformations
6605 // regardless of field and data channel.
6606 // Then byte-shift to the left and OR with char1 so we can pass the
6607 // entire character code to `getCharFromCode`.
6608 char0 = (char0 & 0x03) << 8;
6609 text = getCharFromCode(char0 | char1);
6610 this[this.mode_](packet.pts, text);
6611 this.column_++; // Append extended characters to caption text
6612 } else if (this.isExtCharacter(char0, char1)) {
6613 // Extended characters always follow their "non-extended" equivalents.
6614 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
6615 // decoders are supposed to drop the "è", while compliant decoders
6616 // backspace the "e" and insert "è".
6617 // Delete the previous character
6618 if (this.mode_ === 'popOn') {
6619 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6620 } else {
6621 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6622 } // Bitmask char0 so that we can apply character transformations
6623 // regardless of field and data channel.
6624 // Then byte-shift to the left and OR with char1 so we can pass the
6625 // entire character code to `getCharFromCode`.
6626
6627
6628 char0 = (char0 & 0x03) << 8;
6629 text = getCharFromCode(char0 | char1);
6630 this[this.mode_](packet.pts, text);
6631 this.column_++; // Process mid-row codes
6632 } else if (this.isMidRowCode(char0, char1)) {
6633 // Attributes are not additive, so clear all formatting
6634 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
6635 // should be replaced with spaces, so add one now
6636
6637 this[this.mode_](packet.pts, ' ');
6638 this.column_++;
6639
6640 if ((char1 & 0xe) === 0xe) {
6641 this.addFormatting(packet.pts, ['i']);
6642 }
6643
6644 if ((char1 & 0x1) === 0x1) {
6645 this.addFormatting(packet.pts, ['u']);
6646 } // Detect offset control codes and adjust cursor
6647
6648 } else if (this.isOffsetControlCode(char0, char1)) {
6649 // Cursor position is set by indent PAC (see below) in 4-column
6650 // increments, with an additional offset code of 1-3 to reach any
6651 // of the 32 columns specified by CEA-608. So all we need to do
6652 // here is increment the column cursor by the given offset.
6653 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
6654 } else if (this.isPAC(char0, char1)) {
6655 // There's no logic for PAC -> row mapping, so we have to just
6656 // find the row code in an array and use its index :(
6657 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
6658
6659 if (this.mode_ === 'rollUp') {
6660 // This implies that the base row is incorrectly set.
6661 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
6662 // of roll-up rows set.
6663 if (row - this.rollUpRows_ + 1 < 0) {
6664 row = this.rollUpRows_ - 1;
6665 }
6666
6667 this.setRollUp(packet.pts, row);
6668 }
6669
6670 if (row !== this.row_) {
6671 // formatting is only persistent for current row
6672 this.clearFormatting(packet.pts);
6673 this.row_ = row;
6674 } // All PACs can apply underline, so detect and apply
6675 // (All odd-numbered second bytes set underline)
6676
6677
6678 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
6679 this.addFormatting(packet.pts, ['u']);
6680 }
6681
6682 if ((data & 0x10) === 0x10) {
6683 // We've got an indent level code. Each successive even number
6684 // increments the column cursor by 4, so we can get the desired
6685 // column position by bit-shifting to the right (to get n/2)
6686 // and multiplying by 4.
6687 this.column_ = ((data & 0xe) >> 1) * 4;
6688 }
6689
6690 if (this.isColorPAC(char1)) {
6691 // it's a color code, though we only support white, which
6692 // can be either normal or italicized. white italics can be
6693 // either 0x4e or 0x6e depending on the row, so we just
6694 // bitwise-and with 0xe to see if italics should be turned on
6695 if ((char1 & 0xe) === 0xe) {
6696 this.addFormatting(packet.pts, ['i']);
6697 }
6698 } // We have a normal character in char0, and possibly one in char1
6699
6700 } else if (this.isNormalChar(char0)) {
6701 if (char1 === 0x00) {
6702 char1 = null;
6703 }
6704
6705 text = getCharFromCode(char0);
6706 text += getCharFromCode(char1);
6707 this[this.mode_](packet.pts, text);
6708 this.column_ += text.length;
6709 } // finish data processing
6710
6711 };
6712 };
6713
6714 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
6715 // display buffer
6716
6717 Cea608Stream.prototype.flushDisplayed = function (pts) {
6718 var content = this.displayed_ // remove spaces from the start and end of the string
6719 .map(function (row) {
6720 try {
6721 return row.trim();
6722 } catch (e) {
6723 // Ordinarily, this shouldn't happen. However, caption
6724 // parsing errors should not throw exceptions and
6725 // break playback.
6726 // eslint-disable-next-line no-console
6727 console.error('Skipping malformed caption.');
6728 return '';
6729 }
6730 }) // combine all text rows to display in one cue
6731 .join('\n') // and remove blank rows from the start and end, but not the middle
6732 .replace(/^\n+|\n+$/g, '');
6733
6734 if (content.length) {
6735 this.trigger('data', {
6736 startPts: this.startPts_,
6737 endPts: pts,
6738 text: content,
6739 stream: this.name_
6740 });
6741 }
6742 };
6743 /**
6744 * Zero out the data, used for startup and on seek
6745 */
6746
6747
6748 Cea608Stream.prototype.reset = function () {
6749 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
6750 // actually display captions. If a caption is shifted to a row
6751 // with a lower index than this, it is cleared from the display
6752 // buffer
6753
6754 this.topRow_ = 0;
6755 this.startPts_ = 0;
6756 this.displayed_ = createDisplayBuffer();
6757 this.nonDisplayed_ = createDisplayBuffer();
6758 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
6759
6760 this.column_ = 0;
6761 this.row_ = BOTTOM_ROW;
6762 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
6763
6764 this.formatting_ = [];
6765 };
6766 /**
6767 * Sets up control code and related constants for this instance
6768 */
6769
6770
6771 Cea608Stream.prototype.setConstants = function () {
6772 // The following attributes have these uses:
6773 // ext_ : char0 for mid-row codes, and the base for extended
6774 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
6775 // extended codes)
6776 // control_: char0 for control codes, except byte-shifted to the
6777 // left so that we can do this.control_ | CONTROL_CODE
6778 // offset_: char0 for tab offset codes
6779 //
6780 // It's also worth noting that control codes, and _only_ control codes,
6781 // differ between field 1 and field2. Field 2 control codes are always
6782 // their field 1 value plus 1. That's why there's the "| field" on the
6783 // control value.
6784 if (this.dataChannel_ === 0) {
6785 this.BASE_ = 0x10;
6786 this.EXT_ = 0x11;
6787 this.CONTROL_ = (0x14 | this.field_) << 8;
6788 this.OFFSET_ = 0x17;
6789 } else if (this.dataChannel_ === 1) {
6790 this.BASE_ = 0x18;
6791 this.EXT_ = 0x19;
6792 this.CONTROL_ = (0x1c | this.field_) << 8;
6793 this.OFFSET_ = 0x1f;
6794 } // Constants for the LSByte command codes recognized by Cea608Stream. This
6795 // list is not exhaustive. For a more comprehensive listing and semantics see
6796 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
6797 // Padding
6798
6799
6800 this.PADDING_ = 0x0000; // Pop-on Mode
6801
6802 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
6803 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
6804
6805 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
6806 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
6807 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
6808 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
6809
6810 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
6811
6812 this.BACKSPACE_ = this.CONTROL_ | 0x21;
6813 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
6814 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
6815 };
6816 /**
6817 * Detects if the 2-byte packet data is a special character
6818 *
6819 * Special characters have a second byte in the range 0x30 to 0x3f,
6820 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
6821 * data channel 2).
6822 *
6823 * @param {Integer} char0 The first byte
6824 * @param {Integer} char1 The second byte
6825 * @return {Boolean} Whether the 2 bytes are an special character
6826 */
6827
6828
6829 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
6830 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
6831 };
6832 /**
6833 * Detects if the 2-byte packet data is an extended character
6834 *
6835 * Extended characters have a second byte in the range 0x20 to 0x3f,
6836 * with the first byte being 0x12 or 0x13 (for data channel 1) or
6837 * 0x1a or 0x1b (for data channel 2).
6838 *
6839 * @param {Integer} char0 The first byte
6840 * @param {Integer} char1 The second byte
6841 * @return {Boolean} Whether the 2 bytes are an extended character
6842 */
6843
6844
6845 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
6846 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
6847 };
6848 /**
6849 * Detects if the 2-byte packet is a mid-row code
6850 *
6851 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
6852 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
6853 * channel 2).
6854 *
6855 * @param {Integer} char0 The first byte
6856 * @param {Integer} char1 The second byte
6857 * @return {Boolean} Whether the 2 bytes are a mid-row code
6858 */
6859
6860
6861 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
6862 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
6863 };
6864 /**
6865 * Detects if the 2-byte packet is an offset control code
6866 *
6867 * Offset control codes have a second byte in the range 0x21 to 0x23,
6868 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
6869 * data channel 2).
6870 *
6871 * @param {Integer} char0 The first byte
6872 * @param {Integer} char1 The second byte
6873 * @return {Boolean} Whether the 2 bytes are an offset control code
6874 */
6875
6876
6877 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
6878 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
6879 };
6880 /**
6881 * Detects if the 2-byte packet is a Preamble Address Code
6882 *
6883 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
6884 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
6885 * range 0x40 to 0x7f.
6886 *
6887 * @param {Integer} char0 The first byte
6888 * @param {Integer} char1 The second byte
6889 * @return {Boolean} Whether the 2 bytes are a PAC
6890 */
6891
6892
6893 Cea608Stream.prototype.isPAC = function (char0, char1) {
6894 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
6895 };
6896 /**
6897 * Detects if a packet's second byte is in the range of a PAC color code
6898 *
6899 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
6900 * 0x60 to 0x6f.
6901 *
6902 * @param {Integer} char1 The second byte
6903 * @return {Boolean} Whether the byte is a color PAC
6904 */
6905
6906
6907 Cea608Stream.prototype.isColorPAC = function (char1) {
6908 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
6909 };
6910 /**
6911 * Detects if a single byte is in the range of a normal character
6912 *
6913 * Normal text bytes are in the range 0x20 to 0x7f.
6914 *
6915 * @param {Integer} char The byte
6916 * @return {Boolean} Whether the byte is a normal character
6917 */
6918
6919
6920 Cea608Stream.prototype.isNormalChar = function (char) {
6921 return char >= 0x20 && char <= 0x7f;
6922 };
6923 /**
6924 * Configures roll-up
6925 *
6926 * @param {Integer} pts Current PTS
6927 * @param {Integer} newBaseRow Used by PACs to slide the current window to
6928 * a new position
6929 */
6930
6931
6932 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
6933 // Reset the base row to the bottom row when switching modes
6934 if (this.mode_ !== 'rollUp') {
6935 this.row_ = BOTTOM_ROW;
6936 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
6937
6938 this.flushDisplayed(pts);
6939 this.nonDisplayed_ = createDisplayBuffer();
6940 this.displayed_ = createDisplayBuffer();
6941 }
6942
6943 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
6944 // move currently displayed captions (up or down) to the new base row
6945 for (var i = 0; i < this.rollUpRows_; i++) {
6946 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
6947 this.displayed_[this.row_ - i] = '';
6948 }
6949 }
6950
6951 if (newBaseRow === undefined) {
6952 newBaseRow = this.row_;
6953 }
6954
6955 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
6956 }; // Adds the opening HTML tag for the passed character to the caption text,
6957 // and keeps track of it for later closing
6958
6959
6960 Cea608Stream.prototype.addFormatting = function (pts, format) {
6961 this.formatting_ = this.formatting_.concat(format);
6962 var text = format.reduce(function (text, format) {
6963 return text + '<' + format + '>';
6964 }, '');
6965 this[this.mode_](pts, text);
6966 }; // Adds HTML closing tags for current formatting to caption text and
6967 // clears remembered formatting
6968
6969
6970 Cea608Stream.prototype.clearFormatting = function (pts) {
6971 if (!this.formatting_.length) {
6972 return;
6973 }
6974
6975 var text = this.formatting_.reverse().reduce(function (text, format) {
6976 return text + '</' + format + '>';
6977 }, '');
6978 this.formatting_ = [];
6979 this[this.mode_](pts, text);
6980 }; // Mode Implementations
6981
6982
6983 Cea608Stream.prototype.popOn = function (pts, text) {
6984 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
6985
6986 baseRow += text;
6987 this.nonDisplayed_[this.row_] = baseRow;
6988 };
6989
6990 Cea608Stream.prototype.rollUp = function (pts, text) {
6991 var baseRow = this.displayed_[this.row_];
6992 baseRow += text;
6993 this.displayed_[this.row_] = baseRow;
6994 };
6995
6996 Cea608Stream.prototype.shiftRowsUp_ = function () {
6997 var i; // clear out inactive rows
6998
6999 for (i = 0; i < this.topRow_; i++) {
7000 this.displayed_[i] = '';
7001 }
7002
7003 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
7004 this.displayed_[i] = '';
7005 } // shift displayed rows up
7006
7007
7008 for (i = this.topRow_; i < this.row_; i++) {
7009 this.displayed_[i] = this.displayed_[i + 1];
7010 } // clear out the bottom row
7011
7012
7013 this.displayed_[this.row_] = '';
7014 };
7015
7016 Cea608Stream.prototype.paintOn = function (pts, text) {
7017 var baseRow = this.displayed_[this.row_];
7018 baseRow += text;
7019 this.displayed_[this.row_] = baseRow;
7020 }; // exports
7021
7022
7023 var captionStream = {
7024 CaptionStream: CaptionStream$1,
7025 Cea608Stream: Cea608Stream,
7026 Cea708Stream: Cea708Stream
7027 };
7028 /**
7029 * mux.js
7030 *
7031 * Copyright (c) Brightcove
7032 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7033 */
7034
7035 var streamTypes = {
7036 H264_STREAM_TYPE: 0x1B,
7037 ADTS_STREAM_TYPE: 0x0F,
7038 METADATA_STREAM_TYPE: 0x15
7039 };
7040 var MAX_TS = 8589934592;
7041 var RO_THRESH = 4294967296;
7042 var TYPE_SHARED = 'shared';
7043
7044 var handleRollover = function handleRollover(value, reference) {
7045 var direction = 1;
7046
7047 if (value > reference) {
7048 // If the current timestamp value is greater than our reference timestamp and we detect a
7049 // timestamp rollover, this means the roll over is happening in the opposite direction.
7050 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7051 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7052 // rollover point. In loading this segment, the timestamp values will be very large,
7053 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7054 // the time stamp to be `value - 2^33`.
7055 direction = -1;
7056 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7057 // cause an incorrect adjustment.
7058
7059
7060 while (Math.abs(reference - value) > RO_THRESH) {
7061 value += direction * MAX_TS;
7062 }
7063
7064 return value;
7065 };
7066
7067 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
7068 var lastDTS, referenceDTS;
7069 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
7070 // video and audio. We could use `undefined` here, but having a string
7071 // makes debugging a little clearer.
7072
7073 this.type_ = type || TYPE_SHARED;
7074
7075 this.push = function (data) {
7076 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7077 // streams will only accept data that matches their type.
7078 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7079 return;
7080 }
7081
7082 if (referenceDTS === undefined) {
7083 referenceDTS = data.dts;
7084 }
7085
7086 data.dts = handleRollover(data.dts, referenceDTS);
7087 data.pts = handleRollover(data.pts, referenceDTS);
7088 lastDTS = data.dts;
7089 this.trigger('data', data);
7090 };
7091
7092 this.flush = function () {
7093 referenceDTS = lastDTS;
7094 this.trigger('done');
7095 };
7096
7097 this.endTimeline = function () {
7098 this.flush();
7099 this.trigger('endedtimeline');
7100 };
7101
7102 this.discontinuity = function () {
7103 referenceDTS = void 0;
7104 lastDTS = void 0;
7105 };
7106
7107 this.reset = function () {
7108 this.discontinuity();
7109 this.trigger('reset');
7110 };
7111 };
7112
7113 TimestampRolloverStream$1.prototype = new stream();
7114 var timestampRolloverStream = {
7115 TimestampRolloverStream: TimestampRolloverStream$1,
7116 handleRollover: handleRollover
7117 };
7118
7119 var percentEncode$1 = function percentEncode(bytes, start, end) {
7120 var i,
7121 result = '';
7122
7123 for (i = start; i < end; i++) {
7124 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7125 }
7126
7127 return result;
7128 },
7129 // return the string representation of the specified byte range,
7130 // interpreted as UTf-8.
7131 parseUtf8 = function parseUtf8(bytes, start, end) {
7132 return decodeURIComponent(percentEncode$1(bytes, start, end));
7133 },
7134 // return the string representation of the specified byte range,
7135 // interpreted as ISO-8859-1.
7136 parseIso88591$1 = function parseIso88591(bytes, start, end) {
7137 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
7138 },
7139 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
7140 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7141 },
7142 tagParsers = {
7143 TXXX: function TXXX(tag) {
7144 var i;
7145
7146 if (tag.data[0] !== 3) {
7147 // ignore frames with unrecognized character encodings
7148 return;
7149 }
7150
7151 for (i = 1; i < tag.data.length; i++) {
7152 if (tag.data[i] === 0) {
7153 // parse the text fields
7154 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
7155
7156 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7157 break;
7158 }
7159 }
7160
7161 tag.data = tag.value;
7162 },
7163 WXXX: function WXXX(tag) {
7164 var i;
7165
7166 if (tag.data[0] !== 3) {
7167 // ignore frames with unrecognized character encodings
7168 return;
7169 }
7170
7171 for (i = 1; i < tag.data.length; i++) {
7172 if (tag.data[i] === 0) {
7173 // parse the description and URL fields
7174 tag.description = parseUtf8(tag.data, 1, i);
7175 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7176 break;
7177 }
7178 }
7179 },
7180 PRIV: function PRIV(tag) {
7181 var i;
7182
7183 for (i = 0; i < tag.data.length; i++) {
7184 if (tag.data[i] === 0) {
7185 // parse the description and URL fields
7186 tag.owner = parseIso88591$1(tag.data, 0, i);
7187 break;
7188 }
7189 }
7190
7191 tag.privateData = tag.data.subarray(i + 1);
7192 tag.data = tag.privateData;
7193 }
7194 },
7195 _MetadataStream;
7196
7197 _MetadataStream = function MetadataStream(options) {
7198 var settings = {
7199 debug: !!(options && options.debug),
7200 // the bytes of the program-level descriptor field in MP2T
7201 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7202 // program element descriptors"
7203 descriptor: options && options.descriptor
7204 },
7205 // the total size in bytes of the ID3 tag being parsed
7206 tagSize = 0,
7207 // tag data that is not complete enough to be parsed
7208 buffer = [],
7209 // the total number of bytes currently in the buffer
7210 bufferSize = 0,
7211 i;
7212
7213 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
7214 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7215
7216
7217 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7218
7219 if (settings.descriptor) {
7220 for (i = 0; i < settings.descriptor.length; i++) {
7221 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7222 }
7223 }
7224
7225 this.push = function (chunk) {
7226 var tag, frameStart, frameSize, frame, i, frameHeader;
7227
7228 if (chunk.type !== 'timed-metadata') {
7229 return;
7230 } // if data_alignment_indicator is set in the PES header,
7231 // we must have the start of a new ID3 tag. Assume anything
7232 // remaining in the buffer was malformed and throw it out
7233
7234
7235 if (chunk.dataAlignmentIndicator) {
7236 bufferSize = 0;
7237 buffer.length = 0;
7238 } // ignore events that don't look like ID3 data
7239
7240
7241 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7242 if (settings.debug) {
7243 // eslint-disable-next-line no-console
7244 console.log('Skipping unrecognized metadata packet');
7245 }
7246
7247 return;
7248 } // add this chunk to the data we've collected so far
7249
7250
7251 buffer.push(chunk);
7252 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
7253
7254 if (buffer.length === 1) {
7255 // the frame size is transmitted as a 28-bit integer in the
7256 // last four bytes of the ID3 header.
7257 // The most significant bit of each byte is dropped and the
7258 // results concatenated to recover the actual value.
7259 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
7260 // convenient for our comparisons to include it
7261
7262 tagSize += 10;
7263 } // if the entire frame has not arrived, wait for more data
7264
7265
7266 if (bufferSize < tagSize) {
7267 return;
7268 } // collect the entire frame so it can be parsed
7269
7270
7271 tag = {
7272 data: new Uint8Array(tagSize),
7273 frames: [],
7274 pts: buffer[0].pts,
7275 dts: buffer[0].dts
7276 };
7277
7278 for (i = 0; i < tagSize;) {
7279 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7280 i += buffer[0].data.byteLength;
7281 bufferSize -= buffer[0].data.byteLength;
7282 buffer.shift();
7283 } // find the start of the first frame and the end of the tag
7284
7285
7286 frameStart = 10;
7287
7288 if (tag.data[5] & 0x40) {
7289 // advance the frame start past the extended header
7290 frameStart += 4; // header size field
7291
7292 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
7293
7294 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
7295 } // parse one or more ID3 frames
7296 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7297
7298
7299 do {
7300 // determine the number of bytes in this frame
7301 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
7302
7303 if (frameSize < 1) {
7304 // eslint-disable-next-line no-console
7305 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
7306 }
7307
7308 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7309 frame = {
7310 id: frameHeader,
7311 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7312 };
7313 frame.key = frame.id;
7314
7315 if (tagParsers[frame.id]) {
7316 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
7317 // time for raw AAC data
7318
7319 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7320 var d = frame.data,
7321 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7322 size *= 4;
7323 size += d[7] & 0x03;
7324 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
7325 // on the value of this frame
7326 // we couldn't have known the appropriate pts and dts before
7327 // parsing this ID3 tag so set those values now
7328
7329 if (tag.pts === undefined && tag.dts === undefined) {
7330 tag.pts = frame.timeStamp;
7331 tag.dts = frame.timeStamp;
7332 }
7333
7334 this.trigger('timestamp', frame);
7335 }
7336 }
7337
7338 tag.frames.push(frame);
7339 frameStart += 10; // advance past the frame header
7340
7341 frameStart += frameSize; // advance past the frame body
7342 } while (frameStart < tagSize);
7343
7344 this.trigger('data', tag);
7345 };
7346 };
7347
7348 _MetadataStream.prototype = new stream();
7349 var metadataStream = _MetadataStream;
7350 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
7351
7352 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
7353
7354
7355 var MP2T_PACKET_LENGTH = 188,
7356 // bytes
7357 SYNC_BYTE = 0x47;
7358 /**
7359 * Splits an incoming stream of binary data into MPEG-2 Transport
7360 * Stream packets.
7361 */
7362
7363 _TransportPacketStream = function TransportPacketStream() {
7364 var buffer = new Uint8Array(MP2T_PACKET_LENGTH),
7365 bytesInBuffer = 0;
7366
7367 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
7368
7369 /**
7370 * Split a stream of data into M2TS packets
7371 **/
7372
7373
7374 this.push = function (bytes) {
7375 var startIndex = 0,
7376 endIndex = MP2T_PACKET_LENGTH,
7377 everything; // If there are bytes remaining from the last segment, prepend them to the
7378 // bytes that were pushed in
7379
7380 if (bytesInBuffer) {
7381 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7382 everything.set(buffer.subarray(0, bytesInBuffer));
7383 everything.set(bytes, bytesInBuffer);
7384 bytesInBuffer = 0;
7385 } else {
7386 everything = bytes;
7387 } // While we have enough data for a packet
7388
7389
7390 while (endIndex < everything.byteLength) {
7391 // Look for a pair of start and end sync bytes in the data..
7392 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
7393 // We found a packet so emit it and jump one whole packet forward in
7394 // the stream
7395 this.trigger('data', everything.subarray(startIndex, endIndex));
7396 startIndex += MP2T_PACKET_LENGTH;
7397 endIndex += MP2T_PACKET_LENGTH;
7398 continue;
7399 } // If we get here, we have somehow become de-synchronized and we need to step
7400 // forward one byte at a time until we find a pair of sync bytes that denote
7401 // a packet
7402
7403
7404 startIndex++;
7405 endIndex++;
7406 } // If there was some data left over at the end of the segment that couldn't
7407 // possibly be a whole packet, keep it because it might be the start of a packet
7408 // that continues in the next segment
7409
7410
7411 if (startIndex < everything.byteLength) {
7412 buffer.set(everything.subarray(startIndex), 0);
7413 bytesInBuffer = everything.byteLength - startIndex;
7414 }
7415 };
7416 /**
7417 * Passes identified M2TS packets to the TransportParseStream to be parsed
7418 **/
7419
7420
7421 this.flush = function () {
7422 // If the buffer contains a whole packet when we are being flushed, emit it
7423 // and empty the buffer. Otherwise hold onto the data because it may be
7424 // important for decoding the next segment
7425 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
7426 this.trigger('data', buffer);
7427 bytesInBuffer = 0;
7428 }
7429
7430 this.trigger('done');
7431 };
7432
7433 this.endTimeline = function () {
7434 this.flush();
7435 this.trigger('endedtimeline');
7436 };
7437
7438 this.reset = function () {
7439 bytesInBuffer = 0;
7440 this.trigger('reset');
7441 };
7442 };
7443
7444 _TransportPacketStream.prototype = new stream();
7445 /**
7446 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7447 * forms of the individual transport stream packets.
7448 */
7449
7450 _TransportParseStream = function TransportParseStream() {
7451 var parsePsi, parsePat, parsePmt, self;
7452
7453 _TransportParseStream.prototype.init.call(this);
7454
7455 self = this;
7456 this.packetsWaitingForPmt = [];
7457 this.programMapTable = undefined;
7458
7459 parsePsi = function parsePsi(payload, psi) {
7460 var offset = 0; // PSI packets may be split into multiple sections and those
7461 // sections may be split into multiple packets. If a PSI
7462 // section starts in this packet, the payload_unit_start_indicator
7463 // will be true and the first byte of the payload will indicate
7464 // the offset from the current position to the start of the
7465 // section.
7466
7467 if (psi.payloadUnitStartIndicator) {
7468 offset += payload[offset] + 1;
7469 }
7470
7471 if (psi.type === 'pat') {
7472 parsePat(payload.subarray(offset), psi);
7473 } else {
7474 parsePmt(payload.subarray(offset), psi);
7475 }
7476 };
7477
7478 parsePat = function parsePat(payload, pat) {
7479 pat.section_number = payload[7]; // eslint-disable-line camelcase
7480
7481 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
7482 // skip the PSI header and parse the first PMT entry
7483
7484 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
7485 pat.pmtPid = self.pmtPid;
7486 };
7487 /**
7488 * Parse out the relevant fields of a Program Map Table (PMT).
7489 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
7490 * packet. The first byte in this array should be the table_id
7491 * field.
7492 * @param pmt {object} the object that should be decorated with
7493 * fields parsed from the PMT.
7494 */
7495
7496
7497 parsePmt = function parsePmt(payload, pmt) {
7498 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
7499 // take effect. We don't believe this should ever be the case
7500 // for HLS but we'll ignore "forward" PMT declarations if we see
7501 // them. Future PMT declarations have the current_next_indicator
7502 // set to zero.
7503
7504 if (!(payload[5] & 0x01)) {
7505 return;
7506 } // overwrite any existing program map table
7507
7508
7509 self.programMapTable = {
7510 video: null,
7511 audio: null,
7512 'timed-metadata': {}
7513 }; // the mapping table ends at the end of the current section
7514
7515 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
7516 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
7517 // long the program info descriptors are
7518
7519 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
7520
7521 offset = 12 + programInfoLength;
7522
7523 while (offset < tableEnd) {
7524 var streamType = payload[offset];
7525 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
7526 // TODO: should this be done for metadata too? for now maintain behavior of
7527 // multiple metadata streams
7528
7529 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
7530 self.programMapTable.video = pid;
7531 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
7532 self.programMapTable.audio = pid;
7533 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
7534 // map pid to stream type for metadata streams
7535 self.programMapTable['timed-metadata'][pid] = streamType;
7536 } // move to the next table entry
7537 // skip past the elementary stream descriptors, if present
7538
7539
7540 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
7541 } // record the map on the packet as well
7542
7543
7544 pmt.programMapTable = self.programMapTable;
7545 };
7546 /**
7547 * Deliver a new MP2T packet to the next stream in the pipeline.
7548 */
7549
7550
7551 this.push = function (packet) {
7552 var result = {},
7553 offset = 4;
7554 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
7555
7556 result.pid = packet[1] & 0x1f;
7557 result.pid <<= 8;
7558 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
7559 // fifth byte of the TS packet header. The adaptation field is
7560 // used to add stuffing to PES packets that don't fill a complete
7561 // TS packet, and to specify some forms of timing and control data
7562 // that we do not currently use.
7563
7564 if ((packet[3] & 0x30) >>> 4 > 0x01) {
7565 offset += packet[offset] + 1;
7566 } // parse the rest of the packet based on the type
7567
7568
7569 if (result.pid === 0) {
7570 result.type = 'pat';
7571 parsePsi(packet.subarray(offset), result);
7572 this.trigger('data', result);
7573 } else if (result.pid === this.pmtPid) {
7574 result.type = 'pmt';
7575 parsePsi(packet.subarray(offset), result);
7576 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
7577
7578 while (this.packetsWaitingForPmt.length) {
7579 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
7580 }
7581 } else if (this.programMapTable === undefined) {
7582 // When we have not seen a PMT yet, defer further processing of
7583 // PES packets until one has been parsed
7584 this.packetsWaitingForPmt.push([packet, offset, result]);
7585 } else {
7586 this.processPes_(packet, offset, result);
7587 }
7588 };
7589
7590 this.processPes_ = function (packet, offset, result) {
7591 // set the appropriate stream type
7592 if (result.pid === this.programMapTable.video) {
7593 result.streamType = streamTypes.H264_STREAM_TYPE;
7594 } else if (result.pid === this.programMapTable.audio) {
7595 result.streamType = streamTypes.ADTS_STREAM_TYPE;
7596 } else {
7597 // if not video or audio, it is timed-metadata or unknown
7598 // if unknown, streamType will be undefined
7599 result.streamType = this.programMapTable['timed-metadata'][result.pid];
7600 }
7601
7602 result.type = 'pes';
7603 result.data = packet.subarray(offset);
7604 this.trigger('data', result);
7605 };
7606 };
7607
7608 _TransportParseStream.prototype = new stream();
7609 _TransportParseStream.STREAM_TYPES = {
7610 h264: 0x1b,
7611 adts: 0x0f
7612 };
7613 /**
7614 * Reconsistutes program elementary stream (PES) packets from parsed
7615 * transport stream packets. That is, if you pipe an
7616 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
7617 * events will be events which capture the bytes for individual PES
7618 * packets plus relevant metadata that has been extracted from the
7619 * container.
7620 */
7621
7622 _ElementaryStream = function ElementaryStream() {
7623 var self = this,
7624 // PES packet fragments
7625 video = {
7626 data: [],
7627 size: 0
7628 },
7629 audio = {
7630 data: [],
7631 size: 0
7632 },
7633 timedMetadata = {
7634 data: [],
7635 size: 0
7636 },
7637 programMapTable,
7638 parsePes = function parsePes(payload, pes) {
7639 var ptsDtsFlags;
7640 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
7641
7642 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
7643 // that are frame data that is continuing from the previous fragment. This
7644 // is to check that the pes data is the start of a new pes payload
7645
7646 if (startPrefix !== 1) {
7647 return;
7648 } // get the packet length, this will be 0 for video
7649
7650
7651 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
7652
7653 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
7654 // and a DTS value. Determine what combination of values is
7655 // available to work with.
7656
7657 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
7658 // performs all bitwise operations on 32-bit integers but javascript
7659 // supports a much greater range (52-bits) of integer using standard
7660 // mathematical operations.
7661 // We construct a 31-bit value using bitwise operators over the 31
7662 // most significant bits and then multiply by 4 (equal to a left-shift
7663 // of 2) before we add the final 2 least significant bits of the
7664 // timestamp (equal to an OR.)
7665
7666 if (ptsDtsFlags & 0xC0) {
7667 // the PTS and DTS are not written out directly. For information
7668 // on how they are encoded, see
7669 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
7670 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
7671 pes.pts *= 4; // Left shift by 2
7672
7673 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
7674
7675 pes.dts = pes.pts;
7676
7677 if (ptsDtsFlags & 0x40) {
7678 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
7679 pes.dts *= 4; // Left shift by 2
7680
7681 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
7682 }
7683 } // the data section starts immediately after the PES header.
7684 // pes_header_data_length specifies the number of header bytes
7685 // that follow the last byte of the field.
7686
7687
7688 pes.data = payload.subarray(9 + payload[8]);
7689 },
7690
7691 /**
7692 * Pass completely parsed PES packets to the next stream in the pipeline
7693 **/
7694 flushStream = function flushStream(stream, type, forceFlush) {
7695 var packetData = new Uint8Array(stream.size),
7696 event = {
7697 type: type
7698 },
7699 i = 0,
7700 offset = 0,
7701 packetFlushable = false,
7702 fragment; // do nothing if there is not enough buffered data for a complete
7703 // PES header
7704
7705 if (!stream.data.length || stream.size < 9) {
7706 return;
7707 }
7708
7709 event.trackId = stream.data[0].pid; // reassemble the packet
7710
7711 for (i = 0; i < stream.data.length; i++) {
7712 fragment = stream.data[i];
7713 packetData.set(fragment.data, offset);
7714 offset += fragment.data.byteLength;
7715 } // parse assembled packet's PES header
7716
7717
7718 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
7719 // check that there is enough stream data to fill the packet
7720
7721 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
7722
7723 if (forceFlush || packetFlushable) {
7724 stream.size = 0;
7725 stream.data.length = 0;
7726 } // only emit packets that are complete. this is to avoid assembling
7727 // incomplete PES packets due to poor segmentation
7728
7729
7730 if (packetFlushable) {
7731 self.trigger('data', event);
7732 }
7733 };
7734
7735 _ElementaryStream.prototype.init.call(this);
7736 /**
7737 * Identifies M2TS packet types and parses PES packets using metadata
7738 * parsed from the PMT
7739 **/
7740
7741
7742 this.push = function (data) {
7743 ({
7744 pat: function pat() {// we have to wait for the PMT to arrive as well before we
7745 // have any meaningful metadata
7746 },
7747 pes: function pes() {
7748 var stream, streamType;
7749
7750 switch (data.streamType) {
7751 case streamTypes.H264_STREAM_TYPE:
7752 stream = video;
7753 streamType = 'video';
7754 break;
7755
7756 case streamTypes.ADTS_STREAM_TYPE:
7757 stream = audio;
7758 streamType = 'audio';
7759 break;
7760
7761 case streamTypes.METADATA_STREAM_TYPE:
7762 stream = timedMetadata;
7763 streamType = 'timed-metadata';
7764 break;
7765
7766 default:
7767 // ignore unknown stream types
7768 return;
7769 } // if a new packet is starting, we can flush the completed
7770 // packet
7771
7772
7773 if (data.payloadUnitStartIndicator) {
7774 flushStream(stream, streamType, true);
7775 } // buffer this fragment until we are sure we've received the
7776 // complete payload
7777
7778
7779 stream.data.push(data);
7780 stream.size += data.data.byteLength;
7781 },
7782 pmt: function pmt() {
7783 var event = {
7784 type: 'metadata',
7785 tracks: []
7786 };
7787 programMapTable = data.programMapTable; // translate audio and video streams to tracks
7788
7789 if (programMapTable.video !== null) {
7790 event.tracks.push({
7791 timelineStartInfo: {
7792 baseMediaDecodeTime: 0
7793 },
7794 id: +programMapTable.video,
7795 codec: 'avc',
7796 type: 'video'
7797 });
7798 }
7799
7800 if (programMapTable.audio !== null) {
7801 event.tracks.push({
7802 timelineStartInfo: {
7803 baseMediaDecodeTime: 0
7804 },
7805 id: +programMapTable.audio,
7806 codec: 'adts',
7807 type: 'audio'
7808 });
7809 }
7810
7811 self.trigger('data', event);
7812 }
7813 })[data.type]();
7814 };
7815
7816 this.reset = function () {
7817 video.size = 0;
7818 video.data.length = 0;
7819 audio.size = 0;
7820 audio.data.length = 0;
7821 this.trigger('reset');
7822 };
7823 /**
7824 * Flush any remaining input. Video PES packets may be of variable
7825 * length. Normally, the start of a new video packet can trigger the
7826 * finalization of the previous packet. That is not possible if no
7827 * more video is forthcoming, however. In that case, some other
7828 * mechanism (like the end of the file) has to be employed. When it is
7829 * clear that no additional data is forthcoming, calling this method
7830 * will flush the buffered packets.
7831 */
7832
7833
7834 this.flushStreams_ = function () {
7835 // !!THIS ORDER IS IMPORTANT!!
7836 // video first then audio
7837 flushStream(video, 'video');
7838 flushStream(audio, 'audio');
7839 flushStream(timedMetadata, 'timed-metadata');
7840 };
7841
7842 this.flush = function () {
7843 this.flushStreams_();
7844 this.trigger('done');
7845 };
7846 };
7847
7848 _ElementaryStream.prototype = new stream();
7849 var m2ts = {
7850 PAT_PID: 0x0000,
7851 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
7852 TransportPacketStream: _TransportPacketStream,
7853 TransportParseStream: _TransportParseStream,
7854 ElementaryStream: _ElementaryStream,
7855 TimestampRolloverStream: TimestampRolloverStream,
7856 CaptionStream: captionStream.CaptionStream,
7857 Cea608Stream: captionStream.Cea608Stream,
7858 Cea708Stream: captionStream.Cea708Stream,
7859 MetadataStream: metadataStream
7860 };
7861
7862 for (var type in streamTypes) {
7863 if (streamTypes.hasOwnProperty(type)) {
7864 m2ts[type] = streamTypes[type];
7865 }
7866 }
7867
7868 var m2ts_1 = m2ts;
7869 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
7870
7871 var _AdtsStream;
7872
7873 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
7874 /*
7875 * Accepts a ElementaryStream and emits data events with parsed
7876 * AAC Audio Frames of the individual packets. Input audio in ADTS
7877 * format is unpacked and re-emitted as AAC frames.
7878 *
7879 * @see http://wiki.multimedia.cx/index.php?title=ADTS
7880 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
7881 */
7882
7883 _AdtsStream = function AdtsStream(handlePartialSegments) {
7884 var buffer,
7885 frameNum = 0;
7886
7887 _AdtsStream.prototype.init.call(this);
7888
7889 this.push = function (packet) {
7890 var i = 0,
7891 frameLength,
7892 protectionSkipBytes,
7893 frameEnd,
7894 oldBuffer,
7895 sampleCount,
7896 adtsFrameDuration;
7897
7898 if (!handlePartialSegments) {
7899 frameNum = 0;
7900 }
7901
7902 if (packet.type !== 'audio') {
7903 // ignore non-audio data
7904 return;
7905 } // Prepend any data in the buffer to the input data so that we can parse
7906 // aac frames the cross a PES packet boundary
7907
7908
7909 if (buffer) {
7910 oldBuffer = buffer;
7911 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
7912 buffer.set(oldBuffer);
7913 buffer.set(packet.data, oldBuffer.byteLength);
7914 } else {
7915 buffer = packet.data;
7916 } // unpack any ADTS frames which have been fully received
7917 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
7918
7919
7920 while (i + 5 < buffer.length) {
7921 // Look for the start of an ADTS header..
7922 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
7923 // If a valid header was not found, jump one forward and attempt to
7924 // find a valid ADTS header starting at the next byte
7925 i++;
7926 continue;
7927 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
7928 // end of the ADTS header
7929
7930
7931 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
7932 // end of the sync sequence
7933
7934 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
7935 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
7936 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2];
7937 frameEnd = i + frameLength; // If we don't have enough data to actually finish this ADTS frame, return
7938 // and wait for more data
7939
7940 if (buffer.byteLength < frameEnd) {
7941 return;
7942 } // Otherwise, deliver the complete AAC frame
7943
7944
7945 this.trigger('data', {
7946 pts: packet.pts + frameNum * adtsFrameDuration,
7947 dts: packet.dts + frameNum * adtsFrameDuration,
7948 sampleCount: sampleCount,
7949 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
7950 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
7951 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
7952 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
7953 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
7954 samplesize: 16,
7955 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
7956 });
7957 frameNum++; // If the buffer is empty, clear it and return
7958
7959 if (buffer.byteLength === frameEnd) {
7960 buffer = undefined;
7961 return;
7962 } // Remove the finished frame from the buffer and start the process again
7963
7964
7965 buffer = buffer.subarray(frameEnd);
7966 }
7967 };
7968
7969 this.flush = function () {
7970 frameNum = 0;
7971 this.trigger('done');
7972 };
7973
7974 this.reset = function () {
7975 buffer = void 0;
7976 this.trigger('reset');
7977 };
7978
7979 this.endTimeline = function () {
7980 buffer = void 0;
7981 this.trigger('endedtimeline');
7982 };
7983 };
7984
7985 _AdtsStream.prototype = new stream();
7986 var adts = _AdtsStream;
7987 /**
7988 * mux.js
7989 *
7990 * Copyright (c) Brightcove
7991 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7992 */
7993
7994 var ExpGolomb;
7995 /**
7996 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
7997 * scheme used by h264.
7998 */
7999
8000 ExpGolomb = function ExpGolomb(workingData) {
8001 var // the number of bytes left to examine in workingData
8002 workingBytesAvailable = workingData.byteLength,
8003 // the current word being examined
8004 workingWord = 0,
8005 // :uint
8006 // the number of bits left to examine in the current word
8007 workingBitsAvailable = 0; // :uint;
8008 // ():uint
8009
8010 this.length = function () {
8011 return 8 * workingBytesAvailable;
8012 }; // ():uint
8013
8014
8015 this.bitsAvailable = function () {
8016 return 8 * workingBytesAvailable + workingBitsAvailable;
8017 }; // ():void
8018
8019
8020 this.loadWord = function () {
8021 var position = workingData.byteLength - workingBytesAvailable,
8022 workingBytes = new Uint8Array(4),
8023 availableBytes = Math.min(4, workingBytesAvailable);
8024
8025 if (availableBytes === 0) {
8026 throw new Error('no bytes available');
8027 }
8028
8029 workingBytes.set(workingData.subarray(position, position + availableBytes));
8030 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
8031
8032 workingBitsAvailable = availableBytes * 8;
8033 workingBytesAvailable -= availableBytes;
8034 }; // (count:int):void
8035
8036
8037 this.skipBits = function (count) {
8038 var skipBytes; // :int
8039
8040 if (workingBitsAvailable > count) {
8041 workingWord <<= count;
8042 workingBitsAvailable -= count;
8043 } else {
8044 count -= workingBitsAvailable;
8045 skipBytes = Math.floor(count / 8);
8046 count -= skipBytes * 8;
8047 workingBytesAvailable -= skipBytes;
8048 this.loadWord();
8049 workingWord <<= count;
8050 workingBitsAvailable -= count;
8051 }
8052 }; // (size:int):uint
8053
8054
8055 this.readBits = function (size) {
8056 var bits = Math.min(workingBitsAvailable, size),
8057 // :uint
8058 valu = workingWord >>> 32 - bits; // :uint
8059 // if size > 31, handle error
8060
8061 workingBitsAvailable -= bits;
8062
8063 if (workingBitsAvailable > 0) {
8064 workingWord <<= bits;
8065 } else if (workingBytesAvailable > 0) {
8066 this.loadWord();
8067 }
8068
8069 bits = size - bits;
8070
8071 if (bits > 0) {
8072 return valu << bits | this.readBits(bits);
8073 }
8074
8075 return valu;
8076 }; // ():uint
8077
8078
8079 this.skipLeadingZeros = function () {
8080 var leadingZeroCount; // :uint
8081
8082 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8083 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8084 // the first bit of working word is 1
8085 workingWord <<= leadingZeroCount;
8086 workingBitsAvailable -= leadingZeroCount;
8087 return leadingZeroCount;
8088 }
8089 } // we exhausted workingWord and still have not found a 1
8090
8091
8092 this.loadWord();
8093 return leadingZeroCount + this.skipLeadingZeros();
8094 }; // ():void
8095
8096
8097 this.skipUnsignedExpGolomb = function () {
8098 this.skipBits(1 + this.skipLeadingZeros());
8099 }; // ():void
8100
8101
8102 this.skipExpGolomb = function () {
8103 this.skipBits(1 + this.skipLeadingZeros());
8104 }; // ():uint
8105
8106
8107 this.readUnsignedExpGolomb = function () {
8108 var clz = this.skipLeadingZeros(); // :uint
8109
8110 return this.readBits(clz + 1) - 1;
8111 }; // ():int
8112
8113
8114 this.readExpGolomb = function () {
8115 var valu = this.readUnsignedExpGolomb(); // :int
8116
8117 if (0x01 & valu) {
8118 // the number is odd if the low order bit is set
8119 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8120 }
8121
8122 return -1 * (valu >>> 1); // divide by two then make it negative
8123 }; // Some convenience functions
8124 // :Boolean
8125
8126
8127 this.readBoolean = function () {
8128 return this.readBits(1) === 1;
8129 }; // ():int
8130
8131
8132 this.readUnsignedByte = function () {
8133 return this.readBits(8);
8134 };
8135
8136 this.loadWord();
8137 };
8138
8139 var expGolomb = ExpGolomb;
8140
8141 var _H264Stream, _NalByteStream;
8142
8143 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8144 /**
8145 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8146 */
8147
8148 _NalByteStream = function NalByteStream() {
8149 var syncPoint = 0,
8150 i,
8151 buffer;
8152
8153 _NalByteStream.prototype.init.call(this);
8154 /*
8155 * Scans a byte stream and triggers a data event with the NAL units found.
8156 * @param {Object} data Event received from H264Stream
8157 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8158 *
8159 * @see H264Stream.push
8160 */
8161
8162
8163 this.push = function (data) {
8164 var swapBuffer;
8165
8166 if (!buffer) {
8167 buffer = data.data;
8168 } else {
8169 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8170 swapBuffer.set(buffer);
8171 swapBuffer.set(data.data, buffer.byteLength);
8172 buffer = swapBuffer;
8173 }
8174
8175 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
8176 // scan for NAL unit boundaries
8177 // a match looks like this:
8178 // 0 0 1 .. NAL .. 0 0 1
8179 // ^ sync point ^ i
8180 // or this:
8181 // 0 0 1 .. NAL .. 0 0 0
8182 // ^ sync point ^ i
8183 // advance the sync point to a NAL start, if necessary
8184
8185 for (; syncPoint < len - 3; syncPoint++) {
8186 if (buffer[syncPoint + 2] === 1) {
8187 // the sync point is properly aligned
8188 i = syncPoint + 5;
8189 break;
8190 }
8191 }
8192
8193 while (i < len) {
8194 // look at the current byte to determine if we've hit the end of
8195 // a NAL unit boundary
8196 switch (buffer[i]) {
8197 case 0:
8198 // skip past non-sync sequences
8199 if (buffer[i - 1] !== 0) {
8200 i += 2;
8201 break;
8202 } else if (buffer[i - 2] !== 0) {
8203 i++;
8204 break;
8205 } // deliver the NAL unit if it isn't empty
8206
8207
8208 if (syncPoint + 3 !== i - 2) {
8209 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8210 } // drop trailing zeroes
8211
8212
8213 do {
8214 i++;
8215 } while (buffer[i] !== 1 && i < len);
8216
8217 syncPoint = i - 2;
8218 i += 3;
8219 break;
8220
8221 case 1:
8222 // skip past non-sync sequences
8223 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8224 i += 3;
8225 break;
8226 } // deliver the NAL unit
8227
8228
8229 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8230 syncPoint = i - 2;
8231 i += 3;
8232 break;
8233
8234 default:
8235 // the current byte isn't a one or zero, so it cannot be part
8236 // of a sync sequence
8237 i += 3;
8238 break;
8239 }
8240 } // filter out the NAL units that were delivered
8241
8242
8243 buffer = buffer.subarray(syncPoint);
8244 i -= syncPoint;
8245 syncPoint = 0;
8246 };
8247
8248 this.reset = function () {
8249 buffer = null;
8250 syncPoint = 0;
8251 this.trigger('reset');
8252 };
8253
8254 this.flush = function () {
8255 // deliver the last buffered NAL unit
8256 if (buffer && buffer.byteLength > 3) {
8257 this.trigger('data', buffer.subarray(syncPoint + 3));
8258 } // reset the stream state
8259
8260
8261 buffer = null;
8262 syncPoint = 0;
8263 this.trigger('done');
8264 };
8265
8266 this.endTimeline = function () {
8267 this.flush();
8268 this.trigger('endedtimeline');
8269 };
8270 };
8271
8272 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
8273 // see Recommendation ITU-T H.264 (4/2013),
8274 // 7.3.2.1.1 Sequence parameter set data syntax
8275
8276 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8277 100: true,
8278 110: true,
8279 122: true,
8280 244: true,
8281 44: true,
8282 83: true,
8283 86: true,
8284 118: true,
8285 128: true,
8286 138: true,
8287 139: true,
8288 134: true
8289 };
8290 /**
8291 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8292 * events.
8293 */
8294
8295 _H264Stream = function H264Stream() {
8296 var nalByteStream = new _NalByteStream(),
8297 self,
8298 trackId,
8299 currentPts,
8300 currentDts,
8301 discardEmulationPreventionBytes,
8302 readSequenceParameterSet,
8303 skipScalingList;
8304
8305 _H264Stream.prototype.init.call(this);
8306
8307 self = this;
8308 /*
8309 * Pushes a packet from a stream onto the NalByteStream
8310 *
8311 * @param {Object} packet - A packet received from a stream
8312 * @param {Uint8Array} packet.data - The raw bytes of the packet
8313 * @param {Number} packet.dts - Decode timestamp of the packet
8314 * @param {Number} packet.pts - Presentation timestamp of the packet
8315 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8316 * @param {('video'|'audio')} packet.type - The type of packet
8317 *
8318 */
8319
8320 this.push = function (packet) {
8321 if (packet.type !== 'video') {
8322 return;
8323 }
8324
8325 trackId = packet.trackId;
8326 currentPts = packet.pts;
8327 currentDts = packet.dts;
8328 nalByteStream.push(packet);
8329 };
8330 /*
8331 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8332 * for the NALUs to the next stream component.
8333 * Also, preprocess caption and sequence parameter NALUs.
8334 *
8335 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8336 * @see NalByteStream.push
8337 */
8338
8339
8340 nalByteStream.on('data', function (data) {
8341 var event = {
8342 trackId: trackId,
8343 pts: currentPts,
8344 dts: currentDts,
8345 data: data
8346 };
8347
8348 switch (data[0] & 0x1f) {
8349 case 0x05:
8350 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8351 break;
8352
8353 case 0x06:
8354 event.nalUnitType = 'sei_rbsp';
8355 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8356 break;
8357
8358 case 0x07:
8359 event.nalUnitType = 'seq_parameter_set_rbsp';
8360 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8361 event.config = readSequenceParameterSet(event.escapedRBSP);
8362 break;
8363
8364 case 0x08:
8365 event.nalUnitType = 'pic_parameter_set_rbsp';
8366 break;
8367
8368 case 0x09:
8369 event.nalUnitType = 'access_unit_delimiter_rbsp';
8370 break;
8371 } // This triggers data on the H264Stream
8372
8373
8374 self.trigger('data', event);
8375 });
8376 nalByteStream.on('done', function () {
8377 self.trigger('done');
8378 });
8379 nalByteStream.on('partialdone', function () {
8380 self.trigger('partialdone');
8381 });
8382 nalByteStream.on('reset', function () {
8383 self.trigger('reset');
8384 });
8385 nalByteStream.on('endedtimeline', function () {
8386 self.trigger('endedtimeline');
8387 });
8388
8389 this.flush = function () {
8390 nalByteStream.flush();
8391 };
8392
8393 this.partialFlush = function () {
8394 nalByteStream.partialFlush();
8395 };
8396
8397 this.reset = function () {
8398 nalByteStream.reset();
8399 };
8400
8401 this.endTimeline = function () {
8402 nalByteStream.endTimeline();
8403 };
8404 /**
8405 * Advance the ExpGolomb decoder past a scaling list. The scaling
8406 * list is optionally transmitted as part of a sequence parameter
8407 * set and is not relevant to transmuxing.
8408 * @param count {number} the number of entries in this scaling list
8409 * @param expGolombDecoder {object} an ExpGolomb pointed to the
8410 * start of a scaling list
8411 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
8412 */
8413
8414
8415 skipScalingList = function skipScalingList(count, expGolombDecoder) {
8416 var lastScale = 8,
8417 nextScale = 8,
8418 j,
8419 deltaScale;
8420
8421 for (j = 0; j < count; j++) {
8422 if (nextScale !== 0) {
8423 deltaScale = expGolombDecoder.readExpGolomb();
8424 nextScale = (lastScale + deltaScale + 256) % 256;
8425 }
8426
8427 lastScale = nextScale === 0 ? lastScale : nextScale;
8428 }
8429 };
8430 /**
8431 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8432 * Sequence Payload"
8433 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8434 * unit
8435 * @return {Uint8Array} the RBSP without any Emulation
8436 * Prevention Bytes
8437 */
8438
8439
8440 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
8441 var length = data.byteLength,
8442 emulationPreventionBytesPositions = [],
8443 i = 1,
8444 newLength,
8445 newData; // Find all `Emulation Prevention Bytes`
8446
8447 while (i < length - 2) {
8448 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
8449 emulationPreventionBytesPositions.push(i + 2);
8450 i += 2;
8451 } else {
8452 i++;
8453 }
8454 } // If no Emulation Prevention Bytes were found just return the original
8455 // array
8456
8457
8458 if (emulationPreventionBytesPositions.length === 0) {
8459 return data;
8460 } // Create a new array to hold the NAL unit data
8461
8462
8463 newLength = length - emulationPreventionBytesPositions.length;
8464 newData = new Uint8Array(newLength);
8465 var sourceIndex = 0;
8466
8467 for (i = 0; i < newLength; sourceIndex++, i++) {
8468 if (sourceIndex === emulationPreventionBytesPositions[0]) {
8469 // Skip this byte
8470 sourceIndex++; // Remove this position index
8471
8472 emulationPreventionBytesPositions.shift();
8473 }
8474
8475 newData[i] = data[sourceIndex];
8476 }
8477
8478 return newData;
8479 };
8480 /**
8481 * Read a sequence parameter set and return some interesting video
8482 * properties. A sequence parameter set is the H264 metadata that
8483 * describes the properties of upcoming video frames.
8484 * @param data {Uint8Array} the bytes of a sequence parameter set
8485 * @return {object} an object with configuration parsed from the
8486 * sequence parameter set, including the dimensions of the
8487 * associated video frames.
8488 */
8489
8490
8491 readSequenceParameterSet = function readSequenceParameterSet(data) {
8492 var frameCropLeftOffset = 0,
8493 frameCropRightOffset = 0,
8494 frameCropTopOffset = 0,
8495 frameCropBottomOffset = 0,
8496 sarScale = 1,
8497 expGolombDecoder,
8498 profileIdc,
8499 levelIdc,
8500 profileCompatibility,
8501 chromaFormatIdc,
8502 picOrderCntType,
8503 numRefFramesInPicOrderCntCycle,
8504 picWidthInMbsMinus1,
8505 picHeightInMapUnitsMinus1,
8506 frameMbsOnlyFlag,
8507 scalingListCount,
8508 sarRatio,
8509 aspectRatioIdc,
8510 i;
8511 expGolombDecoder = new expGolomb(data);
8512 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
8513
8514 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
8515
8516 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
8517
8518 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
8519 // some profiles have more optional data we don't need
8520
8521 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
8522 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
8523
8524 if (chromaFormatIdc === 3) {
8525 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
8526 }
8527
8528 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
8529
8530 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
8531
8532 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
8533
8534 if (expGolombDecoder.readBoolean()) {
8535 // seq_scaling_matrix_present_flag
8536 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
8537
8538 for (i = 0; i < scalingListCount; i++) {
8539 if (expGolombDecoder.readBoolean()) {
8540 // seq_scaling_list_present_flag[ i ]
8541 if (i < 6) {
8542 skipScalingList(16, expGolombDecoder);
8543 } else {
8544 skipScalingList(64, expGolombDecoder);
8545 }
8546 }
8547 }
8548 }
8549 }
8550
8551 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
8552
8553 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
8554
8555 if (picOrderCntType === 0) {
8556 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
8557 } else if (picOrderCntType === 1) {
8558 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
8559
8560 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
8561
8562 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
8563
8564 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
8565
8566 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
8567 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
8568 }
8569 }
8570
8571 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
8572
8573 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
8574
8575 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8576 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8577 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
8578
8579 if (frameMbsOnlyFlag === 0) {
8580 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
8581 }
8582
8583 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
8584
8585 if (expGolombDecoder.readBoolean()) {
8586 // frame_cropping_flag
8587 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
8588 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
8589 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
8590 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
8591 }
8592
8593 if (expGolombDecoder.readBoolean()) {
8594 // vui_parameters_present_flag
8595 if (expGolombDecoder.readBoolean()) {
8596 // aspect_ratio_info_present_flag
8597 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
8598
8599 switch (aspectRatioIdc) {
8600 case 1:
8601 sarRatio = [1, 1];
8602 break;
8603
8604 case 2:
8605 sarRatio = [12, 11];
8606 break;
8607
8608 case 3:
8609 sarRatio = [10, 11];
8610 break;
8611
8612 case 4:
8613 sarRatio = [16, 11];
8614 break;
8615
8616 case 5:
8617 sarRatio = [40, 33];
8618 break;
8619
8620 case 6:
8621 sarRatio = [24, 11];
8622 break;
8623
8624 case 7:
8625 sarRatio = [20, 11];
8626 break;
8627
8628 case 8:
8629 sarRatio = [32, 11];
8630 break;
8631
8632 case 9:
8633 sarRatio = [80, 33];
8634 break;
8635
8636 case 10:
8637 sarRatio = [18, 11];
8638 break;
8639
8640 case 11:
8641 sarRatio = [15, 11];
8642 break;
8643
8644 case 12:
8645 sarRatio = [64, 33];
8646 break;
8647
8648 case 13:
8649 sarRatio = [160, 99];
8650 break;
8651
8652 case 14:
8653 sarRatio = [4, 3];
8654 break;
8655
8656 case 15:
8657 sarRatio = [3, 2];
8658 break;
8659
8660 case 16:
8661 sarRatio = [2, 1];
8662 break;
8663
8664 case 255:
8665 {
8666 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
8667 break;
8668 }
8669 }
8670
8671 if (sarRatio) {
8672 sarScale = sarRatio[0] / sarRatio[1];
8673 }
8674 }
8675 }
8676
8677 return {
8678 profileIdc: profileIdc,
8679 levelIdc: levelIdc,
8680 profileCompatibility: profileCompatibility,
8681 width: Math.ceil(((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
8682 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
8683 sarRatio: sarRatio
8684 };
8685 };
8686 };
8687
8688 _H264Stream.prototype = new stream();
8689 var h264 = {
8690 H264Stream: _H264Stream,
8691 NalByteStream: _NalByteStream
8692 };
8693 /**
8694 * mux.js
8695 *
8696 * Copyright (c) Brightcove
8697 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8698 *
8699 * Utilities to detect basic properties and metadata about Aac data.
8700 */
8701
8702 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8703
8704 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
8705 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
8706 flags = header[byteIndex + 5],
8707 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
8708
8709 returnSize = returnSize >= 0 ? returnSize : 0;
8710
8711 if (footerPresent) {
8712 return returnSize + 20;
8713 }
8714
8715 return returnSize + 10;
8716 };
8717
8718 var getId3Offset = function getId3Offset(data, offset) {
8719 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
8720 return offset;
8721 }
8722
8723 offset += parseId3TagSize(data, offset);
8724 return getId3Offset(data, offset);
8725 }; // TODO: use vhs-utils
8726
8727
8728 var isLikelyAacData$2 = function isLikelyAacData(data) {
8729 var offset = getId3Offset(data, 0);
8730 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
8731 // is not mp3 data but aac data.
8732 (data[offset + 1] & 0x16) === 0x10;
8733 };
8734
8735 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
8736 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
8737 }; // return a percent-encoded representation of the specified byte range
8738 // @see http://en.wikipedia.org/wiki/Percent-encoding
8739
8740
8741 var percentEncode = function percentEncode(bytes, start, end) {
8742 var i,
8743 result = '';
8744
8745 for (i = start; i < end; i++) {
8746 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
8747 }
8748
8749 return result;
8750 }; // return the string representation of the specified byte range,
8751 // interpreted as ISO-8859-1.
8752
8753
8754 var parseIso88591 = function parseIso88591(bytes, start, end) {
8755 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
8756 };
8757
8758 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
8759 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
8760 middle = header[byteIndex + 4] << 3,
8761 highTwo = header[byteIndex + 3] & 0x3 << 11;
8762 return highTwo | middle | lowThree;
8763 };
8764
8765 var parseType$1 = function parseType(header, byteIndex) {
8766 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
8767 return 'timed-metadata';
8768 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
8769 return 'audio';
8770 }
8771
8772 return null;
8773 };
8774
8775 var parseSampleRate = function parseSampleRate(packet) {
8776 var i = 0;
8777
8778 while (i + 5 < packet.length) {
8779 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
8780 // If a valid header was not found, jump one forward and attempt to
8781 // find a valid ADTS header starting at the next byte
8782 i++;
8783 continue;
8784 }
8785
8786 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
8787 }
8788
8789 return null;
8790 };
8791
8792 var parseAacTimestamp = function parseAacTimestamp(packet) {
8793 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
8794
8795 frameStart = 10;
8796
8797 if (packet[5] & 0x40) {
8798 // advance the frame start past the extended header
8799 frameStart += 4; // header size field
8800
8801 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
8802 } // parse one or more ID3 frames
8803 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
8804
8805
8806 do {
8807 // determine the number of bytes in this frame
8808 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
8809
8810 if (frameSize < 1) {
8811 return null;
8812 }
8813
8814 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
8815
8816 if (frameHeader === 'PRIV') {
8817 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
8818
8819 for (var i = 0; i < frame.byteLength; i++) {
8820 if (frame[i] === 0) {
8821 var owner = parseIso88591(frame, 0, i);
8822
8823 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
8824 var d = frame.subarray(i + 1);
8825 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
8826 size *= 4;
8827 size += d[7] & 0x03;
8828 return size;
8829 }
8830
8831 break;
8832 }
8833 }
8834 }
8835
8836 frameStart += 10; // advance past the frame header
8837
8838 frameStart += frameSize; // advance past the frame body
8839 } while (frameStart < packet.byteLength);
8840
8841 return null;
8842 };
8843
8844 var utils = {
8845 isLikelyAacData: isLikelyAacData$2,
8846 parseId3TagSize: parseId3TagSize,
8847 parseAdtsSize: parseAdtsSize,
8848 parseType: parseType$1,
8849 parseSampleRate: parseSampleRate,
8850 parseAacTimestamp: parseAacTimestamp
8851 };
8852
8853 var _AacStream;
8854 /**
8855 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
8856 */
8857
8858
8859 _AacStream = function AacStream() {
8860 var everything = new Uint8Array(),
8861 timeStamp = 0;
8862
8863 _AacStream.prototype.init.call(this);
8864
8865 this.setTimestamp = function (timestamp) {
8866 timeStamp = timestamp;
8867 };
8868
8869 this.push = function (bytes) {
8870 var frameSize = 0,
8871 byteIndex = 0,
8872 bytesLeft,
8873 chunk,
8874 packet,
8875 tempLength; // If there are bytes remaining from the last segment, prepend them to the
8876 // bytes that were pushed in
8877
8878 if (everything.length) {
8879 tempLength = everything.length;
8880 everything = new Uint8Array(bytes.byteLength + tempLength);
8881 everything.set(everything.subarray(0, tempLength));
8882 everything.set(bytes, tempLength);
8883 } else {
8884 everything = bytes;
8885 }
8886
8887 while (everything.length - byteIndex >= 3) {
8888 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
8889 // Exit early because we don't have enough to parse
8890 // the ID3 tag header
8891 if (everything.length - byteIndex < 10) {
8892 break;
8893 } // check framesize
8894
8895
8896 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
8897 // to emit a full packet
8898 // Add to byteIndex to support multiple ID3 tags in sequence
8899
8900 if (byteIndex + frameSize > everything.length) {
8901 break;
8902 }
8903
8904 chunk = {
8905 type: 'timed-metadata',
8906 data: everything.subarray(byteIndex, byteIndex + frameSize)
8907 };
8908 this.trigger('data', chunk);
8909 byteIndex += frameSize;
8910 continue;
8911 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
8912 // Exit early because we don't have enough to parse
8913 // the ADTS frame header
8914 if (everything.length - byteIndex < 7) {
8915 break;
8916 }
8917
8918 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
8919 // to emit a full packet
8920
8921 if (byteIndex + frameSize > everything.length) {
8922 break;
8923 }
8924
8925 packet = {
8926 type: 'audio',
8927 data: everything.subarray(byteIndex, byteIndex + frameSize),
8928 pts: timeStamp,
8929 dts: timeStamp
8930 };
8931 this.trigger('data', packet);
8932 byteIndex += frameSize;
8933 continue;
8934 }
8935
8936 byteIndex++;
8937 }
8938
8939 bytesLeft = everything.length - byteIndex;
8940
8941 if (bytesLeft > 0) {
8942 everything = everything.subarray(byteIndex);
8943 } else {
8944 everything = new Uint8Array();
8945 }
8946 };
8947
8948 this.reset = function () {
8949 everything = new Uint8Array();
8950 this.trigger('reset');
8951 };
8952
8953 this.endTimeline = function () {
8954 everything = new Uint8Array();
8955 this.trigger('endedtimeline');
8956 };
8957 };
8958
8959 _AacStream.prototype = new stream();
8960 var aac = _AacStream; // constants
8961
8962 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
8963 var audioProperties = AUDIO_PROPERTIES;
8964 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
8965 var videoProperties = VIDEO_PROPERTIES;
8966 var H264Stream = h264.H264Stream;
8967 var isLikelyAacData$1 = utils.isLikelyAacData;
8968 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
8969
8970 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
8971 /**
8972 * Compare two arrays (even typed) for same-ness
8973 */
8974
8975
8976 var arrayEquals = function arrayEquals(a, b) {
8977 var i;
8978
8979 if (a.length !== b.length) {
8980 return false;
8981 } // compare the value of each element in the array
8982
8983
8984 for (i = 0; i < a.length; i++) {
8985 if (a[i] !== b[i]) {
8986 return false;
8987 }
8988 }
8989
8990 return true;
8991 };
8992
8993 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
8994 var ptsOffsetFromDts = startPts - startDts,
8995 decodeDuration = endDts - startDts,
8996 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
8997 // however, the player time values will reflect a start from the baseMediaDecodeTime.
8998 // In order to provide relevant values for the player times, base timing info on the
8999 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
9000
9001 return {
9002 start: {
9003 dts: baseMediaDecodeTime,
9004 pts: baseMediaDecodeTime + ptsOffsetFromDts
9005 },
9006 end: {
9007 dts: baseMediaDecodeTime + decodeDuration,
9008 pts: baseMediaDecodeTime + presentationDuration
9009 },
9010 prependedContentDuration: prependedContentDuration,
9011 baseMediaDecodeTime: baseMediaDecodeTime
9012 };
9013 };
9014 /**
9015 * Constructs a single-track, ISO BMFF media segment from AAC data
9016 * events. The output of this stream can be fed to a SourceBuffer
9017 * configured with a suitable initialization segment.
9018 * @param track {object} track metadata configuration
9019 * @param options {object} transmuxer options object
9020 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9021 * in the source; false to adjust the first segment to start at 0.
9022 */
9023
9024
9025 _AudioSegmentStream = function AudioSegmentStream(track, options) {
9026 var adtsFrames = [],
9027 sequenceNumber = 0,
9028 earliestAllowedDts = 0,
9029 audioAppendStartTs = 0,
9030 videoBaseMediaDecodeTime = Infinity;
9031 options = options || {};
9032
9033 _AudioSegmentStream.prototype.init.call(this);
9034
9035 this.push = function (data) {
9036 trackDecodeInfo.collectDtsInfo(track, data);
9037
9038 if (track) {
9039 audioProperties.forEach(function (prop) {
9040 track[prop] = data[prop];
9041 });
9042 } // buffer audio data until end() is called
9043
9044
9045 adtsFrames.push(data);
9046 };
9047
9048 this.setEarliestDts = function (earliestDts) {
9049 earliestAllowedDts = earliestDts;
9050 };
9051
9052 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9053 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9054 };
9055
9056 this.setAudioAppendStart = function (timestamp) {
9057 audioAppendStartTs = timestamp;
9058 };
9059
9060 this.flush = function () {
9061 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
9062
9063 if (adtsFrames.length === 0) {
9064 this.trigger('done', 'AudioSegmentStream');
9065 return;
9066 }
9067
9068 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9069 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
9070
9071 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
9072 // samples (that is, adts frames) in the audio data
9073
9074 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
9075
9076 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9077 adtsFrames = [];
9078 moof = mp4Generator.moof(sequenceNumber, [track]);
9079 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
9080
9081 sequenceNumber++;
9082 boxes.set(moof);
9083 boxes.set(mdat, moof.byteLength);
9084 trackDecodeInfo.clearDtsInfo(track);
9085 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
9086 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9087 // valid use-case where an init segment/data should be triggered without associated
9088 // frames. Leaving for now, but should be looked into.
9089
9090 if (frames.length) {
9091 segmentDuration = frames.length * frameDuration;
9092 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
9093 // frame info is in video clock cycles. Convert to match expectation of
9094 // listeners (that all timestamps will be based on video clock cycles).
9095 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
9096 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
9097 this.trigger('timingInfo', {
9098 start: frames[0].pts,
9099 end: frames[0].pts + segmentDuration
9100 });
9101 }
9102
9103 this.trigger('data', {
9104 track: track,
9105 boxes: boxes
9106 });
9107 this.trigger('done', 'AudioSegmentStream');
9108 };
9109
9110 this.reset = function () {
9111 trackDecodeInfo.clearDtsInfo(track);
9112 adtsFrames = [];
9113 this.trigger('reset');
9114 };
9115 };
9116
9117 _AudioSegmentStream.prototype = new stream();
9118 /**
9119 * Constructs a single-track, ISO BMFF media segment from H264 data
9120 * events. The output of this stream can be fed to a SourceBuffer
9121 * configured with a suitable initialization segment.
9122 * @param track {object} track metadata configuration
9123 * @param options {object} transmuxer options object
9124 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9125 * gopsToAlignWith list when attempting to align gop pts
9126 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9127 * in the source; false to adjust the first segment to start at 0.
9128 */
9129
9130 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9131 var sequenceNumber = 0,
9132 nalUnits = [],
9133 gopsToAlignWith = [],
9134 config,
9135 pps;
9136 options = options || {};
9137
9138 _VideoSegmentStream.prototype.init.call(this);
9139
9140 delete track.minPTS;
9141 this.gopCache_ = [];
9142 /**
9143 * Constructs a ISO BMFF segment given H264 nalUnits
9144 * @param {Object} nalUnit A data event representing a nalUnit
9145 * @param {String} nalUnit.nalUnitType
9146 * @param {Object} nalUnit.config Properties for a mp4 track
9147 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9148 * @see lib/codecs/h264.js
9149 **/
9150
9151 this.push = function (nalUnit) {
9152 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
9153
9154 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9155 config = nalUnit.config;
9156 track.sps = [nalUnit.data];
9157 videoProperties.forEach(function (prop) {
9158 track[prop] = config[prop];
9159 }, this);
9160 }
9161
9162 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9163 pps = nalUnit.data;
9164 track.pps = [nalUnit.data];
9165 } // buffer video until flush() is called
9166
9167
9168 nalUnits.push(nalUnit);
9169 };
9170 /**
9171 * Pass constructed ISO BMFF track and boxes on to the
9172 * next stream in the pipeline
9173 **/
9174
9175
9176 this.flush = function () {
9177 var frames,
9178 gopForFusion,
9179 gops,
9180 moof,
9181 mdat,
9182 boxes,
9183 prependedContentDuration = 0,
9184 firstGop,
9185 lastGop; // Throw away nalUnits at the start of the byte stream until
9186 // we find the first AUD
9187
9188 while (nalUnits.length) {
9189 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9190 break;
9191 }
9192
9193 nalUnits.shift();
9194 } // Return early if no video data has been observed
9195
9196
9197 if (nalUnits.length === 0) {
9198 this.resetStream_();
9199 this.trigger('done', 'VideoSegmentStream');
9200 return;
9201 } // Organize the raw nal-units into arrays that represent
9202 // higher-level constructs such as frames and gops
9203 // (group-of-pictures)
9204
9205
9206 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9207 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
9208 // a problem since MSE (on Chrome) requires a leading keyframe.
9209 //
9210 // We have two approaches to repairing this situation:
9211 // 1) GOP-FUSION:
9212 // This is where we keep track of the GOPS (group-of-pictures)
9213 // from previous fragments and attempt to find one that we can
9214 // prepend to the current fragment in order to create a valid
9215 // fragment.
9216 // 2) KEYFRAME-PULLING:
9217 // Here we search for the first keyframe in the fragment and
9218 // throw away all the frames between the start of the fragment
9219 // and that keyframe. We then extend the duration and pull the
9220 // PTS of the keyframe forward so that it covers the time range
9221 // of the frames that were disposed of.
9222 //
9223 // #1 is far prefereable over #2 which can cause "stuttering" but
9224 // requires more things to be just right.
9225
9226 if (!gops[0][0].keyFrame) {
9227 // Search for a gop for fusion from our gopCache
9228 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9229
9230 if (gopForFusion) {
9231 // in order to provide more accurate timing information about the segment, save
9232 // the number of seconds prepended to the original segment due to GOP fusion
9233 prependedContentDuration = gopForFusion.duration;
9234 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
9235 // new gop at the beginning
9236
9237 gops.byteLength += gopForFusion.byteLength;
9238 gops.nalCount += gopForFusion.nalCount;
9239 gops.pts = gopForFusion.pts;
9240 gops.dts = gopForFusion.dts;
9241 gops.duration += gopForFusion.duration;
9242 } else {
9243 // If we didn't find a candidate gop fall back to keyframe-pulling
9244 gops = frameUtils.extendFirstKeyFrame(gops);
9245 }
9246 } // Trim gops to align with gopsToAlignWith
9247
9248
9249 if (gopsToAlignWith.length) {
9250 var alignedGops;
9251
9252 if (options.alignGopsAtEnd) {
9253 alignedGops = this.alignGopsAtEnd_(gops);
9254 } else {
9255 alignedGops = this.alignGopsAtStart_(gops);
9256 }
9257
9258 if (!alignedGops) {
9259 // save all the nals in the last GOP into the gop cache
9260 this.gopCache_.unshift({
9261 gop: gops.pop(),
9262 pps: track.pps,
9263 sps: track.sps
9264 }); // Keep a maximum of 6 GOPs in the cache
9265
9266 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9267
9268 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
9269
9270 this.resetStream_();
9271 this.trigger('done', 'VideoSegmentStream');
9272 return;
9273 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9274 // when recalculated before sending off to CoalesceStream
9275
9276
9277 trackDecodeInfo.clearDtsInfo(track);
9278 gops = alignedGops;
9279 }
9280
9281 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
9282 // samples (that is, frames) in the video data
9283
9284 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
9285
9286 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9287 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9288 this.trigger('processedGopsInfo', gops.map(function (gop) {
9289 return {
9290 pts: gop.pts,
9291 dts: gop.dts,
9292 byteLength: gop.byteLength
9293 };
9294 }));
9295 firstGop = gops[0];
9296 lastGop = gops[gops.length - 1];
9297 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9298 this.trigger('timingInfo', {
9299 start: gops[0].pts,
9300 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9301 }); // save all the nals in the last GOP into the gop cache
9302
9303 this.gopCache_.unshift({
9304 gop: gops.pop(),
9305 pps: track.pps,
9306 sps: track.sps
9307 }); // Keep a maximum of 6 GOPs in the cache
9308
9309 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9310
9311 nalUnits = [];
9312 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9313 this.trigger('timelineStartInfo', track.timelineStartInfo);
9314 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
9315 // throwing away hundreds of media segment fragments
9316
9317 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
9318
9319 sequenceNumber++;
9320 boxes.set(moof);
9321 boxes.set(mdat, moof.byteLength);
9322 this.trigger('data', {
9323 track: track,
9324 boxes: boxes
9325 });
9326 this.resetStream_(); // Continue with the flush process now
9327
9328 this.trigger('done', 'VideoSegmentStream');
9329 };
9330
9331 this.reset = function () {
9332 this.resetStream_();
9333 nalUnits = [];
9334 this.gopCache_.length = 0;
9335 gopsToAlignWith.length = 0;
9336 this.trigger('reset');
9337 };
9338
9339 this.resetStream_ = function () {
9340 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
9341 // for instance, when we are rendition switching
9342
9343 config = undefined;
9344 pps = undefined;
9345 }; // Search for a candidate Gop for gop-fusion from the gop cache and
9346 // return it or return null if no good candidate was found
9347
9348
9349 this.getGopForFusion_ = function (nalUnit) {
9350 var halfSecond = 45000,
9351 // Half-a-second in a 90khz clock
9352 allowableOverlap = 10000,
9353 // About 3 frames @ 30fps
9354 nearestDistance = Infinity,
9355 dtsDistance,
9356 nearestGopObj,
9357 currentGop,
9358 currentGopObj,
9359 i; // Search for the GOP nearest to the beginning of this nal unit
9360
9361 for (i = 0; i < this.gopCache_.length; i++) {
9362 currentGopObj = this.gopCache_[i];
9363 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
9364
9365 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9366 continue;
9367 } // Reject Gops that would require a negative baseMediaDecodeTime
9368
9369
9370 if (currentGop.dts < track.timelineStartInfo.dts) {
9371 continue;
9372 } // The distance between the end of the gop and the start of the nalUnit
9373
9374
9375 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
9376 // a half-second of the nal unit
9377
9378 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
9379 // Always use the closest GOP we found if there is more than
9380 // one candidate
9381 if (!nearestGopObj || nearestDistance > dtsDistance) {
9382 nearestGopObj = currentGopObj;
9383 nearestDistance = dtsDistance;
9384 }
9385 }
9386 }
9387
9388 if (nearestGopObj) {
9389 return nearestGopObj.gop;
9390 }
9391
9392 return null;
9393 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
9394 // of gopsToAlignWith starting from the START of the list
9395
9396
9397 this.alignGopsAtStart_ = function (gops) {
9398 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
9399 byteLength = gops.byteLength;
9400 nalCount = gops.nalCount;
9401 duration = gops.duration;
9402 alignIndex = gopIndex = 0;
9403
9404 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
9405 align = gopsToAlignWith[alignIndex];
9406 gop = gops[gopIndex];
9407
9408 if (align.pts === gop.pts) {
9409 break;
9410 }
9411
9412 if (gop.pts > align.pts) {
9413 // this current gop starts after the current gop we want to align on, so increment
9414 // align index
9415 alignIndex++;
9416 continue;
9417 } // current gop starts before the current gop we want to align on. so increment gop
9418 // index
9419
9420
9421 gopIndex++;
9422 byteLength -= gop.byteLength;
9423 nalCount -= gop.nalCount;
9424 duration -= gop.duration;
9425 }
9426
9427 if (gopIndex === 0) {
9428 // no gops to trim
9429 return gops;
9430 }
9431
9432 if (gopIndex === gops.length) {
9433 // all gops trimmed, skip appending all gops
9434 return null;
9435 }
9436
9437 alignedGops = gops.slice(gopIndex);
9438 alignedGops.byteLength = byteLength;
9439 alignedGops.duration = duration;
9440 alignedGops.nalCount = nalCount;
9441 alignedGops.pts = alignedGops[0].pts;
9442 alignedGops.dts = alignedGops[0].dts;
9443 return alignedGops;
9444 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
9445 // of gopsToAlignWith starting from the END of the list
9446
9447
9448 this.alignGopsAtEnd_ = function (gops) {
9449 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
9450 alignIndex = gopsToAlignWith.length - 1;
9451 gopIndex = gops.length - 1;
9452 alignEndIndex = null;
9453 matchFound = false;
9454
9455 while (alignIndex >= 0 && gopIndex >= 0) {
9456 align = gopsToAlignWith[alignIndex];
9457 gop = gops[gopIndex];
9458
9459 if (align.pts === gop.pts) {
9460 matchFound = true;
9461 break;
9462 }
9463
9464 if (align.pts > gop.pts) {
9465 alignIndex--;
9466 continue;
9467 }
9468
9469 if (alignIndex === gopsToAlignWith.length - 1) {
9470 // gop.pts is greater than the last alignment candidate. If no match is found
9471 // by the end of this loop, we still want to append gops that come after this
9472 // point
9473 alignEndIndex = gopIndex;
9474 }
9475
9476 gopIndex--;
9477 }
9478
9479 if (!matchFound && alignEndIndex === null) {
9480 return null;
9481 }
9482
9483 var trimIndex;
9484
9485 if (matchFound) {
9486 trimIndex = gopIndex;
9487 } else {
9488 trimIndex = alignEndIndex;
9489 }
9490
9491 if (trimIndex === 0) {
9492 return gops;
9493 }
9494
9495 var alignedGops = gops.slice(trimIndex);
9496 var metadata = alignedGops.reduce(function (total, gop) {
9497 total.byteLength += gop.byteLength;
9498 total.duration += gop.duration;
9499 total.nalCount += gop.nalCount;
9500 return total;
9501 }, {
9502 byteLength: 0,
9503 duration: 0,
9504 nalCount: 0
9505 });
9506 alignedGops.byteLength = metadata.byteLength;
9507 alignedGops.duration = metadata.duration;
9508 alignedGops.nalCount = metadata.nalCount;
9509 alignedGops.pts = alignedGops[0].pts;
9510 alignedGops.dts = alignedGops[0].dts;
9511 return alignedGops;
9512 };
9513
9514 this.alignGopsWith = function (newGopsToAlignWith) {
9515 gopsToAlignWith = newGopsToAlignWith;
9516 };
9517 };
9518
9519 _VideoSegmentStream.prototype = new stream();
9520 /**
9521 * A Stream that can combine multiple streams (ie. audio & video)
9522 * into a single output segment for MSE. Also supports audio-only
9523 * and video-only streams.
9524 * @param options {object} transmuxer options object
9525 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9526 * in the source; false to adjust the first segment to start at media timeline start.
9527 */
9528
9529 _CoalesceStream = function CoalesceStream(options, metadataStream) {
9530 // Number of Tracks per output segment
9531 // If greater than 1, we combine multiple
9532 // tracks into a single segment
9533 this.numberOfTracks = 0;
9534 this.metadataStream = metadataStream;
9535 options = options || {};
9536
9537 if (typeof options.remux !== 'undefined') {
9538 this.remuxTracks = !!options.remux;
9539 } else {
9540 this.remuxTracks = true;
9541 }
9542
9543 if (typeof options.keepOriginalTimestamps === 'boolean') {
9544 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
9545 } else {
9546 this.keepOriginalTimestamps = false;
9547 }
9548
9549 this.pendingTracks = [];
9550 this.videoTrack = null;
9551 this.pendingBoxes = [];
9552 this.pendingCaptions = [];
9553 this.pendingMetadata = [];
9554 this.pendingBytes = 0;
9555 this.emittedTracks = 0;
9556
9557 _CoalesceStream.prototype.init.call(this); // Take output from multiple
9558
9559
9560 this.push = function (output) {
9561 // buffer incoming captions until the associated video segment
9562 // finishes
9563 if (output.text) {
9564 return this.pendingCaptions.push(output);
9565 } // buffer incoming id3 tags until the final flush
9566
9567
9568 if (output.frames) {
9569 return this.pendingMetadata.push(output);
9570 } // Add this track to the list of pending tracks and store
9571 // important information required for the construction of
9572 // the final segment
9573
9574
9575 this.pendingTracks.push(output.track);
9576 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
9577 // We unshift audio and push video because
9578 // as of Chrome 75 when switching from
9579 // one init segment to another if the video
9580 // mdat does not appear after the audio mdat
9581 // only audio will play for the duration of our transmux.
9582
9583 if (output.track.type === 'video') {
9584 this.videoTrack = output.track;
9585 this.pendingBoxes.push(output.boxes);
9586 }
9587
9588 if (output.track.type === 'audio') {
9589 this.audioTrack = output.track;
9590 this.pendingBoxes.unshift(output.boxes);
9591 }
9592 };
9593 };
9594
9595 _CoalesceStream.prototype = new stream();
9596
9597 _CoalesceStream.prototype.flush = function (flushSource) {
9598 var offset = 0,
9599 event = {
9600 captions: [],
9601 captionStreams: {},
9602 metadata: [],
9603 info: {}
9604 },
9605 caption,
9606 id3,
9607 initSegment,
9608 timelineStartPts = 0,
9609 i;
9610
9611 if (this.pendingTracks.length < this.numberOfTracks) {
9612 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
9613 // Return because we haven't received a flush from a data-generating
9614 // portion of the segment (meaning that we have only recieved meta-data
9615 // or captions.)
9616 return;
9617 } else if (this.remuxTracks) {
9618 // Return until we have enough tracks from the pipeline to remux (if we
9619 // are remuxing audio and video into a single MP4)
9620 return;
9621 } else if (this.pendingTracks.length === 0) {
9622 // In the case where we receive a flush without any data having been
9623 // received we consider it an emitted track for the purposes of coalescing
9624 // `done` events.
9625 // We do this for the case where there is an audio and video track in the
9626 // segment but no audio data. (seen in several playlists with alternate
9627 // audio tracks and no audio present in the main TS segments.)
9628 this.emittedTracks++;
9629
9630 if (this.emittedTracks >= this.numberOfTracks) {
9631 this.trigger('done');
9632 this.emittedTracks = 0;
9633 }
9634
9635 return;
9636 }
9637 }
9638
9639 if (this.videoTrack) {
9640 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
9641 videoProperties.forEach(function (prop) {
9642 event.info[prop] = this.videoTrack[prop];
9643 }, this);
9644 } else if (this.audioTrack) {
9645 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
9646 audioProperties.forEach(function (prop) {
9647 event.info[prop] = this.audioTrack[prop];
9648 }, this);
9649 }
9650
9651 if (this.videoTrack || this.audioTrack) {
9652 if (this.pendingTracks.length === 1) {
9653 event.type = this.pendingTracks[0].type;
9654 } else {
9655 event.type = 'combined';
9656 }
9657
9658 this.emittedTracks += this.pendingTracks.length;
9659 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
9660
9661 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
9662 // and track definitions
9663
9664 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
9665
9666 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
9667
9668 for (i = 0; i < this.pendingBoxes.length; i++) {
9669 event.data.set(this.pendingBoxes[i], offset);
9670 offset += this.pendingBoxes[i].byteLength;
9671 } // Translate caption PTS times into second offsets to match the
9672 // video timeline for the segment, and add track info
9673
9674
9675 for (i = 0; i < this.pendingCaptions.length; i++) {
9676 caption = this.pendingCaptions[i];
9677 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
9678 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
9679 event.captionStreams[caption.stream] = true;
9680 event.captions.push(caption);
9681 } // Translate ID3 frame PTS times into second offsets to match the
9682 // video timeline for the segment
9683
9684
9685 for (i = 0; i < this.pendingMetadata.length; i++) {
9686 id3 = this.pendingMetadata[i];
9687 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
9688 event.metadata.push(id3);
9689 } // We add this to every single emitted segment even though we only need
9690 // it for the first
9691
9692
9693 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
9694
9695 this.pendingTracks.length = 0;
9696 this.videoTrack = null;
9697 this.pendingBoxes.length = 0;
9698 this.pendingCaptions.length = 0;
9699 this.pendingBytes = 0;
9700 this.pendingMetadata.length = 0; // Emit the built segment
9701 // We include captions and ID3 tags for backwards compatibility,
9702 // ideally we should send only video and audio in the data event
9703
9704 this.trigger('data', event); // Emit each caption to the outside world
9705 // Ideally, this would happen immediately on parsing captions,
9706 // but we need to ensure that video data is sent back first
9707 // so that caption timing can be adjusted to match video timing
9708
9709 for (i = 0; i < event.captions.length; i++) {
9710 caption = event.captions[i];
9711 this.trigger('caption', caption);
9712 } // Emit each id3 tag to the outside world
9713 // Ideally, this would happen immediately on parsing the tag,
9714 // but we need to ensure that video data is sent back first
9715 // so that ID3 frame timing can be adjusted to match video timing
9716
9717
9718 for (i = 0; i < event.metadata.length; i++) {
9719 id3 = event.metadata[i];
9720 this.trigger('id3Frame', id3);
9721 }
9722 } // Only emit `done` if all tracks have been flushed and emitted
9723
9724
9725 if (this.emittedTracks >= this.numberOfTracks) {
9726 this.trigger('done');
9727 this.emittedTracks = 0;
9728 }
9729 };
9730
9731 _CoalesceStream.prototype.setRemux = function (val) {
9732 this.remuxTracks = val;
9733 };
9734 /**
9735 * A Stream that expects MP2T binary data as input and produces
9736 * corresponding media segments, suitable for use with Media Source
9737 * Extension (MSE) implementations that support the ISO BMFF byte
9738 * stream format, like Chrome.
9739 */
9740
9741
9742 _Transmuxer = function Transmuxer(options) {
9743 var self = this,
9744 hasFlushed = true,
9745 videoTrack,
9746 audioTrack;
9747
9748 _Transmuxer.prototype.init.call(this);
9749
9750 options = options || {};
9751 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
9752 this.transmuxPipeline_ = {};
9753
9754 this.setupAacPipeline = function () {
9755 var pipeline = {};
9756 this.transmuxPipeline_ = pipeline;
9757 pipeline.type = 'aac';
9758 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
9759
9760 pipeline.aacStream = new aac();
9761 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
9762 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
9763 pipeline.adtsStream = new adts();
9764 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9765 pipeline.headOfPipeline = pipeline.aacStream;
9766 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
9767 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
9768 pipeline.metadataStream.on('timestamp', function (frame) {
9769 pipeline.aacStream.setTimestamp(frame.timeStamp);
9770 });
9771 pipeline.aacStream.on('data', function (data) {
9772 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
9773 return;
9774 }
9775
9776 audioTrack = audioTrack || {
9777 timelineStartInfo: {
9778 baseMediaDecodeTime: self.baseMediaDecodeTime
9779 },
9780 codec: 'adts',
9781 type: 'audio'
9782 }; // hook up the audio segment stream to the first track with aac data
9783
9784 pipeline.coalesceStream.numberOfTracks++;
9785 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9786 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
9787
9788 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
9789
9790 self.trigger('trackinfo', {
9791 hasAudio: !!audioTrack,
9792 hasVideo: !!videoTrack
9793 });
9794 }); // Re-emit any data coming from the coalesce stream to the outside world
9795
9796 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
9797
9798 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9799 };
9800
9801 this.setupTsPipeline = function () {
9802 var pipeline = {};
9803 this.transmuxPipeline_ = pipeline;
9804 pipeline.type = 'ts';
9805 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
9806
9807 pipeline.packetStream = new m2ts_1.TransportPacketStream();
9808 pipeline.parseStream = new m2ts_1.TransportParseStream();
9809 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
9810 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
9811 pipeline.adtsStream = new adts();
9812 pipeline.h264Stream = new H264Stream();
9813 pipeline.captionStream = new m2ts_1.CaptionStream(options);
9814 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9815 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
9816
9817 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
9818 // demux the streams
9819
9820 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
9821 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
9822 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
9823
9824 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
9825 pipeline.elementaryStream.on('data', function (data) {
9826 var i;
9827
9828 if (data.type === 'metadata') {
9829 i = data.tracks.length; // scan the tracks listed in the metadata
9830
9831 while (i--) {
9832 if (!videoTrack && data.tracks[i].type === 'video') {
9833 videoTrack = data.tracks[i];
9834 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
9835 } else if (!audioTrack && data.tracks[i].type === 'audio') {
9836 audioTrack = data.tracks[i];
9837 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
9838 }
9839 } // hook up the video segment stream to the first track with h264 data
9840
9841
9842 if (videoTrack && !pipeline.videoSegmentStream) {
9843 pipeline.coalesceStream.numberOfTracks++;
9844 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
9845 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
9846 // When video emits timelineStartInfo data after a flush, we forward that
9847 // info to the AudioSegmentStream, if it exists, because video timeline
9848 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
9849 // because this is a particularly subtle form of timestamp alteration.
9850 if (audioTrack && !options.keepOriginalTimestamps) {
9851 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
9852 // very earliest DTS we have seen in video because Chrome will
9853 // interpret any video track with a baseMediaDecodeTime that is
9854 // non-zero as a gap.
9855
9856 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
9857 }
9858 });
9859 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
9860 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
9861 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
9862 if (audioTrack) {
9863 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
9864 }
9865 });
9866 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
9867
9868 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
9869 }
9870
9871 if (audioTrack && !pipeline.audioSegmentStream) {
9872 // hook up the audio segment stream to the first track with aac data
9873 pipeline.coalesceStream.numberOfTracks++;
9874 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9875 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
9876 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
9877
9878 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
9879 } // emit pmt info
9880
9881
9882 self.trigger('trackinfo', {
9883 hasAudio: !!audioTrack,
9884 hasVideo: !!videoTrack
9885 });
9886 }
9887 }); // Re-emit any data coming from the coalesce stream to the outside world
9888
9889 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
9890 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
9891 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
9892 self.trigger('id3Frame', id3Frame);
9893 });
9894 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
9895
9896 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9897 }; // hook up the segment streams once track metadata is delivered
9898
9899
9900 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9901 var pipeline = this.transmuxPipeline_;
9902
9903 if (!options.keepOriginalTimestamps) {
9904 this.baseMediaDecodeTime = baseMediaDecodeTime;
9905 }
9906
9907 if (audioTrack) {
9908 audioTrack.timelineStartInfo.dts = undefined;
9909 audioTrack.timelineStartInfo.pts = undefined;
9910 trackDecodeInfo.clearDtsInfo(audioTrack);
9911
9912 if (pipeline.audioTimestampRolloverStream) {
9913 pipeline.audioTimestampRolloverStream.discontinuity();
9914 }
9915 }
9916
9917 if (videoTrack) {
9918 if (pipeline.videoSegmentStream) {
9919 pipeline.videoSegmentStream.gopCache_ = [];
9920 }
9921
9922 videoTrack.timelineStartInfo.dts = undefined;
9923 videoTrack.timelineStartInfo.pts = undefined;
9924 trackDecodeInfo.clearDtsInfo(videoTrack);
9925 pipeline.captionStream.reset();
9926 }
9927
9928 if (pipeline.timestampRolloverStream) {
9929 pipeline.timestampRolloverStream.discontinuity();
9930 }
9931 };
9932
9933 this.setAudioAppendStart = function (timestamp) {
9934 if (audioTrack) {
9935 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
9936 }
9937 };
9938
9939 this.setRemux = function (val) {
9940 var pipeline = this.transmuxPipeline_;
9941 options.remux = val;
9942
9943 if (pipeline && pipeline.coalesceStream) {
9944 pipeline.coalesceStream.setRemux(val);
9945 }
9946 };
9947
9948 this.alignGopsWith = function (gopsToAlignWith) {
9949 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
9950 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
9951 }
9952 }; // feed incoming data to the front of the parsing pipeline
9953
9954
9955 this.push = function (data) {
9956 if (hasFlushed) {
9957 var isAac = isLikelyAacData$1(data);
9958
9959 if (isAac && this.transmuxPipeline_.type !== 'aac') {
9960 this.setupAacPipeline();
9961 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
9962 this.setupTsPipeline();
9963 }
9964
9965 hasFlushed = false;
9966 }
9967
9968 this.transmuxPipeline_.headOfPipeline.push(data);
9969 }; // flush any buffered data
9970
9971
9972 this.flush = function () {
9973 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
9974
9975 this.transmuxPipeline_.headOfPipeline.flush();
9976 };
9977
9978 this.endTimeline = function () {
9979 this.transmuxPipeline_.headOfPipeline.endTimeline();
9980 };
9981
9982 this.reset = function () {
9983 if (this.transmuxPipeline_.headOfPipeline) {
9984 this.transmuxPipeline_.headOfPipeline.reset();
9985 }
9986 }; // Caption data has to be reset when seeking outside buffered range
9987
9988
9989 this.resetCaptions = function () {
9990 if (this.transmuxPipeline_.captionStream) {
9991 this.transmuxPipeline_.captionStream.reset();
9992 }
9993 };
9994 };
9995
9996 _Transmuxer.prototype = new stream();
9997 var transmuxer$1 = {
9998 Transmuxer: _Transmuxer,
9999 VideoSegmentStream: _VideoSegmentStream,
10000 AudioSegmentStream: _AudioSegmentStream,
10001 AUDIO_PROPERTIES: audioProperties,
10002 VIDEO_PROPERTIES: videoProperties,
10003 // exported for testing
10004 generateSegmentTimingInfo: generateSegmentTimingInfo
10005 };
10006 /**
10007 * mux.js
10008 *
10009 * Copyright (c) Brightcove
10010 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10011 */
10012
10013 var codecs = {
10014 Adts: adts,
10015 h264: h264
10016 };
10017 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
10018 /**
10019 * Constructs a single-track, ISO BMFF media segment from AAC data
10020 * events. The output of this stream can be fed to a SourceBuffer
10021 * configured with a suitable initialization segment.
10022 */
10023
10024 var AudioSegmentStream = function AudioSegmentStream(track, options) {
10025 var adtsFrames = [],
10026 sequenceNumber = 0,
10027 earliestAllowedDts = 0,
10028 audioAppendStartTs = 0,
10029 videoBaseMediaDecodeTime = Infinity,
10030 segmentStartPts = null,
10031 segmentEndPts = null;
10032 options = options || {};
10033 AudioSegmentStream.prototype.init.call(this);
10034
10035 this.push = function (data) {
10036 trackDecodeInfo.collectDtsInfo(track, data);
10037
10038 if (track) {
10039 audioProperties.forEach(function (prop) {
10040 track[prop] = data[prop];
10041 });
10042 } // buffer audio data until end() is called
10043
10044
10045 adtsFrames.push(data);
10046 };
10047
10048 this.setEarliestDts = function (earliestDts) {
10049 earliestAllowedDts = earliestDts;
10050 };
10051
10052 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10053 videoBaseMediaDecodeTime = baseMediaDecodeTime;
10054 };
10055
10056 this.setAudioAppendStart = function (timestamp) {
10057 audioAppendStartTs = timestamp;
10058 };
10059
10060 this.processFrames_ = function () {
10061 var frames, moof, mdat, boxes, timingInfo; // return early if no audio data has been observed
10062
10063 if (adtsFrames.length === 0) {
10064 return;
10065 }
10066
10067 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
10068
10069 if (frames.length === 0) {
10070 // return early if the frames are all after the earliest allowed DTS
10071 // TODO should we clear the adtsFrames?
10072 return;
10073 }
10074
10075 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
10076 audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
10077 // samples (that is, adts frames) in the audio data
10078
10079 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
10080
10081 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
10082 adtsFrames = [];
10083 moof = mp4Generator.moof(sequenceNumber, [track]); // bump the sequence number for next time
10084
10085 sequenceNumber++;
10086 track.initSegment = mp4Generator.initSegment([track]); // it would be great to allocate this array up front instead of
10087 // throwing away hundreds of media segment fragments
10088
10089 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
10090 boxes.set(moof);
10091 boxes.set(mdat, moof.byteLength);
10092 trackDecodeInfo.clearDtsInfo(track);
10093
10094 if (segmentStartPts === null) {
10095 segmentEndPts = segmentStartPts = frames[0].pts;
10096 }
10097
10098 segmentEndPts += frames.length * (ONE_SECOND_IN_TS * 1024 / track.samplerate);
10099 timingInfo = {
10100 start: segmentStartPts
10101 };
10102 this.trigger('timingInfo', timingInfo);
10103 this.trigger('data', {
10104 track: track,
10105 boxes: boxes
10106 });
10107 };
10108
10109 this.flush = function () {
10110 this.processFrames_(); // trigger final timing info
10111
10112 this.trigger('timingInfo', {
10113 start: segmentStartPts,
10114 end: segmentEndPts
10115 });
10116 this.resetTiming_();
10117 this.trigger('done', 'AudioSegmentStream');
10118 };
10119
10120 this.partialFlush = function () {
10121 this.processFrames_();
10122 this.trigger('partialdone', 'AudioSegmentStream');
10123 };
10124
10125 this.endTimeline = function () {
10126 this.flush();
10127 this.trigger('endedtimeline', 'AudioSegmentStream');
10128 };
10129
10130 this.resetTiming_ = function () {
10131 trackDecodeInfo.clearDtsInfo(track);
10132 segmentStartPts = null;
10133 segmentEndPts = null;
10134 };
10135
10136 this.reset = function () {
10137 this.resetTiming_();
10138 adtsFrames = [];
10139 this.trigger('reset');
10140 };
10141 };
10142
10143 AudioSegmentStream.prototype = new stream();
10144 var audioSegmentStream = AudioSegmentStream;
10145
10146 var VideoSegmentStream = function VideoSegmentStream(track, options) {
10147 var sequenceNumber = 0,
10148 nalUnits = [],
10149 frameCache = [],
10150 // gopsToAlignWith = [],
10151 config,
10152 pps,
10153 segmentStartPts = null,
10154 segmentEndPts = null,
10155 gops,
10156 ensureNextFrameIsKeyFrame = true;
10157 options = options || {};
10158 VideoSegmentStream.prototype.init.call(this);
10159
10160 this.push = function (nalUnit) {
10161 trackDecodeInfo.collectDtsInfo(track, nalUnit);
10162
10163 if (typeof track.timelineStartInfo.dts === 'undefined') {
10164 track.timelineStartInfo.dts = nalUnit.dts;
10165 } // record the track config
10166
10167
10168 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
10169 config = nalUnit.config;
10170 track.sps = [nalUnit.data];
10171 videoProperties.forEach(function (prop) {
10172 track[prop] = config[prop];
10173 }, this);
10174 }
10175
10176 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
10177 pps = nalUnit.data;
10178 track.pps = [nalUnit.data];
10179 } // buffer video until flush() is called
10180
10181
10182 nalUnits.push(nalUnit);
10183 };
10184
10185 this.processNals_ = function (cacheLastFrame) {
10186 var i;
10187 nalUnits = frameCache.concat(nalUnits); // Throw away nalUnits at the start of the byte stream until
10188 // we find the first AUD
10189
10190 while (nalUnits.length) {
10191 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
10192 break;
10193 }
10194
10195 nalUnits.shift();
10196 } // Return early if no video data has been observed
10197
10198
10199 if (nalUnits.length === 0) {
10200 return;
10201 }
10202
10203 var frames = frameUtils.groupNalsIntoFrames(nalUnits);
10204
10205 if (!frames.length) {
10206 return;
10207 } // note that the frame cache may also protect us from cases where we haven't
10208 // pushed data for the entire first or last frame yet
10209
10210
10211 frameCache = frames[frames.length - 1];
10212
10213 if (cacheLastFrame) {
10214 frames.pop();
10215 frames.duration -= frameCache.duration;
10216 frames.nalCount -= frameCache.length;
10217 frames.byteLength -= frameCache.byteLength;
10218 }
10219
10220 if (!frames.length) {
10221 nalUnits = [];
10222 return;
10223 }
10224
10225 this.trigger('timelineStartInfo', track.timelineStartInfo);
10226
10227 if (ensureNextFrameIsKeyFrame) {
10228 gops = frameUtils.groupFramesIntoGops(frames);
10229
10230 if (!gops[0][0].keyFrame) {
10231 gops = frameUtils.extendFirstKeyFrame(gops);
10232
10233 if (!gops[0][0].keyFrame) {
10234 // we haven't yet gotten a key frame, so reset nal units to wait for more nal
10235 // units
10236 nalUnits = [].concat.apply([], frames).concat(frameCache);
10237 frameCache = [];
10238 return;
10239 }
10240
10241 frames = [].concat.apply([], gops);
10242 frames.duration = gops.duration;
10243 }
10244
10245 ensureNextFrameIsKeyFrame = false;
10246 }
10247
10248 if (segmentStartPts === null) {
10249 segmentStartPts = frames[0].pts;
10250 segmentEndPts = segmentStartPts;
10251 }
10252
10253 segmentEndPts += frames.duration;
10254 this.trigger('timingInfo', {
10255 start: segmentStartPts,
10256 end: segmentEndPts
10257 });
10258
10259 for (i = 0; i < frames.length; i++) {
10260 var frame = frames[i];
10261 track.samples = frameUtils.generateSampleTableForFrame(frame);
10262 var mdat = mp4Generator.mdat(frameUtils.concatenateNalDataForFrame(frame));
10263 trackDecodeInfo.clearDtsInfo(track);
10264 trackDecodeInfo.collectDtsInfo(track, frame);
10265 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
10266 var moof = mp4Generator.moof(sequenceNumber, [track]);
10267 sequenceNumber++;
10268 track.initSegment = mp4Generator.initSegment([track]);
10269 var boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
10270 boxes.set(moof);
10271 boxes.set(mdat, moof.byteLength);
10272 this.trigger('data', {
10273 track: track,
10274 boxes: boxes,
10275 sequence: sequenceNumber,
10276 videoFrameDts: frame.dts,
10277 videoFramePts: frame.pts
10278 });
10279 }
10280
10281 nalUnits = [];
10282 };
10283
10284 this.resetTimingAndConfig_ = function () {
10285 config = undefined;
10286 pps = undefined;
10287 segmentStartPts = null;
10288 segmentEndPts = null;
10289 };
10290
10291 this.partialFlush = function () {
10292 this.processNals_(true);
10293 this.trigger('partialdone', 'VideoSegmentStream');
10294 };
10295
10296 this.flush = function () {
10297 this.processNals_(false); // reset config and pps because they may differ across segments
10298 // for instance, when we are rendition switching
10299
10300 this.resetTimingAndConfig_();
10301 this.trigger('done', 'VideoSegmentStream');
10302 };
10303
10304 this.endTimeline = function () {
10305 this.flush();
10306 this.trigger('endedtimeline', 'VideoSegmentStream');
10307 };
10308
10309 this.reset = function () {
10310 this.resetTimingAndConfig_();
10311 frameCache = [];
10312 nalUnits = [];
10313 ensureNextFrameIsKeyFrame = true;
10314 this.trigger('reset');
10315 };
10316 };
10317
10318 VideoSegmentStream.prototype = new stream();
10319 var videoSegmentStream = VideoSegmentStream;
10320 var isLikelyAacData = utils.isLikelyAacData;
10321
10322 var createPipeline = function createPipeline(object) {
10323 object.prototype = new stream();
10324 object.prototype.init.call(object);
10325 return object;
10326 };
10327
10328 var tsPipeline = function tsPipeline(options) {
10329 var pipeline = {
10330 type: 'ts',
10331 tracks: {
10332 audio: null,
10333 video: null
10334 },
10335 packet: new m2ts_1.TransportPacketStream(),
10336 parse: new m2ts_1.TransportParseStream(),
10337 elementary: new m2ts_1.ElementaryStream(),
10338 timestampRollover: new m2ts_1.TimestampRolloverStream(),
10339 adts: new codecs.Adts(),
10340 h264: new codecs.h264.H264Stream(),
10341 captionStream: new m2ts_1.CaptionStream(options),
10342 metadataStream: new m2ts_1.MetadataStream()
10343 };
10344 pipeline.headOfPipeline = pipeline.packet; // Transport Stream
10345
10346 pipeline.packet.pipe(pipeline.parse).pipe(pipeline.elementary).pipe(pipeline.timestampRollover); // H264
10347
10348 pipeline.timestampRollover.pipe(pipeline.h264); // Hook up CEA-608/708 caption stream
10349
10350 pipeline.h264.pipe(pipeline.captionStream);
10351 pipeline.timestampRollover.pipe(pipeline.metadataStream); // ADTS
10352
10353 pipeline.timestampRollover.pipe(pipeline.adts);
10354 pipeline.elementary.on('data', function (data) {
10355 if (data.type !== 'metadata') {
10356 return;
10357 }
10358
10359 for (var i = 0; i < data.tracks.length; i++) {
10360 if (!pipeline.tracks[data.tracks[i].type]) {
10361 pipeline.tracks[data.tracks[i].type] = data.tracks[i];
10362 pipeline.tracks[data.tracks[i].type].timelineStartInfo.baseMediaDecodeTime = options.baseMediaDecodeTime;
10363 }
10364 }
10365
10366 if (pipeline.tracks.video && !pipeline.videoSegmentStream) {
10367 pipeline.videoSegmentStream = new videoSegmentStream(pipeline.tracks.video, options);
10368 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
10369 if (pipeline.tracks.audio && !options.keepOriginalTimestamps) {
10370 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - options.baseMediaDecodeTime);
10371 }
10372 });
10373 pipeline.videoSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'videoTimingInfo'));
10374 pipeline.videoSegmentStream.on('data', function (data) {
10375 pipeline.trigger('data', {
10376 type: 'video',
10377 data: data
10378 });
10379 });
10380 pipeline.videoSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
10381 pipeline.videoSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
10382 pipeline.videoSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
10383 pipeline.h264.pipe(pipeline.videoSegmentStream);
10384 }
10385
10386 if (pipeline.tracks.audio && !pipeline.audioSegmentStream) {
10387 pipeline.audioSegmentStream = new audioSegmentStream(pipeline.tracks.audio, options);
10388 pipeline.audioSegmentStream.on('data', function (data) {
10389 pipeline.trigger('data', {
10390 type: 'audio',
10391 data: data
10392 });
10393 });
10394 pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
10395 pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
10396 pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
10397 pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo'));
10398 pipeline.adts.pipe(pipeline.audioSegmentStream);
10399 } // emit pmt info
10400
10401
10402 pipeline.trigger('trackinfo', {
10403 hasAudio: !!pipeline.tracks.audio,
10404 hasVideo: !!pipeline.tracks.video
10405 });
10406 });
10407 pipeline.captionStream.on('data', function (caption) {
10408 var timelineStartPts;
10409
10410 if (pipeline.tracks.video) {
10411 timelineStartPts = pipeline.tracks.video.timelineStartInfo.pts || 0;
10412 } else {
10413 // This will only happen if we encounter caption packets before
10414 // video data in a segment. This is an unusual/unlikely scenario,
10415 // so we assume the timeline starts at zero for now.
10416 timelineStartPts = 0;
10417 } // Translate caption PTS times into second offsets into the
10418 // video timeline for the segment
10419
10420
10421 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, options.keepOriginalTimestamps);
10422 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, options.keepOriginalTimestamps);
10423 pipeline.trigger('caption', caption);
10424 });
10425 pipeline = createPipeline(pipeline);
10426 pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
10427 return pipeline;
10428 };
10429
10430 var aacPipeline = function aacPipeline(options) {
10431 var pipeline = {
10432 type: 'aac',
10433 tracks: {
10434 audio: null
10435 },
10436 metadataStream: new m2ts_1.MetadataStream(),
10437 aacStream: new aac(),
10438 audioRollover: new m2ts_1.TimestampRolloverStream('audio'),
10439 timedMetadataRollover: new m2ts_1.TimestampRolloverStream('timed-metadata'),
10440 adtsStream: new adts(true)
10441 }; // set up the parsing pipeline
10442
10443 pipeline.headOfPipeline = pipeline.aacStream;
10444 pipeline.aacStream.pipe(pipeline.audioRollover).pipe(pipeline.adtsStream);
10445 pipeline.aacStream.pipe(pipeline.timedMetadataRollover).pipe(pipeline.metadataStream);
10446 pipeline.metadataStream.on('timestamp', function (frame) {
10447 pipeline.aacStream.setTimestamp(frame.timeStamp);
10448 });
10449 pipeline.aacStream.on('data', function (data) {
10450 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
10451 return;
10452 }
10453
10454 pipeline.tracks.audio = pipeline.tracks.audio || {
10455 timelineStartInfo: {
10456 baseMediaDecodeTime: options.baseMediaDecodeTime
10457 },
10458 codec: 'adts',
10459 type: 'audio'
10460 }; // hook up the audio segment stream to the first track with aac data
10461
10462 pipeline.audioSegmentStream = new audioSegmentStream(pipeline.tracks.audio, options);
10463 pipeline.audioSegmentStream.on('data', function (data) {
10464 pipeline.trigger('data', {
10465 type: 'audio',
10466 data: data
10467 });
10468 });
10469 pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
10470 pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
10471 pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
10472 pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo')); // Set up the final part of the audio pipeline
10473
10474 pipeline.adtsStream.pipe(pipeline.audioSegmentStream);
10475 pipeline.trigger('trackinfo', {
10476 hasAudio: !!pipeline.tracks.audio,
10477 hasVideo: !!pipeline.tracks.video
10478 });
10479 }); // set the pipeline up as a stream before binding to get access to the trigger function
10480
10481 pipeline = createPipeline(pipeline);
10482 pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
10483 return pipeline;
10484 };
10485
10486 var setupPipelineListeners = function setupPipelineListeners(pipeline, transmuxer) {
10487 pipeline.on('data', transmuxer.trigger.bind(transmuxer, 'data'));
10488 pipeline.on('done', transmuxer.trigger.bind(transmuxer, 'done'));
10489 pipeline.on('partialdone', transmuxer.trigger.bind(transmuxer, 'partialdone'));
10490 pipeline.on('endedtimeline', transmuxer.trigger.bind(transmuxer, 'endedtimeline'));
10491 pipeline.on('audioTimingInfo', transmuxer.trigger.bind(transmuxer, 'audioTimingInfo'));
10492 pipeline.on('videoTimingInfo', transmuxer.trigger.bind(transmuxer, 'videoTimingInfo'));
10493 pipeline.on('trackinfo', transmuxer.trigger.bind(transmuxer, 'trackinfo'));
10494 pipeline.on('id3Frame', function (event) {
10495 // add this to every single emitted segment even though it's only needed for the first
10496 event.dispatchType = pipeline.metadataStream.dispatchType; // keep original time, can be adjusted if needed at a higher level
10497
10498 event.cueTime = clock.videoTsToSeconds(event.pts);
10499 transmuxer.trigger('id3Frame', event);
10500 });
10501 pipeline.on('caption', function (event) {
10502 transmuxer.trigger('caption', event);
10503 });
10504 };
10505
10506 var Transmuxer = function Transmuxer(options) {
10507 var pipeline = null,
10508 hasFlushed = true;
10509 options = options || {};
10510 Transmuxer.prototype.init.call(this);
10511 options.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
10512
10513 this.push = function (bytes) {
10514 if (hasFlushed) {
10515 var isAac = isLikelyAacData(bytes);
10516
10517 if (isAac && (!pipeline || pipeline.type !== 'aac')) {
10518 pipeline = aacPipeline(options);
10519 setupPipelineListeners(pipeline, this);
10520 } else if (!isAac && (!pipeline || pipeline.type !== 'ts')) {
10521 pipeline = tsPipeline(options);
10522 setupPipelineListeners(pipeline, this);
10523 }
10524
10525 hasFlushed = false;
10526 }
10527
10528 pipeline.headOfPipeline.push(bytes);
10529 };
10530
10531 this.flush = function () {
10532 if (!pipeline) {
10533 return;
10534 }
10535
10536 hasFlushed = true;
10537 pipeline.headOfPipeline.flush();
10538 };
10539
10540 this.partialFlush = function () {
10541 if (!pipeline) {
10542 return;
10543 }
10544
10545 pipeline.headOfPipeline.partialFlush();
10546 };
10547
10548 this.endTimeline = function () {
10549 if (!pipeline) {
10550 return;
10551 }
10552
10553 pipeline.headOfPipeline.endTimeline();
10554 };
10555
10556 this.reset = function () {
10557 if (!pipeline) {
10558 return;
10559 }
10560
10561 pipeline.headOfPipeline.reset();
10562 };
10563
10564 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10565 if (!options.keepOriginalTimestamps) {
10566 options.baseMediaDecodeTime = baseMediaDecodeTime;
10567 }
10568
10569 if (!pipeline) {
10570 return;
10571 }
10572
10573 if (pipeline.tracks.audio) {
10574 pipeline.tracks.audio.timelineStartInfo.dts = undefined;
10575 pipeline.tracks.audio.timelineStartInfo.pts = undefined;
10576 trackDecodeInfo.clearDtsInfo(pipeline.tracks.audio);
10577
10578 if (pipeline.audioRollover) {
10579 pipeline.audioRollover.discontinuity();
10580 }
10581 }
10582
10583 if (pipeline.tracks.video) {
10584 if (pipeline.videoSegmentStream) {
10585 pipeline.videoSegmentStream.gopCache_ = [];
10586 }
10587
10588 pipeline.tracks.video.timelineStartInfo.dts = undefined;
10589 pipeline.tracks.video.timelineStartInfo.pts = undefined;
10590 trackDecodeInfo.clearDtsInfo(pipeline.tracks.video); // pipeline.captionStream.reset();
10591 }
10592
10593 if (pipeline.timestampRollover) {
10594 pipeline.timestampRollover.discontinuity();
10595 }
10596 };
10597
10598 this.setRemux = function (val) {
10599 options.remux = val;
10600
10601 if (pipeline && pipeline.coalesceStream) {
10602 pipeline.coalesceStream.setRemux(val);
10603 }
10604 };
10605
10606 this.setAudioAppendStart = function (audioAppendStart) {
10607 if (!pipeline || !pipeline.tracks.audio || !pipeline.audioSegmentStream) {
10608 return;
10609 }
10610
10611 pipeline.audioSegmentStream.setAudioAppendStart(audioAppendStart);
10612 }; // TODO GOP alignment support
10613 // Support may be a bit trickier than with full segment appends, as GOPs may be split
10614 // and processed in a more granular fashion
10615
10616
10617 this.alignGopsWith = function (gopsToAlignWith) {
10618 return;
10619 };
10620 };
10621
10622 Transmuxer.prototype = new stream();
10623 var transmuxer = Transmuxer;
10624 /**
10625 * mux.js
10626 *
10627 * Copyright (c) Brightcove
10628 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10629 */
10630
10631 var toUnsigned$2 = function toUnsigned(value) {
10632 return value >>> 0;
10633 };
10634
10635 var toHexString = function toHexString(value) {
10636 return ('00' + value.toString(16)).slice(-2);
10637 };
10638
10639 var bin = {
10640 toUnsigned: toUnsigned$2,
10641 toHexString: toHexString
10642 };
10643
10644 var parseType = function parseType(buffer) {
10645 var result = '';
10646 result += String.fromCharCode(buffer[0]);
10647 result += String.fromCharCode(buffer[1]);
10648 result += String.fromCharCode(buffer[2]);
10649 result += String.fromCharCode(buffer[3]);
10650 return result;
10651 };
10652
10653 var parseType_1 = parseType;
10654 var toUnsigned$1 = bin.toUnsigned;
10655
10656 var findBox = function findBox(data, path) {
10657 var results = [],
10658 i,
10659 size,
10660 type,
10661 end,
10662 subresults;
10663
10664 if (!path.length) {
10665 // short-circuit the search for empty paths
10666 return null;
10667 }
10668
10669 for (i = 0; i < data.byteLength;) {
10670 size = toUnsigned$1(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
10671 type = parseType_1(data.subarray(i + 4, i + 8));
10672 end = size > 1 ? i + size : data.byteLength;
10673
10674 if (type === path[0]) {
10675 if (path.length === 1) {
10676 // this is the end of the path and we've found the box we were
10677 // looking for
10678 results.push(data.subarray(i + 8, end));
10679 } else {
10680 // recursively search for the next box along the path
10681 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
10682
10683 if (subresults.length) {
10684 results = results.concat(subresults);
10685 }
10686 }
10687 }
10688
10689 i = end;
10690 } // we've finished searching all of data
10691
10692
10693 return results;
10694 };
10695
10696 var findBox_1 = findBox;
10697 var toUnsigned = bin.toUnsigned;
10698
10699 var tfdt = function tfdt(data) {
10700 var result = {
10701 version: data[0],
10702 flags: new Uint8Array(data.subarray(1, 4)),
10703 baseMediaDecodeTime: toUnsigned(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
10704 };
10705
10706 if (result.version === 1) {
10707 result.baseMediaDecodeTime *= Math.pow(2, 32);
10708 result.baseMediaDecodeTime += toUnsigned(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
10709 }
10710
10711 return result;
10712 };
10713
10714 var parseTfdt = tfdt;
10715
10716 var parseSampleFlags = function parseSampleFlags(flags) {
10717 return {
10718 isLeading: (flags[0] & 0x0c) >>> 2,
10719 dependsOn: flags[0] & 0x03,
10720 isDependedOn: (flags[1] & 0xc0) >>> 6,
10721 hasRedundancy: (flags[1] & 0x30) >>> 4,
10722 paddingValue: (flags[1] & 0x0e) >>> 1,
10723 isNonSyncSample: flags[1] & 0x01,
10724 degradationPriority: flags[2] << 8 | flags[3]
10725 };
10726 };
10727
10728 var parseSampleFlags_1 = parseSampleFlags;
10729
10730 var trun = function trun(data) {
10731 var result = {
10732 version: data[0],
10733 flags: new Uint8Array(data.subarray(1, 4)),
10734 samples: []
10735 },
10736 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10737 // Flag interpretation
10738 dataOffsetPresent = result.flags[2] & 0x01,
10739 // compare with 2nd byte of 0x1
10740 firstSampleFlagsPresent = result.flags[2] & 0x04,
10741 // compare with 2nd byte of 0x4
10742 sampleDurationPresent = result.flags[1] & 0x01,
10743 // compare with 2nd byte of 0x100
10744 sampleSizePresent = result.flags[1] & 0x02,
10745 // compare with 2nd byte of 0x200
10746 sampleFlagsPresent = result.flags[1] & 0x04,
10747 // compare with 2nd byte of 0x400
10748 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
10749 // compare with 2nd byte of 0x800
10750 sampleCount = view.getUint32(4),
10751 offset = 8,
10752 sample;
10753
10754 if (dataOffsetPresent) {
10755 // 32 bit signed integer
10756 result.dataOffset = view.getInt32(offset);
10757 offset += 4;
10758 } // Overrides the flags for the first sample only. The order of
10759 // optional values will be: duration, size, compositionTimeOffset
10760
10761
10762 if (firstSampleFlagsPresent && sampleCount) {
10763 sample = {
10764 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
10765 };
10766 offset += 4;
10767
10768 if (sampleDurationPresent) {
10769 sample.duration = view.getUint32(offset);
10770 offset += 4;
10771 }
10772
10773 if (sampleSizePresent) {
10774 sample.size = view.getUint32(offset);
10775 offset += 4;
10776 }
10777
10778 if (sampleCompositionTimeOffsetPresent) {
10779 if (result.version === 1) {
10780 sample.compositionTimeOffset = view.getInt32(offset);
10781 } else {
10782 sample.compositionTimeOffset = view.getUint32(offset);
10783 }
10784
10785 offset += 4;
10786 }
10787
10788 result.samples.push(sample);
10789 sampleCount--;
10790 }
10791
10792 while (sampleCount--) {
10793 sample = {};
10794
10795 if (sampleDurationPresent) {
10796 sample.duration = view.getUint32(offset);
10797 offset += 4;
10798 }
10799
10800 if (sampleSizePresent) {
10801 sample.size = view.getUint32(offset);
10802 offset += 4;
10803 }
10804
10805 if (sampleFlagsPresent) {
10806 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
10807 offset += 4;
10808 }
10809
10810 if (sampleCompositionTimeOffsetPresent) {
10811 if (result.version === 1) {
10812 sample.compositionTimeOffset = view.getInt32(offset);
10813 } else {
10814 sample.compositionTimeOffset = view.getUint32(offset);
10815 }
10816
10817 offset += 4;
10818 }
10819
10820 result.samples.push(sample);
10821 }
10822
10823 return result;
10824 };
10825
10826 var parseTrun = trun;
10827
10828 var tfhd = function tfhd(data) {
10829 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10830 result = {
10831 version: data[0],
10832 flags: new Uint8Array(data.subarray(1, 4)),
10833 trackId: view.getUint32(4)
10834 },
10835 baseDataOffsetPresent = result.flags[2] & 0x01,
10836 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
10837 defaultSampleDurationPresent = result.flags[2] & 0x08,
10838 defaultSampleSizePresent = result.flags[2] & 0x10,
10839 defaultSampleFlagsPresent = result.flags[2] & 0x20,
10840 durationIsEmpty = result.flags[0] & 0x010000,
10841 defaultBaseIsMoof = result.flags[0] & 0x020000,
10842 i;
10843 i = 8;
10844
10845 if (baseDataOffsetPresent) {
10846 i += 4; // truncate top 4 bytes
10847 // FIXME: should we read the full 64 bits?
10848
10849 result.baseDataOffset = view.getUint32(12);
10850 i += 4;
10851 }
10852
10853 if (sampleDescriptionIndexPresent) {
10854 result.sampleDescriptionIndex = view.getUint32(i);
10855 i += 4;
10856 }
10857
10858 if (defaultSampleDurationPresent) {
10859 result.defaultSampleDuration = view.getUint32(i);
10860 i += 4;
10861 }
10862
10863 if (defaultSampleSizePresent) {
10864 result.defaultSampleSize = view.getUint32(i);
10865 i += 4;
10866 }
10867
10868 if (defaultSampleFlagsPresent) {
10869 result.defaultSampleFlags = view.getUint32(i);
10870 }
10871
10872 if (durationIsEmpty) {
10873 result.durationIsEmpty = true;
10874 }
10875
10876 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
10877 result.baseDataOffsetIsMoof = true;
10878 }
10879
10880 return result;
10881 };
10882
10883 var parseTfhd = tfhd;
10884 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
10885 var CaptionStream = captionStream.CaptionStream;
10886 /**
10887 * Maps an offset in the mdat to a sample based on the the size of the samples.
10888 * Assumes that `parseSamples` has been called first.
10889 *
10890 * @param {Number} offset - The offset into the mdat
10891 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
10892 * @return {?Object} The matching sample, or null if no match was found.
10893 *
10894 * @see ISO-BMFF-12/2015, Section 8.8.8
10895 **/
10896
10897 var mapToSample = function mapToSample(offset, samples) {
10898 var approximateOffset = offset;
10899
10900 for (var i = 0; i < samples.length; i++) {
10901 var sample = samples[i];
10902
10903 if (approximateOffset < sample.size) {
10904 return sample;
10905 }
10906
10907 approximateOffset -= sample.size;
10908 }
10909
10910 return null;
10911 };
10912 /**
10913 * Finds SEI nal units contained in a Media Data Box.
10914 * Assumes that `parseSamples` has been called first.
10915 *
10916 * @param {Uint8Array} avcStream - The bytes of the mdat
10917 * @param {Object[]} samples - The samples parsed out by `parseSamples`
10918 * @param {Number} trackId - The trackId of this video track
10919 * @return {Object[]} seiNals - the parsed SEI NALUs found.
10920 * The contents of the seiNal should match what is expected by
10921 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
10922 *
10923 * @see ISO-BMFF-12/2015, Section 8.1.1
10924 * @see Rec. ITU-T H.264, 7.3.2.3.1
10925 **/
10926
10927
10928 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
10929 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
10930 result = [],
10931 seiNal,
10932 i,
10933 length,
10934 lastMatchedSample;
10935
10936 for (i = 0; i + 4 < avcStream.length; i += length) {
10937 length = avcView.getUint32(i);
10938 i += 4; // Bail if this doesn't appear to be an H264 stream
10939
10940 if (length <= 0) {
10941 continue;
10942 }
10943
10944 switch (avcStream[i] & 0x1F) {
10945 case 0x06:
10946 var data = avcStream.subarray(i + 1, i + 1 + length);
10947 var matchingSample = mapToSample(i, samples);
10948 seiNal = {
10949 nalUnitType: 'sei_rbsp',
10950 size: length,
10951 data: data,
10952 escapedRBSP: discardEmulationPreventionBytes(data),
10953 trackId: trackId
10954 };
10955
10956 if (matchingSample) {
10957 seiNal.pts = matchingSample.pts;
10958 seiNal.dts = matchingSample.dts;
10959 lastMatchedSample = matchingSample;
10960 } else if (lastMatchedSample) {
10961 // If a matching sample cannot be found, use the last
10962 // sample's values as they should be as close as possible
10963 seiNal.pts = lastMatchedSample.pts;
10964 seiNal.dts = lastMatchedSample.dts;
10965 } else {
10966 // eslint-disable-next-line no-console
10967 console.log("We've encountered a nal unit without data. See mux.js#233.");
10968 break;
10969 }
10970
10971 result.push(seiNal);
10972 break;
10973 }
10974 }
10975
10976 return result;
10977 };
10978 /**
10979 * Parses sample information out of Track Run Boxes and calculates
10980 * the absolute presentation and decode timestamps of each sample.
10981 *
10982 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
10983 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
10984 @see ISO-BMFF-12/2015, Section 8.8.12
10985 * @param {Object} tfhd - The parsed Track Fragment Header
10986 * @see inspect.parseTfhd
10987 * @return {Object[]} the parsed samples
10988 *
10989 * @see ISO-BMFF-12/2015, Section 8.8.8
10990 **/
10991
10992
10993 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
10994 var currentDts = baseMediaDecodeTime;
10995 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
10996 var defaultSampleSize = tfhd.defaultSampleSize || 0;
10997 var trackId = tfhd.trackId;
10998 var allSamples = [];
10999 truns.forEach(function (trun) {
11000 // Note: We currently do not parse the sample table as well
11001 // as the trun. It's possible some sources will require this.
11002 // moov > trak > mdia > minf > stbl
11003 var trackRun = parseTrun(trun);
11004 var samples = trackRun.samples;
11005 samples.forEach(function (sample) {
11006 if (sample.duration === undefined) {
11007 sample.duration = defaultSampleDuration;
11008 }
11009
11010 if (sample.size === undefined) {
11011 sample.size = defaultSampleSize;
11012 }
11013
11014 sample.trackId = trackId;
11015 sample.dts = currentDts;
11016
11017 if (sample.compositionTimeOffset === undefined) {
11018 sample.compositionTimeOffset = 0;
11019 }
11020
11021 sample.pts = currentDts + sample.compositionTimeOffset;
11022 currentDts += sample.duration;
11023 });
11024 allSamples = allSamples.concat(samples);
11025 });
11026 return allSamples;
11027 };
11028 /**
11029 * Parses out caption nals from an FMP4 segment's video tracks.
11030 *
11031 * @param {Uint8Array} segment - The bytes of a single segment
11032 * @param {Number} videoTrackId - The trackId of a video track in the segment
11033 * @return {Object.<Number, Object[]>} A mapping of video trackId to
11034 * a list of seiNals found in that track
11035 **/
11036
11037
11038 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
11039 // To get the samples
11040 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
11041
11042 var mdats = findBox_1(segment, ['mdat']);
11043 var captionNals = {};
11044 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
11045
11046 mdats.forEach(function (mdat, index) {
11047 var matchingTraf = trafs[index];
11048 mdatTrafPairs.push({
11049 mdat: mdat,
11050 traf: matchingTraf
11051 });
11052 });
11053 mdatTrafPairs.forEach(function (pair) {
11054 var mdat = pair.mdat;
11055 var traf = pair.traf;
11056 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
11057
11058 var headerInfo = parseTfhd(tfhd[0]);
11059 var trackId = headerInfo.trackId;
11060 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
11061
11062 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
11063 var truns = findBox_1(traf, ['trun']);
11064 var samples;
11065 var seiNals; // Only parse video data for the chosen video track
11066
11067 if (videoTrackId === trackId && truns.length > 0) {
11068 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
11069 seiNals = findSeiNals(mdat, samples, trackId);
11070
11071 if (!captionNals[trackId]) {
11072 captionNals[trackId] = [];
11073 }
11074
11075 captionNals[trackId] = captionNals[trackId].concat(seiNals);
11076 }
11077 });
11078 return captionNals;
11079 };
11080 /**
11081 * Parses out inband captions from an MP4 container and returns
11082 * caption objects that can be used by WebVTT and the TextTrack API.
11083 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
11084 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
11085 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
11086 *
11087 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11088 * @param {Number} trackId - The id of the video track to parse
11089 * @param {Number} timescale - The timescale for the video track from the init segment
11090 *
11091 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
11092 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
11093 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
11094 * @return {String} parsedCaptions[].text - The visible content of the caption
11095 **/
11096
11097
11098 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
11099 var seiNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11100
11101 if (trackId === null) {
11102 return null;
11103 }
11104
11105 seiNals = parseCaptionNals(segment, trackId);
11106 return {
11107 seiNals: seiNals[trackId],
11108 timescale: timescale
11109 };
11110 };
11111 /**
11112 * Converts SEI NALUs into captions that can be used by video.js
11113 **/
11114
11115
11116 var CaptionParser = function CaptionParser() {
11117 var isInitialized = false;
11118 var captionStream; // Stores segments seen before trackId and timescale are set
11119
11120 var segmentCache; // Stores video track ID of the track being parsed
11121
11122 var trackId; // Stores the timescale of the track being parsed
11123
11124 var timescale; // Stores captions parsed so far
11125
11126 var parsedCaptions; // Stores whether we are receiving partial data or not
11127
11128 var parsingPartial;
11129 /**
11130 * A method to indicate whether a CaptionParser has been initalized
11131 * @returns {Boolean}
11132 **/
11133
11134 this.isInitialized = function () {
11135 return isInitialized;
11136 };
11137 /**
11138 * Initializes the underlying CaptionStream, SEI NAL parsing
11139 * and management, and caption collection
11140 **/
11141
11142
11143 this.init = function (options) {
11144 captionStream = new CaptionStream();
11145 isInitialized = true;
11146 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
11147
11148 captionStream.on('data', function (event) {
11149 // Convert to seconds in the source's timescale
11150 event.startTime = event.startPts / timescale;
11151 event.endTime = event.endPts / timescale;
11152 parsedCaptions.captions.push(event);
11153 parsedCaptions.captionStreams[event.stream] = true;
11154 });
11155 };
11156 /**
11157 * Determines if a new video track will be selected
11158 * or if the timescale changed
11159 * @return {Boolean}
11160 **/
11161
11162
11163 this.isNewInit = function (videoTrackIds, timescales) {
11164 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
11165 return false;
11166 }
11167
11168 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
11169 };
11170 /**
11171 * Parses out SEI captions and interacts with underlying
11172 * CaptionStream to return dispatched captions
11173 *
11174 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11175 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
11176 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
11177 * @see parseEmbeddedCaptions
11178 * @see m2ts/caption-stream.js
11179 **/
11180
11181
11182 this.parse = function (segment, videoTrackIds, timescales) {
11183 var parsedData;
11184
11185 if (!this.isInitialized()) {
11186 return null; // This is not likely to be a video segment
11187 } else if (!videoTrackIds || !timescales) {
11188 return null;
11189 } else if (this.isNewInit(videoTrackIds, timescales)) {
11190 // Use the first video track only as there is no
11191 // mechanism to switch to other video tracks
11192 trackId = videoTrackIds[0];
11193 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
11194 // data until we have one.
11195 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11196 } else if (trackId === null || !timescale) {
11197 segmentCache.push(segment);
11198 return null;
11199 } // Now that a timescale and trackId is set, parse cached segments
11200
11201
11202 while (segmentCache.length > 0) {
11203 var cachedSegment = segmentCache.shift();
11204 this.parse(cachedSegment, videoTrackIds, timescales);
11205 }
11206
11207 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
11208
11209 if (parsedData === null || !parsedData.seiNals) {
11210 return null;
11211 }
11212
11213 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
11214
11215 this.flushStream();
11216 return parsedCaptions;
11217 };
11218 /**
11219 * Pushes SEI NALUs onto CaptionStream
11220 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
11221 * Assumes that `parseCaptionNals` has been called first
11222 * @see m2ts/caption-stream.js
11223 **/
11224
11225
11226 this.pushNals = function (nals) {
11227 if (!this.isInitialized() || !nals || nals.length === 0) {
11228 return null;
11229 }
11230
11231 nals.forEach(function (nal) {
11232 captionStream.push(nal);
11233 });
11234 };
11235 /**
11236 * Flushes underlying CaptionStream to dispatch processed, displayable captions
11237 * @see m2ts/caption-stream.js
11238 **/
11239
11240
11241 this.flushStream = function () {
11242 if (!this.isInitialized()) {
11243 return null;
11244 }
11245
11246 if (!parsingPartial) {
11247 captionStream.flush();
11248 } else {
11249 captionStream.partialFlush();
11250 }
11251 };
11252 /**
11253 * Reset caption buckets for new data
11254 **/
11255
11256
11257 this.clearParsedCaptions = function () {
11258 parsedCaptions.captions = [];
11259 parsedCaptions.captionStreams = {};
11260 };
11261 /**
11262 * Resets underlying CaptionStream
11263 * @see m2ts/caption-stream.js
11264 **/
11265
11266
11267 this.resetCaptionStream = function () {
11268 if (!this.isInitialized()) {
11269 return null;
11270 }
11271
11272 captionStream.reset();
11273 };
11274 /**
11275 * Convenience method to clear all captions flushed from the
11276 * CaptionStream and still being parsed
11277 * @see m2ts/caption-stream.js
11278 **/
11279
11280
11281 this.clearAllCaptions = function () {
11282 this.clearParsedCaptions();
11283 this.resetCaptionStream();
11284 };
11285 /**
11286 * Reset caption parser
11287 **/
11288
11289
11290 this.reset = function () {
11291 segmentCache = [];
11292 trackId = null;
11293 timescale = null;
11294
11295 if (!parsedCaptions) {
11296 parsedCaptions = {
11297 captions: [],
11298 // CC1, CC2, CC3, CC4
11299 captionStreams: {}
11300 };
11301 } else {
11302 this.clearParsedCaptions();
11303 }
11304
11305 this.resetCaptionStream();
11306 };
11307
11308 this.reset();
11309 };
11310
11311 var captionParser = CaptionParser;
11312 /* global self */
11313
11314 var typeFromStreamString = function typeFromStreamString(streamString) {
11315 if (streamString === 'AudioSegmentStream') {
11316 return 'audio';
11317 }
11318
11319 return streamString === 'VideoSegmentStream' ? 'video' : '';
11320 };
11321 /**
11322 * Re-emits transmuxer events by converting them into messages to the
11323 * world outside the worker.
11324 *
11325 * @param {Object} transmuxer the transmuxer to wire events on
11326 * @private
11327 */
11328
11329
11330 var wireFullTransmuxerEvents = function wireFullTransmuxerEvents(self, transmuxer) {
11331 transmuxer.on('data', function (segment) {
11332 // transfer ownership of the underlying ArrayBuffer
11333 // instead of doing a copy to save memory
11334 // ArrayBuffers are transferable but generic TypedArrays are not
11335 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
11336 var initArray = segment.initSegment;
11337 segment.initSegment = {
11338 data: initArray.buffer,
11339 byteOffset: initArray.byteOffset,
11340 byteLength: initArray.byteLength
11341 };
11342 var typedArray = segment.data;
11343 segment.data = typedArray.buffer;
11344 self.postMessage({
11345 action: 'data',
11346 segment: segment,
11347 byteOffset: typedArray.byteOffset,
11348 byteLength: typedArray.byteLength
11349 }, [segment.data]);
11350 });
11351 transmuxer.on('done', function (data) {
11352 self.postMessage({
11353 action: 'done'
11354 });
11355 });
11356 transmuxer.on('gopInfo', function (gopInfo) {
11357 self.postMessage({
11358 action: 'gopInfo',
11359 gopInfo: gopInfo
11360 });
11361 });
11362 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
11363 var videoSegmentTimingInfo = {
11364 start: {
11365 decode: clock.videoTsToSeconds(timingInfo.start.dts),
11366 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
11367 },
11368 end: {
11369 decode: clock.videoTsToSeconds(timingInfo.end.dts),
11370 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
11371 },
11372 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
11373 };
11374
11375 if (timingInfo.prependedContentDuration) {
11376 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
11377 }
11378
11379 self.postMessage({
11380 action: 'videoSegmentTimingInfo',
11381 videoSegmentTimingInfo: videoSegmentTimingInfo
11382 });
11383 });
11384 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
11385 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
11386 var audioSegmentTimingInfo = {
11387 start: {
11388 decode: clock.videoTsToSeconds(timingInfo.start.dts),
11389 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
11390 },
11391 end: {
11392 decode: clock.videoTsToSeconds(timingInfo.end.dts),
11393 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
11394 },
11395 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
11396 };
11397
11398 if (timingInfo.prependedContentDuration) {
11399 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
11400 }
11401
11402 self.postMessage({
11403 action: 'audioSegmentTimingInfo',
11404 audioSegmentTimingInfo: audioSegmentTimingInfo
11405 });
11406 });
11407 transmuxer.on('id3Frame', function (id3Frame) {
11408 self.postMessage({
11409 action: 'id3Frame',
11410 id3Frame: id3Frame
11411 });
11412 });
11413 transmuxer.on('caption', function (caption) {
11414 self.postMessage({
11415 action: 'caption',
11416 caption: caption
11417 });
11418 });
11419 transmuxer.on('trackinfo', function (trackInfo) {
11420 self.postMessage({
11421 action: 'trackinfo',
11422 trackInfo: trackInfo
11423 });
11424 });
11425 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
11426 // convert to video TS since we prioritize video time over audio
11427 self.postMessage({
11428 action: 'audioTimingInfo',
11429 audioTimingInfo: {
11430 start: clock.videoTsToSeconds(audioTimingInfo.start),
11431 end: clock.videoTsToSeconds(audioTimingInfo.end)
11432 }
11433 });
11434 });
11435 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
11436 self.postMessage({
11437 action: 'videoTimingInfo',
11438 videoTimingInfo: {
11439 start: clock.videoTsToSeconds(videoTimingInfo.start),
11440 end: clock.videoTsToSeconds(videoTimingInfo.end)
11441 }
11442 });
11443 });
11444 };
11445
11446 var wirePartialTransmuxerEvents = function wirePartialTransmuxerEvents(self, transmuxer) {
11447 transmuxer.on('data', function (event) {
11448 // transfer ownership of the underlying ArrayBuffer
11449 // instead of doing a copy to save memory
11450 // ArrayBuffers are transferable but generic TypedArrays are not
11451 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
11452 var initSegment = {
11453 data: event.data.track.initSegment.buffer,
11454 byteOffset: event.data.track.initSegment.byteOffset,
11455 byteLength: event.data.track.initSegment.byteLength
11456 };
11457 var boxes = {
11458 data: event.data.boxes.buffer,
11459 byteOffset: event.data.boxes.byteOffset,
11460 byteLength: event.data.boxes.byteLength
11461 };
11462 var segment = {
11463 boxes: boxes,
11464 initSegment: initSegment,
11465 type: event.type,
11466 sequence: event.data.sequence
11467 };
11468
11469 if (typeof event.data.videoFrameDts !== 'undefined') {
11470 segment.videoFrameDtsTime = clock.videoTsToSeconds(event.data.videoFrameDts);
11471 }
11472
11473 if (typeof event.data.videoFramePts !== 'undefined') {
11474 segment.videoFramePtsTime = clock.videoTsToSeconds(event.data.videoFramePts);
11475 }
11476
11477 self.postMessage({
11478 action: 'data',
11479 segment: segment
11480 }, [segment.boxes.data, segment.initSegment.data]);
11481 });
11482 transmuxer.on('id3Frame', function (id3Frame) {
11483 self.postMessage({
11484 action: 'id3Frame',
11485 id3Frame: id3Frame
11486 });
11487 });
11488 transmuxer.on('caption', function (caption) {
11489 self.postMessage({
11490 action: 'caption',
11491 caption: caption
11492 });
11493 });
11494 transmuxer.on('done', function (data) {
11495 self.postMessage({
11496 action: 'done',
11497 type: typeFromStreamString(data)
11498 });
11499 });
11500 transmuxer.on('partialdone', function (data) {
11501 self.postMessage({
11502 action: 'partialdone',
11503 type: typeFromStreamString(data)
11504 });
11505 });
11506 transmuxer.on('endedsegment', function (data) {
11507 self.postMessage({
11508 action: 'endedSegment',
11509 type: typeFromStreamString(data)
11510 });
11511 });
11512 transmuxer.on('trackinfo', function (trackInfo) {
11513 self.postMessage({
11514 action: 'trackinfo',
11515 trackInfo: trackInfo
11516 });
11517 });
11518 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
11519 // This can happen if flush is called when no
11520 // audio has been processed. This should be an
11521 // unusual case, but if it does occur should not
11522 // result in valid data being returned
11523 if (audioTimingInfo.start === null) {
11524 self.postMessage({
11525 action: 'audioTimingInfo',
11526 audioTimingInfo: audioTimingInfo
11527 });
11528 return;
11529 } // convert to video TS since we prioritize video time over audio
11530
11531
11532 var timingInfoInSeconds = {
11533 start: clock.videoTsToSeconds(audioTimingInfo.start)
11534 };
11535
11536 if (audioTimingInfo.end) {
11537 timingInfoInSeconds.end = clock.videoTsToSeconds(audioTimingInfo.end);
11538 }
11539
11540 self.postMessage({
11541 action: 'audioTimingInfo',
11542 audioTimingInfo: timingInfoInSeconds
11543 });
11544 });
11545 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
11546 var timingInfoInSeconds = {
11547 start: clock.videoTsToSeconds(videoTimingInfo.start)
11548 };
11549
11550 if (videoTimingInfo.end) {
11551 timingInfoInSeconds.end = clock.videoTsToSeconds(videoTimingInfo.end);
11552 }
11553
11554 self.postMessage({
11555 action: 'videoTimingInfo',
11556 videoTimingInfo: timingInfoInSeconds
11557 });
11558 });
11559 };
11560 /**
11561 * All incoming messages route through this hash. If no function exists
11562 * to handle an incoming message, then we ignore the message.
11563 *
11564 * @class MessageHandlers
11565 * @param {Object} options the options to initialize with
11566 */
11567
11568
11569 var MessageHandlers = /*#__PURE__*/function () {
11570 function MessageHandlers(self, options) {
11571 this.options = options || {};
11572 this.self = self;
11573 this.init();
11574 }
11575 /**
11576 * initialize our web worker and wire all the events.
11577 */
11578
11579
11580 var _proto = MessageHandlers.prototype;
11581
11582 _proto.init = function init() {
11583 if (this.transmuxer) {
11584 this.transmuxer.dispose();
11585 }
11586
11587 this.transmuxer = this.options.handlePartialData ? new transmuxer(this.options) : new transmuxer$1.Transmuxer(this.options);
11588
11589 if (this.options.handlePartialData) {
11590 wirePartialTransmuxerEvents(this.self, this.transmuxer);
11591 } else {
11592 wireFullTransmuxerEvents(this.self, this.transmuxer);
11593 }
11594 };
11595
11596 _proto.pushMp4Captions = function pushMp4Captions(data) {
11597 if (!this.captionParser) {
11598 this.captionParser = new captionParser();
11599 this.captionParser.init();
11600 }
11601
11602 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
11603 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
11604 this.self.postMessage({
11605 action: 'mp4Captions',
11606 captions: parsed && parsed.captions || [],
11607 data: segment.buffer
11608 }, [segment.buffer]);
11609 };
11610
11611 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
11612 if (this.captionParser) {
11613 this.captionParser.clearAllCaptions();
11614 }
11615 };
11616
11617 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
11618 if (this.captionParser) {
11619 this.captionParser.clearParsedCaptions();
11620 }
11621 }
11622 /**
11623 * Adds data (a ts segment) to the start of the transmuxer pipeline for
11624 * processing.
11625 *
11626 * @param {ArrayBuffer} data data to push into the muxer
11627 */
11628 ;
11629
11630 _proto.push = function push(data) {
11631 // Cast array buffer to correct type for transmuxer
11632 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
11633 this.transmuxer.push(segment);
11634 }
11635 /**
11636 * Recreate the transmuxer so that the next segment added via `push`
11637 * start with a fresh transmuxer.
11638 */
11639 ;
11640
11641 _proto.reset = function reset() {
11642 this.transmuxer.reset();
11643 }
11644 /**
11645 * Set the value that will be used as the `baseMediaDecodeTime` time for the
11646 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
11647 * set relative to the first based on the PTS values.
11648 *
11649 * @param {Object} data used to set the timestamp offset in the muxer
11650 */
11651 ;
11652
11653 _proto.setTimestampOffset = function setTimestampOffset(data) {
11654 var timestampOffset = data.timestampOffset || 0;
11655 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
11656 };
11657
11658 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
11659 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
11660 };
11661
11662 _proto.setRemux = function setRemux(data) {
11663 this.transmuxer.setRemux(data.remux);
11664 }
11665 /**
11666 * Forces the pipeline to finish processing the last segment and emit it's
11667 * results.
11668 *
11669 * @param {Object} data event data, not really used
11670 */
11671 ;
11672
11673 _proto.flush = function flush(data) {
11674 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
11675
11676 self.postMessage({
11677 action: 'done',
11678 type: 'transmuxed'
11679 });
11680 };
11681
11682 _proto.partialFlush = function partialFlush(data) {
11683 this.transmuxer.partialFlush(); // transmuxed partialdone action is fired after both audio/video pipelines are flushed
11684
11685 self.postMessage({
11686 action: 'partialdone',
11687 type: 'transmuxed'
11688 });
11689 };
11690
11691 _proto.endTimeline = function endTimeline() {
11692 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
11693 // timelines
11694
11695 self.postMessage({
11696 action: 'endedtimeline',
11697 type: 'transmuxed'
11698 });
11699 };
11700
11701 _proto.alignGopsWith = function alignGopsWith(data) {
11702 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
11703 };
11704
11705 return MessageHandlers;
11706 }();
11707 /**
11708 * Our web worker interface so that things can talk to mux.js
11709 * that will be running in a web worker. the scope is passed to this by
11710 * webworkify.
11711 *
11712 * @param {Object} self the scope for the web worker
11713 */
11714
11715
11716 self.onmessage = function (event) {
11717 if (event.data.action === 'init' && event.data.options) {
11718 this.messageHandlers = new MessageHandlers(self, event.data.options);
11719 return;
11720 }
11721
11722 if (!this.messageHandlers) {
11723 this.messageHandlers = new MessageHandlers(self);
11724 }
11725
11726 if (event.data && event.data.action && event.data.action !== 'init') {
11727 if (this.messageHandlers[event.data.action]) {
11728 this.messageHandlers[event.data.action](event.data);
11729 }
11730 }
11731 };
11732}));
11733var TransmuxWorker = factory(workerCode$1);
11734/* rollup-plugin-worker-factory end for worker!/Users/gkatsevman/p/http-streaming-release/src/transmuxer-worker.js */
11735
11736var handleData_ = function handleData_(event, transmuxedData, callback) {
11737 var _event$data$segment = event.data.segment,
11738 type = _event$data$segment.type,
11739 initSegment = _event$data$segment.initSegment,
11740 captions = _event$data$segment.captions,
11741 captionStreams = _event$data$segment.captionStreams,
11742 metadata = _event$data$segment.metadata,
11743 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
11744 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
11745 transmuxedData.buffer.push({
11746 captions: captions,
11747 captionStreams: captionStreams,
11748 metadata: metadata
11749 }); // right now, boxes will come back from partial transmuxer, data from full
11750
11751 var boxes = event.data.segment.boxes || {
11752 data: event.data.segment.data
11753 };
11754 var result = {
11755 type: type,
11756 // cast ArrayBuffer to TypedArray
11757 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
11758 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
11759 };
11760
11761 if (typeof videoFrameDtsTime !== 'undefined') {
11762 result.videoFrameDtsTime = videoFrameDtsTime;
11763 }
11764
11765 if (typeof videoFramePtsTime !== 'undefined') {
11766 result.videoFramePtsTime = videoFramePtsTime;
11767 }
11768
11769 callback(result);
11770};
11771var handleDone_ = function handleDone_(_ref) {
11772 var transmuxedData = _ref.transmuxedData,
11773 callback = _ref.callback;
11774 // Previously we only returned data on data events,
11775 // not on done events. Clear out the buffer to keep that consistent.
11776 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
11777 // have received
11778
11779 callback(transmuxedData);
11780};
11781var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
11782 transmuxedData.gopInfo = event.data.gopInfo;
11783};
11784var processTransmux = function processTransmux(options) {
11785 var transmuxer = options.transmuxer,
11786 bytes = options.bytes,
11787 audioAppendStart = options.audioAppendStart,
11788 gopsToAlignWith = options.gopsToAlignWith,
11789 isPartial = options.isPartial,
11790 remux = options.remux,
11791 onData = options.onData,
11792 onTrackInfo = options.onTrackInfo,
11793 onAudioTimingInfo = options.onAudioTimingInfo,
11794 onVideoTimingInfo = options.onVideoTimingInfo,
11795 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
11796 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
11797 onId3 = options.onId3,
11798 onCaptions = options.onCaptions,
11799 onDone = options.onDone,
11800 onEndedTimeline = options.onEndedTimeline,
11801 isEndOfTimeline = options.isEndOfTimeline;
11802 var transmuxedData = {
11803 isPartial: isPartial,
11804 buffer: []
11805 };
11806 var waitForEndedTimelineEvent = isEndOfTimeline;
11807
11808 var handleMessage = function handleMessage(event) {
11809 if (transmuxer.currentTransmux !== options) {
11810 // disposed
11811 return;
11812 }
11813
11814 if (event.data.action === 'data') {
11815 handleData_(event, transmuxedData, onData);
11816 }
11817
11818 if (event.data.action === 'trackinfo') {
11819 onTrackInfo(event.data.trackInfo);
11820 }
11821
11822 if (event.data.action === 'gopInfo') {
11823 handleGopInfo_(event, transmuxedData);
11824 }
11825
11826 if (event.data.action === 'audioTimingInfo') {
11827 onAudioTimingInfo(event.data.audioTimingInfo);
11828 }
11829
11830 if (event.data.action === 'videoTimingInfo') {
11831 onVideoTimingInfo(event.data.videoTimingInfo);
11832 }
11833
11834 if (event.data.action === 'videoSegmentTimingInfo') {
11835 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
11836 }
11837
11838 if (event.data.action === 'audioSegmentTimingInfo') {
11839 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
11840 }
11841
11842 if (event.data.action === 'id3Frame') {
11843 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
11844 }
11845
11846 if (event.data.action === 'caption') {
11847 onCaptions(event.data.caption);
11848 }
11849
11850 if (event.data.action === 'endedtimeline') {
11851 waitForEndedTimelineEvent = false;
11852 onEndedTimeline();
11853 } // wait for the transmuxed event since we may have audio and video
11854
11855
11856 if (event.data.type !== 'transmuxed') {
11857 return;
11858 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
11859 // of a timeline, that means there may still be data events before the segment
11860 // processing can be considerred complete. In that case, the final event should be
11861 // an "endedtimeline" event with the type "transmuxed."
11862
11863
11864 if (waitForEndedTimelineEvent) {
11865 return;
11866 }
11867
11868 transmuxer.onmessage = null;
11869 handleDone_({
11870 transmuxedData: transmuxedData,
11871 callback: onDone
11872 });
11873 /* eslint-disable no-use-before-define */
11874
11875 dequeue(transmuxer);
11876 /* eslint-enable */
11877 };
11878
11879 transmuxer.onmessage = handleMessage;
11880
11881 if (audioAppendStart) {
11882 transmuxer.postMessage({
11883 action: 'setAudioAppendStart',
11884 appendStart: audioAppendStart
11885 });
11886 } // allow empty arrays to be passed to clear out GOPs
11887
11888
11889 if (Array.isArray(gopsToAlignWith)) {
11890 transmuxer.postMessage({
11891 action: 'alignGopsWith',
11892 gopsToAlignWith: gopsToAlignWith
11893 });
11894 }
11895
11896 if (typeof remux !== 'undefined') {
11897 transmuxer.postMessage({
11898 action: 'setRemux',
11899 remux: remux
11900 });
11901 }
11902
11903 if (bytes.byteLength) {
11904 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
11905 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
11906 transmuxer.postMessage({
11907 action: 'push',
11908 // Send the typed-array of data as an ArrayBuffer so that
11909 // it can be sent as a "Transferable" and avoid the costly
11910 // memory copy
11911 data: buffer,
11912 // To recreate the original typed-array, we need information
11913 // about what portion of the ArrayBuffer it was a view into
11914 byteOffset: byteOffset,
11915 byteLength: bytes.byteLength
11916 }, [buffer]);
11917 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
11918 // the end of the segment
11919
11920
11921 transmuxer.postMessage({
11922 action: isPartial ? 'partialFlush' : 'flush'
11923 });
11924
11925 if (isEndOfTimeline) {
11926 transmuxer.postMessage({
11927 action: 'endTimeline'
11928 });
11929 }
11930};
11931var dequeue = function dequeue(transmuxer) {
11932 transmuxer.currentTransmux = null;
11933
11934 if (transmuxer.transmuxQueue.length) {
11935 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
11936
11937 if (typeof transmuxer.currentTransmux === 'function') {
11938 transmuxer.currentTransmux();
11939 } else {
11940 processTransmux(transmuxer.currentTransmux);
11941 }
11942 }
11943};
11944var processAction = function processAction(transmuxer, action) {
11945 transmuxer.postMessage({
11946 action: action
11947 });
11948 dequeue(transmuxer);
11949};
11950var enqueueAction = function enqueueAction(action, transmuxer) {
11951 if (!transmuxer.currentTransmux) {
11952 transmuxer.currentTransmux = action;
11953 processAction(transmuxer, action);
11954 return;
11955 }
11956
11957 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
11958};
11959var reset = function reset(transmuxer) {
11960 enqueueAction('reset', transmuxer);
11961};
11962var endTimeline = function endTimeline(transmuxer) {
11963 enqueueAction('endTimeline', transmuxer);
11964};
11965var transmux = function transmux(options) {
11966 if (!options.transmuxer.currentTransmux) {
11967 options.transmuxer.currentTransmux = options;
11968 processTransmux(options);
11969 return;
11970 }
11971
11972 options.transmuxer.transmuxQueue.push(options);
11973};
11974var createTransmuxer = function createTransmuxer(options) {
11975 var transmuxer = new TransmuxWorker();
11976 transmuxer.currentTransmux = null;
11977 transmuxer.transmuxQueue = [];
11978 var term = transmuxer.terminate;
11979
11980 transmuxer.terminate = function () {
11981 transmuxer.currentTransmux = null;
11982 transmuxer.transmuxQueue.length = 0;
11983 return term.call(transmuxer);
11984 };
11985
11986 transmuxer.postMessage({
11987 action: 'init',
11988 options: options
11989 });
11990 return transmuxer;
11991};
11992var segmentTransmuxer = {
11993 reset: reset,
11994 endTimeline: endTimeline,
11995 transmux: transmux,
11996 createTransmuxer: createTransmuxer
11997};
11998
11999/**
12000 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
12001 * internal "media time," as well as whether it contains video and/or audio.
12002 *
12003 * @private
12004 * @param {Uint8Array} bytes - segment bytes
12005 * @param {number} baseStartTime
12006 * Relative reference timestamp used when adjusting frame timestamps for rollover.
12007 * This value should be in seconds, as it's converted to a 90khz clock within the
12008 * function body.
12009 * @return {Object} The start time of the current segment in "media time" as well as
12010 * whether it contains video and/or audio
12011 */
12012
12013var probeTsSegment = function probeTsSegment(bytes, baseStartTime) {
12014 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
12015 var timeInfo = tsInspector__default['default'].inspect(bytes, tsStartTime);
12016
12017 if (!timeInfo) {
12018 return null;
12019 }
12020
12021 var result = {
12022 // each type's time info comes back as an array of 2 times, start and end
12023 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
12024 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
12025 };
12026
12027 if (result.hasVideo) {
12028 result.videoStart = timeInfo.video[0].ptsTime;
12029 }
12030
12031 if (result.hasAudio) {
12032 result.audioStart = timeInfo.audio[0].ptsTime;
12033 }
12034
12035 return result;
12036};
12037/**
12038 * Combine all segments into a single Uint8Array
12039 *
12040 * @param {Object} segmentObj
12041 * @return {Uint8Array} concatenated bytes
12042 * @private
12043 */
12044
12045var concatSegments = function concatSegments(segmentObj) {
12046 var offset = 0;
12047 var tempBuffer;
12048
12049 if (segmentObj.bytes) {
12050 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
12051
12052 segmentObj.segments.forEach(function (segment) {
12053 tempBuffer.set(segment, offset);
12054 offset += segment.byteLength;
12055 });
12056 }
12057
12058 return tempBuffer;
12059};
12060
12061var REQUEST_ERRORS = {
12062 FAILURE: 2,
12063 TIMEOUT: -101,
12064 ABORTED: -102
12065};
12066/**
12067 * Abort all requests
12068 *
12069 * @param {Object} activeXhrs - an object that tracks all XHR requests
12070 */
12071
12072var abortAll = function abortAll(activeXhrs) {
12073 activeXhrs.forEach(function (xhr) {
12074 xhr.abort();
12075 });
12076};
12077/**
12078 * Gather important bandwidth stats once a request has completed
12079 *
12080 * @param {Object} request - the XHR request from which to gather stats
12081 */
12082
12083
12084var getRequestStats = function getRequestStats(request) {
12085 return {
12086 bandwidth: request.bandwidth,
12087 bytesReceived: request.bytesReceived || 0,
12088 roundTripTime: request.roundTripTime || 0
12089 };
12090};
12091/**
12092 * If possible gather bandwidth stats as a request is in
12093 * progress
12094 *
12095 * @param {Event} progressEvent - an event object from an XHR's progress event
12096 */
12097
12098
12099var getProgressStats = function getProgressStats(progressEvent) {
12100 var request = progressEvent.target;
12101 var roundTripTime = Date.now() - request.requestTime;
12102 var stats = {
12103 bandwidth: Infinity,
12104 bytesReceived: 0,
12105 roundTripTime: roundTripTime || 0
12106 };
12107 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
12108 // because we should only use bandwidth stats on progress to determine when
12109 // abort a request early due to insufficient bandwidth
12110
12111 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
12112 return stats;
12113};
12114/**
12115 * Handle all error conditions in one place and return an object
12116 * with all the information
12117 *
12118 * @param {Error|null} error - if non-null signals an error occured with the XHR
12119 * @param {Object} request - the XHR request that possibly generated the error
12120 */
12121
12122
12123var handleErrors = function handleErrors(error, request) {
12124 if (request.timedout) {
12125 return {
12126 status: request.status,
12127 message: 'HLS request timed-out at URL: ' + request.uri,
12128 code: REQUEST_ERRORS.TIMEOUT,
12129 xhr: request
12130 };
12131 }
12132
12133 if (request.aborted) {
12134 return {
12135 status: request.status,
12136 message: 'HLS request aborted at URL: ' + request.uri,
12137 code: REQUEST_ERRORS.ABORTED,
12138 xhr: request
12139 };
12140 }
12141
12142 if (error) {
12143 return {
12144 status: request.status,
12145 message: 'HLS request errored at URL: ' + request.uri,
12146 code: REQUEST_ERRORS.FAILURE,
12147 xhr: request
12148 };
12149 }
12150
12151 return null;
12152};
12153/**
12154 * Handle responses for key data and convert the key data to the correct format
12155 * for the decryption step later
12156 *
12157 * @param {Object} segment - a simplified copy of the segmentInfo object
12158 * from SegmentLoader
12159 * @param {Function} finishProcessingFn - a callback to execute to continue processing
12160 * this request
12161 */
12162
12163
12164var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
12165 return function (error, request) {
12166 var response = request.response;
12167 var errorObj = handleErrors(error, request);
12168
12169 if (errorObj) {
12170 return finishProcessingFn(errorObj, segment);
12171 }
12172
12173 if (response.byteLength !== 16) {
12174 return finishProcessingFn({
12175 status: request.status,
12176 message: 'Invalid HLS key at URL: ' + request.uri,
12177 code: REQUEST_ERRORS.FAILURE,
12178 xhr: request
12179 }, segment);
12180 }
12181
12182 var view = new DataView(response);
12183 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
12184 return finishProcessingFn(null, segment);
12185 };
12186};
12187/**
12188 * Handle init-segment responses
12189 *
12190 * @param {Object} segment - a simplified copy of the segmentInfo object
12191 * from SegmentLoader
12192 * @param {Function} finishProcessingFn - a callback to execute to continue processing
12193 * this request
12194 */
12195
12196
12197var handleInitSegmentResponse = function handleInitSegmentResponse(_ref) {
12198 var segment = _ref.segment,
12199 finishProcessingFn = _ref.finishProcessingFn;
12200 return function (error, request) {
12201 var response = request.response;
12202 var errorObj = handleErrors(error, request);
12203
12204 if (errorObj) {
12205 return finishProcessingFn(errorObj, segment);
12206 } // stop processing if received empty content
12207
12208
12209 if (response.byteLength === 0) {
12210 return finishProcessingFn({
12211 status: request.status,
12212 message: 'Empty HLS segment content at URL: ' + request.uri,
12213 code: REQUEST_ERRORS.FAILURE,
12214 xhr: request
12215 }, segment);
12216 }
12217
12218 segment.map.bytes = new Uint8Array(request.response);
12219 var type = containers.detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
12220 // only know how to parse mp4 init segments at the moment
12221
12222 if (type !== 'mp4') {
12223 return finishProcessingFn({
12224 status: request.status,
12225 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + request.uri,
12226 code: REQUEST_ERRORS.FAILURE,
12227 internal: true,
12228 xhr: request
12229 }, segment);
12230 }
12231
12232 var tracks = mp4probe__default['default'].tracks(segment.map.bytes);
12233 tracks.forEach(function (track) {
12234 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
12235
12236 if (segment.map.tracks[track.type]) {
12237 return;
12238 }
12239
12240 segment.map.tracks[track.type] = track;
12241
12242 if (typeof track.id === 'number' && track.timescale) {
12243 segment.map.timescales = segment.map.timescales || {};
12244 segment.map.timescales[track.id] = track.timescale;
12245 }
12246 });
12247 return finishProcessingFn(null, segment);
12248 };
12249};
12250/**
12251 * Response handler for segment-requests being sure to set the correct
12252 * property depending on whether the segment is encryped or not
12253 * Also records and keeps track of stats that are used for ABR purposes
12254 *
12255 * @param {Object} segment - a simplified copy of the segmentInfo object
12256 * from SegmentLoader
12257 * @param {Function} finishProcessingFn - a callback to execute to continue processing
12258 * this request
12259 */
12260
12261
12262var handleSegmentResponse = function handleSegmentResponse(_ref2) {
12263 var segment = _ref2.segment,
12264 finishProcessingFn = _ref2.finishProcessingFn,
12265 responseType = _ref2.responseType;
12266 return function (error, request) {
12267 var response = request.response;
12268 var errorObj = handleErrors(error, request);
12269
12270 if (errorObj) {
12271 return finishProcessingFn(errorObj, segment);
12272 }
12273
12274 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
12275 // thrown for two primary cases:
12276 // 1. the mime type override stops working, or is not implemented for a specific
12277 // browser
12278 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
12279 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0)); // stop processing if received empty content
12280
12281 if (response.byteLength === 0) {
12282 return finishProcessingFn({
12283 status: request.status,
12284 message: 'Empty HLS segment content at URL: ' + request.uri,
12285 code: REQUEST_ERRORS.FAILURE,
12286 xhr: request
12287 }, segment);
12288 }
12289
12290 segment.stats = getRequestStats(request);
12291
12292 if (segment.key) {
12293 segment.encryptedBytes = new Uint8Array(newBytes);
12294 } else {
12295 segment.bytes = new Uint8Array(newBytes);
12296 }
12297
12298 return finishProcessingFn(null, segment);
12299 };
12300};
12301
12302var transmuxAndNotify = function transmuxAndNotify(_ref3) {
12303 var segment = _ref3.segment,
12304 bytes = _ref3.bytes,
12305 isPartial = _ref3.isPartial,
12306 trackInfoFn = _ref3.trackInfoFn,
12307 timingInfoFn = _ref3.timingInfoFn,
12308 videoSegmentTimingInfoFn = _ref3.videoSegmentTimingInfoFn,
12309 audioSegmentTimingInfoFn = _ref3.audioSegmentTimingInfoFn,
12310 id3Fn = _ref3.id3Fn,
12311 captionsFn = _ref3.captionsFn,
12312 isEndOfTimeline = _ref3.isEndOfTimeline,
12313 endedTimelineFn = _ref3.endedTimelineFn,
12314 dataFn = _ref3.dataFn,
12315 doneFn = _ref3.doneFn;
12316 var fmp4Tracks = segment.map && segment.map.tracks || {};
12317 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
12318 // One reason for this is that in the case of full segments, we want to trust start
12319 // times from the probe, rather than the transmuxer.
12320
12321 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
12322 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
12323 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
12324 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end'); // Check to see if we are appending a full segment.
12325
12326 if (!isPartial && !segment.lastReachedChar) {
12327 // In the full segment transmuxer, we don't yet have the ability to extract a "proper"
12328 // start time. Meaning cached frame data may corrupt our notion of where this segment
12329 // really starts. To get around this, full segment appends should probe for the info
12330 // needed.
12331 var probeResult = probeTsSegment(bytes, segment.baseStartTime);
12332
12333 if (probeResult) {
12334 trackInfoFn(segment, {
12335 hasAudio: probeResult.hasAudio,
12336 hasVideo: probeResult.hasVideo,
12337 isMuxed: isMuxed
12338 });
12339 trackInfoFn = null;
12340
12341 if (probeResult.hasAudio && !isMuxed) {
12342 audioStartFn(probeResult.audioStart);
12343 }
12344
12345 if (probeResult.hasVideo) {
12346 videoStartFn(probeResult.videoStart);
12347 }
12348
12349 audioStartFn = null;
12350 videoStartFn = null;
12351 }
12352 }
12353
12354 transmux({
12355 bytes: bytes,
12356 transmuxer: segment.transmuxer,
12357 audioAppendStart: segment.audioAppendStart,
12358 gopsToAlignWith: segment.gopsToAlignWith,
12359 isPartial: isPartial,
12360 remux: isMuxed,
12361 onData: function onData(result) {
12362 result.type = result.type === 'combined' ? 'video' : result.type;
12363 dataFn(segment, result);
12364 },
12365 onTrackInfo: function onTrackInfo(trackInfo) {
12366 if (trackInfoFn) {
12367 if (isMuxed) {
12368 trackInfo.isMuxed = true;
12369 }
12370
12371 trackInfoFn(segment, trackInfo);
12372 }
12373 },
12374 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
12375 // we only want the first start value we encounter
12376 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
12377 audioStartFn(audioTimingInfo.start);
12378 audioStartFn = null;
12379 } // we want to continually update the end time
12380
12381
12382 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
12383 audioEndFn(audioTimingInfo.end);
12384 }
12385 },
12386 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
12387 // we only want the first start value we encounter
12388 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
12389 videoStartFn(videoTimingInfo.start);
12390 videoStartFn = null;
12391 } // we want to continually update the end time
12392
12393
12394 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
12395 videoEndFn(videoTimingInfo.end);
12396 }
12397 },
12398 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
12399 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
12400 },
12401 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
12402 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
12403 },
12404 onId3: function onId3(id3Frames, dispatchType) {
12405 id3Fn(segment, id3Frames, dispatchType);
12406 },
12407 onCaptions: function onCaptions(captions) {
12408 captionsFn(segment, [captions]);
12409 },
12410 // if this is a partial transmux, the end of the timeline has not yet been reached
12411 // until the last part of the segment is processed (at which point isPartial will
12412 // be false)
12413 isEndOfTimeline: isEndOfTimeline && !isPartial,
12414 onEndedTimeline: function onEndedTimeline() {
12415 endedTimelineFn();
12416 },
12417 onDone: function onDone(result) {
12418 // To handle partial appends, there won't be a done function passed in (since
12419 // there's still, potentially, more segment to process), so there's nothing to do.
12420 if (!doneFn || isPartial) {
12421 return;
12422 }
12423
12424 result.type = result.type === 'combined' ? 'video' : result.type;
12425 doneFn(null, segment, result);
12426 }
12427 });
12428};
12429
12430var handleSegmentBytes = function handleSegmentBytes(_ref4) {
12431 var segment = _ref4.segment,
12432 bytes = _ref4.bytes,
12433 isPartial = _ref4.isPartial,
12434 trackInfoFn = _ref4.trackInfoFn,
12435 timingInfoFn = _ref4.timingInfoFn,
12436 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
12437 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
12438 id3Fn = _ref4.id3Fn,
12439 captionsFn = _ref4.captionsFn,
12440 isEndOfTimeline = _ref4.isEndOfTimeline,
12441 endedTimelineFn = _ref4.endedTimelineFn,
12442 dataFn = _ref4.dataFn,
12443 doneFn = _ref4.doneFn;
12444 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
12445 // We should have a handler that fetches the number of bytes required
12446 // to check if something is fmp4. This will allow us to save bandwidth
12447 // because we can only blacklist a playlist and abort requests
12448 // by codec after trackinfo triggers.
12449
12450 if (containers.isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
12451 segment.isFmp4 = true;
12452 var tracks = segment.map.tracks;
12453 var trackInfo = {
12454 isFmp4: true,
12455 hasVideo: !!tracks.video,
12456 hasAudio: !!tracks.audio
12457 }; // if we have a audio track, with a codec that is not set to
12458 // encrypted audio
12459
12460 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
12461 trackInfo.audioCodec = tracks.audio.codec;
12462 } // if we have a video track, with a codec that is not set to
12463 // encrypted video
12464
12465
12466 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
12467 trackInfo.videoCodec = tracks.video.codec;
12468 }
12469
12470 if (tracks.video && tracks.audio) {
12471 trackInfo.isMuxed = true;
12472 } // since we don't support appending fmp4 data on progress, we know we have the full
12473 // segment here
12474
12475
12476 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
12477 // time. The end time can be roughly calculated by the receiver using the duration.
12478 //
12479 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
12480 // that is the true start of the segment (where the playback engine should begin
12481 // decoding).
12482
12483 var timingInfo = mp4probe__default['default'].startTime(segment.map.timescales, bytesAsUint8Array);
12484
12485 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
12486 timingInfoFn(segment, 'audio', 'start', timingInfo);
12487 }
12488
12489 if (trackInfo.hasVideo) {
12490 timingInfoFn(segment, 'video', 'start', timingInfo);
12491 }
12492
12493 var finishLoading = function finishLoading(captions) {
12494 // if the track still has audio at this point it is only possible
12495 // for it to be audio only. See `tracks.video && tracks.audio` if statement
12496 // above.
12497 // we make sure to use segment.bytes here as that
12498 dataFn(segment, {
12499 data: bytes,
12500 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
12501 });
12502
12503 if (captions && captions.length) {
12504 captionsFn(segment, captions);
12505 }
12506
12507 doneFn(null, segment, {});
12508 }; // Run through the CaptionParser in case there are captions.
12509 // Initialize CaptionParser if it hasn't been yet
12510
12511
12512 if (!tracks.video || !bytes.byteLength || !segment.transmuxer) {
12513 finishLoading();
12514 return;
12515 }
12516
12517 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
12518 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
12519
12520 var listenForCaptions = function listenForCaptions(event) {
12521 if (event.data.action !== 'mp4Captions') {
12522 return;
12523 }
12524
12525 segment.transmuxer.removeEventListener('message', listenForCaptions);
12526 var data = event.data.data; // transfer ownership of bytes back to us.
12527
12528 segment.bytes = bytes = new Uint8Array(data, data.byteOffset || 0, data.byteLength);
12529 finishLoading(event.data.captions);
12530 };
12531
12532 segment.transmuxer.addEventListener('message', listenForCaptions); // transfer ownership of bytes to worker.
12533
12534 segment.transmuxer.postMessage({
12535 action: 'pushMp4Captions',
12536 timescales: segment.map.timescales,
12537 trackIds: [tracks.video.id],
12538 data: buffer,
12539 byteOffset: byteOffset,
12540 byteLength: bytes.byteLength
12541 }, [buffer]);
12542 return;
12543 } // VTT or other segments that don't need processing
12544
12545
12546 if (!segment.transmuxer) {
12547 doneFn(null, segment, {});
12548 return;
12549 }
12550
12551 if (typeof segment.container === 'undefined') {
12552 segment.container = containers.detectContainerForBytes(bytesAsUint8Array);
12553 }
12554
12555 if (segment.container !== 'ts' && segment.container !== 'aac') {
12556 trackInfoFn(segment, {
12557 hasAudio: false,
12558 hasVideo: false
12559 });
12560 doneFn(null, segment, {});
12561 return;
12562 } // ts or aac
12563
12564
12565 transmuxAndNotify({
12566 segment: segment,
12567 bytes: bytes,
12568 isPartial: isPartial,
12569 trackInfoFn: trackInfoFn,
12570 timingInfoFn: timingInfoFn,
12571 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12572 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12573 id3Fn: id3Fn,
12574 captionsFn: captionsFn,
12575 isEndOfTimeline: isEndOfTimeline,
12576 endedTimelineFn: endedTimelineFn,
12577 dataFn: dataFn,
12578 doneFn: doneFn
12579 });
12580};
12581/**
12582 * Decrypt the segment via the decryption web worker
12583 *
12584 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
12585 * routines
12586 * @param {Object} segment - a simplified copy of the segmentInfo object
12587 * from SegmentLoader
12588 * @param {Function} trackInfoFn - a callback that receives track info
12589 * @param {Function} timingInfoFn - a callback that receives timing info
12590 * @param {Function} videoSegmentTimingInfoFn
12591 * a callback that receives video timing info based on media times and
12592 * any adjustments made by the transmuxer
12593 * @param {Function} audioSegmentTimingInfoFn
12594 * a callback that receives audio timing info based on media times and
12595 * any adjustments made by the transmuxer
12596 * @param {boolean} isEndOfTimeline
12597 * true if this segment represents the last segment in a timeline
12598 * @param {Function} endedTimelineFn
12599 * a callback made when a timeline is ended, will only be called if
12600 * isEndOfTimeline is true
12601 * @param {Function} dataFn - a callback that is executed when segment bytes are available
12602 * and ready to use
12603 * @param {Function} doneFn - a callback that is executed after decryption has completed
12604 */
12605
12606
12607var decryptSegment = function decryptSegment(_ref5) {
12608 var decryptionWorker = _ref5.decryptionWorker,
12609 segment = _ref5.segment,
12610 trackInfoFn = _ref5.trackInfoFn,
12611 timingInfoFn = _ref5.timingInfoFn,
12612 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
12613 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
12614 id3Fn = _ref5.id3Fn,
12615 captionsFn = _ref5.captionsFn,
12616 isEndOfTimeline = _ref5.isEndOfTimeline,
12617 endedTimelineFn = _ref5.endedTimelineFn,
12618 dataFn = _ref5.dataFn,
12619 doneFn = _ref5.doneFn;
12620
12621 var decryptionHandler = function decryptionHandler(event) {
12622 if (event.data.source === segment.requestId) {
12623 decryptionWorker.removeEventListener('message', decryptionHandler);
12624 var decrypted = event.data.decrypted;
12625 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
12626 handleSegmentBytes({
12627 segment: segment,
12628 bytes: segment.bytes,
12629 isPartial: false,
12630 trackInfoFn: trackInfoFn,
12631 timingInfoFn: timingInfoFn,
12632 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12633 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12634 id3Fn: id3Fn,
12635 captionsFn: captionsFn,
12636 isEndOfTimeline: isEndOfTimeline,
12637 endedTimelineFn: endedTimelineFn,
12638 dataFn: dataFn,
12639 doneFn: doneFn
12640 });
12641 }
12642 };
12643
12644 decryptionWorker.addEventListener('message', decryptionHandler);
12645 var keyBytes;
12646
12647 if (segment.key.bytes.slice) {
12648 keyBytes = segment.key.bytes.slice();
12649 } else {
12650 keyBytes = new Uint32Array(Array.prototype.slice.call(segment.key.bytes));
12651 } // this is an encrypted segment
12652 // incrementally decrypt the segment
12653
12654
12655 decryptionWorker.postMessage(createTransferableMessage({
12656 source: segment.requestId,
12657 encrypted: segment.encryptedBytes,
12658 key: keyBytes,
12659 iv: segment.key.iv
12660 }), [segment.encryptedBytes.buffer, keyBytes.buffer]);
12661};
12662/**
12663 * This function waits for all XHRs to finish (with either success or failure)
12664 * before continueing processing via it's callback. The function gathers errors
12665 * from each request into a single errors array so that the error status for
12666 * each request can be examined later.
12667 *
12668 * @param {Object} activeXhrs - an object that tracks all XHR requests
12669 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
12670 * routines
12671 * @param {Function} trackInfoFn - a callback that receives track info
12672 * @param {Function} timingInfoFn - a callback that receives timing info
12673 * @param {Function} videoSegmentTimingInfoFn
12674 * a callback that receives video timing info based on media times and
12675 * any adjustments made by the transmuxer
12676 * @param {Function} audioSegmentTimingInfoFn
12677 * a callback that receives audio timing info based on media times and
12678 * any adjustments made by the transmuxer
12679 * @param {Function} id3Fn - a callback that receives ID3 metadata
12680 * @param {Function} captionsFn - a callback that receives captions
12681 * @param {boolean} isEndOfTimeline
12682 * true if this segment represents the last segment in a timeline
12683 * @param {Function} endedTimelineFn
12684 * a callback made when a timeline is ended, will only be called if
12685 * isEndOfTimeline is true
12686 * @param {Function} dataFn - a callback that is executed when segment bytes are available
12687 * and ready to use
12688 * @param {Function} doneFn - a callback that is executed after all resources have been
12689 * downloaded and any decryption completed
12690 */
12691
12692
12693var waitForCompletion = function waitForCompletion(_ref6) {
12694 var activeXhrs = _ref6.activeXhrs,
12695 decryptionWorker = _ref6.decryptionWorker,
12696 trackInfoFn = _ref6.trackInfoFn,
12697 timingInfoFn = _ref6.timingInfoFn,
12698 videoSegmentTimingInfoFn = _ref6.videoSegmentTimingInfoFn,
12699 audioSegmentTimingInfoFn = _ref6.audioSegmentTimingInfoFn,
12700 id3Fn = _ref6.id3Fn,
12701 captionsFn = _ref6.captionsFn,
12702 isEndOfTimeline = _ref6.isEndOfTimeline,
12703 endedTimelineFn = _ref6.endedTimelineFn,
12704 dataFn = _ref6.dataFn,
12705 doneFn = _ref6.doneFn;
12706 var count = 0;
12707 var didError = false;
12708 return function (error, segment) {
12709 if (didError) {
12710 return;
12711 }
12712
12713 if (error) {
12714 didError = true; // If there are errors, we have to abort any outstanding requests
12715
12716 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
12717 // handle the aborted events from those requests, there are some cases where we may
12718 // never get an aborted event. For instance, if the network connection is lost and
12719 // there were two requests, the first may have triggered an error immediately, while
12720 // the second request remains unsent. In that case, the aborted algorithm will not
12721 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
12722 //
12723 // We also can't rely on the ready state of the XHR, since the request that
12724 // triggered the connection error may also show as a ready state of 0 (unsent).
12725 // Therefore, we have to finish this group of requests immediately after the first
12726 // seen error.
12727
12728 return doneFn(error, segment);
12729 }
12730
12731 count += 1;
12732
12733 if (count === activeXhrs.length) {
12734 // Keep track of when *all* of the requests have completed
12735 segment.endOfAllRequests = Date.now();
12736
12737 if (segment.encryptedBytes) {
12738 return decryptSegment({
12739 decryptionWorker: decryptionWorker,
12740 segment: segment,
12741 trackInfoFn: trackInfoFn,
12742 timingInfoFn: timingInfoFn,
12743 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12744 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12745 id3Fn: id3Fn,
12746 captionsFn: captionsFn,
12747 isEndOfTimeline: isEndOfTimeline,
12748 endedTimelineFn: endedTimelineFn,
12749 dataFn: dataFn,
12750 doneFn: doneFn
12751 });
12752 } // Otherwise, everything is ready just continue
12753
12754
12755 handleSegmentBytes({
12756 segment: segment,
12757 bytes: segment.bytes,
12758 isPartial: false,
12759 trackInfoFn: trackInfoFn,
12760 timingInfoFn: timingInfoFn,
12761 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12762 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12763 id3Fn: id3Fn,
12764 captionsFn: captionsFn,
12765 isEndOfTimeline: isEndOfTimeline,
12766 endedTimelineFn: endedTimelineFn,
12767 dataFn: dataFn,
12768 doneFn: doneFn
12769 });
12770 }
12771 };
12772};
12773/**
12774 * Calls the abort callback if any request within the batch was aborted. Will only call
12775 * the callback once per batch of requests, even if multiple were aborted.
12776 *
12777 * @param {Object} loadendState - state to check to see if the abort function was called
12778 * @param {Function} abortFn - callback to call for abort
12779 */
12780
12781
12782var handleLoadEnd = function handleLoadEnd(_ref7) {
12783 var loadendState = _ref7.loadendState,
12784 abortFn = _ref7.abortFn;
12785 return function (event) {
12786 var request = event.target;
12787
12788 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
12789 abortFn();
12790 loadendState.calledAbortFn = true;
12791 }
12792 };
12793};
12794/**
12795 * Simple progress event callback handler that gathers some stats before
12796 * executing a provided callback with the `segment` object
12797 *
12798 * @param {Object} segment - a simplified copy of the segmentInfo object
12799 * from SegmentLoader
12800 * @param {Function} progressFn - a callback that is executed each time a progress event
12801 * is received
12802 * @param {Function} trackInfoFn - a callback that receives track info
12803 * @param {Function} timingInfoFn - a callback that receives timing info
12804 * @param {Function} videoSegmentTimingInfoFn
12805 * a callback that receives video timing info based on media times and
12806 * any adjustments made by the transmuxer
12807 * @param {Function} audioSegmentTimingInfoFn
12808 * a callback that receives audio timing info based on media times and
12809 * any adjustments made by the transmuxer
12810 * @param {boolean} isEndOfTimeline
12811 * true if this segment represents the last segment in a timeline
12812 * @param {Function} endedTimelineFn
12813 * a callback made when a timeline is ended, will only be called if
12814 * isEndOfTimeline is true
12815 * @param {Function} dataFn - a callback that is executed when segment bytes are available
12816 * and ready to use
12817 * @param {Event} event - the progress event object from XMLHttpRequest
12818 */
12819
12820
12821var handleProgress = function handleProgress(_ref8) {
12822 var segment = _ref8.segment,
12823 progressFn = _ref8.progressFn,
12824 trackInfoFn = _ref8.trackInfoFn,
12825 timingInfoFn = _ref8.timingInfoFn,
12826 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
12827 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
12828 id3Fn = _ref8.id3Fn,
12829 captionsFn = _ref8.captionsFn,
12830 isEndOfTimeline = _ref8.isEndOfTimeline,
12831 endedTimelineFn = _ref8.endedTimelineFn,
12832 dataFn = _ref8.dataFn,
12833 handlePartialData = _ref8.handlePartialData;
12834 return function (event) {
12835 var request = event.target;
12836
12837 if (request.aborted) {
12838 return;
12839 } // don't support encrypted segments or fmp4 for now
12840
12841
12842 if (handlePartialData && !segment.key && // although responseText "should" exist, this guard serves to prevent an error being
12843 // thrown on the next check for two primary cases:
12844 // 1. the mime type override stops working, or is not implemented for a specific
12845 // browser
12846 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
12847 request.responseText && // in order to determine if it's an fmp4 we need at least 8 bytes
12848 request.responseText.length >= 8) {
12849 var newBytes = stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
12850
12851 if (segment.lastReachedChar || !containers.isLikelyFmp4MediaSegment(new Uint8Array(newBytes))) {
12852 segment.lastReachedChar = request.responseText.length;
12853 handleSegmentBytes({
12854 segment: segment,
12855 bytes: newBytes,
12856 isPartial: true,
12857 trackInfoFn: trackInfoFn,
12858 timingInfoFn: timingInfoFn,
12859 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12860 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12861 id3Fn: id3Fn,
12862 captionsFn: captionsFn,
12863 isEndOfTimeline: isEndOfTimeline,
12864 endedTimelineFn: endedTimelineFn,
12865 dataFn: dataFn
12866 });
12867 }
12868 }
12869
12870 segment.stats = videojs__default['default'].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
12871
12872 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
12873 segment.stats.firstBytesReceivedAt = Date.now();
12874 }
12875
12876 return progressFn(event, segment);
12877 };
12878};
12879/**
12880 * Load all resources and does any processing necessary for a media-segment
12881 *
12882 * Features:
12883 * decrypts the media-segment if it has a key uri and an iv
12884 * aborts *all* requests if *any* one request fails
12885 *
12886 * The segment object, at minimum, has the following format:
12887 * {
12888 * resolvedUri: String,
12889 * [transmuxer]: Object,
12890 * [byterange]: {
12891 * offset: Number,
12892 * length: Number
12893 * },
12894 * [key]: {
12895 * resolvedUri: String
12896 * [byterange]: {
12897 * offset: Number,
12898 * length: Number
12899 * },
12900 * iv: {
12901 * bytes: Uint32Array
12902 * }
12903 * },
12904 * [map]: {
12905 * resolvedUri: String,
12906 * [byterange]: {
12907 * offset: Number,
12908 * length: Number
12909 * },
12910 * [bytes]: Uint8Array
12911 * }
12912 * }
12913 * ...where [name] denotes optional properties
12914 *
12915 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
12916 * @param {Object} xhrOptions - the base options to provide to all xhr requests
12917 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
12918 * decryption routines
12919 * @param {Object} segment - a simplified copy of the segmentInfo object
12920 * from SegmentLoader
12921 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
12922 * aborted
12923 * @param {Function} progressFn - a callback that receives progress events from the main
12924 * segment's xhr request
12925 * @param {Function} trackInfoFn - a callback that receives track info
12926 * @param {Function} timingInfoFn - a callback that receives timing info
12927 * @param {Function} videoSegmentTimingInfoFn
12928 * a callback that receives video timing info based on media times and
12929 * any adjustments made by the transmuxer
12930 * @param {Function} audioSegmentTimingInfoFn
12931 * a callback that receives audio timing info based on media times and
12932 * any adjustments made by the transmuxer
12933 * @param {Function} id3Fn - a callback that receives ID3 metadata
12934 * @param {Function} captionsFn - a callback that receives captions
12935 * @param {boolean} isEndOfTimeline
12936 * true if this segment represents the last segment in a timeline
12937 * @param {Function} endedTimelineFn
12938 * a callback made when a timeline is ended, will only be called if
12939 * isEndOfTimeline is true
12940 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
12941 * request, transmuxed if needed
12942 * @param {Function} doneFn - a callback that is executed only once all requests have
12943 * succeeded or failed
12944 * @return {Function} a function that, when invoked, immediately aborts all
12945 * outstanding requests
12946 */
12947
12948
12949var mediaSegmentRequest = function mediaSegmentRequest(_ref9) {
12950 var xhr = _ref9.xhr,
12951 xhrOptions = _ref9.xhrOptions,
12952 decryptionWorker = _ref9.decryptionWorker,
12953 segment = _ref9.segment,
12954 abortFn = _ref9.abortFn,
12955 progressFn = _ref9.progressFn,
12956 trackInfoFn = _ref9.trackInfoFn,
12957 timingInfoFn = _ref9.timingInfoFn,
12958 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
12959 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
12960 id3Fn = _ref9.id3Fn,
12961 captionsFn = _ref9.captionsFn,
12962 isEndOfTimeline = _ref9.isEndOfTimeline,
12963 endedTimelineFn = _ref9.endedTimelineFn,
12964 dataFn = _ref9.dataFn,
12965 doneFn = _ref9.doneFn,
12966 handlePartialData = _ref9.handlePartialData;
12967 var activeXhrs = [];
12968 var finishProcessingFn = waitForCompletion({
12969 activeXhrs: activeXhrs,
12970 decryptionWorker: decryptionWorker,
12971 trackInfoFn: trackInfoFn,
12972 timingInfoFn: timingInfoFn,
12973 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12974 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12975 id3Fn: id3Fn,
12976 captionsFn: captionsFn,
12977 isEndOfTimeline: isEndOfTimeline,
12978 endedTimelineFn: endedTimelineFn,
12979 dataFn: dataFn,
12980 doneFn: doneFn
12981 }); // optionally, request the decryption key
12982
12983 if (segment.key && !segment.key.bytes) {
12984 var keyRequestOptions = videojs__default['default'].mergeOptions(xhrOptions, {
12985 uri: segment.key.resolvedUri,
12986 responseType: 'arraybuffer'
12987 });
12988 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
12989 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
12990 activeXhrs.push(keyXhr);
12991 } // optionally, request the associated media init segment
12992
12993
12994 if (segment.map && !segment.map.bytes) {
12995 var initSegmentOptions = videojs__default['default'].mergeOptions(xhrOptions, {
12996 uri: segment.map.resolvedUri,
12997 responseType: 'arraybuffer',
12998 headers: segmentXhrHeaders(segment.map)
12999 });
13000 var initSegmentRequestCallback = handleInitSegmentResponse({
13001 segment: segment,
13002 finishProcessingFn: finishProcessingFn
13003 });
13004 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
13005 activeXhrs.push(initSegmentXhr);
13006 }
13007
13008 var segmentRequestOptions = videojs__default['default'].mergeOptions(xhrOptions, {
13009 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
13010 responseType: 'arraybuffer',
13011 headers: segmentXhrHeaders(segment)
13012 });
13013
13014 if (handlePartialData) {
13015 // setting to text is required for partial responses
13016 // conversion to ArrayBuffer happens later
13017 segmentRequestOptions.responseType = 'text';
13018
13019 segmentRequestOptions.beforeSend = function (xhrObject) {
13020 // XHR binary charset opt by Marcus Granado 2006 [http://mgran.blogspot.com]
13021 // makes the browser pass through the "text" unparsed
13022 xhrObject.overrideMimeType('text/plain; charset=x-user-defined');
13023 };
13024 }
13025
13026 var segmentRequestCallback = handleSegmentResponse({
13027 segment: segment,
13028 finishProcessingFn: finishProcessingFn,
13029 responseType: segmentRequestOptions.responseType
13030 });
13031 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
13032 segmentXhr.addEventListener('progress', handleProgress({
13033 segment: segment,
13034 progressFn: progressFn,
13035 trackInfoFn: trackInfoFn,
13036 timingInfoFn: timingInfoFn,
13037 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13038 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13039 id3Fn: id3Fn,
13040 captionsFn: captionsFn,
13041 isEndOfTimeline: isEndOfTimeline,
13042 endedTimelineFn: endedTimelineFn,
13043 dataFn: dataFn,
13044 handlePartialData: handlePartialData
13045 }));
13046 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
13047 // multiple times, provide a shared state object
13048
13049 var loadendState = {};
13050 activeXhrs.forEach(function (activeXhr) {
13051 activeXhr.addEventListener('loadend', handleLoadEnd({
13052 loadendState: loadendState,
13053 abortFn: abortFn
13054 }));
13055 });
13056 return function () {
13057 return abortAll(activeXhrs);
13058 };
13059};
13060
13061/**
13062 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
13063 * codec strings, or translating codec strings into objects that can be examined.
13064 */
13065var logFn$1 = logger('CodecUtils');
13066/**
13067 * Returns a set of codec strings parsed from the playlist or the default
13068 * codec strings if no codecs were specified in the playlist
13069 *
13070 * @param {Playlist} media the current media playlist
13071 * @return {Object} an object with the video and audio codecs
13072 */
13073
13074var getCodecs = function getCodecs(media) {
13075 // if the codecs were explicitly specified, use them instead of the
13076 // defaults
13077 var mediaAttributes = media.attributes || {};
13078
13079 if (mediaAttributes.CODECS) {
13080 return codecs_js.parseCodecs(mediaAttributes.CODECS);
13081 }
13082};
13083
13084var isMaat = function isMaat(master, media) {
13085 var mediaAttributes = media.attributes || {};
13086 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
13087};
13088var isMuxed = function isMuxed(master, media) {
13089 if (!isMaat(master, media)) {
13090 return true;
13091 }
13092
13093 var mediaAttributes = media.attributes || {};
13094 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
13095
13096 for (var groupId in audioGroup) {
13097 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
13098 // or there are listed playlists (the case for DASH, as the manifest will have already
13099 // provided all of the details necessary to generate the audio playlist, as opposed to
13100 // HLS' externally requested playlists), then the content is demuxed.
13101 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
13102 return true;
13103 }
13104 }
13105
13106 return false;
13107};
13108var unwrapCodecList = function unwrapCodecList(codecList) {
13109 var codecs = {};
13110 codecList.forEach(function (_ref) {
13111 var mediaType = _ref.mediaType,
13112 type = _ref.type,
13113 details = _ref.details;
13114 codecs[mediaType] = codecs[mediaType] || [];
13115 codecs[mediaType].push(codecs_js.translateLegacyCodec("" + type + details));
13116 });
13117 Object.keys(codecs).forEach(function (mediaType) {
13118 if (codecs[mediaType].length > 1) {
13119 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
13120 codecs[mediaType] = null;
13121 return;
13122 }
13123
13124 codecs[mediaType] = codecs[mediaType][0];
13125 });
13126 return codecs;
13127};
13128var codecCount = function codecCount(codecObj) {
13129 var count = 0;
13130
13131 if (codecObj.audio) {
13132 count++;
13133 }
13134
13135 if (codecObj.video) {
13136 count++;
13137 }
13138
13139 return count;
13140};
13141/**
13142 * Calculates the codec strings for a working configuration of
13143 * SourceBuffers to play variant streams in a master playlist. If
13144 * there is no possible working configuration, an empty object will be
13145 * returned.
13146 *
13147 * @param master {Object} the m3u8 object for the master playlist
13148 * @param media {Object} the m3u8 object for the variant playlist
13149 * @return {Object} the codec strings.
13150 *
13151 * @private
13152 */
13153
13154var codecsForPlaylist = function codecsForPlaylist(master, media) {
13155 var mediaAttributes = media.attributes || {};
13156 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
13157 // Put another way, there is no way to have a video-only multiple-audio HLS!
13158
13159 if (isMaat(master, media) && !codecInfo.audio) {
13160 if (!isMuxed(master, media)) {
13161 // It is possible for codecs to be specified on the audio media group playlist but
13162 // not on the rendition playlist. This is mostly the case for DASH, where audio and
13163 // video are always separate (and separately specified).
13164 var defaultCodecs = unwrapCodecList(codecs_js.codecsFromDefault(master, mediaAttributes.AUDIO) || []);
13165
13166 if (defaultCodecs.audio) {
13167 codecInfo.audio = defaultCodecs.audio;
13168 }
13169 }
13170 }
13171
13172 return codecInfo;
13173};
13174
13175var logFn = logger('PlaylistSelector');
13176
13177var representationToString = function representationToString(representation) {
13178 if (!representation || !representation.playlist) {
13179 return;
13180 }
13181
13182 var playlist = representation.playlist;
13183 return JSON.stringify({
13184 id: playlist.id,
13185 bandwidth: representation.bandwidth,
13186 width: representation.width,
13187 height: representation.height,
13188 codecs: playlist.attributes && playlist.attributes.CODECS || ''
13189 });
13190}; // Utilities
13191
13192/**
13193 * Returns the CSS value for the specified property on an element
13194 * using `getComputedStyle`. Firefox has a long-standing issue where
13195 * getComputedStyle() may return null when running in an iframe with
13196 * `display: none`.
13197 *
13198 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
13199 * @param {HTMLElement} el the htmlelement to work on
13200 * @param {string} the proprety to get the style for
13201 */
13202
13203
13204var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
13205 if (!el) {
13206 return '';
13207 }
13208
13209 var result = window__default['default'].getComputedStyle(el);
13210
13211 if (!result) {
13212 return '';
13213 }
13214
13215 return result[property];
13216};
13217/**
13218 * Resuable stable sort function
13219 *
13220 * @param {Playlists} array
13221 * @param {Function} sortFn Different comparators
13222 * @function stableSort
13223 */
13224
13225
13226var stableSort = function stableSort(array, sortFn) {
13227 var newArray = array.slice();
13228 array.sort(function (left, right) {
13229 var cmp = sortFn(left, right);
13230
13231 if (cmp === 0) {
13232 return newArray.indexOf(left) - newArray.indexOf(right);
13233 }
13234
13235 return cmp;
13236 });
13237};
13238/**
13239 * A comparator function to sort two playlist object by bandwidth.
13240 *
13241 * @param {Object} left a media playlist object
13242 * @param {Object} right a media playlist object
13243 * @return {number} Greater than zero if the bandwidth attribute of
13244 * left is greater than the corresponding attribute of right. Less
13245 * than zero if the bandwidth of right is greater than left and
13246 * exactly zero if the two are equal.
13247 */
13248
13249
13250var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
13251 var leftBandwidth;
13252 var rightBandwidth;
13253
13254 if (left.attributes.BANDWIDTH) {
13255 leftBandwidth = left.attributes.BANDWIDTH;
13256 }
13257
13258 leftBandwidth = leftBandwidth || window__default['default'].Number.MAX_VALUE;
13259
13260 if (right.attributes.BANDWIDTH) {
13261 rightBandwidth = right.attributes.BANDWIDTH;
13262 }
13263
13264 rightBandwidth = rightBandwidth || window__default['default'].Number.MAX_VALUE;
13265 return leftBandwidth - rightBandwidth;
13266};
13267/**
13268 * A comparator function to sort two playlist object by resolution (width).
13269 *
13270 * @param {Object} left a media playlist object
13271 * @param {Object} right a media playlist object
13272 * @return {number} Greater than zero if the resolution.width attribute of
13273 * left is greater than the corresponding attribute of right. Less
13274 * than zero if the resolution.width of right is greater than left and
13275 * exactly zero if the two are equal.
13276 */
13277
13278var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
13279 var leftWidth;
13280 var rightWidth;
13281
13282 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
13283 leftWidth = left.attributes.RESOLUTION.width;
13284 }
13285
13286 leftWidth = leftWidth || window__default['default'].Number.MAX_VALUE;
13287
13288 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
13289 rightWidth = right.attributes.RESOLUTION.width;
13290 }
13291
13292 rightWidth = rightWidth || window__default['default'].Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
13293 // have the same media dimensions/ resolution
13294
13295 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
13296 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
13297 }
13298
13299 return leftWidth - rightWidth;
13300};
13301/**
13302 * Chooses the appropriate media playlist based on bandwidth and player size
13303 *
13304 * @param {Object} master
13305 * Object representation of the master manifest
13306 * @param {number} playerBandwidth
13307 * Current calculated bandwidth of the player
13308 * @param {number} playerWidth
13309 * Current width of the player element (should account for the device pixel ratio)
13310 * @param {number} playerHeight
13311 * Current height of the player element (should account for the device pixel ratio)
13312 * @param {boolean} limitRenditionByPlayerDimensions
13313 * True if the player width and height should be used during the selection, false otherwise
13314 * @param {Object} masterPlaylistController
13315 * the current masterPlaylistController object
13316 * @return {Playlist} the highest bitrate playlist less than the
13317 * currently detected bandwidth, accounting for some amount of
13318 * bandwidth variance
13319 */
13320
13321var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
13322 // If we end up getting called before `master` is available, exit early
13323 if (!master) {
13324 return;
13325 }
13326
13327 var options = {
13328 bandwidth: playerBandwidth,
13329 width: playerWidth,
13330 height: playerHeight,
13331 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
13332 };
13333 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
13334
13335 if (Playlist.isAudioOnly(master)) {
13336 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
13337 // at the buttom of this function for debugging.
13338
13339 options.audioOnly = true;
13340 } // convert the playlists to an intermediary representation to make comparisons easier
13341
13342
13343 var sortedPlaylistReps = playlists.map(function (playlist) {
13344 var bandwidth;
13345 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
13346 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
13347 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
13348 bandwidth = bandwidth || window__default['default'].Number.MAX_VALUE;
13349 return {
13350 bandwidth: bandwidth,
13351 width: width,
13352 height: height,
13353 playlist: playlist
13354 };
13355 });
13356 stableSort(sortedPlaylistReps, function (left, right) {
13357 return left.bandwidth - right.bandwidth;
13358 }); // filter out any playlists that have been excluded due to
13359 // incompatible configurations
13360
13361 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
13362 return !Playlist.isIncompatible(rep.playlist);
13363 }); // filter out any playlists that have been disabled manually through the representations
13364 // api or blacklisted temporarily due to playback errors.
13365
13366 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
13367 return Playlist.isEnabled(rep.playlist);
13368 });
13369
13370 if (!enabledPlaylistReps.length) {
13371 // if there are no enabled playlists, then they have all been blacklisted or disabled
13372 // by the user through the representations api. In this case, ignore blacklisting and
13373 // fallback to what the user wants by using playlists the user has not disabled.
13374 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
13375 return !Playlist.isDisabled(rep.playlist);
13376 });
13377 } // filter out any variant that has greater effective bitrate
13378 // than the current estimated bandwidth
13379
13380
13381 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
13382 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
13383 });
13384 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
13385 // and then taking the very first element
13386
13387 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
13388 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
13389 })[0]; // if we're not going to limit renditions by player size, make an early decision.
13390
13391 if (limitRenditionByPlayerDimensions === false) {
13392 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
13393
13394 if (_chosenRep && _chosenRep.playlist) {
13395 var type = 'sortedPlaylistReps';
13396
13397 if (bandwidthBestRep) {
13398 type = 'bandwidthBestRep';
13399 }
13400
13401 if (enabledPlaylistReps[0]) {
13402 type = 'enabledPlaylistReps';
13403 }
13404
13405 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
13406 return _chosenRep.playlist;
13407 }
13408
13409 logFn('could not choose a playlist with options', options);
13410 return null;
13411 } // filter out playlists without resolution information
13412
13413
13414 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
13415 return rep.width && rep.height;
13416 }); // sort variants by resolution
13417
13418 stableSort(haveResolution, function (left, right) {
13419 return left.width - right.width;
13420 }); // if we have the exact resolution as the player use it
13421
13422 var resolutionBestRepList = haveResolution.filter(function (rep) {
13423 return rep.width === playerWidth && rep.height === playerHeight;
13424 });
13425 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
13426
13427 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
13428 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
13429 })[0];
13430 var resolutionPlusOneList;
13431 var resolutionPlusOneSmallest;
13432 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
13433 // if there is no match of exact resolution
13434
13435 if (!resolutionBestRep) {
13436 resolutionPlusOneList = haveResolution.filter(function (rep) {
13437 return rep.width > playerWidth || rep.height > playerHeight;
13438 }); // find all the variants have the same smallest resolution
13439
13440 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
13441 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
13442 }); // ensure that we also pick the highest bandwidth variant that
13443 // is just-larger-than the video player
13444
13445 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
13446 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
13447 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
13448 })[0];
13449 } // fallback chain of variants
13450
13451
13452 var chosenRep = resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
13453
13454 if (chosenRep && chosenRep.playlist) {
13455 var _type = 'sortedPlaylistReps';
13456
13457 if (resolutionPlusOneRep) {
13458 _type = 'resolutionPlusOneRep';
13459 } else if (resolutionBestRep) {
13460 _type = 'resolutionBestRep';
13461 } else if (bandwidthBestRep) {
13462 _type = 'bandwidthBestRep';
13463 } else if (enabledPlaylistReps[0]) {
13464 _type = 'enabledPlaylistReps';
13465 }
13466
13467 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
13468 return chosenRep.playlist;
13469 }
13470
13471 logFn('could not choose a playlist with options', options);
13472 return null;
13473}; // Playlist Selectors
13474
13475/**
13476 * Chooses the appropriate media playlist based on the most recent
13477 * bandwidth estimate and the player size.
13478 *
13479 * Expects to be called within the context of an instance of VhsHandler
13480 *
13481 * @return {Playlist} the highest bitrate playlist less than the
13482 * currently detected bandwidth, accounting for some amount of
13483 * bandwidth variance
13484 */
13485
13486var lastBandwidthSelector = function lastBandwidthSelector() {
13487 var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
13488 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
13489};
13490/**
13491 * Chooses the appropriate media playlist based on an
13492 * exponential-weighted moving average of the bandwidth after
13493 * filtering for player size.
13494 *
13495 * Expects to be called within the context of an instance of VhsHandler
13496 *
13497 * @param {number} decay - a number between 0 and 1. Higher values of
13498 * this parameter will cause previous bandwidth estimates to lose
13499 * significance more quickly.
13500 * @return {Function} a function which can be invoked to create a new
13501 * playlist selector function.
13502 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
13503 */
13504
13505var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
13506 var average = -1;
13507
13508 if (decay < 0 || decay > 1) {
13509 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
13510 }
13511
13512 return function () {
13513 var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
13514
13515 if (average < 0) {
13516 average = this.systemBandwidth;
13517 }
13518
13519 average = decay * this.systemBandwidth + (1 - decay) * average;
13520 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
13521 };
13522};
13523/**
13524 * Chooses the appropriate media playlist based on the potential to rebuffer
13525 *
13526 * @param {Object} settings
13527 * Object of information required to use this selector
13528 * @param {Object} settings.master
13529 * Object representation of the master manifest
13530 * @param {number} settings.currentTime
13531 * The current time of the player
13532 * @param {number} settings.bandwidth
13533 * Current measured bandwidth
13534 * @param {number} settings.duration
13535 * Duration of the media
13536 * @param {number} settings.segmentDuration
13537 * Segment duration to be used in round trip time calculations
13538 * @param {number} settings.timeUntilRebuffer
13539 * Time left in seconds until the player has to rebuffer
13540 * @param {number} settings.currentTimeline
13541 * The current timeline segments are being loaded from
13542 * @param {SyncController} settings.syncController
13543 * SyncController for determining if we have a sync point for a given playlist
13544 * @return {Object|null}
13545 * {Object} return.playlist
13546 * The highest bandwidth playlist with the least amount of rebuffering
13547 * {Number} return.rebufferingImpact
13548 * The amount of time in seconds switching to this playlist will rebuffer. A
13549 * negative value means that switching will cause zero rebuffering.
13550 */
13551
13552var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
13553 var master = settings.master,
13554 currentTime = settings.currentTime,
13555 bandwidth = settings.bandwidth,
13556 duration = settings.duration,
13557 segmentDuration = settings.segmentDuration,
13558 timeUntilRebuffer = settings.timeUntilRebuffer,
13559 currentTimeline = settings.currentTimeline,
13560 syncController = settings.syncController; // filter out any playlists that have been excluded due to
13561 // incompatible configurations
13562
13563 var compatiblePlaylists = master.playlists.filter(function (playlist) {
13564 return !Playlist.isIncompatible(playlist);
13565 }); // filter out any playlists that have been disabled manually through the representations
13566 // api or blacklisted temporarily due to playback errors.
13567
13568 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
13569
13570 if (!enabledPlaylists.length) {
13571 // if there are no enabled playlists, then they have all been blacklisted or disabled
13572 // by the user through the representations api. In this case, ignore blacklisting and
13573 // fallback to what the user wants by using playlists the user has not disabled.
13574 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
13575 return !Playlist.isDisabled(playlist);
13576 });
13577 }
13578
13579 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
13580 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
13581 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
13582 // sync request first. This will double the request time
13583
13584 var numRequests = syncPoint ? 1 : 2;
13585 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
13586 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
13587 return {
13588 playlist: playlist,
13589 rebufferingImpact: rebufferingImpact
13590 };
13591 });
13592 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
13593 return estimate.rebufferingImpact <= 0;
13594 }); // Sort by bandwidth DESC
13595
13596 stableSort(noRebufferingPlaylists, function (a, b) {
13597 return comparePlaylistBandwidth(b.playlist, a.playlist);
13598 });
13599
13600 if (noRebufferingPlaylists.length) {
13601 return noRebufferingPlaylists[0];
13602 }
13603
13604 stableSort(rebufferingEstimates, function (a, b) {
13605 return a.rebufferingImpact - b.rebufferingImpact;
13606 });
13607 return rebufferingEstimates[0] || null;
13608};
13609/**
13610 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
13611 * one with video. If no renditions with video exist, return the lowest audio rendition.
13612 *
13613 * Expects to be called within the context of an instance of VhsHandler
13614 *
13615 * @return {Object|null}
13616 * {Object} return.playlist
13617 * The lowest bitrate playlist that contains a video codec. If no such rendition
13618 * exists pick the lowest audio rendition.
13619 */
13620
13621var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
13622 var _this = this;
13623
13624 // filter out any playlists that have been excluded due to
13625 // incompatible configurations or playback errors
13626 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
13627
13628 stableSort(playlists, function (a, b) {
13629 return comparePlaylistBandwidth(a, b);
13630 }); // Parse and assume that playlists with no video codec have no video
13631 // (this is not necessarily true, although it is generally true).
13632 //
13633 // If an entire manifest has no valid videos everything will get filtered
13634 // out.
13635
13636 var playlistsWithVideo = playlists.filter(function (playlist) {
13637 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
13638 });
13639 return playlistsWithVideo[0] || null;
13640};
13641
13642/**
13643 * @file text-tracks.js
13644 */
13645/**
13646 * Create captions text tracks on video.js if they do not exist
13647 *
13648 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
13649 * @param {Object} tech the video.js tech
13650 * @param {Object} captionStream the caption stream to create
13651 * @private
13652 */
13653
13654var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
13655 if (!inbandTextTracks[captionStream]) {
13656 tech.trigger({
13657 type: 'usage',
13658 name: 'vhs-608'
13659 });
13660 tech.trigger({
13661 type: 'usage',
13662 name: 'hls-608'
13663 });
13664 var track = tech.textTracks().getTrackById(captionStream);
13665
13666 if (track) {
13667 // Resuse an existing track with a CC# id because this was
13668 // very likely created by videojs-contrib-hls from information
13669 // in the m3u8 for us to use
13670 inbandTextTracks[captionStream] = track;
13671 } else {
13672 // Otherwise, create a track with the default `CC#` label and
13673 // without a language
13674 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
13675 kind: 'captions',
13676 id: captionStream,
13677 label: captionStream
13678 }, false).track;
13679 }
13680 }
13681};
13682/**
13683 * Add caption text track data to a source handler given an array of captions
13684 *
13685 * @param {Object}
13686 * @param {Object} inbandTextTracks the inband text tracks
13687 * @param {number} timestampOffset the timestamp offset of the source buffer
13688 * @param {Array} captionArray an array of caption data
13689 * @private
13690 */
13691
13692var addCaptionData = function addCaptionData(_ref) {
13693 var inbandTextTracks = _ref.inbandTextTracks,
13694 captionArray = _ref.captionArray,
13695 timestampOffset = _ref.timestampOffset;
13696
13697 if (!captionArray) {
13698 return;
13699 }
13700
13701 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
13702 captionArray.forEach(function (caption) {
13703 var track = caption.stream;
13704 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
13705 });
13706};
13707/**
13708 * Define properties on a cue for backwards compatability,
13709 * but warn the user that the way that they are using it
13710 * is depricated and will be removed at a later date.
13711 *
13712 * @param {Cue} cue the cue to add the properties on
13713 * @private
13714 */
13715
13716var deprecateOldCue = function deprecateOldCue(cue) {
13717 Object.defineProperties(cue.frame, {
13718 id: {
13719 get: function get() {
13720 videojs__default['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
13721 return cue.value.key;
13722 }
13723 },
13724 value: {
13725 get: function get() {
13726 videojs__default['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
13727 return cue.value.data;
13728 }
13729 },
13730 privateData: {
13731 get: function get() {
13732 videojs__default['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
13733 return cue.value.data;
13734 }
13735 }
13736 });
13737};
13738/**
13739 * Add metadata text track data to a source handler given an array of metadata
13740 *
13741 * @param {Object}
13742 * @param {Object} inbandTextTracks the inband text tracks
13743 * @param {Array} metadataArray an array of meta data
13744 * @param {number} timestampOffset the timestamp offset of the source buffer
13745 * @param {number} videoDuration the duration of the video
13746 * @private
13747 */
13748
13749
13750var addMetadata = function addMetadata(_ref2) {
13751 var inbandTextTracks = _ref2.inbandTextTracks,
13752 metadataArray = _ref2.metadataArray,
13753 timestampOffset = _ref2.timestampOffset,
13754 videoDuration = _ref2.videoDuration;
13755
13756 if (!metadataArray) {
13757 return;
13758 }
13759
13760 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
13761 var metadataTrack = inbandTextTracks.metadataTrack_;
13762
13763 if (!metadataTrack) {
13764 return;
13765 }
13766
13767 metadataArray.forEach(function (metadata) {
13768 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
13769 // ignore this bit of metadata.
13770 // This likely occurs when you have an non-timed ID3 tag like TIT2,
13771 // which is the "Title/Songname/Content description" frame
13772
13773 if (typeof time !== 'number' || window__default['default'].isNaN(time) || time < 0 || !(time < Infinity)) {
13774 return;
13775 }
13776
13777 metadata.frames.forEach(function (frame) {
13778 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
13779 cue.frame = frame;
13780 cue.value = frame;
13781 deprecateOldCue(cue);
13782 metadataTrack.addCue(cue);
13783 });
13784 });
13785
13786 if (!metadataTrack.cues || !metadataTrack.cues.length) {
13787 return;
13788 } // Updating the metadeta cues so that
13789 // the endTime of each cue is the startTime of the next cue
13790 // the endTime of last cue is the duration of the video
13791
13792
13793 var cues = metadataTrack.cues;
13794 var cuesArray = []; // Create a copy of the TextTrackCueList...
13795 // ...disregarding cues with a falsey value
13796
13797 for (var i = 0; i < cues.length; i++) {
13798 if (cues[i]) {
13799 cuesArray.push(cues[i]);
13800 }
13801 } // Group cues by their startTime value
13802
13803
13804 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
13805 var timeSlot = obj[cue.startTime] || [];
13806 timeSlot.push(cue);
13807 obj[cue.startTime] = timeSlot;
13808 return obj;
13809 }, {}); // Sort startTimes by ascending order
13810
13811 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
13812 return Number(a) - Number(b);
13813 }); // Map each cue group's endTime to the next group's startTime
13814
13815 sortedStartTimes.forEach(function (startTime, idx) {
13816 var cueGroup = cuesGroupedByStartTime[startTime];
13817 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
13818
13819 cueGroup.forEach(function (cue) {
13820 cue.endTime = nextTime;
13821 });
13822 });
13823};
13824/**
13825 * Create metadata text track on video.js if it does not exist
13826 *
13827 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
13828 * @param {string} dispatchType the inband metadata track dispatch type
13829 * @param {Object} tech the video.js tech
13830 * @private
13831 */
13832
13833var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
13834 if (inbandTextTracks.metadataTrack_) {
13835 return;
13836 }
13837
13838 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
13839 kind: 'metadata',
13840 label: 'Timed Metadata'
13841 }, false).track;
13842 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
13843};
13844/**
13845 * Remove cues from a track on video.js.
13846 *
13847 * @param {Double} start start of where we should remove the cue
13848 * @param {Double} end end of where the we should remove the cue
13849 * @param {Object} track the text track to remove the cues from
13850 * @private
13851 */
13852
13853var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
13854 var i;
13855 var cue;
13856
13857 if (!track) {
13858 return;
13859 }
13860
13861 if (!track.cues) {
13862 return;
13863 }
13864
13865 i = track.cues.length;
13866
13867 while (i--) {
13868 cue = track.cues[i]; // Remove any cue within the provided start and end time
13869
13870 if (cue.startTime >= start && cue.endTime <= end) {
13871 track.removeCue(cue);
13872 }
13873 }
13874};
13875/**
13876 * Remove duplicate cues from a track on video.js (a cue is considered a
13877 * duplicate if it has the same time interval and text as another)
13878 *
13879 * @param {Object} track the text track to remove the duplicate cues from
13880 * @private
13881 */
13882
13883var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
13884 var cues = track.cues;
13885
13886 if (!cues) {
13887 return;
13888 }
13889
13890 for (var i = 0; i < cues.length; i++) {
13891 var duplicates = [];
13892 var occurrences = 0;
13893
13894 for (var j = 0; j < cues.length; j++) {
13895 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
13896 occurrences++;
13897
13898 if (occurrences > 1) {
13899 duplicates.push(cues[j]);
13900 }
13901 }
13902 }
13903
13904 if (duplicates.length) {
13905 duplicates.forEach(function (dupe) {
13906 return track.removeCue(dupe);
13907 });
13908 }
13909 }
13910};
13911
13912/**
13913 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
13914 * front of current time.
13915 *
13916 * @param {Array} buffer
13917 * The current buffer of gop information
13918 * @param {number} currentTime
13919 * The current time
13920 * @param {Double} mapping
13921 * Offset to map display time to stream presentation time
13922 * @return {Array}
13923 * List of gops considered safe to append over
13924 */
13925
13926var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
13927 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
13928 return [];
13929 } // pts value for current time + 3 seconds to give a bit more wiggle room
13930
13931
13932 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
13933 var i;
13934
13935 for (i = 0; i < buffer.length; i++) {
13936 if (buffer[i].pts > currentTimePts) {
13937 break;
13938 }
13939 }
13940
13941 return buffer.slice(i);
13942};
13943/**
13944 * Appends gop information (timing and byteLength) received by the transmuxer for the
13945 * gops appended in the last call to appendBuffer
13946 *
13947 * @param {Array} buffer
13948 * The current buffer of gop information
13949 * @param {Array} gops
13950 * List of new gop information
13951 * @param {boolean} replace
13952 * If true, replace the buffer with the new gop information. If false, append the
13953 * new gop information to the buffer in the right location of time.
13954 * @return {Array}
13955 * Updated list of gop information
13956 */
13957
13958var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
13959 if (!gops.length) {
13960 return buffer;
13961 }
13962
13963 if (replace) {
13964 // If we are in safe append mode, then completely overwrite the gop buffer
13965 // with the most recent appeneded data. This will make sure that when appending
13966 // future segments, we only try to align with gops that are both ahead of current
13967 // time and in the last segment appended.
13968 return gops.slice();
13969 }
13970
13971 var start = gops[0].pts;
13972 var i = 0;
13973
13974 for (i; i < buffer.length; i++) {
13975 if (buffer[i].pts >= start) {
13976 break;
13977 }
13978 }
13979
13980 return buffer.slice(0, i).concat(gops);
13981};
13982/**
13983 * Removes gop information in buffer that overlaps with provided start and end
13984 *
13985 * @param {Array} buffer
13986 * The current buffer of gop information
13987 * @param {Double} start
13988 * position to start the remove at
13989 * @param {Double} end
13990 * position to end the remove at
13991 * @param {Double} mapping
13992 * Offset to map display time to stream presentation time
13993 */
13994
13995var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
13996 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
13997 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
13998 var updatedBuffer = buffer.slice();
13999 var i = buffer.length;
14000
14001 while (i--) {
14002 if (buffer[i].pts <= endPts) {
14003 break;
14004 }
14005 }
14006
14007 if (i === -1) {
14008 // no removal because end of remove range is before start of buffer
14009 return updatedBuffer;
14010 }
14011
14012 var j = i + 1;
14013
14014 while (j--) {
14015 if (buffer[j].pts <= startPts) {
14016 break;
14017 }
14018 } // clamp remove range start to 0 index
14019
14020
14021 j = Math.max(j, 0);
14022 updatedBuffer.splice(j, i - j + 1);
14023 return updatedBuffer;
14024};
14025
14026var shallowEqual = function shallowEqual(a, b) {
14027 // if both are undefined
14028 // or one or the other is undefined
14029 // they are not equal
14030 if (!a && !b || !a && b || a && !b) {
14031 return false;
14032 } // they are the same object and thus, equal
14033
14034
14035 if (a === b) {
14036 return true;
14037 } // sort keys so we can make sure they have
14038 // all the same keys later.
14039
14040
14041 var akeys = Object.keys(a).sort();
14042 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
14043
14044 if (akeys.length !== bkeys.length) {
14045 return false;
14046 }
14047
14048 for (var i = 0; i < akeys.length; i++) {
14049 var key = akeys[i]; // different sorted keys, not equal
14050
14051 if (key !== bkeys[i]) {
14052 return false;
14053 } // different values, not equal
14054
14055
14056 if (a[key] !== b[key]) {
14057 return false;
14058 }
14059 }
14060
14061 return true;
14062};
14063
14064// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
14065var QUOTA_EXCEEDED_ERR = 22;
14066
14067// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
14068// as a start to prevent any potential issues with removing content too close to the
14069// playhead.
14070
14071var MIN_BACK_BUFFER = 1; // in ms
14072
14073var CHECK_BUFFER_DELAY = 500;
14074
14075var finite = function finite(num) {
14076 return typeof num === 'number' && isFinite(num);
14077}; // With most content hovering around 30fps, if a segment has a duration less than a half
14078// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
14079// not accurately reflect the rest of the content.
14080
14081
14082var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
14083var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
14084 // Although these checks should most likely cover non 'main' types, for now it narrows
14085 // the scope of our checks.
14086 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
14087 return null;
14088 }
14089
14090 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
14091 return 'Neither audio nor video found in segment.';
14092 }
14093
14094 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
14095 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
14096 }
14097
14098 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
14099 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
14100 }
14101
14102 return null;
14103};
14104/**
14105 * Calculates a time value that is safe to remove from the back buffer without interrupting
14106 * playback.
14107 *
14108 * @param {TimeRange} seekable
14109 * The current seekable range
14110 * @param {number} currentTime
14111 * The current time of the player
14112 * @param {number} targetDuration
14113 * The target duration of the current playlist
14114 * @return {number}
14115 * Time that is safe to remove from the back buffer without interrupting playback
14116 */
14117
14118var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
14119 // 30 seconds before the playhead provides a safe default for trimming.
14120 //
14121 // Choosing a reasonable default is particularly important for high bitrate content and
14122 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
14123 // throw an APPEND_BUFFER_ERR.
14124 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
14125
14126 if (seekable.length) {
14127 // Some live playlists may have a shorter window of content than the full allowed back
14128 // buffer. For these playlists, don't save content that's no longer within the window.
14129 trimTime = Math.max(trimTime, seekable.start(0));
14130 } // Don't remove within target duration of the current time to avoid the possibility of
14131 // removing the GOP currently being played, as removing it can cause playback stalls.
14132
14133
14134 var maxTrimTime = currentTime - targetDuration;
14135 return Math.min(maxTrimTime, trimTime);
14136};
14137
14138var segmentInfoString = function segmentInfoString(segmentInfo) {
14139 var _segmentInfo$segment = segmentInfo.segment,
14140 start = _segmentInfo$segment.start,
14141 end = _segmentInfo$segment.end,
14142 parts = _segmentInfo$segment.parts,
14143 _segmentInfo$playlist = segmentInfo.playlist,
14144 seq = _segmentInfo$playlist.mediaSequence,
14145 id = _segmentInfo$playlist.id,
14146 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
14147 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
14148 index = segmentInfo.mediaIndex,
14149 partIndex = segmentInfo.partIndex,
14150 timeline = segmentInfo.timeline;
14151 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
14152 return [name + " [" + index + "/" + (segments.length - 1) + "]", partIndex ? "part [" + partIndex + "/" + (parts.length - 1) + "]" : '', "mediaSequenceNumber [" + seq + "/" + (seq + segments.length - 1) + "]", "playlist [" + id + "]", "start/end [" + start + " => " + end + "]", "timeline [" + timeline + "]"].join(' ');
14153};
14154
14155var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
14156 return mediaType + "TimingInfo";
14157};
14158/**
14159 * Returns the timestamp offset to use for the segment.
14160 *
14161 * @param {number} segmentTimeline
14162 * The timeline of the segment
14163 * @param {number} currentTimeline
14164 * The timeline currently being followed by the loader
14165 * @param {number} startOfSegment
14166 * The estimated segment start
14167 * @param {TimeRange[]} buffered
14168 * The loader's buffer
14169 * @param {boolean} overrideCheck
14170 * If true, no checks are made to see if the timestamp offset value should be set,
14171 * but sets it directly to a value.
14172 *
14173 * @return {number|null}
14174 * Either a number representing a new timestamp offset, or null if the segment is
14175 * part of the same timeline
14176 */
14177
14178
14179var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
14180 var segmentTimeline = _ref.segmentTimeline,
14181 currentTimeline = _ref.currentTimeline,
14182 startOfSegment = _ref.startOfSegment,
14183 buffered = _ref.buffered,
14184 overrideCheck = _ref.overrideCheck;
14185
14186 // Check to see if we are crossing a discontinuity to see if we need to set the
14187 // timestamp offset on the transmuxer and source buffer.
14188 //
14189 // Previously, we changed the timestampOffset if the start of this segment was less than
14190 // the currently set timestampOffset, but this isn't desirable as it can produce bad
14191 // behavior, especially around long running live streams.
14192 if (!overrideCheck && segmentTimeline === currentTimeline) {
14193 return null;
14194 } // When changing renditions, it's possible to request a segment on an older timeline. For
14195 // instance, given two renditions with the following:
14196 //
14197 // #EXTINF:10
14198 // segment1
14199 // #EXT-X-DISCONTINUITY
14200 // #EXTINF:10
14201 // segment2
14202 // #EXTINF:10
14203 // segment3
14204 //
14205 // And the current player state:
14206 //
14207 // current time: 8
14208 // buffer: 0 => 20
14209 //
14210 // The next segment on the current rendition would be segment3, filling the buffer from
14211 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
14212 // then the next segment to be requested will be segment1 from the new rendition in
14213 // order to fill time 8 and onwards. Using the buffered end would result in repeated
14214 // content (since it would position segment1 of the new rendition starting at 20s). This
14215 // case can be identified when the new segment's timeline is a prior value. Instead of
14216 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
14217 // more accurate to the actual start time of the segment.
14218
14219
14220 if (segmentTimeline < currentTimeline) {
14221 return startOfSegment;
14222 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
14223 // value uses the end of the last segment if it is available. While this value
14224 // should often be correct, it's better to rely on the buffered end, as the new
14225 // content post discontinuity should line up with the buffered end as if it were
14226 // time 0 for the new content.
14227
14228
14229 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
14230};
14231/**
14232 * Returns whether or not the loader should wait for a timeline change from the timeline
14233 * change controller before processing the segment.
14234 *
14235 * Primary timing in VHS goes by video. This is different from most media players, as
14236 * audio is more often used as the primary timing source. For the foreseeable future, VHS
14237 * will continue to use video as the primary timing source, due to the current logic and
14238 * expectations built around it.
14239
14240 * Since the timing follows video, in order to maintain sync, the video loader is
14241 * responsible for setting both audio and video source buffer timestamp offsets.
14242 *
14243 * Setting different values for audio and video source buffers could lead to
14244 * desyncing. The following examples demonstrate some of the situations where this
14245 * distinction is important. Note that all of these cases involve demuxed content. When
14246 * content is muxed, the audio and video are packaged together, therefore syncing
14247 * separate media playlists is not an issue.
14248 *
14249 * CASE 1: Audio prepares to load a new timeline before video:
14250 *
14251 * Timeline: 0 1
14252 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
14253 * Audio Loader: ^
14254 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
14255 * Video Loader ^
14256 *
14257 * In the above example, the audio loader is preparing to load the 6th segment, the first
14258 * after a discontinuity, while the video loader is still loading the 5th segment, before
14259 * the discontinuity.
14260 *
14261 * If the audio loader goes ahead and loads and appends the 6th segment before the video
14262 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
14263 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
14264 * the audio loader must provide the audioAppendStart value to trim the content in the
14265 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
14266 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
14267 * segment until that value is provided.
14268 *
14269 * CASE 2: Video prepares to load a new timeline before audio:
14270 *
14271 * Timeline: 0 1
14272 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
14273 * Audio Loader: ^
14274 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
14275 * Video Loader ^
14276 *
14277 * In the above example, the video loader is preparing to load the 6th segment, the first
14278 * after a discontinuity, while the audio loader is still loading the 5th segment, before
14279 * the discontinuity.
14280 *
14281 * If the video loader goes ahead and loads and appends the 6th segment, then once the
14282 * segment is loaded and processed, both the video and audio timestamp offsets will be
14283 * set, since video is used as the primary timing source. This is to ensure content lines
14284 * up appropriately, as any modifications to the video timing are reflected by audio when
14285 * the video loader sets the audio and video timestamp offsets to the same value. However,
14286 * setting the timestamp offset for audio before audio has had a chance to change
14287 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
14288 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
14289 *
14290 * CASE 3: When seeking, audio prepares to load a new timeline before video
14291 *
14292 * Timeline: 0 1
14293 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
14294 * Audio Loader: ^
14295 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
14296 * Video Loader ^
14297 *
14298 * In the above example, both audio and video loaders are loading segments from timeline
14299 * 0, but imagine that the seek originated from timeline 1.
14300 *
14301 * When seeking to a new timeline, the timestamp offset will be set based on the expected
14302 * segment start of the loaded video segment. In order to maintain sync, the audio loader
14303 * must wait for the video loader to load its segment and update both the audio and video
14304 * timestamp offsets before it may load and append its own segment. This is the case
14305 * whether the seek results in a mismatched segment request (e.g., the audio loader
14306 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
14307 * loaders choose to load the same segment index from each playlist, as the segments may
14308 * not be aligned perfectly, even for matching segment indexes.
14309 *
14310 * @param {Object} timelinechangeController
14311 * @param {number} currentTimeline
14312 * The timeline currently being followed by the loader
14313 * @param {number} segmentTimeline
14314 * The timeline of the segment being loaded
14315 * @param {('main'|'audio')} loaderType
14316 * The loader type
14317 * @param {boolean} audioDisabled
14318 * Whether the audio is disabled for the loader. This should only be true when the
14319 * loader may have muxed audio in its segment, but should not append it, e.g., for
14320 * the main loader when an alternate audio playlist is active.
14321 *
14322 * @return {boolean}
14323 * Whether the loader should wait for a timeline change from the timeline change
14324 * controller before processing the segment
14325 */
14326
14327var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
14328 var timelineChangeController = _ref2.timelineChangeController,
14329 currentTimeline = _ref2.currentTimeline,
14330 segmentTimeline = _ref2.segmentTimeline,
14331 loaderType = _ref2.loaderType,
14332 audioDisabled = _ref2.audioDisabled;
14333
14334 if (currentTimeline === segmentTimeline) {
14335 return false;
14336 }
14337
14338 if (loaderType === 'audio') {
14339 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
14340 type: 'main'
14341 }); // Audio loader should wait if:
14342 //
14343 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
14344 // * main hasn't yet changed to the timeline audio is looking to load
14345
14346 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
14347 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
14348 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
14349 // loader's segments (or the content is audio/video only and handled by the main
14350 // loader).
14351
14352
14353 if (loaderType === 'main' && audioDisabled) {
14354 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
14355 type: 'audio'
14356 }); // Main loader should wait for the audio loader if audio is not pending a timeline
14357 // change to the current timeline.
14358 //
14359 // Since the main loader is responsible for setting the timestamp offset for both
14360 // audio and video, the main loader must wait for audio to be about to change to its
14361 // timeline before setting the offset, otherwise, if audio is behind in loading,
14362 // segments from the previous timeline would be adjusted by the new timestamp offset.
14363 //
14364 // This requirement means that video will not cross a timeline until the audio is
14365 // about to cross to it, so that way audio and video will always cross the timeline
14366 // together.
14367 //
14368 // In addition to normal timeline changes, these rules also apply to the start of a
14369 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
14370 // that these rules apply to the first timeline change because if they did not, it's
14371 // possible that the main loader will cross two timelines before the audio loader has
14372 // crossed one. Logic may be implemented to handle the startup as a special case, but
14373 // it's easier to simply treat all timeline changes the same.
14374
14375 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
14376 return false;
14377 }
14378
14379 return true;
14380 }
14381
14382 return false;
14383};
14384var mediaDuration = function mediaDuration(audioTimingInfo, videoTimingInfo) {
14385 var audioDuration = audioTimingInfo && typeof audioTimingInfo.start === 'number' && typeof audioTimingInfo.end === 'number' ? audioTimingInfo.end - audioTimingInfo.start : 0;
14386 var videoDuration = videoTimingInfo && typeof videoTimingInfo.start === 'number' && typeof videoTimingInfo.end === 'number' ? videoTimingInfo.end - videoTimingInfo.start : 0;
14387 return Math.max(audioDuration, videoDuration);
14388};
14389var segmentTooLong = function segmentTooLong(_ref3) {
14390 var segmentDuration = _ref3.segmentDuration,
14391 maxDuration = _ref3.maxDuration;
14392
14393 // 0 duration segments are most likely due to metadata only segments or a lack of
14394 // information.
14395 if (!segmentDuration) {
14396 return false;
14397 } // For HLS:
14398 //
14399 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
14400 // The EXTINF duration of each Media Segment in the Playlist
14401 // file, when rounded to the nearest integer, MUST be less than or equal
14402 // to the target duration; longer segments can trigger playback stalls
14403 // or other errors.
14404 //
14405 // For DASH, the mpd-parser uses the largest reported segment duration as the target
14406 // duration. Although that reported duration is occasionally approximate (i.e., not
14407 // exact), a strict check may report that a segment is too long more often in DASH.
14408
14409
14410 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
14411};
14412var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
14413 // Right now we aren't following DASH's timing model exactly, so only perform
14414 // this check for HLS content.
14415 if (sourceType !== 'hls') {
14416 return null;
14417 }
14418
14419 var segmentDuration = mediaDuration(segmentInfo.audioTimingInfo, segmentInfo.videoTimingInfo); // Don't report if we lack information.
14420 //
14421 // If the segment has a duration of 0 it is either a lack of information or a
14422 // metadata only segment and shouldn't be reported here.
14423
14424 if (!segmentDuration) {
14425 return null;
14426 }
14427
14428 var targetDuration = segmentInfo.playlist.targetDuration;
14429 var isSegmentWayTooLong = segmentTooLong({
14430 segmentDuration: segmentDuration,
14431 maxDuration: targetDuration * 2
14432 });
14433 var isSegmentSlightlyTooLong = segmentTooLong({
14434 segmentDuration: segmentDuration,
14435 maxDuration: targetDuration
14436 });
14437 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
14438
14439 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
14440 return {
14441 severity: isSegmentWayTooLong ? 'warn' : 'info',
14442 message: segmentTooLongMessage
14443 };
14444 }
14445
14446 return null;
14447};
14448/**
14449 * An object that manages segment loading and appending.
14450 *
14451 * @class SegmentLoader
14452 * @param {Object} options required and optional options
14453 * @extends videojs.EventTarget
14454 */
14455
14456var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
14457 _inheritsLoose__default['default'](SegmentLoader, _videojs$EventTarget);
14458
14459 function SegmentLoader(settings, options) {
14460 var _this;
14461
14462 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
14463
14464 if (!settings) {
14465 throw new TypeError('Initialization settings are required');
14466 }
14467
14468 if (typeof settings.currentTime !== 'function') {
14469 throw new TypeError('No currentTime getter specified');
14470 }
14471
14472 if (!settings.mediaSource) {
14473 throw new TypeError('No MediaSource specified');
14474 } // public properties
14475
14476
14477 _this.bandwidth = settings.bandwidth;
14478 _this.throughput = {
14479 rate: 0,
14480 count: 0
14481 };
14482 _this.roundTrip = NaN;
14483
14484 _this.resetStats_();
14485
14486 _this.mediaIndex = null;
14487 _this.partIndex = null; // private settings
14488
14489 _this.hasPlayed_ = settings.hasPlayed;
14490 _this.currentTime_ = settings.currentTime;
14491 _this.seekable_ = settings.seekable;
14492 _this.seeking_ = settings.seeking;
14493 _this.duration_ = settings.duration;
14494 _this.mediaSource_ = settings.mediaSource;
14495 _this.vhs_ = settings.vhs;
14496 _this.loaderType_ = settings.loaderType;
14497 _this.currentMediaInfo_ = void 0;
14498 _this.startingMediaInfo_ = void 0;
14499 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
14500 _this.goalBufferLength_ = settings.goalBufferLength;
14501 _this.sourceType_ = settings.sourceType;
14502 _this.sourceUpdater_ = settings.sourceUpdater;
14503 _this.inbandTextTracks_ = settings.inbandTextTracks;
14504 _this.state_ = 'INIT';
14505 _this.handlePartialData_ = settings.handlePartialData;
14506 _this.timelineChangeController_ = settings.timelineChangeController;
14507 _this.shouldSaveSegmentTimingInfo_ = true;
14508 _this.parse708captions_ = settings.parse708captions; // private instance variables
14509
14510 _this.checkBufferTimeout_ = null;
14511 _this.error_ = void 0;
14512 _this.currentTimeline_ = -1;
14513 _this.pendingSegment_ = null;
14514 _this.xhrOptions_ = null;
14515 _this.pendingSegments_ = [];
14516 _this.audioDisabled_ = false;
14517 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
14518
14519 _this.gopBuffer_ = [];
14520 _this.timeMapping_ = 0;
14521 _this.safeAppend_ = videojs__default['default'].browser.IE_VERSION >= 11;
14522 _this.appendInitSegment_ = {
14523 audio: true,
14524 video: true
14525 };
14526 _this.playlistOfLastInitSegment_ = {
14527 audio: null,
14528 video: null
14529 };
14530 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
14531 // information yet to start the loading process (e.g., if the audio loader wants to
14532 // load a segment from the next timeline but the main loader hasn't yet crossed that
14533 // timeline), then the load call will be added to the queue until it is ready to be
14534 // processed.
14535
14536 _this.loadQueue_ = [];
14537 _this.metadataQueue_ = {
14538 id3: [],
14539 caption: []
14540 };
14541 _this.waitingOnRemove_ = false;
14542 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
14543
14544 _this.activeInitSegmentId_ = null;
14545 _this.initSegments_ = {}; // HLSe playback
14546
14547 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
14548 _this.keyCache_ = {};
14549 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
14550 // between a time in the display time and a segment index within
14551 // a playlist
14552
14553 _this.syncController_ = settings.syncController;
14554 _this.syncPoint_ = {
14555 segmentIndex: 0,
14556 time: 0
14557 };
14558 _this.transmuxer_ = _this.createTransmuxer_();
14559
14560 _this.triggerSyncInfoUpdate_ = function () {
14561 return _this.trigger('syncinfoupdate');
14562 };
14563
14564 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
14565
14566 _this.mediaSource_.addEventListener('sourceopen', function () {
14567 if (!_this.isEndOfStream_()) {
14568 _this.ended_ = false;
14569 }
14570 }); // ...for determining the fetch location
14571
14572
14573 _this.fetchAtBuffer_ = false;
14574 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
14575 Object.defineProperty(_assertThisInitialized__default['default'](_this), 'state', {
14576 get: function get() {
14577 return this.state_;
14578 },
14579 set: function set(newState) {
14580 if (newState !== this.state_) {
14581 this.logger_(this.state_ + " -> " + newState);
14582 this.state_ = newState;
14583 this.trigger('statechange');
14584 }
14585 }
14586 });
14587
14588 _this.sourceUpdater_.on('ready', function () {
14589 if (_this.hasEnoughInfoToAppend_()) {
14590 _this.processCallQueue_();
14591 }
14592 }); // Only the main loader needs to listen for pending timeline changes, as the main
14593 // loader should wait for audio to be ready to change its timeline so that both main
14594 // and audio timelines change together. For more details, see the
14595 // shouldWaitForTimelineChange function.
14596
14597
14598 if (_this.loaderType_ === 'main') {
14599 _this.timelineChangeController_.on('pendingtimelinechange', function () {
14600 if (_this.hasEnoughInfoToAppend_()) {
14601 _this.processCallQueue_();
14602 }
14603 });
14604 } // The main loader only listens on pending timeline changes, but the audio loader,
14605 // since its loads follow main, needs to listen on timeline changes. For more details,
14606 // see the shouldWaitForTimelineChange function.
14607
14608
14609 if (_this.loaderType_ === 'audio') {
14610 _this.timelineChangeController_.on('timelinechange', function () {
14611 if (_this.hasEnoughInfoToLoad_()) {
14612 _this.processLoadQueue_();
14613 }
14614
14615 if (_this.hasEnoughInfoToAppend_()) {
14616 _this.processCallQueue_();
14617 }
14618 });
14619 }
14620
14621 return _this;
14622 }
14623
14624 var _proto = SegmentLoader.prototype;
14625
14626 _proto.createTransmuxer_ = function createTransmuxer_() {
14627 return segmentTransmuxer.createTransmuxer({
14628 remux: false,
14629 alignGopsAtEnd: this.safeAppend_,
14630 keepOriginalTimestamps: true,
14631 handlePartialData: this.handlePartialData_,
14632 parse708captions: this.parse708captions_
14633 });
14634 }
14635 /**
14636 * reset all of our media stats
14637 *
14638 * @private
14639 */
14640 ;
14641
14642 _proto.resetStats_ = function resetStats_() {
14643 this.mediaBytesTransferred = 0;
14644 this.mediaRequests = 0;
14645 this.mediaRequestsAborted = 0;
14646 this.mediaRequestsTimedout = 0;
14647 this.mediaRequestsErrored = 0;
14648 this.mediaTransferDuration = 0;
14649 this.mediaSecondsLoaded = 0;
14650 }
14651 /**
14652 * dispose of the SegmentLoader and reset to the default state
14653 */
14654 ;
14655
14656 _proto.dispose = function dispose() {
14657 this.trigger('dispose');
14658 this.state = 'DISPOSED';
14659 this.pause();
14660 this.abort_();
14661
14662 if (this.transmuxer_) {
14663 this.transmuxer_.terminate();
14664 }
14665
14666 this.resetStats_();
14667
14668 if (this.checkBufferTimeout_) {
14669 window__default['default'].clearTimeout(this.checkBufferTimeout_);
14670 }
14671
14672 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
14673 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
14674 }
14675
14676 this.off();
14677 };
14678
14679 _proto.setAudio = function setAudio(enable) {
14680 this.audioDisabled_ = !enable;
14681
14682 if (enable) {
14683 this.appendInitSegment_.audio = true;
14684 } else {
14685 // remove current track audio if it gets disabled
14686 this.sourceUpdater_.removeAudio(0, this.duration_());
14687 }
14688 }
14689 /**
14690 * abort anything that is currently doing on with the SegmentLoader
14691 * and reset to a default state
14692 */
14693 ;
14694
14695 _proto.abort = function abort() {
14696 if (this.state !== 'WAITING') {
14697 if (this.pendingSegment_) {
14698 this.pendingSegment_ = null;
14699 }
14700
14701 return;
14702 }
14703
14704 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
14705 // since we are no longer "waiting" on any requests. XHR callback is not always run
14706 // when the request is aborted. This will prevent the loader from being stuck in the
14707 // WAITING state indefinitely.
14708
14709 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
14710 // next segment
14711
14712 if (!this.paused()) {
14713 this.monitorBuffer_();
14714 }
14715 }
14716 /**
14717 * abort all pending xhr requests and null any pending segements
14718 *
14719 * @private
14720 */
14721 ;
14722
14723 _proto.abort_ = function abort_() {
14724 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
14725 this.pendingSegment_.abortRequests();
14726 } // clear out the segment being processed
14727
14728
14729 this.pendingSegment_ = null;
14730 this.callQueue_ = [];
14731 this.loadQueue_ = [];
14732 this.metadataQueue_.id3 = [];
14733 this.metadataQueue_.caption = [];
14734 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
14735 this.waitingOnRemove_ = false;
14736 window__default['default'].clearTimeout(this.quotaExceededErrorRetryTimeout_);
14737 this.quotaExceededErrorRetryTimeout_ = null;
14738 };
14739
14740 _proto.checkForAbort_ = function checkForAbort_(requestId) {
14741 // If the state is APPENDING, then aborts will not modify the state, meaning the first
14742 // callback that happens should reset the state to READY so that loading can continue.
14743 if (this.state === 'APPENDING' && !this.pendingSegment_) {
14744 this.state = 'READY';
14745 return true;
14746 }
14747
14748 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
14749 return true;
14750 }
14751
14752 return false;
14753 }
14754 /**
14755 * set an error on the segment loader and null out any pending segements
14756 *
14757 * @param {Error} error the error to set on the SegmentLoader
14758 * @return {Error} the error that was set or that is currently set
14759 */
14760 ;
14761
14762 _proto.error = function error(_error) {
14763 if (typeof _error !== 'undefined') {
14764 this.logger_('error occurred:', _error);
14765 this.error_ = _error;
14766 }
14767
14768 this.pendingSegment_ = null;
14769 return this.error_;
14770 };
14771
14772 _proto.endOfStream = function endOfStream() {
14773 this.ended_ = true;
14774
14775 if (this.transmuxer_) {
14776 // need to clear out any cached data to prepare for the new segment
14777 segmentTransmuxer.reset(this.transmuxer_);
14778 }
14779
14780 this.gopBuffer_.length = 0;
14781 this.pause();
14782 this.trigger('ended');
14783 }
14784 /**
14785 * Indicates which time ranges are buffered
14786 *
14787 * @return {TimeRange}
14788 * TimeRange object representing the current buffered ranges
14789 */
14790 ;
14791
14792 _proto.buffered_ = function buffered_() {
14793 if (!this.sourceUpdater_ || !this.startingMediaInfo_) {
14794 return videojs__default['default'].createTimeRanges();
14795 }
14796
14797 if (this.loaderType_ === 'main') {
14798 var _this$startingMediaIn = this.startingMediaInfo_,
14799 hasAudio = _this$startingMediaIn.hasAudio,
14800 hasVideo = _this$startingMediaIn.hasVideo,
14801 isMuxed = _this$startingMediaIn.isMuxed;
14802
14803 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
14804 return this.sourceUpdater_.buffered();
14805 }
14806
14807 if (hasVideo) {
14808 return this.sourceUpdater_.videoBuffered();
14809 }
14810 } // One case that can be ignored for now is audio only with alt audio,
14811 // as we don't yet have proper support for that.
14812
14813
14814 return this.sourceUpdater_.audioBuffered();
14815 }
14816 /**
14817 * Gets and sets init segment for the provided map
14818 *
14819 * @param {Object} map
14820 * The map object representing the init segment to get or set
14821 * @param {boolean=} set
14822 * If true, the init segment for the provided map should be saved
14823 * @return {Object}
14824 * map object for desired init segment
14825 */
14826 ;
14827
14828 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
14829 if (set === void 0) {
14830 set = false;
14831 }
14832
14833 if (!map) {
14834 return null;
14835 }
14836
14837 var id = initSegmentId(map);
14838 var storedMap = this.initSegments_[id];
14839
14840 if (set && !storedMap && map.bytes) {
14841 this.initSegments_[id] = storedMap = {
14842 resolvedUri: map.resolvedUri,
14843 byterange: map.byterange,
14844 bytes: map.bytes,
14845 tracks: map.tracks,
14846 timescales: map.timescales
14847 };
14848 }
14849
14850 return storedMap || map;
14851 }
14852 /**
14853 * Gets and sets key for the provided key
14854 *
14855 * @param {Object} key
14856 * The key object representing the key to get or set
14857 * @param {boolean=} set
14858 * If true, the key for the provided key should be saved
14859 * @return {Object}
14860 * Key object for desired key
14861 */
14862 ;
14863
14864 _proto.segmentKey = function segmentKey(key, set) {
14865 if (set === void 0) {
14866 set = false;
14867 }
14868
14869 if (!key) {
14870 return null;
14871 }
14872
14873 var id = segmentKeyId(key);
14874 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
14875 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
14876
14877 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
14878 this.keyCache_[id] = storedKey = {
14879 resolvedUri: key.resolvedUri,
14880 bytes: key.bytes
14881 };
14882 }
14883
14884 var result = {
14885 resolvedUri: (storedKey || key).resolvedUri
14886 };
14887
14888 if (storedKey) {
14889 result.bytes = storedKey.bytes;
14890 }
14891
14892 return result;
14893 }
14894 /**
14895 * Returns true if all configuration required for loading is present, otherwise false.
14896 *
14897 * @return {boolean} True if the all configuration is ready for loading
14898 * @private
14899 */
14900 ;
14901
14902 _proto.couldBeginLoading_ = function couldBeginLoading_() {
14903 return this.playlist_ && !this.paused();
14904 }
14905 /**
14906 * load a playlist and start to fill the buffer
14907 */
14908 ;
14909
14910 _proto.load = function load() {
14911 // un-pause
14912 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
14913 // specified
14914
14915 if (!this.playlist_) {
14916 return;
14917 } // if all the configuration is ready, initialize and begin loading
14918
14919
14920 if (this.state === 'INIT' && this.couldBeginLoading_()) {
14921 return this.init_();
14922 } // if we're in the middle of processing a segment already, don't
14923 // kick off an additional segment request
14924
14925
14926 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
14927 return;
14928 }
14929
14930 this.state = 'READY';
14931 }
14932 /**
14933 * Once all the starting parameters have been specified, begin
14934 * operation. This method should only be invoked from the INIT
14935 * state.
14936 *
14937 * @private
14938 */
14939 ;
14940
14941 _proto.init_ = function init_() {
14942 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
14943 // audio data from the muxed content should be removed
14944
14945 this.resetEverything();
14946 return this.monitorBuffer_();
14947 }
14948 /**
14949 * set a playlist on the segment loader
14950 *
14951 * @param {PlaylistLoader} media the playlist to set on the segment loader
14952 */
14953 ;
14954
14955 _proto.playlist = function playlist(newPlaylist, options) {
14956 if (options === void 0) {
14957 options = {};
14958 }
14959
14960 if (!newPlaylist) {
14961 return;
14962 }
14963
14964 var oldPlaylist = this.playlist_;
14965 var segmentInfo = this.pendingSegment_;
14966 this.playlist_ = newPlaylist;
14967 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
14968 // is always our zero-time so force a sync update each time the playlist
14969 // is refreshed from the server
14970 //
14971 // Use the INIT state to determine if playback has started, as the playlist sync info
14972 // should be fixed once requests begin (as sync points are generated based on sync
14973 // info), but not before then.
14974
14975 if (this.state === 'INIT') {
14976 newPlaylist.syncInfo = {
14977 mediaSequence: newPlaylist.mediaSequence,
14978 time: 0
14979 }; // Setting the date time mapping means mapping the program date time (if available)
14980 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
14981 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
14982 // be updated as the playlist is refreshed before the loader starts loading, the
14983 // program date time mapping needs to be updated as well.
14984 //
14985 // This mapping is only done for the main loader because a program date time should
14986 // map equivalently between playlists.
14987
14988 if (this.loaderType_ === 'main') {
14989 this.syncController_.setDateTimeMappingForStart(newPlaylist);
14990 }
14991 }
14992
14993 var oldId = null;
14994
14995 if (oldPlaylist) {
14996 if (oldPlaylist.id) {
14997 oldId = oldPlaylist.id;
14998 } else if (oldPlaylist.uri) {
14999 oldId = oldPlaylist.uri;
15000 }
15001 }
15002
15003 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
15004 // in LIVE, we always want to update with new playlists (including refreshes)
15005
15006 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
15007 // buffering now
15008
15009 if (this.state === 'INIT' && this.couldBeginLoading_()) {
15010 return this.init_();
15011 }
15012
15013 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
15014 if (this.mediaIndex !== null || this.handlePartialData_) {
15015 // we must "resync" the segment loader when we switch renditions and
15016 // the segment loader is already synced to the previous rendition
15017 //
15018 // or if we're handling partial data, we need to ensure the transmuxer is cleared
15019 // out before we start adding more data
15020 this.resyncLoader();
15021 }
15022
15023 this.currentMediaInfo_ = void 0;
15024 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
15025
15026 return;
15027 } // we reloaded the same playlist so we are in a live scenario
15028 // and we will likely need to adjust the mediaIndex
15029
15030
15031 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
15032 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
15033 // this is important because we can abort a request and this value must be
15034 // equal to the last appended mediaIndex
15035
15036 if (this.mediaIndex !== null) {
15037 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
15038 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
15039 // new playlist was incremented by 1.
15040
15041 if (this.mediaIndex < 0) {
15042 this.mediaIndex = null;
15043 this.partIndex = null;
15044 } else {
15045 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
15046 // unless parts fell off of the playlist for this segment.
15047 // In that case we need to reset partIndex and resync
15048
15049 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
15050 var mediaIndex = this.mediaIndex;
15051 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
15052 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
15053 // as the part was dropped from our current playlists segment.
15054 // The mediaIndex will still be valid so keep that around.
15055
15056 this.mediaIndex = mediaIndex;
15057 }
15058 }
15059 } // update the mediaIndex on the SegmentInfo object
15060 // this is important because we will update this.mediaIndex with this value
15061 // in `handleAppendsDone_` after the segment has been successfully appended
15062
15063
15064 if (segmentInfo) {
15065 segmentInfo.mediaIndex -= mediaSequenceDiff;
15066
15067 if (segmentInfo.mediaIndex < 0) {
15068 segmentInfo.mediaIndex = null;
15069 segmentInfo.partIndex = null;
15070 } else {
15071 // we need to update the referenced segment so that timing information is
15072 // saved for the new playlist's segment, however, if the segment fell off the
15073 // playlist, we can leave the old reference and just lose the timing info
15074 if (segmentInfo.mediaIndex >= 0) {
15075 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
15076 }
15077
15078 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
15079 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
15080 }
15081 }
15082 }
15083
15084 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
15085 }
15086 /**
15087 * Prevent the loader from fetching additional segments. If there
15088 * is a segment request outstanding, it will finish processing
15089 * before the loader halts. A segment loader can be unpaused by
15090 * calling load().
15091 */
15092 ;
15093
15094 _proto.pause = function pause() {
15095 if (this.checkBufferTimeout_) {
15096 window__default['default'].clearTimeout(this.checkBufferTimeout_);
15097 this.checkBufferTimeout_ = null;
15098 }
15099 }
15100 /**
15101 * Returns whether the segment loader is fetching additional
15102 * segments when given the opportunity. This property can be
15103 * modified through calls to pause() and load().
15104 */
15105 ;
15106
15107 _proto.paused = function paused() {
15108 return this.checkBufferTimeout_ === null;
15109 }
15110 /**
15111 * Delete all the buffered data and reset the SegmentLoader
15112 *
15113 * @param {Function} [done] an optional callback to be executed when the remove
15114 * operation is complete
15115 */
15116 ;
15117
15118 _proto.resetEverything = function resetEverything(done) {
15119 this.ended_ = false;
15120 this.appendInitSegment_ = {
15121 audio: true,
15122 video: true
15123 };
15124 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
15125 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
15126 // we then clamp the value to duration if necessary.
15127
15128 this.remove(0, Infinity, done); // clears fmp4 captions
15129
15130 if (this.transmuxer_) {
15131 this.transmuxer_.postMessage({
15132 action: 'clearAllMp4Captions'
15133 });
15134 }
15135 }
15136 /**
15137 * Force the SegmentLoader to resync and start loading around the currentTime instead
15138 * of starting at the end of the buffer
15139 *
15140 * Useful for fast quality changes
15141 */
15142 ;
15143
15144 _proto.resetLoader = function resetLoader() {
15145 this.fetchAtBuffer_ = false;
15146 this.resyncLoader();
15147 }
15148 /**
15149 * Force the SegmentLoader to restart synchronization and make a conservative guess
15150 * before returning to the simple walk-forward method
15151 */
15152 ;
15153
15154 _proto.resyncLoader = function resyncLoader() {
15155 if (this.transmuxer_) {
15156 // need to clear out any cached data to prepare for the new segment
15157 segmentTransmuxer.reset(this.transmuxer_);
15158 }
15159
15160 this.mediaIndex = null;
15161 this.partIndex = null;
15162 this.syncPoint_ = null;
15163 this.isPendingTimestampOffset_ = false;
15164 this.callQueue_ = [];
15165 this.loadQueue_ = [];
15166 this.metadataQueue_.id3 = [];
15167 this.metadataQueue_.caption = [];
15168 this.abort();
15169
15170 if (this.transmuxer_) {
15171 this.transmuxer_.postMessage({
15172 action: 'clearParsedMp4Captions'
15173 });
15174 }
15175 }
15176 /**
15177 * Remove any data in the source buffer between start and end times
15178 *
15179 * @param {number} start - the start time of the region to remove from the buffer
15180 * @param {number} end - the end time of the region to remove from the buffer
15181 * @param {Function} [done] - an optional callback to be executed when the remove
15182 * @param {boolean} force - force all remove operations to happen
15183 * operation is complete
15184 */
15185 ;
15186
15187 _proto.remove = function remove(start, end, done, force) {
15188 if (done === void 0) {
15189 done = function done() {};
15190 }
15191
15192 if (force === void 0) {
15193 force = false;
15194 }
15195
15196 // clamp end to duration if we need to remove everything.
15197 // This is due to a browser bug that causes issues if we remove to Infinity.
15198 // videojs/videojs-contrib-hls#1225
15199 if (end === Infinity) {
15200 end = this.duration_();
15201 }
15202
15203 if (!this.sourceUpdater_ || !this.startingMediaInfo_) {
15204 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
15205
15206 return;
15207 } // set it to one to complete this function's removes
15208
15209
15210 var removesRemaining = 1;
15211
15212 var removeFinished = function removeFinished() {
15213 removesRemaining--;
15214
15215 if (removesRemaining === 0) {
15216 done();
15217 }
15218 };
15219
15220 if (force || !this.audioDisabled_) {
15221 removesRemaining++;
15222 this.sourceUpdater_.removeAudio(start, end, removeFinished);
15223 } // While it would be better to only remove video if the main loader has video, this
15224 // should be safe with audio only as removeVideo will call back even if there's no
15225 // video buffer.
15226 //
15227 // In theory we can check to see if there's video before calling the remove, but in
15228 // the event that we're switching between renditions and from video to audio only
15229 // (when we add support for that), we may need to clear the video contents despite
15230 // what the new media will contain.
15231
15232
15233 if (force || this.loaderType_ === 'main') {
15234 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
15235 removesRemaining++;
15236 this.sourceUpdater_.removeVideo(start, end, removeFinished);
15237 } // remove any captions and ID3 tags
15238
15239
15240 for (var track in this.inbandTextTracks_) {
15241 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
15242 }
15243
15244 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
15245
15246 removeFinished();
15247 }
15248 /**
15249 * (re-)schedule monitorBufferTick_ to run as soon as possible
15250 *
15251 * @private
15252 */
15253 ;
15254
15255 _proto.monitorBuffer_ = function monitorBuffer_() {
15256 if (this.checkBufferTimeout_) {
15257 window__default['default'].clearTimeout(this.checkBufferTimeout_);
15258 }
15259
15260 this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), 1);
15261 }
15262 /**
15263 * As long as the SegmentLoader is in the READY state, periodically
15264 * invoke fillBuffer_().
15265 *
15266 * @private
15267 */
15268 ;
15269
15270 _proto.monitorBufferTick_ = function monitorBufferTick_() {
15271 if (this.state === 'READY') {
15272 this.fillBuffer_();
15273 }
15274
15275 if (this.checkBufferTimeout_) {
15276 window__default['default'].clearTimeout(this.checkBufferTimeout_);
15277 }
15278
15279 this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
15280 }
15281 /**
15282 * fill the buffer with segements unless the sourceBuffers are
15283 * currently updating
15284 *
15285 * Note: this function should only ever be called by monitorBuffer_
15286 * and never directly
15287 *
15288 * @private
15289 */
15290 ;
15291
15292 _proto.fillBuffer_ = function fillBuffer_() {
15293 // TODO since the source buffer maintains a queue, and we shouldn't call this function
15294 // except when we're ready for the next segment, this check can most likely be removed
15295 if (this.sourceUpdater_.updating()) {
15296 return;
15297 }
15298
15299 if (!this.syncPoint_) {
15300 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
15301 }
15302
15303 var buffered = this.buffered_(); // see if we need to begin loading immediately
15304
15305 var segmentInfo = this.checkBuffer_(buffered, this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_, this.partIndex);
15306
15307 if (!segmentInfo) {
15308 return;
15309 }
15310
15311 segmentInfo.timestampOffset = timestampOffsetForSegment({
15312 segmentTimeline: segmentInfo.timeline,
15313 currentTimeline: this.currentTimeline_,
15314 startOfSegment: segmentInfo.startOfSegment,
15315 buffered: buffered,
15316 overrideCheck: this.isPendingTimestampOffset_
15317 });
15318 this.isPendingTimestampOffset_ = false;
15319
15320 if (typeof segmentInfo.timestampOffset === 'number') {
15321 this.timelineChangeController_.pendingTimelineChange({
15322 type: this.loaderType_,
15323 from: this.currentTimeline_,
15324 to: segmentInfo.timeline
15325 });
15326 }
15327
15328 this.loadSegment_(segmentInfo);
15329 }
15330 /**
15331 * Determines if we should call endOfStream on the media source based
15332 * on the state of the buffer or if appened segment was the final
15333 * segment in the playlist.
15334 *
15335 * @param {number} [mediaIndex] the media index of segment we last appended
15336 * @param {Object} [playlist] a media playlist object
15337 * @return {boolean} do we need to call endOfStream on the MediaSource
15338 */
15339 ;
15340
15341 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
15342 if (mediaIndex === void 0) {
15343 mediaIndex = this.mediaIndex;
15344 }
15345
15346 if (playlist === void 0) {
15347 playlist = this.playlist_;
15348 }
15349
15350 if (partIndex === void 0) {
15351 partIndex = this.partIndex;
15352 }
15353
15354 if (!playlist || !this.mediaSource_) {
15355 return false;
15356 }
15357
15358 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
15359
15360 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
15361
15362 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
15363 // so that MediaSources can trigger the `ended` event when it runs out of
15364 // buffered data instead of waiting for me
15365
15366 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
15367 }
15368 /**
15369 * Determines what segment request should be made, given current playback
15370 * state.
15371 *
15372 * @param {TimeRanges} buffered - the state of the buffer
15373 * @param {Object} playlist - the playlist object to fetch segments from
15374 * @param {number} mediaIndex - the previous mediaIndex fetched or null
15375 * @param {boolean} hasPlayed - a flag indicating whether we have played or not
15376 * @param {number} currentTime - the playback position in seconds
15377 * @param {Object} syncPoint - a segment info object that describes the
15378 * @return {Object} a segment request object that describes the segment to load
15379 */
15380 ;
15381
15382 _proto.checkBuffer_ = function checkBuffer_(buffered, playlist, currentMediaIndex, hasPlayed, currentTime, syncPoint, currentPartIndex) {
15383 var lastBufferedEnd = 0;
15384
15385 if (buffered.length) {
15386 lastBufferedEnd = buffered.end(buffered.length - 1);
15387 }
15388
15389 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
15390
15391 if (!playlist.segments.length) {
15392 return null;
15393 } // if there is plenty of content buffered, and the video has
15394 // been played before relax for awhile
15395
15396
15397 if (bufferedTime >= this.goalBufferLength_()) {
15398 return null;
15399 } // if the video has not yet played once, and we already have
15400 // one segment downloaded do nothing
15401
15402
15403 if (!hasPlayed && bufferedTime >= 1) {
15404 return null;
15405 }
15406
15407 var nextPartIndex = null;
15408 var nextMediaIndex = null;
15409 var startOfSegment;
15410 var isSyncRequest = false; // When the syncPoint is null, there is no way of determining a good
15411 // conservative segment index to fetch from
15412 // The best thing to do here is to get the kind of sync-point data by
15413 // making a request
15414
15415 if (syncPoint === null) {
15416 nextMediaIndex = this.getSyncSegmentCandidate_(playlist);
15417 isSyncRequest = true;
15418 } else if (currentMediaIndex !== null) {
15419 // Under normal playback conditions fetching is a simple walk forward
15420 var segment = playlist.segments[currentMediaIndex];
15421 var partIndex = typeof currentPartIndex === 'number' ? currentPartIndex : -1;
15422 startOfSegment = segment.end ? segment.end : lastBufferedEnd;
15423
15424 if (segment.parts && segment.parts[partIndex + 1]) {
15425 nextMediaIndex = currentMediaIndex;
15426 nextPartIndex = partIndex + 1;
15427 } else {
15428 nextMediaIndex = currentMediaIndex + 1;
15429 } // There is a sync-point but the lack of a mediaIndex indicates that
15430 // we need to make a good conservative guess about which segment to
15431 // fetch
15432
15433 } else {
15434 // Find the segment containing the end of the buffer or current time.
15435 var mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, this.fetchAtBuffer_ ? lastBufferedEnd : currentTime, syncPoint.segmentIndex, syncPoint.time);
15436 nextMediaIndex = mediaSourceInfo.mediaIndex;
15437 startOfSegment = mediaSourceInfo.startTime;
15438 nextPartIndex = mediaSourceInfo.partIndex;
15439 }
15440
15441 if (typeof nextPartIndex !== 'number' && playlist.segments[nextMediaIndex] && playlist.segments[nextMediaIndex].parts) {
15442 nextPartIndex = 0;
15443 }
15444
15445 var segmentInfo = this.generateSegmentInfo_(playlist, nextMediaIndex, startOfSegment, isSyncRequest, nextPartIndex);
15446
15447 if (!segmentInfo) {
15448 return;
15449 } // if this is the last segment in the playlist
15450 // we are not seeking and end of stream has already been called
15451 // do not re-request
15452
15453
15454 if (this.mediaSource_ && this.playlist_ && segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
15455 return;
15456 }
15457
15458 this.logger_("checkBuffer_ returning " + segmentInfo.uri, {
15459 segmentInfo: segmentInfo,
15460 playlist: playlist,
15461 currentMediaIndex: currentMediaIndex,
15462 currentPartIndex: currentPartIndex,
15463 nextPartIndex: nextPartIndex,
15464 nextMediaIndex: nextMediaIndex,
15465 startOfSegment: startOfSegment,
15466 isSyncRequest: isSyncRequest
15467 });
15468 return segmentInfo;
15469 }
15470 /**
15471 * The segment loader has no recourse except to fetch a segment in the
15472 * current playlist and use the internal timestamps in that segment to
15473 * generate a syncPoint. This function returns a good candidate index
15474 * for that process.
15475 *
15476 * @param {Object} playlist - the playlist object to look for a
15477 * @return {number} An index of a segment from the playlist to load
15478 */
15479 ;
15480
15481 _proto.getSyncSegmentCandidate_ = function getSyncSegmentCandidate_(playlist) {
15482 var _this2 = this;
15483
15484 if (this.currentTimeline_ === -1) {
15485 return 0;
15486 }
15487
15488 var segmentIndexArray = playlist.segments.map(function (s, i) {
15489 return {
15490 timeline: s.timeline,
15491 segmentIndex: i
15492 };
15493 }).filter(function (s) {
15494 return s.timeline === _this2.currentTimeline_;
15495 });
15496
15497 if (segmentIndexArray.length) {
15498 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
15499 }
15500
15501 return Math.max(playlist.segments.length - 1, 0);
15502 };
15503
15504 _proto.generateSegmentInfo_ = function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest, partIndex) {
15505 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
15506 return null;
15507 }
15508
15509 var segment = playlist.segments[mediaIndex];
15510
15511 if (segment.parts && segment.parts.length && partIndex >= segment.parts.length) {
15512 return null;
15513 }
15514
15515 var part = segment.parts && segment.parts.length && segment.parts[partIndex];
15516 var audioBuffered = this.sourceUpdater_.audioBuffered();
15517 var videoBuffered = this.sourceUpdater_.videoBuffered();
15518 var audioAppendStart;
15519 var gopsToAlignWith;
15520
15521 if (audioBuffered.length) {
15522 // since the transmuxer is using the actual timing values, but the buffer is
15523 // adjusted by the timestamp offset, we must adjust the value here
15524 audioAppendStart = audioBuffered.end(audioBuffered.length - 1) - this.sourceUpdater_.audioTimestampOffset();
15525 }
15526
15527 if (videoBuffered.length) {
15528 gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
15529 // adjusted by the timestmap offset, we must adjust the value here
15530 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
15531 }
15532
15533 return {
15534 requestId: 'segment-loader-' + Math.random(),
15535 // resolve the segment URL relative to the playlist
15536 uri: part && part.resolvedUri || segment.resolvedUri,
15537 // the segment's mediaIndex at the time it was requested
15538 mediaIndex: mediaIndex,
15539 partIndex: part ? partIndex : null,
15540 // whether or not to update the SegmentLoader's state with this
15541 // segment's mediaIndex
15542 isSyncRequest: isSyncRequest,
15543 startOfSegment: startOfSegment,
15544 // the segment's playlist
15545 playlist: playlist,
15546 // unencrypted bytes of the segment
15547 bytes: null,
15548 // when a key is defined for this segment, the encrypted bytes
15549 encryptedBytes: null,
15550 // The target timestampOffset for this segment when we append it
15551 // to the source buffer
15552 timestampOffset: null,
15553 // The timeline that the segment is in
15554 timeline: segment.timeline,
15555 // The expected duration of the segment in seconds
15556 duration: segment.duration,
15557 // retain the segment in case the playlist updates while doing an async process
15558 segment: segment,
15559 part: part,
15560 byteLength: 0,
15561 transmuxer: this.transmuxer_,
15562 audioAppendStart: audioAppendStart,
15563 gopsToAlignWith: gopsToAlignWith
15564 };
15565 }
15566 /**
15567 * Determines if the network has enough bandwidth to complete the current segment
15568 * request in a timely manner. If not, the request will be aborted early and bandwidth
15569 * updated to trigger a playlist switch.
15570 *
15571 * @param {Object} stats
15572 * Object containing stats about the request timing and size
15573 * @private
15574 */
15575 ;
15576
15577 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
15578 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
15579 // TODO: Replace using timeout with a boolean indicating whether this playlist is
15580 // the lowestEnabledRendition.
15581 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
15582 !this.playlist_.attributes.BANDWIDTH) {
15583 return;
15584 } // Wait at least 1 second since the first byte of data has been received before
15585 // using the calculated bandwidth from the progress event to allow the bitrate
15586 // to stabilize
15587
15588
15589 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
15590 return;
15591 }
15592
15593 var currentTime = this.currentTime_();
15594 var measuredBandwidth = stats.bandwidth;
15595 var segmentDuration = this.pendingSegment_.duration;
15596 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
15597 // if we are only left with less than 1 second when the request completes.
15598 // A negative timeUntilRebuffering indicates we are already rebuffering
15599
15600 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
15601 // is larger than the estimated time until the player runs out of forward buffer
15602
15603 if (requestTimeRemaining <= timeUntilRebuffer$1) {
15604 return;
15605 }
15606
15607 var switchCandidate = minRebufferMaxBandwidthSelector({
15608 master: this.vhs_.playlists.master,
15609 currentTime: currentTime,
15610 bandwidth: measuredBandwidth,
15611 duration: this.duration_(),
15612 segmentDuration: segmentDuration,
15613 timeUntilRebuffer: timeUntilRebuffer$1,
15614 currentTimeline: this.currentTimeline_,
15615 syncController: this.syncController_
15616 });
15617
15618 if (!switchCandidate) {
15619 return;
15620 }
15621
15622 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
15623 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
15624 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
15625 // potential round trip time of the new request so that we are not too aggressive
15626 // with switching to a playlist that might save us a fraction of a second.
15627
15628 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
15629 minimumTimeSaving = 1;
15630 }
15631
15632 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
15633 return;
15634 } // set the bandwidth to that of the desired playlist being sure to scale by
15635 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
15636 // don't trigger a bandwidthupdate as the bandwidth is artifial
15637
15638
15639 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
15640 this.trigger('earlyabort');
15641 };
15642
15643 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
15644 this.logger_("Aborting " + segmentInfoString(segmentInfo));
15645 this.mediaRequestsAborted += 1;
15646 }
15647 /**
15648 * XHR `progress` event handler
15649 *
15650 * @param {Event}
15651 * The XHR `progress` event
15652 * @param {Object} simpleSegment
15653 * A simplified segment object copy
15654 * @private
15655 */
15656 ;
15657
15658 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
15659 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15660
15661 if (this.checkForAbort_(simpleSegment.requestId)) {
15662 return;
15663 }
15664
15665 this.trigger('progress');
15666 };
15667
15668 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
15669 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15670
15671 if (this.checkForAbort_(simpleSegment.requestId)) {
15672 return;
15673 }
15674
15675 if (this.checkForIllegalMediaSwitch(trackInfo)) {
15676 return;
15677 }
15678
15679 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
15680 // Guard against cases where we're not getting track info at all until we are
15681 // certain that all streams will provide it.
15682
15683 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
15684 this.appendInitSegment_ = {
15685 audio: true,
15686 video: true
15687 };
15688 this.startingMediaInfo_ = trackInfo;
15689 this.currentMediaInfo_ = trackInfo;
15690 this.logger_('trackinfo update', trackInfo);
15691 this.trigger('trackinfo');
15692 } // trackinfo may cause an abort if the trackinfo
15693 // causes a codec change to an unsupported codec.
15694
15695
15696 if (this.checkForAbort_(simpleSegment.requestId)) {
15697 return;
15698 } // set trackinfo on the pending segment so that
15699 // it can append.
15700
15701
15702 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
15703
15704 if (this.hasEnoughInfoToAppend_()) {
15705 this.processCallQueue_();
15706 }
15707 };
15708
15709 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
15710 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15711
15712 if (this.checkForAbort_(simpleSegment.requestId)) {
15713 return;
15714 }
15715
15716 var segmentInfo = this.pendingSegment_;
15717 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
15718 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
15719 segmentInfo[timingInfoProperty][timeType] = time;
15720 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
15721
15722 if (this.hasEnoughInfoToAppend_()) {
15723 this.processCallQueue_();
15724 }
15725 };
15726
15727 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
15728 var _this3 = this;
15729
15730 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15731
15732 if (this.checkForAbort_(simpleSegment.requestId)) {
15733 return;
15734 } // This could only happen with fmp4 segments, but
15735 // should still not happen in general
15736
15737
15738 if (captionData.length === 0) {
15739 this.logger_('SegmentLoader received no captions from a caption event');
15740 return;
15741 }
15742
15743 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
15744 // can be adjusted by the timestamp offset
15745
15746 if (!segmentInfo.hasAppendedData_) {
15747 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
15748 return;
15749 }
15750
15751 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
15752 var captionTracks = {}; // get total start/end and captions for each track/stream
15753
15754 captionData.forEach(function (caption) {
15755 // caption.stream is actually a track name...
15756 // set to the existing values in tracks or default values
15757 captionTracks[caption.stream] = captionTracks[caption.stream] || {
15758 // Infinity, as any other value will be less than this
15759 startTime: Infinity,
15760 captions: [],
15761 // 0 as an other value will be more than this
15762 endTime: 0
15763 };
15764 var captionTrack = captionTracks[caption.stream];
15765 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
15766 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
15767 captionTrack.captions.push(caption);
15768 });
15769 Object.keys(captionTracks).forEach(function (trackName) {
15770 var _captionTracks$trackN = captionTracks[trackName],
15771 startTime = _captionTracks$trackN.startTime,
15772 endTime = _captionTracks$trackN.endTime,
15773 captions = _captionTracks$trackN.captions;
15774 var inbandTextTracks = _this3.inbandTextTracks_;
15775
15776 _this3.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
15777
15778 createCaptionsTrackIfNotExists(inbandTextTracks, _this3.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
15779 // We do this because a rendition change that also changes the timescale for captions
15780 // will result in captions being re-parsed for certain segments. If we add them again
15781 // without clearing we will have two of the same captions visible.
15782
15783 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
15784 addCaptionData({
15785 captionArray: captions,
15786 inbandTextTracks: inbandTextTracks,
15787 timestampOffset: timestampOffset
15788 });
15789 }); // Reset stored captions since we added parsed
15790 // captions to a text track at this point
15791
15792 if (this.transmuxer_) {
15793 this.transmuxer_.postMessage({
15794 action: 'clearParsedMp4Captions'
15795 });
15796 }
15797 };
15798
15799 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
15800 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15801
15802 if (this.checkForAbort_(simpleSegment.requestId)) {
15803 return;
15804 }
15805
15806 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
15807
15808 if (!segmentInfo.hasAppendedData_) {
15809 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
15810 return;
15811 }
15812
15813 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
15814 // audio/video source with a metadata track, and an alt audio with a metadata track.
15815 // However, this probably won't happen, and if it does it can be handled then.
15816
15817 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
15818 addMetadata({
15819 inbandTextTracks: this.inbandTextTracks_,
15820 metadataArray: id3Frames,
15821 timestampOffset: timestampOffset,
15822 videoDuration: this.duration_()
15823 });
15824 };
15825
15826 _proto.processMetadataQueue_ = function processMetadataQueue_() {
15827 this.metadataQueue_.id3.forEach(function (fn) {
15828 return fn();
15829 });
15830 this.metadataQueue_.caption.forEach(function (fn) {
15831 return fn();
15832 });
15833 this.metadataQueue_.id3 = [];
15834 this.metadataQueue_.caption = [];
15835 };
15836
15837 _proto.processCallQueue_ = function processCallQueue_() {
15838 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
15839 // functions may check the length of the load queue and default to pushing themselves
15840 // back onto the queue.
15841
15842 this.callQueue_ = [];
15843 callQueue.forEach(function (fun) {
15844 return fun();
15845 });
15846 };
15847
15848 _proto.processLoadQueue_ = function processLoadQueue_() {
15849 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
15850 // functions may check the length of the load queue and default to pushing themselves
15851 // back onto the queue.
15852
15853 this.loadQueue_ = [];
15854 loadQueue.forEach(function (fun) {
15855 return fun();
15856 });
15857 }
15858 /**
15859 * Determines whether the loader has enough info to load the next segment.
15860 *
15861 * @return {boolean}
15862 * Whether or not the loader has enough info to load the next segment
15863 */
15864 ;
15865
15866 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
15867 // Since primary timing goes by video, only the audio loader potentially needs to wait
15868 // to load.
15869 if (this.loaderType_ !== 'audio') {
15870 return true;
15871 }
15872
15873 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
15874 // enough info to load.
15875
15876 if (!segmentInfo) {
15877 return false;
15878 } // The first segment can and should be loaded immediately so that source buffers are
15879 // created together (before appending). Source buffer creation uses the presence of
15880 // audio and video data to determine whether to create audio/video source buffers, and
15881 // uses processed (transmuxed or parsed) media to determine the types required.
15882
15883
15884 if (!this.currentMediaInfo_) {
15885 return true;
15886 }
15887
15888 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
15889 // can be requested and downloaded and only wait before it is transmuxed or parsed.
15890 // But in practice, there are a few reasons why it is better to wait until a loader
15891 // is ready to append that segment before requesting and downloading:
15892 //
15893 // 1. Because audio and main loaders cross discontinuities together, if this loader
15894 // is waiting for the other to catch up, then instead of requesting another
15895 // segment and using up more bandwidth, by not yet loading, more bandwidth is
15896 // allotted to the loader currently behind.
15897 // 2. media-segment-request doesn't have to have logic to consider whether a segment
15898 // is ready to be processed or not, isolating the queueing behavior to the loader.
15899 // 3. The audio loader bases some of its segment properties on timing information
15900 // provided by the main loader, meaning that, if the logic for waiting on
15901 // processing was in media-segment-request, then it would also need to know how
15902 // to re-generate the segment information after the main loader caught up.
15903 shouldWaitForTimelineChange({
15904 timelineChangeController: this.timelineChangeController_,
15905 currentTimeline: this.currentTimeline_,
15906 segmentTimeline: segmentInfo.timeline,
15907 loaderType: this.loaderType_,
15908 audioDisabled: this.audioDisabled_
15909 })) {
15910 return false;
15911 }
15912
15913 return true;
15914 };
15915
15916 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
15917 if (!this.sourceUpdater_.ready()) {
15918 return false;
15919 } // If content needs to be removed or the loader is waiting on an append reattempt,
15920 // then no additional content should be appended until the prior append is resolved.
15921
15922
15923 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
15924 return false;
15925 }
15926
15927 var segmentInfo = this.pendingSegment_; // no segment to append any data for or
15928 // we do not have information on this specific
15929 // segment yet
15930
15931 if (!segmentInfo || !segmentInfo.trackInfo) {
15932 return false;
15933 }
15934
15935 if (!this.handlePartialData_) {
15936 var _this$currentMediaInf = this.currentMediaInfo_,
15937 hasAudio = _this$currentMediaInf.hasAudio,
15938 hasVideo = _this$currentMediaInf.hasVideo,
15939 isMuxed = _this$currentMediaInf.isMuxed;
15940
15941 if (hasVideo && !segmentInfo.videoTimingInfo) {
15942 return false;
15943 } // muxed content only relies on video timing information for now.
15944
15945
15946 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
15947 return false;
15948 }
15949 }
15950
15951 if (shouldWaitForTimelineChange({
15952 timelineChangeController: this.timelineChangeController_,
15953 currentTimeline: this.currentTimeline_,
15954 segmentTimeline: segmentInfo.timeline,
15955 loaderType: this.loaderType_,
15956 audioDisabled: this.audioDisabled_
15957 })) {
15958 return false;
15959 }
15960
15961 return true;
15962 };
15963
15964 _proto.handleData_ = function handleData_(simpleSegment, result) {
15965 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15966
15967 if (this.checkForAbort_(simpleSegment.requestId)) {
15968 return;
15969 } // If there's anything in the call queue, then this data came later and should be
15970 // executed after the calls currently queued.
15971
15972
15973 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
15974 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
15975 return;
15976 }
15977
15978 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
15979
15980 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
15981
15982 this.updateMediaSecondsLoaded_(segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
15983 // logic may change behavior depending on the state, and changing state too early may
15984 // inflate our estimates of bandwidth. In the future this should be re-examined to
15985 // note more granular states.
15986 // don't process and append data if the mediaSource is closed
15987
15988 if (this.mediaSource_.readyState === 'closed') {
15989 return;
15990 } // if this request included an initialization segment, save that data
15991 // to the initSegment cache
15992
15993
15994 if (simpleSegment.map) {
15995 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
15996
15997 segmentInfo.segment.map = simpleSegment.map;
15998 } // if this request included a segment key, save that data in the cache
15999
16000
16001 if (simpleSegment.key) {
16002 this.segmentKey(simpleSegment.key, true);
16003 }
16004
16005 segmentInfo.isFmp4 = simpleSegment.isFmp4;
16006 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
16007
16008 if (segmentInfo.isFmp4) {
16009 this.trigger('fmp4');
16010 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
16011 } else {
16012 var useVideoTimingInfo = this.loaderType_ === 'main' && this.currentMediaInfo_.hasVideo;
16013 var firstVideoFrameTimeForData;
16014
16015 if (useVideoTimingInfo) {
16016 firstVideoFrameTimeForData = this.handlePartialData_ ? result.videoFramePtsTime : segmentInfo.videoTimingInfo.start;
16017 } // Segment loader knows more about segment timing than the transmuxer (in certain
16018 // aspects), so make any changes required for a more accurate start time.
16019 // Don't set the end time yet, as the segment may not be finished processing.
16020
16021
16022 segmentInfo.timingInfo.start = this.trueSegmentStart_({
16023 currentStart: segmentInfo.timingInfo.start,
16024 playlist: segmentInfo.playlist,
16025 mediaIndex: segmentInfo.mediaIndex,
16026 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
16027 useVideoTimingInfo: useVideoTimingInfo,
16028 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
16029 videoTimingInfo: segmentInfo.videoTimingInfo,
16030 audioTimingInfo: segmentInfo.audioTimingInfo
16031 });
16032 } // Init segments for audio and video only need to be appended in certain cases. Now
16033 // that data is about to be appended, we can check the final cases to determine
16034 // whether we should append an init segment.
16035
16036
16037 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
16038 // as we use the start of the segment to offset the best guess (playlist provided)
16039 // timestamp offset.
16040
16041 this.updateSourceBufferTimestampOffset_(segmentInfo); // Save some state so that in the future anything waiting on first append (and/or
16042 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
16043 // we need some notion of whether the timestamp offset or other relevant information
16044 // has had a chance to be set.
16045
16046 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
16047
16048 this.processMetadataQueue_();
16049 this.appendData_(segmentInfo, result);
16050 };
16051
16052 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
16053 // alt audio doesn't manage timestamp offset
16054 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
16055 // segment for each chunk
16056 !segmentInfo.changedTimestampOffset) {
16057 // if the timestamp offset changed, the timeline may have changed, so we have to re-
16058 // append init segments
16059 this.appendInitSegment_ = {
16060 audio: true,
16061 video: true
16062 };
16063 }
16064
16065 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
16066 // make sure we append init segment on playlist changes, in case the media config
16067 // changed
16068 this.appendInitSegment_[type] = true;
16069 }
16070 };
16071
16072 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
16073 var type = _ref4.type,
16074 initSegment = _ref4.initSegment,
16075 map = _ref4.map,
16076 playlist = _ref4.playlist;
16077
16078 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
16079 // (Section 3) required to parse the applicable Media Segments. It applies to every
16080 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
16081 // or until the end of the playlist."
16082 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
16083 if (map) {
16084 var id = initSegmentId(map);
16085
16086 if (this.activeInitSegmentId_ === id) {
16087 // don't need to re-append the init segment if the ID matches
16088 return null;
16089 } // a map-specified init segment takes priority over any transmuxed (or otherwise
16090 // obtained) init segment
16091 //
16092 // this also caches the init segment for later use
16093
16094
16095 initSegment = this.initSegmentForMap(map, true).bytes;
16096 this.activeInitSegmentId_ = id;
16097 } // We used to always prepend init segments for video, however, that shouldn't be
16098 // necessary. Instead, we should only append on changes, similar to what we've always
16099 // done for audio. This is more important (though may not be that important) for
16100 // frame-by-frame appending for LHLS, simply because of the increased quantity of
16101 // appends.
16102
16103
16104 if (initSegment && this.appendInitSegment_[type]) {
16105 // Make sure we track the playlist that we last used for the init segment, so that
16106 // we can re-append the init segment in the event that we get data from a new
16107 // playlist. Discontinuities and track changes are handled in other sections.
16108 this.playlistOfLastInitSegment_[type] = playlist; // we should only be appending the next init segment if we detect a change, or if
16109 // the segment has a map
16110
16111 this.appendInitSegment_[type] = map ? true : false; // we need to clear out the fmp4 active init segment id, since
16112 // we are appending the muxer init segment
16113
16114 this.activeInitSegmentId_ = null;
16115 return initSegment;
16116 }
16117
16118 return null;
16119 };
16120
16121 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
16122 var _this4 = this;
16123
16124 var segmentInfo = _ref5.segmentInfo,
16125 type = _ref5.type,
16126 bytes = _ref5.bytes;
16127 var audioBuffered = this.sourceUpdater_.audioBuffered();
16128 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
16129 // should be cleared out during the buffer removals. However, log in case it helps
16130 // debug.
16131
16132 if (audioBuffered.length > 1) {
16133 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
16134 }
16135
16136 if (videoBuffered.length > 1) {
16137 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
16138 }
16139
16140 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
16141 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
16142 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
16143 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
16144
16145 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
16146 // Can't remove enough buffer to make room for new segment (or the browser doesn't
16147 // allow for appends of segments this size). In the future, it may be possible to
16148 // split up the segment and append in pieces, but for now, error out this playlist
16149 // in an attempt to switch to a more manageable rendition.
16150 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
16151 this.error({
16152 message: 'Quota exceeded error with append of a single segment of content',
16153 // To prevent any possible repeated downloads for content we can't actually
16154 // append, blacklist forever.
16155 blacklistDuration: Infinity
16156 });
16157 this.trigger('error');
16158 return;
16159 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
16160 // that the segment-loader should block on future events until this one is handled, so
16161 // that it doesn't keep moving onto further segments. Adding the call to the call
16162 // queue will prevent further appends until waitingOnRemove_ and
16163 // quotaExceededErrorRetryTimeout_ are cleared.
16164 //
16165 // Note that this will only block the current loader. In the case of demuxed content,
16166 // the other load may keep filling as fast as possible. In practice, this should be
16167 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
16168 // source buffer, or video fills without enough room for audio to append (and without
16169 // the availability of clearing out seconds of back buffer to make room for audio).
16170 // But it might still be good to handle this case in the future as a TODO.
16171
16172
16173 this.waitingOnRemove_ = true;
16174 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
16175 segmentInfo: segmentInfo,
16176 type: type,
16177 bytes: bytes
16178 }));
16179 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
16180 // before retrying.
16181
16182 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
16183 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
16184 this.remove(0, timeToRemoveUntil, function () {
16185 _this4.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
16186
16187 _this4.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
16188 // attempts (since we can't clear less than the minimum)
16189
16190 _this4.quotaExceededErrorRetryTimeout_ = window__default['default'].setTimeout(function () {
16191 _this4.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
16192
16193 _this4.quotaExceededErrorRetryTimeout_ = null;
16194
16195 _this4.processCallQueue_();
16196 }, MIN_BACK_BUFFER * 1000);
16197 }, true);
16198 };
16199
16200 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
16201 var segmentInfo = _ref6.segmentInfo,
16202 type = _ref6.type,
16203 bytes = _ref6.bytes;
16204
16205 // if there's no error, nothing to do
16206 if (!error) {
16207 return;
16208 }
16209
16210 if (error.code === QUOTA_EXCEEDED_ERR) {
16211 this.handleQuotaExceededError_({
16212 segmentInfo: segmentInfo,
16213 type: type,
16214 bytes: bytes
16215 }); // A quota exceeded error should be recoverable with a future re-append, so no need
16216 // to trigger an append error.
16217
16218 return;
16219 }
16220
16221 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
16222 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
16223 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
16224 //
16225 // Trigger a special error so that it can be handled separately from normal,
16226 // recoverable errors.
16227
16228 this.trigger('appenderror');
16229 };
16230
16231 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
16232 var segmentInfo = _ref7.segmentInfo,
16233 type = _ref7.type,
16234 initSegment = _ref7.initSegment,
16235 data = _ref7.data,
16236 bytes = _ref7.bytes;
16237
16238 // If this is a re-append, bytes were already created and don't need to be recreated
16239 if (!bytes) {
16240 var segments = [data];
16241 var byteLength = data.byteLength;
16242
16243 if (initSegment) {
16244 // if the media initialization segment is changing, append it before the content
16245 // segment
16246 segments.unshift(initSegment);
16247 byteLength += initSegment.byteLength;
16248 } // Technically we should be OK appending the init segment separately, however, we
16249 // haven't yet tested that, and prepending is how we have always done things.
16250
16251
16252 bytes = concatSegments({
16253 bytes: byteLength,
16254 segments: segments
16255 });
16256 }
16257
16258 this.sourceUpdater_.appendBuffer({
16259 segmentInfo: segmentInfo,
16260 type: type,
16261 bytes: bytes
16262 }, this.handleAppendError_.bind(this, {
16263 segmentInfo: segmentInfo,
16264 type: type,
16265 bytes: bytes
16266 }));
16267 };
16268
16269 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
16270 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
16271 return;
16272 }
16273
16274 var segment = this.pendingSegment_.segment;
16275 var timingInfoProperty = type + "TimingInfo";
16276
16277 if (!segment[timingInfoProperty]) {
16278 segment[timingInfoProperty] = {};
16279 }
16280
16281 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
16282 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
16283 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
16284 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
16285 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
16286
16287 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
16288 };
16289
16290 _proto.appendData_ = function appendData_(segmentInfo, result) {
16291 var type = result.type,
16292 data = result.data;
16293
16294 if (!data || !data.byteLength) {
16295 return;
16296 }
16297
16298 if (type === 'audio' && this.audioDisabled_) {
16299 return;
16300 }
16301
16302 var initSegment = this.getInitSegmentAndUpdateState_({
16303 type: type,
16304 initSegment: result.initSegment,
16305 playlist: segmentInfo.playlist,
16306 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
16307 });
16308 this.appendToSourceBuffer_({
16309 segmentInfo: segmentInfo,
16310 type: type,
16311 initSegment: initSegment,
16312 data: data
16313 });
16314 }
16315 /**
16316 * load a specific segment from a request into the buffer
16317 *
16318 * @private
16319 */
16320 ;
16321
16322 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
16323 var _this5 = this;
16324
16325 this.state = 'WAITING';
16326 this.pendingSegment_ = segmentInfo;
16327 this.trimBackBuffer_(segmentInfo);
16328
16329 if (typeof segmentInfo.timestampOffset === 'number') {
16330 if (this.transmuxer_) {
16331 this.transmuxer_.postMessage({
16332 action: 'clearAllMp4Captions'
16333 });
16334 }
16335 }
16336
16337 if (!this.hasEnoughInfoToLoad_()) {
16338 this.loadQueue_.push(function () {
16339 var buffered = _this5.buffered_();
16340
16341 if (typeof segmentInfo.timestampOffset === 'number') {
16342 // The timestamp offset needs to be regenerated, as the buffer most likely
16343 // changed since the function was added to the queue. This is expected, as the
16344 // load is usually pending the main loader appending new segments.
16345 //
16346 // Note also that the overrideCheck property is set to true. This is because
16347 // isPendingTimestampOffset is set back to false after the first set of the
16348 // timestamp offset (before it was added to the queue). But the presence of
16349 // timestamp offset as a property of segmentInfo serves as enough evidence that
16350 // it should be regenerated.
16351 segmentInfo.timestampOffset = timestampOffsetForSegment({
16352 segmentTimeline: segmentInfo.timeline,
16353 currentTimeline: _this5.currentTimeline_,
16354 startOfSegment: segmentInfo.startOfSegment,
16355 buffered: buffered,
16356 overrideCheck: true
16357 });
16358 }
16359
16360 delete segmentInfo.audioAppendStart;
16361
16362 var audioBuffered = _this5.sourceUpdater_.audioBuffered();
16363
16364 if (audioBuffered.length) {
16365 // Because the audio timestamp offset may have been changed by the main loader,
16366 // the audioAppendStart should be regenerated.
16367 //
16368 // Since the transmuxer is using the actual timing values, but the buffer is
16369 // adjusted by the timestamp offset, the value must be adjusted.
16370 segmentInfo.audioAppendStart = audioBuffered.end(audioBuffered.length - 1) - _this5.sourceUpdater_.audioTimestampOffset();
16371 }
16372
16373 _this5.updateTransmuxerAndRequestSegment_(segmentInfo);
16374 });
16375 return;
16376 }
16377
16378 this.updateTransmuxerAndRequestSegment_(segmentInfo);
16379 };
16380
16381 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
16382 var _this6 = this;
16383
16384 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
16385 // the transmuxer still needs to be updated before then.
16386 //
16387 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
16388 // offset must be passed to the transmuxer for stream correcting adjustments.
16389 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
16390 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
16391
16392 segmentInfo.gopsToAlignWith = [];
16393 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
16394
16395 this.transmuxer_.postMessage({
16396 action: 'reset'
16397 });
16398 this.transmuxer_.postMessage({
16399 action: 'setTimestampOffset',
16400 timestampOffset: segmentInfo.timestampOffset
16401 });
16402 }
16403
16404 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
16405 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
16406 var isWalkingForward = this.mediaIndex !== null;
16407 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
16408 // the first timeline
16409 segmentInfo.timeline > 0;
16410 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
16411 this.logger_("Requesting " + segmentInfoString(segmentInfo));
16412 segmentInfo.abortRequests = mediaSegmentRequest({
16413 xhr: this.vhs_.xhr,
16414 xhrOptions: this.xhrOptions_,
16415 decryptionWorker: this.decrypter_,
16416 segment: simpleSegment,
16417 handlePartialData: this.handlePartialData_,
16418 abortFn: this.handleAbort_.bind(this, segmentInfo),
16419 progressFn: this.handleProgress_.bind(this),
16420 trackInfoFn: this.handleTrackInfo_.bind(this),
16421 timingInfoFn: this.handleTimingInfo_.bind(this),
16422 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
16423 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
16424 captionsFn: this.handleCaptions_.bind(this),
16425 isEndOfTimeline: isEndOfTimeline,
16426 endedTimelineFn: function endedTimelineFn() {
16427 _this6.logger_('received endedtimeline callback');
16428 },
16429 id3Fn: this.handleId3_.bind(this),
16430 dataFn: this.handleData_.bind(this),
16431 doneFn: this.segmentRequestFinished_.bind(this)
16432 });
16433 }
16434 /**
16435 * trim the back buffer so that we don't have too much data
16436 * in the source buffer
16437 *
16438 * @private
16439 *
16440 * @param {Object} segmentInfo - the current segment
16441 */
16442 ;
16443
16444 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
16445 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
16446 // buffer and a very conservative "garbage collector"
16447 // We manually clear out the old buffer to ensure
16448 // we don't trigger the QuotaExceeded error
16449 // on the source buffer during subsequent appends
16450
16451 if (removeToTime > 0) {
16452 this.remove(0, removeToTime);
16453 }
16454 }
16455 /**
16456 * created a simplified copy of the segment object with just the
16457 * information necessary to perform the XHR and decryption
16458 *
16459 * @private
16460 *
16461 * @param {Object} segmentInfo - the current segment
16462 * @return {Object} a simplified segment object copy
16463 */
16464 ;
16465
16466 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
16467 var segment = segmentInfo.segment;
16468 var part = segmentInfo.part;
16469 var simpleSegment = {
16470 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
16471 byterange: part ? part.byterange : segment.byterange,
16472 requestId: segmentInfo.requestId,
16473 transmuxer: segmentInfo.transmuxer,
16474 audioAppendStart: segmentInfo.audioAppendStart,
16475 gopsToAlignWith: segmentInfo.gopsToAlignWith,
16476 part: segmentInfo.part
16477 };
16478 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
16479
16480 if (previousSegment && previousSegment.timeline === segment.timeline) {
16481 // The baseStartTime of a segment is used to handle rollover when probing the TS
16482 // segment to retrieve timing information. Since the probe only looks at the media's
16483 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
16484 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
16485 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
16486 // seconds of media time, so should be used here. The previous segment is used since
16487 // the end of the previous segment should represent the beginning of the current
16488 // segment, so long as they are on the same timeline.
16489 if (previousSegment.videoTimingInfo) {
16490 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
16491 } else if (previousSegment.audioTimingInfo) {
16492 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
16493 }
16494 }
16495
16496 if (segment.key) {
16497 // if the media sequence is greater than 2^32, the IV will be incorrect
16498 // assuming 10s segments, that would be about 1300 years
16499 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
16500 simpleSegment.key = this.segmentKey(segment.key);
16501 simpleSegment.key.iv = iv;
16502 }
16503
16504 if (segment.map) {
16505 simpleSegment.map = this.initSegmentForMap(segment.map);
16506 }
16507
16508 return simpleSegment;
16509 };
16510
16511 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
16512 // every request counts as a media request even if it has been aborted
16513 // or canceled due to a timeout
16514 this.mediaRequests += 1;
16515
16516 if (stats) {
16517 this.mediaBytesTransferred += stats.bytesReceived;
16518 this.mediaTransferDuration += stats.roundTripTime;
16519 }
16520 };
16521
16522 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
16523 // byteLength will be used for throughput, and should be based on bytes receieved,
16524 // which we only know at the end of the request and should reflect total bytes
16525 // downloaded rather than just bytes processed from components of the segment
16526 this.pendingSegment_.byteLength = stats.bytesReceived;
16527
16528 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
16529 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
16530 return;
16531 }
16532
16533 this.bandwidth = stats.bandwidth;
16534 this.roundTrip = stats.roundTripTime;
16535 };
16536
16537 _proto.handleTimeout_ = function handleTimeout_() {
16538 // although the VTT segment loader bandwidth isn't really used, it's good to
16539 // maintain functinality between segment loaders
16540 this.mediaRequestsTimedout += 1;
16541 this.bandwidth = 1;
16542 this.roundTrip = NaN;
16543 this.trigger('bandwidthupdate');
16544 }
16545 /**
16546 * Handle the callback from the segmentRequest function and set the
16547 * associated SegmentLoader state and errors if necessary
16548 *
16549 * @private
16550 */
16551 ;
16552
16553 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
16554 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
16555 // check the call queue directly since this function doesn't need to deal with any
16556 // data, and can continue even if the source buffers are not set up and we didn't get
16557 // any data from the segment
16558 if (this.callQueue_.length) {
16559 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
16560 return;
16561 }
16562
16563 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
16564
16565 if (!this.pendingSegment_) {
16566 return;
16567 } // the request was aborted and the SegmentLoader has already started
16568 // another request. this can happen when the timeout for an aborted
16569 // request triggers due to a limitation in the XHR library
16570 // do not count this as any sort of request or we risk double-counting
16571
16572
16573 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
16574 return;
16575 } // an error occurred from the active pendingSegment_ so reset everything
16576
16577
16578 if (error) {
16579 this.pendingSegment_ = null;
16580 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
16581
16582 if (error.code === REQUEST_ERRORS.ABORTED) {
16583 return;
16584 }
16585
16586 this.pause(); // the error is really just that at least one of the requests timed-out
16587 // set the bandwidth to a very low value and trigger an ABR switch to
16588 // take emergency action
16589
16590 if (error.code === REQUEST_ERRORS.TIMEOUT) {
16591 this.handleTimeout_();
16592 return;
16593 } // if control-flow has arrived here, then the error is real
16594 // emit an error event to blacklist the current playlist
16595
16596
16597 this.mediaRequestsErrored += 1;
16598 this.error(error);
16599 this.trigger('error');
16600 return;
16601 }
16602
16603 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
16604 // generated for ABR purposes
16605
16606 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
16607 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
16608
16609 if (result.gopInfo) {
16610 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
16611 } // Although we may have already started appending on progress, we shouldn't switch the
16612 // state away from loading until we are officially done loading the segment data.
16613
16614
16615 this.state = 'APPENDING'; // used for testing
16616
16617 this.trigger('appending');
16618 this.waitForAppendsToComplete_(segmentInfo);
16619 };
16620
16621 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
16622 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
16623
16624 if (timelineMapping !== null) {
16625 this.timeMapping_ = timelineMapping;
16626 }
16627 };
16628
16629 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
16630 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
16631 this.mediaSecondsLoaded += segment.end - segment.start;
16632 } else {
16633 this.mediaSecondsLoaded += segment.duration;
16634 }
16635 };
16636
16637 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
16638 if (timestampOffset === null) {
16639 return false;
16640 } // note that we're potentially using the same timestamp offset for both video and
16641 // audio
16642
16643
16644 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
16645 return true;
16646 }
16647
16648 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
16649 return true;
16650 }
16651
16652 return false;
16653 };
16654
16655 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref8) {
16656 var currentStart = _ref8.currentStart,
16657 playlist = _ref8.playlist,
16658 mediaIndex = _ref8.mediaIndex,
16659 firstVideoFrameTimeForData = _ref8.firstVideoFrameTimeForData,
16660 currentVideoTimestampOffset = _ref8.currentVideoTimestampOffset,
16661 useVideoTimingInfo = _ref8.useVideoTimingInfo,
16662 videoTimingInfo = _ref8.videoTimingInfo,
16663 audioTimingInfo = _ref8.audioTimingInfo;
16664
16665 if (typeof currentStart !== 'undefined') {
16666 // if start was set once, keep using it
16667 return currentStart;
16668 }
16669
16670 if (!useVideoTimingInfo) {
16671 return audioTimingInfo.start;
16672 }
16673
16674 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
16675 // within that segment. Since the transmuxer maintains a cache of incomplete data
16676 // from and/or the last frame seen, the start time may reflect a frame that starts
16677 // in the previous segment. Check for that case and ensure the start time is
16678 // accurate for the segment.
16679
16680 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
16681 return firstVideoFrameTimeForData;
16682 }
16683
16684 return videoTimingInfo.start;
16685 };
16686
16687 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
16688 if (!this.currentMediaInfo_) {
16689 this.error({
16690 message: 'No starting media returned, likely due to an unsupported media format.',
16691 blacklistDuration: Infinity
16692 });
16693 this.trigger('error');
16694 return;
16695 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
16696 // on each queue this loader is responsible for to ensure that the appends are
16697 // complete.
16698
16699
16700 var _this$currentMediaInf2 = this.currentMediaInfo_,
16701 hasAudio = _this$currentMediaInf2.hasAudio,
16702 hasVideo = _this$currentMediaInf2.hasVideo,
16703 isMuxed = _this$currentMediaInf2.isMuxed;
16704 var waitForVideo = this.loaderType_ === 'main' && hasVideo; // TODO: does this break partial support for muxed content?
16705
16706 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
16707 segmentInfo.waitingOnAppends = 0; // segments with no data
16708
16709 if (!segmentInfo.hasAppendedData_) {
16710 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
16711 // When there's no audio or video data in the segment, there's no audio or video
16712 // timing information.
16713 //
16714 // If there's no audio or video timing information, then the timestamp offset
16715 // can't be adjusted to the appropriate value for the transmuxer and source
16716 // buffers.
16717 //
16718 // Therefore, the next segment should be used to set the timestamp offset.
16719 this.isPendingTimestampOffset_ = true;
16720 } // override settings for metadata only segments
16721
16722
16723 segmentInfo.timingInfo = {
16724 start: 0
16725 };
16726 segmentInfo.waitingOnAppends++;
16727
16728 if (!this.isPendingTimestampOffset_) {
16729 // update the timestampoffset
16730 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
16731 // no video/audio data.
16732
16733 this.processMetadataQueue_();
16734 } // append is "done" instantly with no data.
16735
16736
16737 this.checkAppendsDone_(segmentInfo);
16738 return;
16739 } // Since source updater could call back synchronously, do the increments first.
16740
16741
16742 if (waitForVideo) {
16743 segmentInfo.waitingOnAppends++;
16744 }
16745
16746 if (waitForAudio) {
16747 segmentInfo.waitingOnAppends++;
16748 }
16749
16750 if (waitForVideo) {
16751 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
16752 }
16753
16754 if (waitForAudio) {
16755 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
16756 }
16757 };
16758
16759 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
16760 if (this.checkForAbort_(segmentInfo.requestId)) {
16761 return;
16762 }
16763
16764 segmentInfo.waitingOnAppends--;
16765
16766 if (segmentInfo.waitingOnAppends === 0) {
16767 this.handleAppendsDone_();
16768 }
16769 };
16770
16771 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
16772 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.currentMediaInfo_, trackInfo);
16773
16774 if (illegalMediaSwitchError) {
16775 this.error({
16776 message: illegalMediaSwitchError,
16777 blacklistDuration: Infinity
16778 });
16779 this.trigger('error');
16780 return true;
16781 }
16782
16783 return false;
16784 };
16785
16786 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
16787 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
16788 // priority, timing-wise, so we must wait
16789 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
16790 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
16791 this.loaderType_ !== 'main') {
16792 return;
16793 }
16794
16795 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
16796 // the timing info here comes from video. In the event that the audio is longer than
16797 // the video, this will trim the start of the audio.
16798 // This also trims any offset from 0 at the beginning of the media
16799
16800 segmentInfo.timestampOffset -= segmentInfo.timingInfo.start; // In the event that there are partial segment downloads, each will try to update the
16801 // timestamp offset. Retaining this bit of state prevents us from updating in the
16802 // future (within the same segment), however, there may be a better way to handle it.
16803
16804 segmentInfo.changedTimestampOffset = true;
16805
16806 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
16807 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
16808 didChange = true;
16809 }
16810
16811 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
16812 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
16813 didChange = true;
16814 }
16815
16816 if (didChange) {
16817 this.trigger('timestampoffset');
16818 }
16819 };
16820
16821 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
16822 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
16823 var useVideoTimingInfo = this.loaderType_ === 'main' && this.currentMediaInfo_.hasVideo;
16824 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
16825
16826 if (!prioritizedTimingInfo) {
16827 return;
16828 }
16829
16830 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
16831 // current example is the case of fmp4), so use the rough duration to calculate an
16832 // end time.
16833 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
16834 }
16835 /**
16836 * callback to run when appendBuffer is finished. detects if we are
16837 * in a good state to do things with the data we got, or if we need
16838 * to wait for more
16839 *
16840 * @private
16841 */
16842 ;
16843
16844 _proto.handleAppendsDone_ = function handleAppendsDone_() {
16845 // appendsdone can cause an abort
16846 if (this.pendingSegment_) {
16847 this.trigger('appendsdone');
16848 }
16849
16850 if (!this.pendingSegment_) {
16851 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
16852 // all appending cases?
16853
16854 if (!this.paused()) {
16855 this.monitorBuffer_();
16856 }
16857
16858 return;
16859 }
16860
16861 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
16862 // best to wait until all appends are done so we're sure that the primary media is
16863 // finished (and we have its end time).
16864
16865 this.updateTimingInfoEnd_(segmentInfo);
16866
16867 if (this.shouldSaveSegmentTimingInfo_) {
16868 // Timeline mappings should only be saved for the main loader. This is for multiple
16869 // reasons:
16870 //
16871 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
16872 // and the main loader try to save the timeline mapping, whichever comes later
16873 // will overwrite the first. In theory this is OK, as the mappings should be the
16874 // same, however, it breaks for (2)
16875 // 2) In the event of a live stream, the initial live point will make for a somewhat
16876 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
16877 // the mapping will be off for one of the streams, dependent on which one was
16878 // first saved (see (1)).
16879 // 3) Primary timing goes by video in VHS, so the mapping should be video.
16880 //
16881 // Since the audio loader will wait for the main loader to load the first segment,
16882 // the main loader will save the first timeline mapping, and ensure that there won't
16883 // be a case where audio loads two segments without saving a mapping (thus leading
16884 // to missing segment timing info).
16885 this.syncController_.saveSegmentTimingInfo({
16886 segmentInfo: segmentInfo,
16887 shouldSaveTimelineMapping: this.loaderType_ === 'main'
16888 });
16889 }
16890
16891 this.logger_("Appended " + segmentInfoString(segmentInfo));
16892 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
16893
16894 if (segmentDurationMessage) {
16895 if (segmentDurationMessage.severity === 'warn') {
16896 videojs__default['default'].log.warn(segmentDurationMessage.message);
16897 } else {
16898 this.logger_(segmentDurationMessage.message);
16899 }
16900 }
16901
16902 this.recordThroughput_(segmentInfo);
16903 this.pendingSegment_ = null;
16904 this.state = 'READY'; // TODO minor, but for partial segment downloads, this can be done earlier to save
16905 // on bandwidth and download time
16906
16907 if (segmentInfo.isSyncRequest) {
16908 this.trigger('syncinfoupdate');
16909 return;
16910 }
16911
16912 this.addSegmentMetadataCue_(segmentInfo);
16913 this.fetchAtBuffer_ = true;
16914
16915 if (this.currentTimeline_ !== segmentInfo.timeline) {
16916 this.timelineChangeController_.lastTimelineChange({
16917 type: this.loaderType_,
16918 from: this.currentTimeline_,
16919 to: segmentInfo.timeline
16920 }); // If audio is not disabled, the main segment loader is responsible for updating
16921 // the audio timeline as well. If the content is video only, this won't have any
16922 // impact.
16923
16924 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
16925 this.timelineChangeController_.lastTimelineChange({
16926 type: 'audio',
16927 from: this.currentTimeline_,
16928 to: segmentInfo.timeline
16929 });
16930 }
16931 }
16932
16933 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
16934 // the following conditional otherwise it may consider this a bad "guess"
16935 // and attempt to resync when the post-update seekable window and live
16936 // point would mean that this was the perfect segment to fetch
16937
16938 this.trigger('syncinfoupdate');
16939 var segment = segmentInfo.segment; // If we previously appended a segment that ends more than 3 targetDurations before
16940 // the currentTime_ that means that our conservative guess was too conservative.
16941 // In that case, reset the loader state so that we try to use any information gained
16942 // from the previous request to create a new, more accurate, sync-point.
16943
16944 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
16945 this.resetEverything();
16946 return;
16947 }
16948
16949 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
16950 // and conservatively guess
16951
16952 if (isWalkingForward) {
16953 this.trigger('bandwidthupdate');
16954 }
16955
16956 this.trigger('progress');
16957 this.mediaIndex = segmentInfo.mediaIndex;
16958 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
16959 // buffer, end the stream. this ensures the "ended" event will
16960 // fire if playback reaches that point.
16961
16962 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
16963 this.endOfStream();
16964 } // used for testing
16965
16966
16967 this.trigger('appended');
16968
16969 if (!this.paused()) {
16970 this.monitorBuffer_();
16971 }
16972 }
16973 /**
16974 * Records the current throughput of the decrypt, transmux, and append
16975 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
16976 * moving average of the throughput. `throughput.count` is the number of
16977 * data points in the average.
16978 *
16979 * @private
16980 * @param {Object} segmentInfo the object returned by loadSegment
16981 */
16982 ;
16983
16984 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
16985 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
16986 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
16987 return;
16988 }
16989
16990 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
16991 // by zero in the case where the throughput is ridiculously high
16992
16993 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
16994
16995 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
16996 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
16997
16998 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
16999 }
17000 /**
17001 * Adds a cue to the segment-metadata track with some metadata information about the
17002 * segment
17003 *
17004 * @private
17005 * @param {Object} segmentInfo
17006 * the object returned by loadSegment
17007 * @method addSegmentMetadataCue_
17008 */
17009 ;
17010
17011 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
17012 if (!this.segmentMetadataTrack_) {
17013 return;
17014 }
17015
17016 var segment = segmentInfo.segment;
17017 var start = segment.start;
17018 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
17019
17020 if (!finite(start) || !finite(end)) {
17021 return;
17022 }
17023
17024 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
17025 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
17026 var value = {
17027 custom: segment.custom,
17028 dateTimeObject: segment.dateTimeObject,
17029 dateTimeString: segment.dateTimeString,
17030 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
17031 resolution: segmentInfo.playlist.attributes.RESOLUTION,
17032 codecs: segmentInfo.playlist.attributes.CODECS,
17033 byteLength: segmentInfo.byteLength,
17034 uri: segmentInfo.uri,
17035 timeline: segmentInfo.timeline,
17036 playlist: segmentInfo.playlist.id,
17037 start: start,
17038 end: end
17039 };
17040 var data = JSON.stringify(value);
17041 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
17042 // the differences of WebKitDataCue in safari and VTTCue in other browsers
17043
17044 cue.value = value;
17045 this.segmentMetadataTrack_.addCue(cue);
17046 };
17047
17048 return SegmentLoader;
17049}(videojs__default['default'].EventTarget);
17050
17051function noop() {}
17052
17053var toTitleCase = function toTitleCase(string) {
17054 if (typeof string !== 'string') {
17055 return string;
17056 }
17057
17058 return string.replace(/./, function (w) {
17059 return w.toUpperCase();
17060 });
17061};
17062
17063var bufferTypes = ['video', 'audio'];
17064
17065var _updating = function updating(type, sourceUpdater) {
17066 var sourceBuffer = sourceUpdater[type + "Buffer"];
17067 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
17068};
17069
17070var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
17071 for (var i = 0; i < queue.length; i++) {
17072 var queueEntry = queue[i];
17073
17074 if (queueEntry.type === 'mediaSource') {
17075 // If the next entry is a media source entry (uses multiple source buffers), block
17076 // processing to allow it to go through first.
17077 return null;
17078 }
17079
17080 if (queueEntry.type === type) {
17081 return i;
17082 }
17083 }
17084
17085 return null;
17086};
17087
17088var shiftQueue = function shiftQueue(type, sourceUpdater) {
17089 if (sourceUpdater.queue.length === 0) {
17090 return;
17091 }
17092
17093 var queueIndex = 0;
17094 var queueEntry = sourceUpdater.queue[queueIndex];
17095
17096 if (queueEntry.type === 'mediaSource') {
17097 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
17098 sourceUpdater.queue.shift();
17099 queueEntry.action(sourceUpdater);
17100
17101 if (queueEntry.doneFn) {
17102 queueEntry.doneFn();
17103 } // Only specific source buffer actions must wait for async updateend events. Media
17104 // Source actions process synchronously. Therefore, both audio and video source
17105 // buffers are now clear to process the next queue entries.
17106
17107
17108 shiftQueue('audio', sourceUpdater);
17109 shiftQueue('video', sourceUpdater);
17110 } // Media Source actions require both source buffers, so if the media source action
17111 // couldn't process yet (because one or both source buffers are busy), block other
17112 // queue actions until both are available and the media source action can process.
17113
17114
17115 return;
17116 }
17117
17118 if (type === 'mediaSource') {
17119 // If the queue was shifted by a media source action (this happens when pushing a
17120 // media source action onto the queue), then it wasn't from an updateend event from an
17121 // audio or video source buffer, so there's no change from previous state, and no
17122 // processing should be done.
17123 return;
17124 } // Media source queue entries don't need to consider whether the source updater is
17125 // started (i.e., source buffers are created) as they don't need the source buffers, but
17126 // source buffer queue entries do.
17127
17128
17129 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
17130 return;
17131 }
17132
17133 if (queueEntry.type !== type) {
17134 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
17135
17136 if (queueIndex === null) {
17137 // Either there's no queue entry that uses this source buffer type in the queue, or
17138 // there's a media source queue entry before the next entry of this type, in which
17139 // case wait for that action to process first.
17140 return;
17141 }
17142
17143 queueEntry = sourceUpdater.queue[queueIndex];
17144 }
17145
17146 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
17147 //
17148 // The queue pending operation must be set before the action is performed in the event
17149 // that the action results in a synchronous event that is acted upon. For instance, if
17150 // an exception is thrown that can be handled, it's possible that new actions will be
17151 // appended to an empty queue and immediately executed, but would not have the correct
17152 // pending information if this property was set after the action was performed.
17153
17154 sourceUpdater.queuePending[type] = queueEntry;
17155 queueEntry.action(type, sourceUpdater);
17156
17157 if (!queueEntry.doneFn) {
17158 // synchronous operation, process next entry
17159 sourceUpdater.queuePending[type] = null;
17160 shiftQueue(type, sourceUpdater);
17161 return;
17162 }
17163};
17164
17165var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
17166 var buffer = sourceUpdater[type + "Buffer"];
17167 var titleType = toTitleCase(type);
17168
17169 if (!buffer) {
17170 return;
17171 }
17172
17173 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
17174 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
17175 sourceUpdater.codecs[type] = null;
17176 sourceUpdater[type + "Buffer"] = null;
17177};
17178
17179var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
17180 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
17181};
17182
17183var actions = {
17184 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
17185 return function (type, sourceUpdater) {
17186 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
17187 // or the media source does not contain this source buffer.
17188
17189 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
17190 return;
17191 }
17192
17193 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
17194
17195 try {
17196 sourceBuffer.appendBuffer(bytes);
17197 } catch (e) {
17198 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
17199 sourceUpdater.queuePending[type] = null;
17200 onError(e);
17201 }
17202 };
17203 },
17204 remove: function remove(start, end) {
17205 return function (type, sourceUpdater) {
17206 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
17207 // or the media source does not contain this source buffer.
17208
17209 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
17210 return;
17211 }
17212
17213 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
17214
17215 try {
17216 sourceBuffer.remove(start, end);
17217 } catch (e) {
17218 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
17219 }
17220 };
17221 },
17222 timestampOffset: function timestampOffset(offset) {
17223 return function (type, sourceUpdater) {
17224 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
17225 // or the media source does not contain this source buffer.
17226
17227 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
17228 return;
17229 }
17230
17231 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
17232 sourceBuffer.timestampOffset = offset;
17233 };
17234 },
17235 callback: function callback(_callback) {
17236 return function (type, sourceUpdater) {
17237 _callback();
17238 };
17239 },
17240 endOfStream: function endOfStream(error) {
17241 return function (sourceUpdater) {
17242 if (sourceUpdater.mediaSource.readyState !== 'open') {
17243 return;
17244 }
17245
17246 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
17247
17248 try {
17249 sourceUpdater.mediaSource.endOfStream(error);
17250 } catch (e) {
17251 videojs__default['default'].log.warn('Failed to call media source endOfStream', e);
17252 }
17253 };
17254 },
17255 duration: function duration(_duration) {
17256 return function (sourceUpdater) {
17257 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
17258
17259 try {
17260 sourceUpdater.mediaSource.duration = _duration;
17261 } catch (e) {
17262 videojs__default['default'].log.warn('Failed to set media source duration', e);
17263 }
17264 };
17265 },
17266 abort: function abort() {
17267 return function (type, sourceUpdater) {
17268 if (sourceUpdater.mediaSource.readyState !== 'open') {
17269 return;
17270 }
17271
17272 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
17273 // or the media source does not contain this source buffer.
17274
17275 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
17276 return;
17277 }
17278
17279 sourceUpdater.logger_("calling abort on " + type + "Buffer");
17280
17281 try {
17282 sourceBuffer.abort();
17283 } catch (e) {
17284 videojs__default['default'].log.warn("Failed to abort on " + type + "Buffer", e);
17285 }
17286 };
17287 },
17288 addSourceBuffer: function addSourceBuffer(type, codec) {
17289 return function (sourceUpdater) {
17290 var titleType = toTitleCase(type);
17291 var mime = codecs_js.getMimeForCodec(codec);
17292 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
17293 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
17294 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
17295 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
17296 sourceUpdater.codecs[type] = codec;
17297 sourceUpdater[type + "Buffer"] = sourceBuffer;
17298 };
17299 },
17300 removeSourceBuffer: function removeSourceBuffer(type) {
17301 return function (sourceUpdater) {
17302 var sourceBuffer = sourceUpdater[type + "Buffer"];
17303 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
17304 // or the media source does not contain this source buffer.
17305
17306 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
17307 return;
17308 }
17309
17310 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
17311
17312 try {
17313 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
17314 } catch (e) {
17315 videojs__default['default'].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
17316 }
17317 };
17318 },
17319 changeType: function changeType(codec) {
17320 return function (type, sourceUpdater) {
17321 var sourceBuffer = sourceUpdater[type + "Buffer"];
17322 var mime = codecs_js.getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
17323 // or the media source does not contain this source buffer.
17324
17325 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
17326 return;
17327 } // do not update codec if we don't need to.
17328
17329
17330 if (sourceUpdater.codecs[type] === codec) {
17331 return;
17332 }
17333
17334 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
17335 sourceBuffer.changeType(mime);
17336 sourceUpdater.codecs[type] = codec;
17337 };
17338 }
17339};
17340
17341var pushQueue = function pushQueue(_ref) {
17342 var type = _ref.type,
17343 sourceUpdater = _ref.sourceUpdater,
17344 action = _ref.action,
17345 doneFn = _ref.doneFn,
17346 name = _ref.name;
17347 sourceUpdater.queue.push({
17348 type: type,
17349 action: action,
17350 doneFn: doneFn,
17351 name: name
17352 });
17353 shiftQueue(type, sourceUpdater);
17354};
17355
17356var onUpdateend = function onUpdateend(type, sourceUpdater) {
17357 return function (e) {
17358 // Although there should, in theory, be a pending action for any updateend receieved,
17359 // there are some actions that may trigger updateend events without set definitions in
17360 // the w3c spec. For instance, setting the duration on the media source may trigger
17361 // updateend events on source buffers. This does not appear to be in the spec. As such,
17362 // if we encounter an updateend without a corresponding pending action from our queue
17363 // for that source buffer type, process the next action.
17364 if (sourceUpdater.queuePending[type]) {
17365 var doneFn = sourceUpdater.queuePending[type].doneFn;
17366 sourceUpdater.queuePending[type] = null;
17367
17368 if (doneFn) {
17369 // if there's an error, report it
17370 doneFn(sourceUpdater[type + "Error_"]);
17371 }
17372 }
17373
17374 shiftQueue(type, sourceUpdater);
17375 };
17376};
17377/**
17378 * A queue of callbacks to be serialized and applied when a
17379 * MediaSource and its associated SourceBuffers are not in the
17380 * updating state. It is used by the segment loader to update the
17381 * underlying SourceBuffers when new data is loaded, for instance.
17382 *
17383 * @class SourceUpdater
17384 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
17385 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
17386 */
17387
17388
17389var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
17390 _inheritsLoose__default['default'](SourceUpdater, _videojs$EventTarget);
17391
17392 function SourceUpdater(mediaSource) {
17393 var _this;
17394
17395 _this = _videojs$EventTarget.call(this) || this;
17396 _this.mediaSource = mediaSource;
17397
17398 _this.sourceopenListener_ = function () {
17399 return shiftQueue('mediaSource', _assertThisInitialized__default['default'](_this));
17400 };
17401
17402 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
17403
17404 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
17405
17406 _this.audioTimestampOffset_ = 0;
17407 _this.videoTimestampOffset_ = 0;
17408 _this.queue = [];
17409 _this.queuePending = {
17410 audio: null,
17411 video: null
17412 };
17413 _this.delayedAudioAppendQueue_ = [];
17414 _this.videoAppendQueued_ = false;
17415 _this.codecs = {};
17416 _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized__default['default'](_this));
17417 _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized__default['default'](_this));
17418
17419 _this.onVideoError_ = function (e) {
17420 // used for debugging
17421 _this.videoError_ = e;
17422 };
17423
17424 _this.onAudioError_ = function (e) {
17425 // used for debugging
17426 _this.audioError_ = e;
17427 };
17428
17429 _this.createdSourceBuffers_ = false;
17430 _this.initializedEme_ = false;
17431 _this.triggeredReady_ = false;
17432 return _this;
17433 }
17434
17435 var _proto = SourceUpdater.prototype;
17436
17437 _proto.initializedEme = function initializedEme() {
17438 this.initializedEme_ = true;
17439 this.triggerReady();
17440 };
17441
17442 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
17443 // if false, likely waiting on one of the segment loaders to get enough data to create
17444 // source buffers
17445 return this.createdSourceBuffers_;
17446 };
17447
17448 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
17449 return this.initializedEme_;
17450 };
17451
17452 _proto.ready = function ready() {
17453 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
17454 };
17455
17456 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
17457 if (this.hasCreatedSourceBuffers()) {
17458 // already created them before
17459 return;
17460 } // the intial addOrChangeSourceBuffers will always be
17461 // two add buffers.
17462
17463
17464 this.addOrChangeSourceBuffers(codecs);
17465 this.createdSourceBuffers_ = true;
17466 this.trigger('createdsourcebuffers');
17467 this.triggerReady();
17468 };
17469
17470 _proto.triggerReady = function triggerReady() {
17471 // only allow ready to be triggered once, this prevents the case
17472 // where:
17473 // 1. we trigger createdsourcebuffers
17474 // 2. ie 11 synchronously initializates eme
17475 // 3. the synchronous initialization causes us to trigger ready
17476 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
17477 if (this.ready() && !this.triggeredReady_) {
17478 this.triggeredReady_ = true;
17479 this.trigger('ready');
17480 }
17481 }
17482 /**
17483 * Add a type of source buffer to the media source.
17484 *
17485 * @param {string} type
17486 * The type of source buffer to add.
17487 *
17488 * @param {string} codec
17489 * The codec to add the source buffer with.
17490 */
17491 ;
17492
17493 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
17494 pushQueue({
17495 type: 'mediaSource',
17496 sourceUpdater: this,
17497 action: actions.addSourceBuffer(type, codec),
17498 name: 'addSourceBuffer'
17499 });
17500 }
17501 /**
17502 * call abort on a source buffer.
17503 *
17504 * @param {string} type
17505 * The type of source buffer to call abort on.
17506 */
17507 ;
17508
17509 _proto.abort = function abort(type) {
17510 pushQueue({
17511 type: type,
17512 sourceUpdater: this,
17513 action: actions.abort(type),
17514 name: 'abort'
17515 });
17516 }
17517 /**
17518 * Call removeSourceBuffer and remove a specific type
17519 * of source buffer on the mediaSource.
17520 *
17521 * @param {string} type
17522 * The type of source buffer to remove.
17523 */
17524 ;
17525
17526 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
17527 if (!this.canRemoveSourceBuffer()) {
17528 videojs__default['default'].log.error('removeSourceBuffer is not supported!');
17529 return;
17530 }
17531
17532 pushQueue({
17533 type: 'mediaSource',
17534 sourceUpdater: this,
17535 action: actions.removeSourceBuffer(type),
17536 name: 'removeSourceBuffer'
17537 });
17538 }
17539 /**
17540 * Whether or not the removeSourceBuffer function is supported
17541 * on the mediaSource.
17542 *
17543 * @return {boolean}
17544 * if removeSourceBuffer can be called.
17545 */
17546 ;
17547
17548 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
17549 // IE reports that it supports removeSourceBuffer, but often throws
17550 // errors when attempting to use the function. So we report that it
17551 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
17552 // throws errors, so we report that it does not support this as well.
17553 return !videojs__default['default'].browser.IE_VERSION && !videojs__default['default'].browser.IS_FIREFOX && window__default['default'].MediaSource && window__default['default'].MediaSource.prototype && typeof window__default['default'].MediaSource.prototype.removeSourceBuffer === 'function';
17554 }
17555 /**
17556 * Whether or not the changeType function is supported
17557 * on our SourceBuffers.
17558 *
17559 * @return {boolean}
17560 * if changeType can be called.
17561 */
17562 ;
17563
17564 SourceUpdater.canChangeType = function canChangeType() {
17565 return window__default['default'].SourceBuffer && window__default['default'].SourceBuffer.prototype && typeof window__default['default'].SourceBuffer.prototype.changeType === 'function';
17566 }
17567 /**
17568 * Whether or not the changeType function is supported
17569 * on our SourceBuffers.
17570 *
17571 * @return {boolean}
17572 * if changeType can be called.
17573 */
17574 ;
17575
17576 _proto.canChangeType = function canChangeType() {
17577 return this.constructor.canChangeType();
17578 }
17579 /**
17580 * Call the changeType function on a source buffer, given the code and type.
17581 *
17582 * @param {string} type
17583 * The type of source buffer to call changeType on.
17584 *
17585 * @param {string} codec
17586 * The codec string to change type with on the source buffer.
17587 */
17588 ;
17589
17590 _proto.changeType = function changeType(type, codec) {
17591 if (!this.canChangeType()) {
17592 videojs__default['default'].log.error('changeType is not supported!');
17593 return;
17594 }
17595
17596 pushQueue({
17597 type: type,
17598 sourceUpdater: this,
17599 action: actions.changeType(codec),
17600 name: 'changeType'
17601 });
17602 }
17603 /**
17604 * Add source buffers with a codec or, if they are already created,
17605 * call changeType on source buffers using changeType.
17606 *
17607 * @param {Object} codecs
17608 * Codecs to switch to
17609 */
17610 ;
17611
17612 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
17613 var _this2 = this;
17614
17615 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
17616 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
17617 }
17618
17619 Object.keys(codecs).forEach(function (type) {
17620 var codec = codecs[type];
17621
17622 if (!_this2.hasCreatedSourceBuffers()) {
17623 return _this2.addSourceBuffer(type, codec);
17624 }
17625
17626 if (_this2.canChangeType()) {
17627 _this2.changeType(type, codec);
17628 }
17629 });
17630 }
17631 /**
17632 * Queue an update to append an ArrayBuffer.
17633 *
17634 * @param {MediaObject} object containing audioBytes and/or videoBytes
17635 * @param {Function} done the function to call when done
17636 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
17637 */
17638 ;
17639
17640 _proto.appendBuffer = function appendBuffer(options, doneFn) {
17641 var _this3 = this;
17642
17643 var segmentInfo = options.segmentInfo,
17644 type = options.type,
17645 bytes = options.bytes;
17646 this.processedAppend_ = true;
17647
17648 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
17649 this.delayedAudioAppendQueue_.push([options, doneFn]);
17650 this.logger_("delayed audio append of " + bytes.length + " until video append");
17651 return;
17652 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
17653 // not be fired. This means that the queue will be blocked until the next action
17654 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
17655 // these errors by calling the doneFn with the specific error.
17656
17657
17658 var onError = doneFn;
17659 pushQueue({
17660 type: type,
17661 sourceUpdater: this,
17662 action: actions.appendBuffer(bytes, segmentInfo || {
17663 mediaIndex: -1
17664 }, onError),
17665 doneFn: doneFn,
17666 name: 'appendBuffer'
17667 });
17668
17669 if (type === 'video') {
17670 this.videoAppendQueued_ = true;
17671
17672 if (!this.delayedAudioAppendQueue_.length) {
17673 return;
17674 }
17675
17676 var queue = this.delayedAudioAppendQueue_.slice();
17677 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
17678 this.delayedAudioAppendQueue_.length = 0;
17679 queue.forEach(function (que) {
17680 _this3.appendBuffer.apply(_this3, que);
17681 });
17682 }
17683 }
17684 /**
17685 * Get the audio buffer's buffered timerange.
17686 *
17687 * @return {TimeRange}
17688 * The audio buffer's buffered time range
17689 */
17690 ;
17691
17692 _proto.audioBuffered = function audioBuffered() {
17693 // no media source/source buffer or it isn't in the media sources
17694 // source buffer list
17695 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
17696 return videojs__default['default'].createTimeRange();
17697 }
17698
17699 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default['default'].createTimeRange();
17700 }
17701 /**
17702 * Get the video buffer's buffered timerange.
17703 *
17704 * @return {TimeRange}
17705 * The video buffer's buffered time range
17706 */
17707 ;
17708
17709 _proto.videoBuffered = function videoBuffered() {
17710 // no media source/source buffer or it isn't in the media sources
17711 // source buffer list
17712 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
17713 return videojs__default['default'].createTimeRange();
17714 }
17715
17716 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default['default'].createTimeRange();
17717 }
17718 /**
17719 * Get a combined video/audio buffer's buffered timerange.
17720 *
17721 * @return {TimeRange}
17722 * the combined time range
17723 */
17724 ;
17725
17726 _proto.buffered = function buffered() {
17727 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
17728 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
17729
17730 if (audio && !video) {
17731 return this.audioBuffered();
17732 }
17733
17734 if (video && !audio) {
17735 return this.videoBuffered();
17736 }
17737
17738 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
17739 }
17740 /**
17741 * Add a callback to the queue that will set duration on the mediaSource.
17742 *
17743 * @param {number} duration
17744 * The duration to set
17745 *
17746 * @param {Function} [doneFn]
17747 * function to run after duration has been set.
17748 */
17749 ;
17750
17751 _proto.setDuration = function setDuration(duration, doneFn) {
17752 if (doneFn === void 0) {
17753 doneFn = noop;
17754 }
17755
17756 // In order to set the duration on the media source, it's necessary to wait for all
17757 // source buffers to no longer be updating. "If the updating attribute equals true on
17758 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
17759 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
17760 pushQueue({
17761 type: 'mediaSource',
17762 sourceUpdater: this,
17763 action: actions.duration(duration),
17764 name: 'duration',
17765 doneFn: doneFn
17766 });
17767 }
17768 /**
17769 * Add a mediaSource endOfStream call to the queue
17770 *
17771 * @param {Error} [error]
17772 * Call endOfStream with an error
17773 *
17774 * @param {Function} [doneFn]
17775 * A function that should be called when the
17776 * endOfStream call has finished.
17777 */
17778 ;
17779
17780 _proto.endOfStream = function endOfStream(error, doneFn) {
17781 if (error === void 0) {
17782 error = null;
17783 }
17784
17785 if (doneFn === void 0) {
17786 doneFn = noop;
17787 }
17788
17789 if (typeof error !== 'string') {
17790 error = undefined;
17791 } // In order to set the duration on the media source, it's necessary to wait for all
17792 // source buffers to no longer be updating. "If the updating attribute equals true on
17793 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
17794 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
17795
17796
17797 pushQueue({
17798 type: 'mediaSource',
17799 sourceUpdater: this,
17800 action: actions.endOfStream(error),
17801 name: 'endOfStream',
17802 doneFn: doneFn
17803 });
17804 }
17805 /**
17806 * Queue an update to remove a time range from the buffer.
17807 *
17808 * @param {number} start where to start the removal
17809 * @param {number} end where to end the removal
17810 * @param {Function} [done=noop] optional callback to be executed when the remove
17811 * operation is complete
17812 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
17813 */
17814 ;
17815
17816 _proto.removeAudio = function removeAudio(start, end, done) {
17817 if (done === void 0) {
17818 done = noop;
17819 }
17820
17821 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
17822 done();
17823 return;
17824 }
17825
17826 pushQueue({
17827 type: 'audio',
17828 sourceUpdater: this,
17829 action: actions.remove(start, end),
17830 doneFn: done,
17831 name: 'remove'
17832 });
17833 }
17834 /**
17835 * Queue an update to remove a time range from the buffer.
17836 *
17837 * @param {number} start where to start the removal
17838 * @param {number} end where to end the removal
17839 * @param {Function} [done=noop] optional callback to be executed when the remove
17840 * operation is complete
17841 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
17842 */
17843 ;
17844
17845 _proto.removeVideo = function removeVideo(start, end, done) {
17846 if (done === void 0) {
17847 done = noop;
17848 }
17849
17850 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
17851 done();
17852 return;
17853 }
17854
17855 pushQueue({
17856 type: 'video',
17857 sourceUpdater: this,
17858 action: actions.remove(start, end),
17859 doneFn: done,
17860 name: 'remove'
17861 });
17862 }
17863 /**
17864 * Whether the underlying sourceBuffer is updating or not
17865 *
17866 * @return {boolean} the updating status of the SourceBuffer
17867 */
17868 ;
17869
17870 _proto.updating = function updating() {
17871 // the audio/video source buffer is updating
17872 if (_updating('audio', this) || _updating('video', this)) {
17873 return true;
17874 }
17875
17876 return false;
17877 }
17878 /**
17879 * Set/get the timestampoffset on the audio SourceBuffer
17880 *
17881 * @return {number} the timestamp offset
17882 */
17883 ;
17884
17885 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
17886 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
17887 this.audioTimestampOffset_ !== offset) {
17888 pushQueue({
17889 type: 'audio',
17890 sourceUpdater: this,
17891 action: actions.timestampOffset(offset),
17892 name: 'timestampOffset'
17893 });
17894 this.audioTimestampOffset_ = offset;
17895 }
17896
17897 return this.audioTimestampOffset_;
17898 }
17899 /**
17900 * Set/get the timestampoffset on the video SourceBuffer
17901 *
17902 * @return {number} the timestamp offset
17903 */
17904 ;
17905
17906 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
17907 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
17908 this.videoTimestampOffset !== offset) {
17909 pushQueue({
17910 type: 'video',
17911 sourceUpdater: this,
17912 action: actions.timestampOffset(offset),
17913 name: 'timestampOffset'
17914 });
17915 this.videoTimestampOffset_ = offset;
17916 }
17917
17918 return this.videoTimestampOffset_;
17919 }
17920 /**
17921 * Add a function to the queue that will be called
17922 * when it is its turn to run in the audio queue.
17923 *
17924 * @param {Function} callback
17925 * The callback to queue.
17926 */
17927 ;
17928
17929 _proto.audioQueueCallback = function audioQueueCallback(callback) {
17930 if (!this.audioBuffer) {
17931 return;
17932 }
17933
17934 pushQueue({
17935 type: 'audio',
17936 sourceUpdater: this,
17937 action: actions.callback(callback),
17938 name: 'callback'
17939 });
17940 }
17941 /**
17942 * Add a function to the queue that will be called
17943 * when it is its turn to run in the video queue.
17944 *
17945 * @param {Function} callback
17946 * The callback to queue.
17947 */
17948 ;
17949
17950 _proto.videoQueueCallback = function videoQueueCallback(callback) {
17951 if (!this.videoBuffer) {
17952 return;
17953 }
17954
17955 pushQueue({
17956 type: 'video',
17957 sourceUpdater: this,
17958 action: actions.callback(callback),
17959 name: 'callback'
17960 });
17961 }
17962 /**
17963 * dispose of the source updater and the underlying sourceBuffer
17964 */
17965 ;
17966
17967 _proto.dispose = function dispose() {
17968 var _this4 = this;
17969
17970 this.trigger('dispose');
17971 bufferTypes.forEach(function (type) {
17972 _this4.abort(type);
17973
17974 if (_this4.canRemoveSourceBuffer()) {
17975 _this4.removeSourceBuffer(type);
17976 } else {
17977 _this4[type + "QueueCallback"](function () {
17978 return cleanupBuffer(type, _this4);
17979 });
17980 }
17981 });
17982 this.videoAppendQueued_ = false;
17983 this.delayedAudioAppendQueue_.length = 0;
17984
17985 if (this.sourceopenListener_) {
17986 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
17987 }
17988
17989 this.off();
17990 };
17991
17992 return SourceUpdater;
17993}(videojs__default['default'].EventTarget);
17994
17995var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
17996 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
17997};
17998
17999var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
18000 return char.charCodeAt(0);
18001}));
18002/**
18003 * An object that manages segment loading and appending.
18004 *
18005 * @class VTTSegmentLoader
18006 * @param {Object} options required and optional options
18007 * @extends videojs.EventTarget
18008 */
18009
18010var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
18011 _inheritsLoose__default['default'](VTTSegmentLoader, _SegmentLoader);
18012
18013 function VTTSegmentLoader(settings, options) {
18014 var _this;
18015
18016 if (options === void 0) {
18017 options = {};
18018 }
18019
18020 _this = _SegmentLoader.call(this, settings, options) || this; // VTT can't handle partial data
18021
18022 _this.handlePartialData_ = false; // SegmentLoader requires a MediaSource be specified or it will throw an error;
18023 // however, VTTSegmentLoader has no need of a media source, so delete the reference
18024
18025 _this.mediaSource_ = null;
18026 _this.subtitlesTrack_ = null;
18027 _this.loaderType_ = 'subtitle';
18028 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
18029 // the sync controller leads to improper behavior.
18030
18031 _this.shouldSaveSegmentTimingInfo_ = false;
18032 return _this;
18033 }
18034
18035 var _proto = VTTSegmentLoader.prototype;
18036
18037 _proto.createTransmuxer_ = function createTransmuxer_() {
18038 // don't need to transmux any subtitles
18039 return null;
18040 }
18041 /**
18042 * Indicates which time ranges are buffered
18043 *
18044 * @return {TimeRange}
18045 * TimeRange object representing the current buffered ranges
18046 */
18047 ;
18048
18049 _proto.buffered_ = function buffered_() {
18050 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
18051 return videojs__default['default'].createTimeRanges();
18052 }
18053
18054 var cues = this.subtitlesTrack_.cues;
18055 var start = cues[0].startTime;
18056 var end = cues[cues.length - 1].startTime;
18057 return videojs__default['default'].createTimeRanges([[start, end]]);
18058 }
18059 /**
18060 * Gets and sets init segment for the provided map
18061 *
18062 * @param {Object} map
18063 * The map object representing the init segment to get or set
18064 * @param {boolean=} set
18065 * If true, the init segment for the provided map should be saved
18066 * @return {Object}
18067 * map object for desired init segment
18068 */
18069 ;
18070
18071 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
18072 if (set === void 0) {
18073 set = false;
18074 }
18075
18076 if (!map) {
18077 return null;
18078 }
18079
18080 var id = initSegmentId(map);
18081 var storedMap = this.initSegments_[id];
18082
18083 if (set && !storedMap && map.bytes) {
18084 // append WebVTT line terminators to the media initialization segment if it exists
18085 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
18086 // requires two or more WebVTT line terminators between the WebVTT header and the
18087 // rest of the file
18088 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
18089 var combinedSegment = new Uint8Array(combinedByteLength);
18090 combinedSegment.set(map.bytes);
18091 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
18092 this.initSegments_[id] = storedMap = {
18093 resolvedUri: map.resolvedUri,
18094 byterange: map.byterange,
18095 bytes: combinedSegment
18096 };
18097 }
18098
18099 return storedMap || map;
18100 }
18101 /**
18102 * Returns true if all configuration required for loading is present, otherwise false.
18103 *
18104 * @return {boolean} True if the all configuration is ready for loading
18105 * @private
18106 */
18107 ;
18108
18109 _proto.couldBeginLoading_ = function couldBeginLoading_() {
18110 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
18111 }
18112 /**
18113 * Once all the starting parameters have been specified, begin
18114 * operation. This method should only be invoked from the INIT
18115 * state.
18116 *
18117 * @private
18118 */
18119 ;
18120
18121 _proto.init_ = function init_() {
18122 this.state = 'READY';
18123 this.resetEverything();
18124 return this.monitorBuffer_();
18125 }
18126 /**
18127 * Set a subtitle track on the segment loader to add subtitles to
18128 *
18129 * @param {TextTrack=} track
18130 * The text track to add loaded subtitles to
18131 * @return {TextTrack}
18132 * Returns the subtitles track
18133 */
18134 ;
18135
18136 _proto.track = function track(_track) {
18137 if (typeof _track === 'undefined') {
18138 return this.subtitlesTrack_;
18139 }
18140
18141 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
18142 // buffering now
18143
18144 if (this.state === 'INIT' && this.couldBeginLoading_()) {
18145 this.init_();
18146 }
18147
18148 return this.subtitlesTrack_;
18149 }
18150 /**
18151 * Remove any data in the source buffer between start and end times
18152 *
18153 * @param {number} start - the start time of the region to remove from the buffer
18154 * @param {number} end - the end time of the region to remove from the buffer
18155 */
18156 ;
18157
18158 _proto.remove = function remove(start, end) {
18159 removeCuesFromTrack(start, end, this.subtitlesTrack_);
18160 }
18161 /**
18162 * fill the buffer with segements unless the sourceBuffers are
18163 * currently updating
18164 *
18165 * Note: this function should only ever be called by monitorBuffer_
18166 * and never directly
18167 *
18168 * @private
18169 */
18170 ;
18171
18172 _proto.fillBuffer_ = function fillBuffer_() {
18173 var _this2 = this;
18174
18175 if (!this.syncPoint_) {
18176 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
18177 } // see if we need to begin loading immediately
18178
18179
18180 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
18181 segmentInfo = this.skipEmptySegments_(segmentInfo);
18182
18183 if (!segmentInfo) {
18184 return;
18185 }
18186
18187 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
18188 // We don't have the timestamp offset that we need to sync subtitles.
18189 // Rerun on a timestamp offset or user interaction.
18190 var checkTimestampOffset = function checkTimestampOffset() {
18191 _this2.state = 'READY';
18192
18193 if (!_this2.paused()) {
18194 // if not paused, queue a buffer check as soon as possible
18195 _this2.monitorBuffer_();
18196 }
18197 };
18198
18199 this.syncController_.one('timestampoffset', checkTimestampOffset);
18200 this.state = 'WAITING_ON_TIMELINE';
18201 return;
18202 }
18203
18204 this.loadSegment_(segmentInfo);
18205 }
18206 /**
18207 * Prevents the segment loader from requesting segments we know contain no subtitles
18208 * by walking forward until we find the next segment that we don't know whether it is
18209 * empty or not.
18210 *
18211 * @param {Object} segmentInfo
18212 * a segment info object that describes the current segment
18213 * @return {Object}
18214 * a segment info object that describes the current segment
18215 */
18216 ;
18217
18218 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
18219 while (segmentInfo && segmentInfo.segment.empty) {
18220 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
18221 }
18222
18223 return segmentInfo;
18224 };
18225
18226 _proto.stopForError = function stopForError(error) {
18227 this.error(error);
18228 this.state = 'READY';
18229 this.pause();
18230 this.trigger('error');
18231 }
18232 /**
18233 * append a decrypted segement to the SourceBuffer through a SourceUpdater
18234 *
18235 * @private
18236 */
18237 ;
18238
18239 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
18240 var _this3 = this;
18241
18242 if (!this.subtitlesTrack_) {
18243 this.state = 'READY';
18244 return;
18245 }
18246
18247 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
18248
18249 if (!this.pendingSegment_) {
18250 this.state = 'READY';
18251 this.mediaRequestsAborted += 1;
18252 return;
18253 }
18254
18255 if (error) {
18256 if (error.code === REQUEST_ERRORS.TIMEOUT) {
18257 this.handleTimeout_();
18258 }
18259
18260 if (error.code === REQUEST_ERRORS.ABORTED) {
18261 this.mediaRequestsAborted += 1;
18262 } else {
18263 this.mediaRequestsErrored += 1;
18264 }
18265
18266 this.stopForError(error);
18267 return;
18268 }
18269
18270 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
18271 // maintain functionality between segment loaders
18272
18273 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
18274 this.state = 'APPENDING'; // used for tests
18275
18276 this.trigger('appending');
18277 var segment = segmentInfo.segment;
18278
18279 if (segment.map) {
18280 segment.map.bytes = simpleSegment.map.bytes;
18281 }
18282
18283 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
18284
18285 if (typeof window__default['default'].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
18286 var loadHandler;
18287
18288 var errorHandler = function errorHandler() {
18289 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
18290
18291 _this3.stopForError({
18292 message: 'Error loading vtt.js'
18293 });
18294
18295 return;
18296 };
18297
18298 loadHandler = function loadHandler() {
18299 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
18300
18301 _this3.segmentRequestFinished_(error, simpleSegment, result);
18302 };
18303
18304 this.state = 'WAITING_ON_VTTJS';
18305 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
18306 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
18307 return;
18308 }
18309
18310 segment.requested = true;
18311
18312 try {
18313 this.parseVTTCues_(segmentInfo);
18314 } catch (e) {
18315 this.stopForError({
18316 message: e.message
18317 });
18318 return;
18319 }
18320
18321 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
18322
18323 if (segmentInfo.cues.length) {
18324 segmentInfo.timingInfo = {
18325 start: segmentInfo.cues[0].startTime,
18326 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
18327 };
18328 } else {
18329 segmentInfo.timingInfo = {
18330 start: segmentInfo.startOfSegment,
18331 end: segmentInfo.startOfSegment + segmentInfo.duration
18332 };
18333 }
18334
18335 if (segmentInfo.isSyncRequest) {
18336 this.trigger('syncinfoupdate');
18337 this.pendingSegment_ = null;
18338 this.state = 'READY';
18339 return;
18340 }
18341
18342 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
18343 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
18344 // the subtitle track
18345
18346 segmentInfo.cues.forEach(function (cue) {
18347 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window__default['default'].VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
18348 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
18349 // cues to have identical time-intervals, but if the text is also identical
18350 // we can safely assume it is a duplicate that can be removed (ex. when a cue
18351 // "overlaps" VTT segments)
18352
18353 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
18354 this.handleAppendsDone_();
18355 };
18356
18357 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
18358 // that we do not support here.
18359 };
18360
18361 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
18362 }
18363 /**
18364 * Uses the WebVTT parser to parse the segment response
18365 *
18366 * @param {Object} segmentInfo
18367 * a segment info object that describes the current segment
18368 * @private
18369 */
18370 ;
18371
18372 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
18373 var decoder;
18374 var decodeBytesToString = false;
18375
18376 if (typeof window__default['default'].TextDecoder === 'function') {
18377 decoder = new window__default['default'].TextDecoder('utf8');
18378 } else {
18379 decoder = window__default['default'].WebVTT.StringDecoder();
18380 decodeBytesToString = true;
18381 }
18382
18383 var parser = new window__default['default'].WebVTT.Parser(window__default['default'], window__default['default'].vttjs, decoder);
18384 segmentInfo.cues = [];
18385 segmentInfo.timestampmap = {
18386 MPEGTS: 0,
18387 LOCAL: 0
18388 };
18389 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
18390
18391 parser.ontimestampmap = function (map) {
18392 segmentInfo.timestampmap = map;
18393 };
18394
18395 parser.onparsingerror = function (error) {
18396 videojs__default['default'].log.warn('Error encountered when parsing cues: ' + error.message);
18397 };
18398
18399 if (segmentInfo.segment.map) {
18400 var mapData = segmentInfo.segment.map.bytes;
18401
18402 if (decodeBytesToString) {
18403 mapData = uint8ToUtf8(mapData);
18404 }
18405
18406 parser.parse(mapData);
18407 }
18408
18409 var segmentData = segmentInfo.bytes;
18410
18411 if (decodeBytesToString) {
18412 segmentData = uint8ToUtf8(segmentData);
18413 }
18414
18415 parser.parse(segmentData);
18416 parser.flush();
18417 }
18418 /**
18419 * Updates the start and end times of any cues parsed by the WebVTT parser using
18420 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
18421 * from the SyncController
18422 *
18423 * @param {Object} segmentInfo
18424 * a segment info object that describes the current segment
18425 * @param {Object} mappingObj
18426 * object containing a mapping from TS to media time
18427 * @param {Object} playlist
18428 * the playlist object containing the segment
18429 * @private
18430 */
18431 ;
18432
18433 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
18434 var segment = segmentInfo.segment;
18435
18436 if (!mappingObj) {
18437 // If the sync controller does not have a mapping of TS to Media Time for the
18438 // timeline, then we don't have enough information to update the cue
18439 // start/end times
18440 return;
18441 }
18442
18443 if (!segmentInfo.cues.length) {
18444 // If there are no cues, we also do not have enough information to figure out
18445 // segment timing. Mark that the segment contains no cues so we don't re-request
18446 // an empty segment.
18447 segment.empty = true;
18448 return;
18449 }
18450
18451 var timestampmap = segmentInfo.timestampmap;
18452 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
18453 segmentInfo.cues.forEach(function (cue) {
18454 // First convert cue time to TS time using the timestamp-map provided within the vtt
18455 cue.startTime += diff;
18456 cue.endTime += diff;
18457 });
18458
18459 if (!playlist.syncInfo) {
18460 var firstStart = segmentInfo.cues[0].startTime;
18461 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
18462 playlist.syncInfo = {
18463 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
18464 time: Math.min(firstStart, lastStart - segment.duration)
18465 };
18466 }
18467 };
18468
18469 return VTTSegmentLoader;
18470}(SegmentLoader);
18471
18472/**
18473 * @file ad-cue-tags.js
18474 */
18475/**
18476 * Searches for an ad cue that overlaps with the given mediaTime
18477 *
18478 * @param {Object} track
18479 * the track to find the cue for
18480 *
18481 * @param {number} mediaTime
18482 * the time to find the cue at
18483 *
18484 * @return {Object|null}
18485 * the found cue or null
18486 */
18487
18488var findAdCue = function findAdCue(track, mediaTime) {
18489 var cues = track.cues;
18490
18491 for (var i = 0; i < cues.length; i++) {
18492 var cue = cues[i];
18493
18494 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
18495 return cue;
18496 }
18497 }
18498
18499 return null;
18500};
18501var updateAdCues = function updateAdCues(media, track, offset) {
18502 if (offset === void 0) {
18503 offset = 0;
18504 }
18505
18506 if (!media.segments) {
18507 return;
18508 }
18509
18510 var mediaTime = offset;
18511 var cue;
18512
18513 for (var i = 0; i < media.segments.length; i++) {
18514 var segment = media.segments[i];
18515
18516 if (!cue) {
18517 // Since the cues will span for at least the segment duration, adding a fudge
18518 // factor of half segment duration will prevent duplicate cues from being
18519 // created when timing info is not exact (e.g. cue start time initialized
18520 // at 10.006677, but next call mediaTime is 10.003332 )
18521 cue = findAdCue(track, mediaTime + segment.duration / 2);
18522 }
18523
18524 if (cue) {
18525 if ('cueIn' in segment) {
18526 // Found a CUE-IN so end the cue
18527 cue.endTime = mediaTime;
18528 cue.adEndTime = mediaTime;
18529 mediaTime += segment.duration;
18530 cue = null;
18531 continue;
18532 }
18533
18534 if (mediaTime < cue.endTime) {
18535 // Already processed this mediaTime for this cue
18536 mediaTime += segment.duration;
18537 continue;
18538 } // otherwise extend cue until a CUE-IN is found
18539
18540
18541 cue.endTime += segment.duration;
18542 } else {
18543 if ('cueOut' in segment) {
18544 cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
18545 cue.adStartTime = mediaTime; // Assumes tag format to be
18546 // #EXT-X-CUE-OUT:30
18547
18548 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
18549 track.addCue(cue);
18550 }
18551
18552 if ('cueOutCont' in segment) {
18553 // Entered into the middle of an ad cue
18554 // Assumes tag formate to be
18555 // #EXT-X-CUE-OUT-CONT:10/30
18556 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
18557 adOffset = _segment$cueOutCont$s[0],
18558 adTotal = _segment$cueOutCont$s[1];
18559
18560 cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, '');
18561 cue.adStartTime = mediaTime - adOffset;
18562 cue.adEndTime = cue.adStartTime + adTotal;
18563 track.addCue(cue);
18564 }
18565 }
18566
18567 mediaTime += segment.duration;
18568 }
18569};
18570
18571var getSegmentIndex = function getSegmentIndex(i, playlist, currentTime) {
18572 if (currentTime === void 0) {
18573 currentTime = 0;
18574 }
18575
18576 var segments = playlist.segments;
18577 return playlist.endList || currentTime === 0 ? i : segments.length - (i + 1);
18578};
18579
18580var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
18581// the equivalence display-time 0 === segment-index 0
18582{
18583 name: 'VOD',
18584 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18585 if (duration !== Infinity) {
18586 var syncPoint = {
18587 time: 0,
18588 segmentIndex: 0
18589 };
18590 return syncPoint;
18591 }
18592
18593 return null;
18594 }
18595}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
18596{
18597 name: 'ProgramDateTime',
18598 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18599 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
18600 return null;
18601 }
18602
18603 var segments = playlist.segments || [];
18604 var syncPoint = null;
18605 var lastDistance = null;
18606 currentTime = currentTime || 0;
18607
18608 for (var i = 0; i < segments.length; i++) {
18609 var segmentIndex = getSegmentIndex(i, playlist, currentTime);
18610 var segment = segments[segmentIndex];
18611 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
18612
18613 if (!datetimeMapping) {
18614 continue;
18615 }
18616
18617 if (segment.dateTimeObject) {
18618 var segmentTime = segment.dateTimeObject.getTime() / 1000;
18619 var segmentStart = segmentTime + datetimeMapping;
18620 var distance = Math.abs(currentTime - segmentStart); // Once the distance begins to increase, or if distance is 0, we have passed
18621 // currentTime and can stop looking for better candidates
18622
18623 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
18624 break;
18625 }
18626
18627 lastDistance = distance;
18628 syncPoint = {
18629 time: segmentStart,
18630 segmentIndex: segmentIndex
18631 };
18632 }
18633 }
18634
18635 return syncPoint;
18636 }
18637}, // Stategy "Segment": We have a known time mapping for a timeline and a
18638// segment in the current timeline with timing data
18639{
18640 name: 'Segment',
18641 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18642 var segments = playlist.segments || [];
18643 var syncPoint = null;
18644 var lastDistance = null;
18645 currentTime = currentTime || 0;
18646
18647 for (var i = 0; i < segments.length; i++) {
18648 var segmentIndex = getSegmentIndex(i, playlist, currentTime);
18649 var segment = segments[segmentIndex];
18650
18651 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
18652 var distance = Math.abs(currentTime - segment.start); // Once the distance begins to increase, we have passed
18653 // currentTime and can stop looking for better candidates
18654
18655 if (lastDistance !== null && lastDistance < distance) {
18656 break;
18657 }
18658
18659 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
18660 lastDistance = distance;
18661 syncPoint = {
18662 time: segment.start,
18663 segmentIndex: segmentIndex
18664 };
18665 }
18666 }
18667 }
18668
18669 return syncPoint;
18670 }
18671}, // Stategy "Discontinuity": We have a discontinuity with a known
18672// display-time
18673{
18674 name: 'Discontinuity',
18675 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18676 var syncPoint = null;
18677 currentTime = currentTime || 0;
18678
18679 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
18680 var lastDistance = null;
18681
18682 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
18683 var segmentIndex = playlist.discontinuityStarts[i];
18684 var discontinuity = playlist.discontinuitySequence + i + 1;
18685 var discontinuitySync = syncController.discontinuities[discontinuity];
18686
18687 if (discontinuitySync) {
18688 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
18689 // currentTime and can stop looking for better candidates
18690
18691 if (lastDistance !== null && lastDistance < distance) {
18692 break;
18693 }
18694
18695 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
18696 lastDistance = distance;
18697 syncPoint = {
18698 time: discontinuitySync.time,
18699 segmentIndex: segmentIndex
18700 };
18701 }
18702 }
18703 }
18704 }
18705
18706 return syncPoint;
18707 }
18708}, // Stategy "Playlist": We have a playlist with a known mapping of
18709// segment index to display time
18710{
18711 name: 'Playlist',
18712 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18713 if (playlist.syncInfo) {
18714 var syncPoint = {
18715 time: playlist.syncInfo.time,
18716 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
18717 };
18718 return syncPoint;
18719 }
18720
18721 return null;
18722 }
18723}];
18724
18725var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
18726 _inheritsLoose__default['default'](SyncController, _videojs$EventTarget);
18727
18728 function SyncController(options) {
18729 var _this;
18730
18731 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
18732
18733 _this.timelines = [];
18734 _this.discontinuities = [];
18735 _this.timelineToDatetimeMappings = {};
18736 _this.logger_ = logger('SyncController');
18737 return _this;
18738 }
18739 /**
18740 * Find a sync-point for the playlist specified
18741 *
18742 * A sync-point is defined as a known mapping from display-time to
18743 * a segment-index in the current playlist.
18744 *
18745 * @param {Playlist} playlist
18746 * The playlist that needs a sync-point
18747 * @param {number} duration
18748 * Duration of the MediaSource (Infinite if playing a live source)
18749 * @param {number} currentTimeline
18750 * The last timeline from which a segment was loaded
18751 * @return {Object}
18752 * A sync-point object
18753 */
18754
18755
18756 var _proto = SyncController.prototype;
18757
18758 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
18759 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
18760
18761 if (!syncPoints.length) {
18762 // Signal that we need to attempt to get a sync-point manually
18763 // by fetching a segment in the playlist and constructing
18764 // a sync-point from that information
18765 return null;
18766 } // Now find the sync-point that is closest to the currentTime because
18767 // that should result in the most accurate guess about which segment
18768 // to fetch
18769
18770
18771 return this.selectSyncPoint_(syncPoints, {
18772 key: 'time',
18773 value: currentTime
18774 });
18775 }
18776 /**
18777 * Calculate the amount of time that has expired off the playlist during playback
18778 *
18779 * @param {Playlist} playlist
18780 * Playlist object to calculate expired from
18781 * @param {number} duration
18782 * Duration of the MediaSource (Infinity if playling a live source)
18783 * @return {number|null}
18784 * The amount of time that has expired off the playlist during playback. Null
18785 * if no sync-points for the playlist can be found.
18786 */
18787 ;
18788
18789 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
18790 if (!playlist || !playlist.segments) {
18791 return null;
18792 }
18793
18794 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
18795
18796 if (!syncPoints.length) {
18797 return null;
18798 }
18799
18800 var syncPoint = this.selectSyncPoint_(syncPoints, {
18801 key: 'segmentIndex',
18802 value: 0
18803 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
18804 // duration from index 0 to syncPoint.segmentIndex instead of adding.
18805
18806 if (syncPoint.segmentIndex > 0) {
18807 syncPoint.time *= -1;
18808 }
18809
18810 return Math.abs(syncPoint.time + sumDurations(playlist, syncPoint.segmentIndex, 0));
18811 }
18812 /**
18813 * Runs each sync-point strategy and returns a list of sync-points returned by the
18814 * strategies
18815 *
18816 * @private
18817 * @param {Playlist} playlist
18818 * The playlist that needs a sync-point
18819 * @param {number} duration
18820 * Duration of the MediaSource (Infinity if playing a live source)
18821 * @param {number} currentTimeline
18822 * The last timeline from which a segment was loaded
18823 * @return {Array}
18824 * A list of sync-point objects
18825 */
18826 ;
18827
18828 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
18829 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
18830
18831 for (var i = 0; i < syncPointStrategies.length; i++) {
18832 var strategy = syncPointStrategies[i];
18833 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
18834
18835 if (syncPoint) {
18836 syncPoint.strategy = strategy.name;
18837 syncPoints.push({
18838 strategy: strategy.name,
18839 syncPoint: syncPoint
18840 });
18841 }
18842 }
18843
18844 return syncPoints;
18845 }
18846 /**
18847 * Selects the sync-point nearest the specified target
18848 *
18849 * @private
18850 * @param {Array} syncPoints
18851 * List of sync-points to select from
18852 * @param {Object} target
18853 * Object specifying the property and value we are targeting
18854 * @param {string} target.key
18855 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
18856 * @param {number} target.value
18857 * The value to target for the specified key.
18858 * @return {Object}
18859 * The sync-point nearest the target
18860 */
18861 ;
18862
18863 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
18864 var bestSyncPoint = syncPoints[0].syncPoint;
18865 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
18866 var bestStrategy = syncPoints[0].strategy;
18867
18868 for (var i = 1; i < syncPoints.length; i++) {
18869 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
18870
18871 if (newDistance < bestDistance) {
18872 bestDistance = newDistance;
18873 bestSyncPoint = syncPoints[i].syncPoint;
18874 bestStrategy = syncPoints[i].strategy;
18875 }
18876 }
18877
18878 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex + "]"));
18879 return bestSyncPoint;
18880 }
18881 /**
18882 * Save any meta-data present on the segments when segments leave
18883 * the live window to the playlist to allow for synchronization at the
18884 * playlist level later.
18885 *
18886 * @param {Playlist} oldPlaylist - The previous active playlist
18887 * @param {Playlist} newPlaylist - The updated and most current playlist
18888 */
18889 ;
18890
18891 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
18892 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // When a segment expires from the playlist and it has a start time
18893 // save that information as a possible sync-point reference in future
18894
18895 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
18896 var lastRemovedSegment = oldPlaylist.segments[i];
18897
18898 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
18899 newPlaylist.syncInfo = {
18900 mediaSequence: oldPlaylist.mediaSequence + i,
18901 time: lastRemovedSegment.start
18902 };
18903 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
18904 this.trigger('syncinfoupdate');
18905 break;
18906 }
18907 }
18908 }
18909 /**
18910 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
18911 * before segments start to load.
18912 *
18913 * @param {Playlist} playlist - The currently active playlist
18914 */
18915 ;
18916
18917 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
18918 // It's possible for the playlist to be updated before playback starts, meaning time
18919 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
18920 // crossed, then the old time zero mapping (for the prior timeline) would be retained
18921 // unless the mappings are cleared.
18922 this.timelineToDatetimeMappings = {};
18923
18924 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
18925 var firstSegment = playlist.segments[0];
18926 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
18927 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
18928 }
18929 }
18930 /**
18931 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
18932 * based on the latest timing information.
18933 *
18934 * @param {Object} options
18935 * Options object
18936 * @param {SegmentInfo} options.segmentInfo
18937 * The current active request information
18938 * @param {boolean} options.shouldSaveTimelineMapping
18939 * If there's a timeline change, determines if the timeline mapping should be
18940 * saved for timeline mapping and program date time mappings.
18941 */
18942 ;
18943
18944 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
18945 var segmentInfo = _ref.segmentInfo,
18946 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
18947 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
18948 var segment = segmentInfo.segment;
18949
18950 if (didCalculateSegmentTimeMapping) {
18951 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
18952 // now with segment timing information
18953
18954 if (!segmentInfo.playlist.syncInfo) {
18955 segmentInfo.playlist.syncInfo = {
18956 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
18957 time: segment.start
18958 };
18959 }
18960 }
18961
18962 var dateTime = segment.dateTimeObject;
18963
18964 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
18965 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
18966 }
18967 };
18968
18969 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
18970 if (typeof this.timelines[timeline] === 'undefined') {
18971 return null;
18972 }
18973
18974 return this.timelines[timeline].time;
18975 };
18976
18977 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
18978 if (typeof this.timelines[timeline] === 'undefined') {
18979 return null;
18980 }
18981
18982 return this.timelines[timeline].mapping;
18983 }
18984 /**
18985 * Use the "media time" for a segment to generate a mapping to "display time" and
18986 * save that display time to the segment.
18987 *
18988 * @private
18989 * @param {SegmentInfo} segmentInfo
18990 * The current active request information
18991 * @param {Object} timingInfo
18992 * The start and end time of the current segment in "media time"
18993 * @param {boolean} shouldSaveTimelineMapping
18994 * If there's a timeline change, determines if the timeline mapping should be
18995 * saved in timelines.
18996 * @return {boolean}
18997 * Returns false if segment time mapping could not be calculated
18998 */
18999 ;
19000
19001 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
19002 var segment = segmentInfo.segment;
19003 var mappingObj = this.timelines[segmentInfo.timeline];
19004
19005 if (typeof segmentInfo.timestampOffset === 'number') {
19006 mappingObj = {
19007 time: segmentInfo.startOfSegment,
19008 mapping: segmentInfo.startOfSegment - timingInfo.start
19009 };
19010
19011 if (shouldSaveTimelineMapping) {
19012 this.timelines[segmentInfo.timeline] = mappingObj;
19013 this.trigger('timestampoffset');
19014 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
19015 }
19016
19017 segment.start = segmentInfo.startOfSegment;
19018 segment.end = timingInfo.end + mappingObj.mapping;
19019 } else if (mappingObj) {
19020 segment.start = timingInfo.start + mappingObj.mapping;
19021 segment.end = timingInfo.end + mappingObj.mapping;
19022 } else {
19023 return false;
19024 }
19025
19026 return true;
19027 }
19028 /**
19029 * Each time we have discontinuity in the playlist, attempt to calculate the location
19030 * in display of the start of the discontinuity and save that. We also save an accuracy
19031 * value so that we save values with the most accuracy (closest to 0.)
19032 *
19033 * @private
19034 * @param {SegmentInfo} segmentInfo - The current active request information
19035 */
19036 ;
19037
19038 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
19039 var playlist = segmentInfo.playlist;
19040 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
19041 // the start of the range and it's accuracy is 0 (greater accuracy values
19042 // mean more approximation)
19043
19044 if (segment.discontinuity) {
19045 this.discontinuities[segment.timeline] = {
19046 time: segment.start,
19047 accuracy: 0
19048 };
19049 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
19050 // Search for future discontinuities that we can provide better timing
19051 // information for and save that information for sync purposes
19052 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
19053 var segmentIndex = playlist.discontinuityStarts[i];
19054 var discontinuity = playlist.discontinuitySequence + i + 1;
19055 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
19056 var accuracy = Math.abs(mediaIndexDiff);
19057
19058 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
19059 var time = void 0;
19060
19061 if (mediaIndexDiff < 0) {
19062 time = segment.start - sumDurations(playlist, segmentInfo.mediaIndex, segmentIndex);
19063 } else {
19064 time = segment.end + sumDurations(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
19065 }
19066
19067 this.discontinuities[discontinuity] = {
19068 time: time,
19069 accuracy: accuracy
19070 };
19071 }
19072 }
19073 }
19074 };
19075
19076 _proto.dispose = function dispose() {
19077 this.trigger('dispose');
19078 this.off();
19079 };
19080
19081 return SyncController;
19082}(videojs__default['default'].EventTarget);
19083
19084/**
19085 * The TimelineChangeController acts as a source for segment loaders to listen for and
19086 * keep track of latest and pending timeline changes. This is useful to ensure proper
19087 * sync, as each loader may need to make a consideration for what timeline the other
19088 * loader is on before making changes which could impact the other loader's media.
19089 *
19090 * @class TimelineChangeController
19091 * @extends videojs.EventTarget
19092 */
19093
19094var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
19095 _inheritsLoose__default['default'](TimelineChangeController, _videojs$EventTarget);
19096
19097 function TimelineChangeController() {
19098 var _this;
19099
19100 _this = _videojs$EventTarget.call(this) || this;
19101 _this.pendingTimelineChanges_ = {};
19102 _this.lastTimelineChanges_ = {};
19103 return _this;
19104 }
19105
19106 var _proto = TimelineChangeController.prototype;
19107
19108 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
19109 this.pendingTimelineChanges_[type] = null;
19110 this.trigger('pendingtimelinechange');
19111 };
19112
19113 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
19114 var type = _ref.type,
19115 from = _ref.from,
19116 to = _ref.to;
19117
19118 if (typeof from === 'number' && typeof to === 'number') {
19119 this.pendingTimelineChanges_[type] = {
19120 type: type,
19121 from: from,
19122 to: to
19123 };
19124 this.trigger('pendingtimelinechange');
19125 }
19126
19127 return this.pendingTimelineChanges_[type];
19128 };
19129
19130 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
19131 var type = _ref2.type,
19132 from = _ref2.from,
19133 to = _ref2.to;
19134
19135 if (typeof from === 'number' && typeof to === 'number') {
19136 this.lastTimelineChanges_[type] = {
19137 type: type,
19138 from: from,
19139 to: to
19140 };
19141 delete this.pendingTimelineChanges_[type];
19142 this.trigger('timelinechange');
19143 }
19144
19145 return this.lastTimelineChanges_[type];
19146 };
19147
19148 _proto.dispose = function dispose() {
19149 this.trigger('dispose');
19150 this.pendingTimelineChanges_ = {};
19151 this.lastTimelineChanges_ = {};
19152 this.off();
19153 };
19154
19155 return TimelineChangeController;
19156}(videojs__default['default'].EventTarget);
19157
19158/* rollup-plugin-worker-factory start for worker!/Users/gkatsevman/p/http-streaming-release/src/decrypter-worker.js */
19159var workerCode = transform(getWorkerString(function () {
19160
19161 function createCommonjsModule(fn, basedir, module) {
19162 return module = {
19163 path: basedir,
19164 exports: {},
19165 require: function require(path, base) {
19166 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
19167 }
19168 }, fn(module, module.exports), module.exports;
19169 }
19170
19171 function commonjsRequire() {
19172 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
19173 }
19174
19175 var createClass = createCommonjsModule(function (module) {
19176 function _defineProperties(target, props) {
19177 for (var i = 0; i < props.length; i++) {
19178 var descriptor = props[i];
19179 descriptor.enumerable = descriptor.enumerable || false;
19180 descriptor.configurable = true;
19181 if ("value" in descriptor) descriptor.writable = true;
19182 Object.defineProperty(target, descriptor.key, descriptor);
19183 }
19184 }
19185
19186 function _createClass(Constructor, protoProps, staticProps) {
19187 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
19188 if (staticProps) _defineProperties(Constructor, staticProps);
19189 return Constructor;
19190 }
19191
19192 module.exports = _createClass;
19193 module.exports["default"] = module.exports, module.exports.__esModule = true;
19194 });
19195 var setPrototypeOf = createCommonjsModule(function (module) {
19196 function _setPrototypeOf(o, p) {
19197 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
19198 o.__proto__ = p;
19199 return o;
19200 };
19201
19202 module.exports["default"] = module.exports, module.exports.__esModule = true;
19203 return _setPrototypeOf(o, p);
19204 }
19205
19206 module.exports = _setPrototypeOf;
19207 module.exports["default"] = module.exports, module.exports.__esModule = true;
19208 });
19209 var inheritsLoose = createCommonjsModule(function (module) {
19210 function _inheritsLoose(subClass, superClass) {
19211 subClass.prototype = Object.create(superClass.prototype);
19212 subClass.prototype.constructor = subClass;
19213 setPrototypeOf(subClass, superClass);
19214 }
19215
19216 module.exports = _inheritsLoose;
19217 module.exports["default"] = module.exports, module.exports.__esModule = true;
19218 });
19219 /**
19220 * @file stream.js
19221 */
19222
19223 /**
19224 * A lightweight readable stream implemention that handles event dispatching.
19225 *
19226 * @class Stream
19227 */
19228
19229 var Stream = /*#__PURE__*/function () {
19230 function Stream() {
19231 this.listeners = {};
19232 }
19233 /**
19234 * Add a listener for a specified event type.
19235 *
19236 * @param {string} type the event name
19237 * @param {Function} listener the callback to be invoked when an event of
19238 * the specified type occurs
19239 */
19240
19241
19242 var _proto = Stream.prototype;
19243
19244 _proto.on = function on(type, listener) {
19245 if (!this.listeners[type]) {
19246 this.listeners[type] = [];
19247 }
19248
19249 this.listeners[type].push(listener);
19250 }
19251 /**
19252 * Remove a listener for a specified event type.
19253 *
19254 * @param {string} type the event name
19255 * @param {Function} listener a function previously registered for this
19256 * type of event through `on`
19257 * @return {boolean} if we could turn it off or not
19258 */
19259 ;
19260
19261 _proto.off = function off(type, listener) {
19262 if (!this.listeners[type]) {
19263 return false;
19264 }
19265
19266 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
19267 // In Video.js we slice listener functions
19268 // on trigger so that it does not mess up the order
19269 // while we loop through.
19270 //
19271 // Here we slice on off so that the loop in trigger
19272 // can continue using it's old reference to loop without
19273 // messing up the order.
19274
19275 this.listeners[type] = this.listeners[type].slice(0);
19276 this.listeners[type].splice(index, 1);
19277 return index > -1;
19278 }
19279 /**
19280 * Trigger an event of the specified type on this stream. Any additional
19281 * arguments to this function are passed as parameters to event listeners.
19282 *
19283 * @param {string} type the event name
19284 */
19285 ;
19286
19287 _proto.trigger = function trigger(type) {
19288 var callbacks = this.listeners[type];
19289
19290 if (!callbacks) {
19291 return;
19292 } // Slicing the arguments on every invocation of this method
19293 // can add a significant amount of overhead. Avoid the
19294 // intermediate object creation for the common case of a
19295 // single callback argument
19296
19297
19298 if (arguments.length === 2) {
19299 var length = callbacks.length;
19300
19301 for (var i = 0; i < length; ++i) {
19302 callbacks[i].call(this, arguments[1]);
19303 }
19304 } else {
19305 var args = Array.prototype.slice.call(arguments, 1);
19306 var _length = callbacks.length;
19307
19308 for (var _i = 0; _i < _length; ++_i) {
19309 callbacks[_i].apply(this, args);
19310 }
19311 }
19312 }
19313 /**
19314 * Destroys the stream and cleans up.
19315 */
19316 ;
19317
19318 _proto.dispose = function dispose() {
19319 this.listeners = {};
19320 }
19321 /**
19322 * Forwards all `data` events on this stream to the destination stream. The
19323 * destination stream should provide a method `push` to receive the data
19324 * events as they arrive.
19325 *
19326 * @param {Stream} destination the stream that will receive all `data` events
19327 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
19328 */
19329 ;
19330
19331 _proto.pipe = function pipe(destination) {
19332 this.on('data', function (data) {
19333 destination.push(data);
19334 });
19335 };
19336
19337 return Stream;
19338 }();
19339 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
19340
19341 /**
19342 * Returns the subarray of a Uint8Array without PKCS#7 padding.
19343 *
19344 * @param padded {Uint8Array} unencrypted bytes that have been padded
19345 * @return {Uint8Array} the unpadded bytes
19346 * @see http://tools.ietf.org/html/rfc5652
19347 */
19348
19349
19350 function unpad(padded) {
19351 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
19352 }
19353 /*! @name aes-decrypter @version 3.1.2 @license Apache-2.0 */
19354
19355 /**
19356 * @file aes.js
19357 *
19358 * This file contains an adaptation of the AES decryption algorithm
19359 * from the Standford Javascript Cryptography Library. That work is
19360 * covered by the following copyright and permissions notice:
19361 *
19362 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
19363 * All rights reserved.
19364 *
19365 * Redistribution and use in source and binary forms, with or without
19366 * modification, are permitted provided that the following conditions are
19367 * met:
19368 *
19369 * 1. Redistributions of source code must retain the above copyright
19370 * notice, this list of conditions and the following disclaimer.
19371 *
19372 * 2. Redistributions in binary form must reproduce the above
19373 * copyright notice, this list of conditions and the following
19374 * disclaimer in the documentation and/or other materials provided
19375 * with the distribution.
19376 *
19377 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
19378 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19379 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19380 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
19381 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
19382 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19383 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
19384 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
19385 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
19386 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
19387 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
19388 *
19389 * The views and conclusions contained in the software and documentation
19390 * are those of the authors and should not be interpreted as representing
19391 * official policies, either expressed or implied, of the authors.
19392 */
19393
19394 /**
19395 * Expand the S-box tables.
19396 *
19397 * @private
19398 */
19399
19400
19401 var precompute = function precompute() {
19402 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
19403 var encTable = tables[0];
19404 var decTable = tables[1];
19405 var sbox = encTable[4];
19406 var sboxInv = decTable[4];
19407 var i;
19408 var x;
19409 var xInv;
19410 var d = [];
19411 var th = [];
19412 var x2;
19413 var x4;
19414 var x8;
19415 var s;
19416 var tEnc;
19417 var tDec; // Compute double and third tables
19418
19419 for (i = 0; i < 256; i++) {
19420 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
19421 }
19422
19423 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
19424 // Compute sbox
19425 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
19426 s = s >> 8 ^ s & 255 ^ 99;
19427 sbox[x] = s;
19428 sboxInv[s] = x; // Compute MixColumns
19429
19430 x8 = d[x4 = d[x2 = d[x]]];
19431 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
19432 tEnc = d[s] * 0x101 ^ s * 0x1010100;
19433
19434 for (i = 0; i < 4; i++) {
19435 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
19436 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
19437 }
19438 } // Compactify. Considerable speedup on Firefox.
19439
19440
19441 for (i = 0; i < 5; i++) {
19442 encTable[i] = encTable[i].slice(0);
19443 decTable[i] = decTable[i].slice(0);
19444 }
19445
19446 return tables;
19447 };
19448
19449 var aesTables = null;
19450 /**
19451 * Schedule out an AES key for both encryption and decryption. This
19452 * is a low-level class. Use a cipher mode to do bulk encryption.
19453 *
19454 * @class AES
19455 * @param key {Array} The key as an array of 4, 6 or 8 words.
19456 */
19457
19458 var AES = /*#__PURE__*/function () {
19459 function AES(key) {
19460 /**
19461 * The expanded S-box and inverse S-box tables. These will be computed
19462 * on the client so that we don't have to send them down the wire.
19463 *
19464 * There are two tables, _tables[0] is for encryption and
19465 * _tables[1] is for decryption.
19466 *
19467 * The first 4 sub-tables are the expanded S-box with MixColumns. The
19468 * last (_tables[01][4]) is the S-box itself.
19469 *
19470 * @private
19471 */
19472 // if we have yet to precompute the S-box tables
19473 // do so now
19474 if (!aesTables) {
19475 aesTables = precompute();
19476 } // then make a copy of that object for use
19477
19478
19479 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
19480 var i;
19481 var j;
19482 var tmp;
19483 var sbox = this._tables[0][4];
19484 var decTable = this._tables[1];
19485 var keyLen = key.length;
19486 var rcon = 1;
19487
19488 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
19489 throw new Error('Invalid aes key size');
19490 }
19491
19492 var encKey = key.slice(0);
19493 var decKey = [];
19494 this._key = [encKey, decKey]; // schedule encryption keys
19495
19496 for (i = keyLen; i < 4 * keyLen + 28; i++) {
19497 tmp = encKey[i - 1]; // apply sbox
19498
19499 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
19500 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
19501
19502 if (i % keyLen === 0) {
19503 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
19504 rcon = rcon << 1 ^ (rcon >> 7) * 283;
19505 }
19506 }
19507
19508 encKey[i] = encKey[i - keyLen] ^ tmp;
19509 } // schedule decryption keys
19510
19511
19512 for (j = 0; i; j++, i--) {
19513 tmp = encKey[j & 3 ? i : i - 4];
19514
19515 if (i <= 4 || j < 4) {
19516 decKey[j] = tmp;
19517 } else {
19518 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
19519 }
19520 }
19521 }
19522 /**
19523 * Decrypt 16 bytes, specified as four 32-bit words.
19524 *
19525 * @param {number} encrypted0 the first word to decrypt
19526 * @param {number} encrypted1 the second word to decrypt
19527 * @param {number} encrypted2 the third word to decrypt
19528 * @param {number} encrypted3 the fourth word to decrypt
19529 * @param {Int32Array} out the array to write the decrypted words
19530 * into
19531 * @param {number} offset the offset into the output array to start
19532 * writing results
19533 * @return {Array} The plaintext.
19534 */
19535
19536
19537 var _proto = AES.prototype;
19538
19539 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
19540 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
19541
19542 var a = encrypted0 ^ key[0];
19543 var b = encrypted3 ^ key[1];
19544 var c = encrypted2 ^ key[2];
19545 var d = encrypted1 ^ key[3];
19546 var a2;
19547 var b2;
19548 var c2; // key.length === 2 ?
19549
19550 var nInnerRounds = key.length / 4 - 2;
19551 var i;
19552 var kIndex = 4;
19553 var table = this._tables[1]; // load up the tables
19554
19555 var table0 = table[0];
19556 var table1 = table[1];
19557 var table2 = table[2];
19558 var table3 = table[3];
19559 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
19560
19561 for (i = 0; i < nInnerRounds; i++) {
19562 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
19563 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
19564 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
19565 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
19566 kIndex += 4;
19567 a = a2;
19568 b = b2;
19569 c = c2;
19570 } // Last round.
19571
19572
19573 for (i = 0; i < 4; i++) {
19574 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
19575 a2 = a;
19576 a = b;
19577 b = c;
19578 c = d;
19579 d = a2;
19580 }
19581 };
19582
19583 return AES;
19584 }();
19585 /**
19586 * A wrapper around the Stream class to use setTimeout
19587 * and run stream "jobs" Asynchronously
19588 *
19589 * @class AsyncStream
19590 * @extends Stream
19591 */
19592
19593
19594 var AsyncStream = /*#__PURE__*/function (_Stream) {
19595 inheritsLoose(AsyncStream, _Stream);
19596
19597 function AsyncStream() {
19598 var _this;
19599
19600 _this = _Stream.call(this, Stream) || this;
19601 _this.jobs = [];
19602 _this.delay = 1;
19603 _this.timeout_ = null;
19604 return _this;
19605 }
19606 /**
19607 * process an async job
19608 *
19609 * @private
19610 */
19611
19612
19613 var _proto = AsyncStream.prototype;
19614
19615 _proto.processJob_ = function processJob_() {
19616 this.jobs.shift()();
19617
19618 if (this.jobs.length) {
19619 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
19620 } else {
19621 this.timeout_ = null;
19622 }
19623 }
19624 /**
19625 * push a job into the stream
19626 *
19627 * @param {Function} job the job to push into the stream
19628 */
19629 ;
19630
19631 _proto.push = function push(job) {
19632 this.jobs.push(job);
19633
19634 if (!this.timeout_) {
19635 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
19636 }
19637 };
19638
19639 return AsyncStream;
19640 }(Stream);
19641 /**
19642 * Convert network-order (big-endian) bytes into their little-endian
19643 * representation.
19644 */
19645
19646
19647 var ntoh = function ntoh(word) {
19648 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
19649 };
19650 /**
19651 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
19652 *
19653 * @param {Uint8Array} encrypted the encrypted bytes
19654 * @param {Uint32Array} key the bytes of the decryption key
19655 * @param {Uint32Array} initVector the initialization vector (IV) to
19656 * use for the first round of CBC.
19657 * @return {Uint8Array} the decrypted bytes
19658 *
19659 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
19660 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
19661 * @see https://tools.ietf.org/html/rfc2315
19662 */
19663
19664
19665 var decrypt = function decrypt(encrypted, key, initVector) {
19666 // word-level access to the encrypted bytes
19667 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
19668 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
19669
19670 var decrypted = new Uint8Array(encrypted.byteLength);
19671 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
19672 // decrypted data
19673
19674 var init0;
19675 var init1;
19676 var init2;
19677 var init3;
19678 var encrypted0;
19679 var encrypted1;
19680 var encrypted2;
19681 var encrypted3; // iteration variable
19682
19683 var wordIx; // pull out the words of the IV to ensure we don't modify the
19684 // passed-in reference and easier access
19685
19686 init0 = initVector[0];
19687 init1 = initVector[1];
19688 init2 = initVector[2];
19689 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
19690 // to each decrypted block
19691
19692 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
19693 // convert big-endian (network order) words into little-endian
19694 // (javascript order)
19695 encrypted0 = ntoh(encrypted32[wordIx]);
19696 encrypted1 = ntoh(encrypted32[wordIx + 1]);
19697 encrypted2 = ntoh(encrypted32[wordIx + 2]);
19698 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
19699
19700 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
19701 // plaintext
19702
19703 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
19704 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
19705 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
19706 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
19707
19708 init0 = encrypted0;
19709 init1 = encrypted1;
19710 init2 = encrypted2;
19711 init3 = encrypted3;
19712 }
19713
19714 return decrypted;
19715 };
19716 /**
19717 * The `Decrypter` class that manages decryption of AES
19718 * data through `AsyncStream` objects and the `decrypt`
19719 * function
19720 *
19721 * @param {Uint8Array} encrypted the encrypted bytes
19722 * @param {Uint32Array} key the bytes of the decryption key
19723 * @param {Uint32Array} initVector the initialization vector (IV) to
19724 * @param {Function} done the function to run when done
19725 * @class Decrypter
19726 */
19727
19728
19729 var Decrypter = /*#__PURE__*/function () {
19730 function Decrypter(encrypted, key, initVector, done) {
19731 var step = Decrypter.STEP;
19732 var encrypted32 = new Int32Array(encrypted.buffer);
19733 var decrypted = new Uint8Array(encrypted.byteLength);
19734 var i = 0;
19735 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
19736
19737 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
19738
19739 for (i = step; i < encrypted32.length; i += step) {
19740 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
19741 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
19742 } // invoke the done() callback when everything is finished
19743
19744
19745 this.asyncStream_.push(function () {
19746 // remove pkcs#7 padding from the decrypted bytes
19747 done(null, unpad(decrypted));
19748 });
19749 }
19750 /**
19751 * a getter for step the maximum number of bytes to process at one time
19752 *
19753 * @return {number} the value of step 32000
19754 */
19755
19756
19757 var _proto = Decrypter.prototype;
19758 /**
19759 * @private
19760 */
19761
19762 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
19763 return function () {
19764 var bytes = decrypt(encrypted, key, initVector);
19765 decrypted.set(bytes, encrypted.byteOffset);
19766 };
19767 };
19768
19769 createClass(Decrypter, null, [{
19770 key: "STEP",
19771 get: function get() {
19772 // 4 * 8000;
19773 return 32000;
19774 }
19775 }]);
19776 return Decrypter;
19777 }();
19778 /**
19779 * @file bin-utils.js
19780 */
19781
19782 /**
19783 * Creates an object for sending to a web worker modifying properties that are TypedArrays
19784 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
19785 *
19786 * @param {Object} message
19787 * Object of properties and values to send to the web worker
19788 * @return {Object}
19789 * Modified message with TypedArray values expanded
19790 * @function createTransferableMessage
19791 */
19792
19793
19794 var createTransferableMessage = function createTransferableMessage(message) {
19795 var transferable = {};
19796 Object.keys(message).forEach(function (key) {
19797 var value = message[key];
19798
19799 if (ArrayBuffer.isView(value)) {
19800 transferable[key] = {
19801 bytes: value.buffer,
19802 byteOffset: value.byteOffset,
19803 byteLength: value.byteLength
19804 };
19805 } else {
19806 transferable[key] = value;
19807 }
19808 });
19809 return transferable;
19810 };
19811 /* global self */
19812
19813 /**
19814 * Our web worker interface so that things can talk to aes-decrypter
19815 * that will be running in a web worker. the scope is passed to this by
19816 * webworkify.
19817 */
19818
19819
19820 self.onmessage = function (event) {
19821 var data = event.data;
19822 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
19823 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
19824 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
19825 /* eslint-disable no-new, handle-callback-err */
19826
19827 new Decrypter(encrypted, key, iv, function (err, bytes) {
19828 self.postMessage(createTransferableMessage({
19829 source: data.source,
19830 decrypted: bytes
19831 }), [bytes.buffer]);
19832 });
19833 /* eslint-enable */
19834 };
19835}));
19836var Decrypter = factory(workerCode);
19837/* rollup-plugin-worker-factory end for worker!/Users/gkatsevman/p/http-streaming-release/src/decrypter-worker.js */
19838
19839/**
19840 * Convert the properties of an HLS track into an audioTrackKind.
19841 *
19842 * @private
19843 */
19844
19845var audioTrackKind_ = function audioTrackKind_(properties) {
19846 var kind = properties.default ? 'main' : 'alternative';
19847
19848 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
19849 kind = 'main-desc';
19850 }
19851
19852 return kind;
19853};
19854/**
19855 * Pause provided segment loader and playlist loader if active
19856 *
19857 * @param {SegmentLoader} segmentLoader
19858 * SegmentLoader to pause
19859 * @param {Object} mediaType
19860 * Active media type
19861 * @function stopLoaders
19862 */
19863
19864
19865var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
19866 segmentLoader.abort();
19867 segmentLoader.pause();
19868
19869 if (mediaType && mediaType.activePlaylistLoader) {
19870 mediaType.activePlaylistLoader.pause();
19871 mediaType.activePlaylistLoader = null;
19872 }
19873};
19874/**
19875 * Start loading provided segment loader and playlist loader
19876 *
19877 * @param {PlaylistLoader} playlistLoader
19878 * PlaylistLoader to start loading
19879 * @param {Object} mediaType
19880 * Active media type
19881 * @function startLoaders
19882 */
19883
19884var startLoaders = function startLoaders(playlistLoader, mediaType) {
19885 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
19886 // playlist loader
19887 mediaType.activePlaylistLoader = playlistLoader;
19888 playlistLoader.load();
19889};
19890/**
19891 * Returns a function to be called when the media group changes. It performs a
19892 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
19893 * change of group is merely a rendition switch of the same content at another encoding,
19894 * rather than a change of content, such as switching audio from English to Spanish.
19895 *
19896 * @param {string} type
19897 * MediaGroup type
19898 * @param {Object} settings
19899 * Object containing required information for media groups
19900 * @return {Function}
19901 * Handler for a non-destructive resync of SegmentLoader when the active media
19902 * group changes.
19903 * @function onGroupChanged
19904 */
19905
19906var onGroupChanged = function onGroupChanged(type, settings) {
19907 return function () {
19908 var _settings$segmentLoad = settings.segmentLoaders,
19909 segmentLoader = _settings$segmentLoad[type],
19910 mainSegmentLoader = _settings$segmentLoad.main,
19911 mediaType = settings.mediaTypes[type];
19912 var activeTrack = mediaType.activeTrack();
19913 var activeGroup = mediaType.getActiveGroup();
19914 var previousActiveLoader = mediaType.activePlaylistLoader;
19915 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
19916
19917 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
19918 return;
19919 }
19920
19921 mediaType.lastGroup_ = activeGroup;
19922 mediaType.lastTrack_ = activeTrack;
19923 stopLoaders(segmentLoader, mediaType);
19924
19925 if (!activeGroup || activeGroup.isMasterPlaylist) {
19926 // there is no group active or active group is a main playlist and won't change
19927 return;
19928 }
19929
19930 if (!activeGroup.playlistLoader) {
19931 if (previousActiveLoader) {
19932 // The previous group had a playlist loader but the new active group does not
19933 // this means we are switching from demuxed to muxed audio. In this case we want to
19934 // do a destructive reset of the main segment loader and not restart the audio
19935 // loaders.
19936 mainSegmentLoader.resetEverything();
19937 }
19938
19939 return;
19940 } // Non-destructive resync
19941
19942
19943 segmentLoader.resyncLoader();
19944 startLoaders(activeGroup.playlistLoader, mediaType);
19945 };
19946};
19947var onGroupChanging = function onGroupChanging(type, settings) {
19948 return function () {
19949 var segmentLoader = settings.segmentLoaders[type],
19950 mediaType = settings.mediaTypes[type];
19951 mediaType.lastGroup_ = null;
19952 segmentLoader.abort();
19953 segmentLoader.pause();
19954 };
19955};
19956/**
19957 * Returns a function to be called when the media track changes. It performs a
19958 * destructive reset of the SegmentLoader to ensure we start loading as close to
19959 * currentTime as possible.
19960 *
19961 * @param {string} type
19962 * MediaGroup type
19963 * @param {Object} settings
19964 * Object containing required information for media groups
19965 * @return {Function}
19966 * Handler for a destructive reset of SegmentLoader when the active media
19967 * track changes.
19968 * @function onTrackChanged
19969 */
19970
19971var onTrackChanged = function onTrackChanged(type, settings) {
19972 return function () {
19973 var masterPlaylistLoader = settings.masterPlaylistLoader,
19974 _settings$segmentLoad2 = settings.segmentLoaders,
19975 segmentLoader = _settings$segmentLoad2[type],
19976 mainSegmentLoader = _settings$segmentLoad2.main,
19977 mediaType = settings.mediaTypes[type];
19978 var activeTrack = mediaType.activeTrack();
19979 var activeGroup = mediaType.getActiveGroup();
19980 var previousActiveLoader = mediaType.activePlaylistLoader;
19981 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
19982
19983 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
19984 return;
19985 }
19986
19987 mediaType.lastGroup_ = activeGroup;
19988 mediaType.lastTrack_ = activeTrack;
19989 stopLoaders(segmentLoader, mediaType);
19990
19991 if (!activeGroup) {
19992 // there is no group active so we do not want to restart loaders
19993 return;
19994 }
19995
19996 if (activeGroup.isMasterPlaylist) {
19997 // track did not change, do nothing
19998 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
19999 return;
20000 }
20001
20002 var mpc = settings.vhs.masterPlaylistController_;
20003 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
20004
20005 if (mpc.media() === newPlaylist) {
20006 return;
20007 }
20008
20009 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
20010 masterPlaylistLoader.pause();
20011 mainSegmentLoader.resetEverything();
20012 mpc.fastQualityChange_(newPlaylist);
20013 return;
20014 }
20015
20016 if (type === 'AUDIO') {
20017 if (!activeGroup.playlistLoader) {
20018 // when switching from demuxed audio/video to muxed audio/video (noted by no
20019 // playlist loader for the audio group), we want to do a destructive reset of the
20020 // main segment loader and not restart the audio loaders
20021 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
20022 // it should be stopped
20023
20024 mainSegmentLoader.resetEverything();
20025 return;
20026 } // although the segment loader is an audio segment loader, call the setAudio
20027 // function to ensure it is prepared to re-append the init segment (or handle other
20028 // config changes)
20029
20030
20031 segmentLoader.setAudio(true);
20032 mainSegmentLoader.setAudio(false);
20033 }
20034
20035 if (previousActiveLoader === activeGroup.playlistLoader) {
20036 // Nothing has actually changed. This can happen because track change events can fire
20037 // multiple times for a "single" change. One for enabling the new active track, and
20038 // one for disabling the track that was active
20039 startLoaders(activeGroup.playlistLoader, mediaType);
20040 return;
20041 }
20042
20043 if (segmentLoader.track) {
20044 // For WebVTT, set the new text track in the segmentloader
20045 segmentLoader.track(activeTrack);
20046 } // destructive reset
20047
20048
20049 segmentLoader.resetEverything();
20050 startLoaders(activeGroup.playlistLoader, mediaType);
20051 };
20052};
20053var onError = {
20054 /**
20055 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
20056 * an error.
20057 *
20058 * @param {string} type
20059 * MediaGroup type
20060 * @param {Object} settings
20061 * Object containing required information for media groups
20062 * @return {Function}
20063 * Error handler. Logs warning (or error if the playlist is blacklisted) to
20064 * console and switches back to default audio track.
20065 * @function onError.AUDIO
20066 */
20067 AUDIO: function AUDIO(type, settings) {
20068 return function () {
20069 var segmentLoader = settings.segmentLoaders[type],
20070 mediaType = settings.mediaTypes[type],
20071 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
20072 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
20073
20074 var activeTrack = mediaType.activeTrack();
20075 var activeGroup = mediaType.activeGroup();
20076 var id = (activeGroup.filter(function (group) {
20077 return group.default;
20078 })[0] || activeGroup[0]).id;
20079 var defaultTrack = mediaType.tracks[id];
20080
20081 if (activeTrack === defaultTrack) {
20082 // Default track encountered an error. All we can do now is blacklist the current
20083 // rendition and hope another will switch audio groups
20084 blacklistCurrentPlaylist({
20085 message: 'Problem encountered loading the default audio track.'
20086 });
20087 return;
20088 }
20089
20090 videojs__default['default'].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
20091
20092 for (var trackId in mediaType.tracks) {
20093 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
20094 }
20095
20096 mediaType.onTrackChanged();
20097 };
20098 },
20099
20100 /**
20101 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
20102 * an error.
20103 *
20104 * @param {string} type
20105 * MediaGroup type
20106 * @param {Object} settings
20107 * Object containing required information for media groups
20108 * @return {Function}
20109 * Error handler. Logs warning to console and disables the active subtitle track
20110 * @function onError.SUBTITLES
20111 */
20112 SUBTITLES: function SUBTITLES(type, settings) {
20113 return function () {
20114 var segmentLoader = settings.segmentLoaders[type],
20115 mediaType = settings.mediaTypes[type];
20116 videojs__default['default'].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
20117 stopLoaders(segmentLoader, mediaType);
20118 var track = mediaType.activeTrack();
20119
20120 if (track) {
20121 track.mode = 'disabled';
20122 }
20123
20124 mediaType.onTrackChanged();
20125 };
20126 }
20127};
20128var setupListeners = {
20129 /**
20130 * Setup event listeners for audio playlist loader
20131 *
20132 * @param {string} type
20133 * MediaGroup type
20134 * @param {PlaylistLoader|null} playlistLoader
20135 * PlaylistLoader to register listeners on
20136 * @param {Object} settings
20137 * Object containing required information for media groups
20138 * @function setupListeners.AUDIO
20139 */
20140 AUDIO: function AUDIO(type, playlistLoader, settings) {
20141 if (!playlistLoader) {
20142 // no playlist loader means audio will be muxed with the video
20143 return;
20144 }
20145
20146 var tech = settings.tech,
20147 requestOptions = settings.requestOptions,
20148 segmentLoader = settings.segmentLoaders[type];
20149 playlistLoader.on('loadedmetadata', function () {
20150 var media = playlistLoader.media();
20151 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
20152 // permits, start downloading segments
20153
20154 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
20155 segmentLoader.load();
20156 }
20157 });
20158 playlistLoader.on('loadedplaylist', function () {
20159 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
20160
20161 if (!tech.paused()) {
20162 segmentLoader.load();
20163 }
20164 });
20165 playlistLoader.on('error', onError[type](type, settings));
20166 },
20167
20168 /**
20169 * Setup event listeners for subtitle playlist loader
20170 *
20171 * @param {string} type
20172 * MediaGroup type
20173 * @param {PlaylistLoader|null} playlistLoader
20174 * PlaylistLoader to register listeners on
20175 * @param {Object} settings
20176 * Object containing required information for media groups
20177 * @function setupListeners.SUBTITLES
20178 */
20179 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
20180 var tech = settings.tech,
20181 requestOptions = settings.requestOptions,
20182 segmentLoader = settings.segmentLoaders[type],
20183 mediaType = settings.mediaTypes[type];
20184 playlistLoader.on('loadedmetadata', function () {
20185 var media = playlistLoader.media();
20186 segmentLoader.playlist(media, requestOptions);
20187 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
20188 // permits, start downloading segments
20189
20190 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
20191 segmentLoader.load();
20192 }
20193 });
20194 playlistLoader.on('loadedplaylist', function () {
20195 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
20196
20197 if (!tech.paused()) {
20198 segmentLoader.load();
20199 }
20200 });
20201 playlistLoader.on('error', onError[type](type, settings));
20202 }
20203};
20204var initialize = {
20205 /**
20206 * Setup PlaylistLoaders and AudioTracks for the audio groups
20207 *
20208 * @param {string} type
20209 * MediaGroup type
20210 * @param {Object} settings
20211 * Object containing required information for media groups
20212 * @function initialize.AUDIO
20213 */
20214 'AUDIO': function AUDIO(type, settings) {
20215 var vhs = settings.vhs,
20216 sourceType = settings.sourceType,
20217 segmentLoader = settings.segmentLoaders[type],
20218 requestOptions = settings.requestOptions,
20219 mediaGroups = settings.master.mediaGroups,
20220 _settings$mediaTypes$ = settings.mediaTypes[type],
20221 groups = _settings$mediaTypes$.groups,
20222 tracks = _settings$mediaTypes$.tracks,
20223 logger_ = _settings$mediaTypes$.logger_,
20224 masterPlaylistLoader = settings.masterPlaylistLoader;
20225 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
20226
20227 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
20228 mediaGroups[type] = {
20229 main: {
20230 default: {
20231 default: true
20232 }
20233 }
20234 };
20235 }
20236
20237 for (var groupId in mediaGroups[type]) {
20238 if (!groups[groupId]) {
20239 groups[groupId] = [];
20240 }
20241
20242 for (var variantLabel in mediaGroups[type][groupId]) {
20243 var properties = mediaGroups[type][groupId][variantLabel];
20244 var playlistLoader = void 0;
20245
20246 if (audioOnlyMaster) {
20247 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
20248 properties.isMasterPlaylist = true;
20249 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
20250 // use the resolved media playlist object
20251 } else if (sourceType === 'vhs-json' && properties.playlists) {
20252 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
20253 } else if (properties.resolvedUri) {
20254 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
20255 } else if (properties.playlists && sourceType === 'dash') {
20256 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
20257 } else {
20258 // no resolvedUri means the audio is muxed with the video when using this
20259 // audio track
20260 playlistLoader = null;
20261 }
20262
20263 properties = videojs__default['default'].mergeOptions({
20264 id: variantLabel,
20265 playlistLoader: playlistLoader
20266 }, properties);
20267 setupListeners[type](type, properties.playlistLoader, settings);
20268 groups[groupId].push(properties);
20269
20270 if (typeof tracks[variantLabel] === 'undefined') {
20271 var track = new videojs__default['default'].AudioTrack({
20272 id: variantLabel,
20273 kind: audioTrackKind_(properties),
20274 enabled: false,
20275 language: properties.language,
20276 default: properties.default,
20277 label: variantLabel
20278 });
20279 tracks[variantLabel] = track;
20280 }
20281 }
20282 } // setup single error event handler for the segment loader
20283
20284
20285 segmentLoader.on('error', onError[type](type, settings));
20286 },
20287
20288 /**
20289 * Setup PlaylistLoaders and TextTracks for the subtitle groups
20290 *
20291 * @param {string} type
20292 * MediaGroup type
20293 * @param {Object} settings
20294 * Object containing required information for media groups
20295 * @function initialize.SUBTITLES
20296 */
20297 'SUBTITLES': function SUBTITLES(type, settings) {
20298 var tech = settings.tech,
20299 vhs = settings.vhs,
20300 sourceType = settings.sourceType,
20301 segmentLoader = settings.segmentLoaders[type],
20302 requestOptions = settings.requestOptions,
20303 mediaGroups = settings.master.mediaGroups,
20304 _settings$mediaTypes$2 = settings.mediaTypes[type],
20305 groups = _settings$mediaTypes$2.groups,
20306 tracks = _settings$mediaTypes$2.tracks,
20307 masterPlaylistLoader = settings.masterPlaylistLoader;
20308
20309 for (var groupId in mediaGroups[type]) {
20310 if (!groups[groupId]) {
20311 groups[groupId] = [];
20312 }
20313
20314 for (var variantLabel in mediaGroups[type][groupId]) {
20315 if (mediaGroups[type][groupId][variantLabel].forced) {
20316 // Subtitle playlists with the forced attribute are not selectable in Safari.
20317 // According to Apple's HLS Authoring Specification:
20318 // If content has forced subtitles and regular subtitles in a given language,
20319 // the regular subtitles track in that language MUST contain both the forced
20320 // subtitles and the regular subtitles for that language.
20321 // Because of this requirement and that Safari does not add forced subtitles,
20322 // forced subtitles are skipped here to maintain consistent experience across
20323 // all platforms
20324 continue;
20325 }
20326
20327 var properties = mediaGroups[type][groupId][variantLabel];
20328 var playlistLoader = void 0;
20329
20330 if (sourceType === 'hls') {
20331 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
20332 } else if (sourceType === 'dash') {
20333 var playlists = properties.playlists.filter(function (p) {
20334 return p.excludeUntil !== Infinity;
20335 });
20336
20337 if (!playlists.length) {
20338 return;
20339 }
20340
20341 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
20342 } else if (sourceType === 'vhs-json') {
20343 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
20344 // as provided, otherwise use the resolved URI to load the playlist
20345 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
20346 }
20347
20348 properties = videojs__default['default'].mergeOptions({
20349 id: variantLabel,
20350 playlistLoader: playlistLoader
20351 }, properties);
20352 setupListeners[type](type, properties.playlistLoader, settings);
20353 groups[groupId].push(properties);
20354
20355 if (typeof tracks[variantLabel] === 'undefined') {
20356 var track = tech.addRemoteTextTrack({
20357 id: variantLabel,
20358 kind: 'subtitles',
20359 default: properties.default && properties.autoselect,
20360 language: properties.language,
20361 label: variantLabel
20362 }, false).track;
20363 tracks[variantLabel] = track;
20364 }
20365 }
20366 } // setup single error event handler for the segment loader
20367
20368
20369 segmentLoader.on('error', onError[type](type, settings));
20370 },
20371
20372 /**
20373 * Setup TextTracks for the closed-caption groups
20374 *
20375 * @param {String} type
20376 * MediaGroup type
20377 * @param {Object} settings
20378 * Object containing required information for media groups
20379 * @function initialize['CLOSED-CAPTIONS']
20380 */
20381 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
20382 var tech = settings.tech,
20383 mediaGroups = settings.master.mediaGroups,
20384 _settings$mediaTypes$3 = settings.mediaTypes[type],
20385 groups = _settings$mediaTypes$3.groups,
20386 tracks = _settings$mediaTypes$3.tracks;
20387
20388 for (var groupId in mediaGroups[type]) {
20389 if (!groups[groupId]) {
20390 groups[groupId] = [];
20391 }
20392
20393 for (var variantLabel in mediaGroups[type][groupId]) {
20394 var properties = mediaGroups[type][groupId][variantLabel]; // We only support CEA608 captions for now, so ignore anything that
20395 // doesn't use a CCx INSTREAM-ID
20396
20397 if (!properties.instreamId.match(/CC\d/)) {
20398 continue;
20399 } // No PlaylistLoader is required for Closed-Captions because the captions are
20400 // embedded within the video stream
20401
20402
20403 groups[groupId].push(videojs__default['default'].mergeOptions({
20404 id: variantLabel
20405 }, properties));
20406
20407 if (typeof tracks[variantLabel] === 'undefined') {
20408 var track = tech.addRemoteTextTrack({
20409 id: properties.instreamId,
20410 kind: 'captions',
20411 default: properties.default && properties.autoselect,
20412 language: properties.language,
20413 label: variantLabel
20414 }, false).track;
20415 tracks[variantLabel] = track;
20416 }
20417 }
20418 }
20419 }
20420};
20421/**
20422 * Returns a function used to get the active group of the provided type
20423 *
20424 * @param {string} type
20425 * MediaGroup type
20426 * @param {Object} settings
20427 * Object containing required information for media groups
20428 * @return {Function}
20429 * Function that returns the active media group for the provided type. Takes an
20430 * optional parameter {TextTrack} track. If no track is provided, a list of all
20431 * variants in the group, otherwise the variant corresponding to the provided
20432 * track is returned.
20433 * @function activeGroup
20434 */
20435
20436var activeGroup = function activeGroup(type, settings) {
20437 return function (track) {
20438 var masterPlaylistLoader = settings.masterPlaylistLoader,
20439 groups = settings.mediaTypes[type].groups;
20440 var media = masterPlaylistLoader.media();
20441
20442 if (!media) {
20443 return null;
20444 }
20445
20446 var variants = null; // set to variants to main media active group
20447
20448 if (media.attributes[type]) {
20449 variants = groups[media.attributes[type]];
20450 }
20451
20452 var groupKeys = Object.keys(groups);
20453
20454 if (!variants) {
20455 // use the main group if it exists
20456 if (groups.main) {
20457 variants = groups.main; // only one group, use that one
20458 } else if (groupKeys.length === 1) {
20459 variants = groups[groupKeys[0]];
20460 }
20461 }
20462
20463 if (typeof track === 'undefined') {
20464 return variants;
20465 }
20466
20467 if (track === null || !variants) {
20468 // An active track was specified so a corresponding group is expected. track === null
20469 // means no track is currently active so there is no corresponding group
20470 return null;
20471 }
20472
20473 return variants.filter(function (props) {
20474 return props.id === track.id;
20475 })[0] || null;
20476 };
20477};
20478var activeTrack = {
20479 /**
20480 * Returns a function used to get the active track of type provided
20481 *
20482 * @param {string} type
20483 * MediaGroup type
20484 * @param {Object} settings
20485 * Object containing required information for media groups
20486 * @return {Function}
20487 * Function that returns the active media track for the provided type. Returns
20488 * null if no track is active
20489 * @function activeTrack.AUDIO
20490 */
20491 AUDIO: function AUDIO(type, settings) {
20492 return function () {
20493 var tracks = settings.mediaTypes[type].tracks;
20494
20495 for (var id in tracks) {
20496 if (tracks[id].enabled) {
20497 return tracks[id];
20498 }
20499 }
20500
20501 return null;
20502 };
20503 },
20504
20505 /**
20506 * Returns a function used to get the active track of type provided
20507 *
20508 * @param {string} type
20509 * MediaGroup type
20510 * @param {Object} settings
20511 * Object containing required information for media groups
20512 * @return {Function}
20513 * Function that returns the active media track for the provided type. Returns
20514 * null if no track is active
20515 * @function activeTrack.SUBTITLES
20516 */
20517 SUBTITLES: function SUBTITLES(type, settings) {
20518 return function () {
20519 var tracks = settings.mediaTypes[type].tracks;
20520
20521 for (var id in tracks) {
20522 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
20523 return tracks[id];
20524 }
20525 }
20526
20527 return null;
20528 };
20529 }
20530};
20531var getActiveGroup = function getActiveGroup(type, _ref) {
20532 var mediaTypes = _ref.mediaTypes;
20533 return function () {
20534 var activeTrack_ = mediaTypes[type].activeTrack();
20535
20536 if (!activeTrack_) {
20537 return null;
20538 }
20539
20540 return mediaTypes[type].activeGroup(activeTrack_);
20541 };
20542};
20543/**
20544 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
20545 * Closed-Captions) specified in the master manifest.
20546 *
20547 * @param {Object} settings
20548 * Object containing required information for setting up the media groups
20549 * @param {Tech} settings.tech
20550 * The tech of the player
20551 * @param {Object} settings.requestOptions
20552 * XHR request options used by the segment loaders
20553 * @param {PlaylistLoader} settings.masterPlaylistLoader
20554 * PlaylistLoader for the master source
20555 * @param {VhsHandler} settings.vhs
20556 * VHS SourceHandler
20557 * @param {Object} settings.master
20558 * The parsed master manifest
20559 * @param {Object} settings.mediaTypes
20560 * Object to store the loaders, tracks, and utility methods for each media type
20561 * @param {Function} settings.blacklistCurrentPlaylist
20562 * Blacklists the current rendition and forces a rendition switch.
20563 * @function setupMediaGroups
20564 */
20565
20566var setupMediaGroups = function setupMediaGroups(settings) {
20567 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
20568 initialize[type](type, settings);
20569 });
20570 var mediaTypes = settings.mediaTypes,
20571 masterPlaylistLoader = settings.masterPlaylistLoader,
20572 tech = settings.tech,
20573 vhs = settings.vhs; // setup active group and track getters and change event handlers
20574
20575 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
20576 mediaTypes[type].activeGroup = activeGroup(type, settings);
20577 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
20578 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
20579 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
20580 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
20581 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
20582 }); // DO NOT enable the default subtitle or caption track.
20583 // DO enable the default audio track
20584
20585 var audioGroup = mediaTypes.AUDIO.activeGroup();
20586
20587 if (audioGroup) {
20588 var groupId = (audioGroup.filter(function (group) {
20589 return group.default;
20590 })[0] || audioGroup[0]).id;
20591 mediaTypes.AUDIO.tracks[groupId].enabled = true;
20592 mediaTypes.AUDIO.onGroupChanged();
20593 mediaTypes.AUDIO.onTrackChanged();
20594 }
20595
20596 masterPlaylistLoader.on('mediachange', function () {
20597 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
20598 return mediaTypes[type].onGroupChanged();
20599 });
20600 });
20601 masterPlaylistLoader.on('mediachanging', function () {
20602 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
20603 return mediaTypes[type].onGroupChanging();
20604 });
20605 }); // custom audio track change event handler for usage event
20606
20607 var onAudioTrackChanged = function onAudioTrackChanged() {
20608 mediaTypes.AUDIO.onTrackChanged();
20609 tech.trigger({
20610 type: 'usage',
20611 name: 'vhs-audio-change'
20612 });
20613 tech.trigger({
20614 type: 'usage',
20615 name: 'hls-audio-change'
20616 });
20617 };
20618
20619 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
20620 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
20621 vhs.on('dispose', function () {
20622 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
20623 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
20624 }); // clear existing audio tracks and add the ones we just created
20625
20626 tech.clearTracks('audio');
20627
20628 for (var id in mediaTypes.AUDIO.tracks) {
20629 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
20630 }
20631};
20632/**
20633 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
20634 * media type
20635 *
20636 * @return {Object}
20637 * Object to store the loaders, tracks, and utility methods for each media type
20638 * @function createMediaTypes
20639 */
20640
20641var createMediaTypes = function createMediaTypes() {
20642 var mediaTypes = {};
20643 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
20644 mediaTypes[type] = {
20645 groups: {},
20646 tracks: {},
20647 activePlaylistLoader: null,
20648 activeGroup: noop,
20649 activeTrack: noop,
20650 getActiveGroup: noop,
20651 onGroupChanged: noop,
20652 onTrackChanged: noop,
20653 lastTrack_: null,
20654 logger_: logger("MediaGroups[" + type + "]")
20655 };
20656 });
20657 return mediaTypes;
20658};
20659
20660var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
20661var Vhs$1; // SegmentLoader stats that need to have each loader's
20662// values summed to calculate the final value
20663
20664var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
20665
20666var sumLoaderStat = function sumLoaderStat(stat) {
20667 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
20668};
20669
20670var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
20671 var currentPlaylist = _ref.currentPlaylist,
20672 nextPlaylist = _ref.nextPlaylist,
20673 forwardBuffer = _ref.forwardBuffer,
20674 bufferLowWaterLine = _ref.bufferLowWaterLine,
20675 bufferHighWaterLine = _ref.bufferHighWaterLine,
20676 duration = _ref.duration,
20677 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
20678 log = _ref.log;
20679
20680 // we have no other playlist to switch to
20681 if (!nextPlaylist) {
20682 videojs__default['default'].log.warn('We received no playlist to switch to. Please check your stream.');
20683 return false;
20684 }
20685
20686 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id; // If the playlist is live, then we want to not take low water line into account.
20687 // This is because in LIVE, the player plays 3 segments from the end of the
20688 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
20689 // in those segments, a viewer will never experience a rendition upswitch.
20690
20691 if (!currentPlaylist || !currentPlaylist.endList) {
20692 log(sharedLogLine + " as current playlist " + (!currentPlaylist ? 'is not set' : 'is live'));
20693 return true;
20694 } // no need to switch playlist is the same
20695
20696
20697 if (nextPlaylist.id === currentPlaylist.id) {
20698 return false;
20699 }
20700
20701 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
20702 // duration is below the max potential low water line
20703
20704 if (duration < maxBufferLowWaterLine) {
20705 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
20706 return true;
20707 }
20708
20709 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
20710 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
20711 // we can switch down
20712
20713 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
20714 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
20715
20716 if (experimentalBufferBasedABR) {
20717 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
20718 }
20719
20720 log(logLine);
20721 return true;
20722 } // and if our buffer is higher than the low water line,
20723 // we can switch up
20724
20725
20726 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
20727 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
20728
20729 if (experimentalBufferBasedABR) {
20730 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
20731 }
20732
20733 log(_logLine);
20734 return true;
20735 }
20736
20737 log("not " + sharedLogLine + " as no switching criteria met");
20738 return false;
20739};
20740/**
20741 * the master playlist controller controller all interactons
20742 * between playlists and segmentloaders. At this time this mainly
20743 * involves a master playlist and a series of audio playlists
20744 * if they are available
20745 *
20746 * @class MasterPlaylistController
20747 * @extends videojs.EventTarget
20748 */
20749
20750
20751var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
20752 _inheritsLoose__default['default'](MasterPlaylistController, _videojs$EventTarget);
20753
20754 function MasterPlaylistController(options) {
20755 var _this;
20756
20757 _this = _videojs$EventTarget.call(this) || this;
20758 var src = options.src,
20759 handleManifestRedirects = options.handleManifestRedirects,
20760 withCredentials = options.withCredentials,
20761 tech = options.tech,
20762 bandwidth = options.bandwidth,
20763 externVhs = options.externVhs,
20764 useCueTags = options.useCueTags,
20765 blacklistDuration = options.blacklistDuration,
20766 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
20767 sourceType = options.sourceType,
20768 cacheEncryptionKeys = options.cacheEncryptionKeys,
20769 handlePartialData = options.handlePartialData,
20770 experimentalBufferBasedABR = options.experimentalBufferBasedABR;
20771
20772 if (!src) {
20773 throw new Error('A non-empty playlist URL or JSON manifest string is required');
20774 }
20775
20776 Vhs$1 = externVhs;
20777 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
20778 _this.withCredentials = withCredentials;
20779 _this.tech_ = tech;
20780 _this.vhs_ = tech.vhs;
20781 _this.sourceType_ = sourceType;
20782 _this.useCueTags_ = useCueTags;
20783 _this.blacklistDuration = blacklistDuration;
20784 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
20785
20786 if (_this.useCueTags_) {
20787 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
20788 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
20789 }
20790
20791 _this.requestOptions_ = {
20792 withCredentials: withCredentials,
20793 handleManifestRedirects: handleManifestRedirects,
20794 timeout: null
20795 };
20796
20797 _this.on('error', _this.pauseLoading);
20798
20799 _this.mediaTypes_ = createMediaTypes();
20800 _this.mediaSource = new window__default['default'].MediaSource();
20801 _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized__default['default'](_this));
20802 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized__default['default'](_this));
20803 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized__default['default'](_this));
20804
20805 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
20806
20807
20808 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
20809
20810 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
20811 // everything, and the MediaSource should not be detached without a proper disposal
20812
20813
20814 _this.seekable_ = videojs__default['default'].createTimeRanges();
20815 _this.hasPlayed_ = false;
20816 _this.syncController_ = new SyncController(options);
20817 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
20818 kind: 'metadata',
20819 label: 'segment-metadata'
20820 }, false).track;
20821 _this.decrypter_ = new Decrypter();
20822 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
20823 _this.inbandTextTracks_ = {};
20824 _this.timelineChangeController_ = new TimelineChangeController();
20825 var segmentLoaderSettings = {
20826 vhs: _this.vhs_,
20827 parse708captions: options.parse708captions,
20828 mediaSource: _this.mediaSource,
20829 currentTime: _this.tech_.currentTime.bind(_this.tech_),
20830 seekable: function seekable() {
20831 return _this.seekable();
20832 },
20833 seeking: function seeking() {
20834 return _this.tech_.seeking();
20835 },
20836 duration: function duration() {
20837 return _this.duration();
20838 },
20839 hasPlayed: function hasPlayed() {
20840 return _this.hasPlayed_;
20841 },
20842 goalBufferLength: function goalBufferLength() {
20843 return _this.goalBufferLength();
20844 },
20845 bandwidth: bandwidth,
20846 syncController: _this.syncController_,
20847 decrypter: _this.decrypter_,
20848 sourceType: _this.sourceType_,
20849 inbandTextTracks: _this.inbandTextTracks_,
20850 cacheEncryptionKeys: cacheEncryptionKeys,
20851 handlePartialData: handlePartialData,
20852 sourceUpdater: _this.sourceUpdater_,
20853 timelineChangeController: _this.timelineChangeController_
20854 }; // The source type check not only determines whether a special DASH playlist loader
20855 // should be used, but also covers the case where the provided src is a vhs-json
20856 // manifest object (instead of a URL). In the case of vhs-json, the default
20857 // PlaylistLoader should be used.
20858
20859 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
20860
20861 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
20862 // combined audio/video or just video when alternate audio track is selected
20863
20864
20865 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
20866 segmentMetadataTrack: _this.segmentMetadataTrack_,
20867 loaderType: 'main'
20868 }), options); // alternate audio track
20869
20870 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
20871 loaderType: 'audio'
20872 }), options);
20873 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
20874 loaderType: 'vtt',
20875 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
20876 }), options);
20877
20878 _this.setupSegmentLoaderListeners_();
20879
20880 if (_this.experimentalBufferBasedABR) {
20881 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
20882 return _this.startABRTimer_();
20883 });
20884
20885 _this.tech_.on('pause', function () {
20886 return _this.stopABRTimer_();
20887 });
20888
20889 _this.tech_.on('play', function () {
20890 return _this.startABRTimer_();
20891 });
20892 } // Create SegmentLoader stat-getters
20893 // mediaRequests_
20894 // mediaRequestsAborted_
20895 // mediaRequestsTimedout_
20896 // mediaRequestsErrored_
20897 // mediaTransferDuration_
20898 // mediaBytesTransferred_
20899
20900
20901 loaderStats.forEach(function (stat) {
20902 _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized__default['default'](_this), stat);
20903 });
20904 _this.logger_ = logger('MPC');
20905 _this.triggeredFmp4Usage = false;
20906
20907 if (_this.tech_.preload() === 'none') {
20908 _this.loadOnPlay_ = function () {
20909 _this.loadOnPlay_ = null;
20910
20911 _this.masterPlaylistLoader_.load();
20912 };
20913
20914 _this.tech_.one('play', _this.loadOnPlay_);
20915 } else {
20916 _this.masterPlaylistLoader_.load();
20917 }
20918
20919 return _this;
20920 }
20921 /**
20922 * Run selectPlaylist and switch to the new playlist if we should
20923 *
20924 * @private
20925 *
20926 */
20927
20928
20929 var _proto = MasterPlaylistController.prototype;
20930
20931 _proto.checkABR_ = function checkABR_() {
20932 var nextPlaylist = this.selectPlaylist();
20933
20934 if (this.shouldSwitchToMedia_(nextPlaylist)) {
20935 this.switchMedia_(nextPlaylist, 'abr');
20936 }
20937 };
20938
20939 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
20940 var oldMedia = this.media();
20941 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
20942 var newId = playlist.id || playlist.uri;
20943
20944 if (oldId && oldId !== newId) {
20945 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
20946 this.tech_.trigger({
20947 type: 'usage',
20948 name: "vhs-rendition-change-" + cause
20949 });
20950 }
20951
20952 this.masterPlaylistLoader_.media(playlist, delay);
20953 }
20954 /**
20955 * Start a timer that periodically calls checkABR_
20956 *
20957 * @private
20958 */
20959 ;
20960
20961 _proto.startABRTimer_ = function startABRTimer_() {
20962 var _this2 = this;
20963
20964 this.stopABRTimer_();
20965 this.abrTimer_ = window__default['default'].setInterval(function () {
20966 return _this2.checkABR_();
20967 }, 250);
20968 }
20969 /**
20970 * Stop the timer that periodically calls checkABR_
20971 *
20972 * @private
20973 */
20974 ;
20975
20976 _proto.stopABRTimer_ = function stopABRTimer_() {
20977 // if we're scrubbing, we don't need to pause.
20978 // This getter will be added to Video.js in version 7.11.
20979 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
20980 return;
20981 }
20982
20983 window__default['default'].clearInterval(this.abrTimer_);
20984 this.abrTimer_ = null;
20985 }
20986 /**
20987 * Get a list of playlists for the currently selected audio playlist
20988 *
20989 * @return {Array} the array of audio playlists
20990 */
20991 ;
20992
20993 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
20994 var master = this.master(); // if we don't have any audio groups then we can only
20995 // assume that the audio tracks are contained in masters
20996 // playlist array, use that or an empty array.
20997
20998 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
20999 return master && master.playlists || [];
21000 }
21001
21002 var AUDIO = master.mediaGroups.AUDIO;
21003 var groupKeys = Object.keys(AUDIO);
21004 var track; // get the current active track
21005
21006 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
21007 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
21008 } else {
21009 // default group is `main` or just the first group.
21010 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
21011
21012 for (var label in defaultGroup) {
21013 if (defaultGroup[label].default) {
21014 track = {
21015 label: label
21016 };
21017 break;
21018 }
21019 }
21020 } // no active track no playlists.
21021
21022
21023 if (!track) {
21024 return [];
21025 }
21026
21027 var playlists = []; // get all of the playlists that are possible for the
21028 // active track.
21029
21030 for (var group in AUDIO) {
21031 if (AUDIO[group][track.label]) {
21032 var properties = AUDIO[group][track.label];
21033
21034 if (properties.playlists) {
21035 playlists.push.apply(playlists, properties.playlists);
21036 } else {
21037 playlists.push(properties);
21038 }
21039 }
21040 }
21041
21042 return playlists;
21043 }
21044 /**
21045 * Register event handlers on the master playlist loader. A helper
21046 * function for construction time.
21047 *
21048 * @private
21049 */
21050 ;
21051
21052 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
21053 var _this3 = this;
21054
21055 this.masterPlaylistLoader_.on('loadedmetadata', function () {
21056 var media = _this3.masterPlaylistLoader_.media();
21057
21058 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
21059 // timeout the request.
21060
21061 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
21062 _this3.requestOptions_.timeout = 0;
21063 } else {
21064 _this3.requestOptions_.timeout = requestTimeout;
21065 } // if this isn't a live video and preload permits, start
21066 // downloading segments
21067
21068
21069 if (media.endList && _this3.tech_.preload() !== 'none') {
21070 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
21071
21072 _this3.mainSegmentLoader_.load();
21073 }
21074
21075 setupMediaGroups({
21076 sourceType: _this3.sourceType_,
21077 segmentLoaders: {
21078 AUDIO: _this3.audioSegmentLoader_,
21079 SUBTITLES: _this3.subtitleSegmentLoader_,
21080 main: _this3.mainSegmentLoader_
21081 },
21082 tech: _this3.tech_,
21083 requestOptions: _this3.requestOptions_,
21084 masterPlaylistLoader: _this3.masterPlaylistLoader_,
21085 vhs: _this3.vhs_,
21086 master: _this3.master(),
21087 mediaTypes: _this3.mediaTypes_,
21088 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
21089 });
21090
21091 _this3.triggerPresenceUsage_(_this3.master(), media);
21092
21093 _this3.setupFirstPlay();
21094
21095 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
21096 _this3.trigger('selectedinitialmedia');
21097 } else {
21098 // We must wait for the active audio playlist loader to
21099 // finish setting up before triggering this event so the
21100 // representations API and EME setup is correct
21101 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
21102 _this3.trigger('selectedinitialmedia');
21103 });
21104 }
21105 });
21106 this.masterPlaylistLoader_.on('loadedplaylist', function () {
21107 if (_this3.loadOnPlay_) {
21108 _this3.tech_.off('play', _this3.loadOnPlay_);
21109 }
21110
21111 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
21112
21113 if (!updatedPlaylist) {
21114 // exclude any variants that are not supported by the browser before selecting
21115 // an initial media as the playlist selectors do not consider browser support
21116 _this3.excludeUnsupportedVariants_();
21117
21118 var selectedMedia;
21119
21120 if (_this3.enableLowInitialPlaylist) {
21121 selectedMedia = _this3.selectInitialPlaylist();
21122 }
21123
21124 if (!selectedMedia) {
21125 selectedMedia = _this3.selectPlaylist();
21126 }
21127
21128 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
21129 return;
21130 }
21131
21132 _this3.initialMedia_ = selectedMedia;
21133
21134 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
21135 // fire again since the playlist will be requested. In the case of vhs-json
21136 // (where the manifest object is provided as the source), when the media
21137 // playlist's `segments` list is already available, a media playlist won't be
21138 // requested, and loadedplaylist won't fire again, so the playlist handler must be
21139 // called on its own here.
21140
21141
21142 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
21143
21144 if (!haveJsonSource) {
21145 return;
21146 }
21147
21148 updatedPlaylist = _this3.initialMedia_;
21149 }
21150
21151 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
21152 });
21153 this.masterPlaylistLoader_.on('error', function () {
21154 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
21155 });
21156 this.masterPlaylistLoader_.on('mediachanging', function () {
21157 _this3.mainSegmentLoader_.abort();
21158
21159 _this3.mainSegmentLoader_.pause();
21160 });
21161 this.masterPlaylistLoader_.on('mediachange', function () {
21162 var media = _this3.masterPlaylistLoader_.media();
21163
21164 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
21165 // timeout the request.
21166
21167 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
21168 _this3.requestOptions_.timeout = 0;
21169 } else {
21170 _this3.requestOptions_.timeout = requestTimeout;
21171 } // TODO: Create a new event on the PlaylistLoader that signals
21172 // that the segments have changed in some way and use that to
21173 // update the SegmentLoader instead of doing it twice here and
21174 // on `loadedplaylist`
21175
21176
21177 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
21178
21179 _this3.mainSegmentLoader_.load();
21180
21181 _this3.tech_.trigger({
21182 type: 'mediachange',
21183 bubbles: true
21184 });
21185 });
21186 this.masterPlaylistLoader_.on('playlistunchanged', function () {
21187 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
21188 // excluded for not-changing. We likely just have a really slowly updating
21189 // playlist.
21190
21191
21192 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
21193 return;
21194 }
21195
21196 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
21197
21198 if (playlistOutdated) {
21199 // Playlist has stopped updating and we're stuck at its end. Try to
21200 // blacklist it and switch to another playlist in the hope that that
21201 // one is updating (and give the player a chance to re-adjust to the
21202 // safe live point).
21203 _this3.blacklistCurrentPlaylist({
21204 message: 'Playlist no longer updating.',
21205 reason: 'playlist-unchanged'
21206 }); // useful for monitoring QoS
21207
21208
21209 _this3.tech_.trigger('playliststuck');
21210 }
21211 });
21212 this.masterPlaylistLoader_.on('renditiondisabled', function () {
21213 _this3.tech_.trigger({
21214 type: 'usage',
21215 name: 'vhs-rendition-disabled'
21216 });
21217
21218 _this3.tech_.trigger({
21219 type: 'usage',
21220 name: 'hls-rendition-disabled'
21221 });
21222 });
21223 this.masterPlaylistLoader_.on('renditionenabled', function () {
21224 _this3.tech_.trigger({
21225 type: 'usage',
21226 name: 'vhs-rendition-enabled'
21227 });
21228
21229 _this3.tech_.trigger({
21230 type: 'usage',
21231 name: 'hls-rendition-enabled'
21232 });
21233 });
21234 }
21235 /**
21236 * Given an updated media playlist (whether it was loaded for the first time, or
21237 * refreshed for live playlists), update any relevant properties and state to reflect
21238 * changes in the media that should be accounted for (e.g., cues and duration).
21239 *
21240 * @param {Object} updatedPlaylist the updated media playlist object
21241 *
21242 * @private
21243 */
21244 ;
21245
21246 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
21247 if (this.useCueTags_) {
21248 this.updateAdCues_(updatedPlaylist);
21249 } // TODO: Create a new event on the PlaylistLoader that signals
21250 // that the segments have changed in some way and use that to
21251 // update the SegmentLoader instead of doing it twice here and
21252 // on `mediachange`
21253
21254
21255 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
21256 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
21257 // as it is possible that it was temporarily stopped while waiting for
21258 // a playlist (e.g., in case the playlist errored and we re-requested it).
21259
21260 if (!this.tech_.paused()) {
21261 this.mainSegmentLoader_.load();
21262
21263 if (this.audioSegmentLoader_) {
21264 this.audioSegmentLoader_.load();
21265 }
21266 }
21267 }
21268 /**
21269 * A helper function for triggerring presence usage events once per source
21270 *
21271 * @private
21272 */
21273 ;
21274
21275 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
21276 var mediaGroups = master.mediaGroups || {};
21277 var defaultDemuxed = true;
21278 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
21279
21280 for (var mediaGroup in mediaGroups.AUDIO) {
21281 for (var label in mediaGroups.AUDIO[mediaGroup]) {
21282 var properties = mediaGroups.AUDIO[mediaGroup][label];
21283
21284 if (!properties.uri) {
21285 defaultDemuxed = false;
21286 }
21287 }
21288 }
21289
21290 if (defaultDemuxed) {
21291 this.tech_.trigger({
21292 type: 'usage',
21293 name: 'vhs-demuxed'
21294 });
21295 this.tech_.trigger({
21296 type: 'usage',
21297 name: 'hls-demuxed'
21298 });
21299 }
21300
21301 if (Object.keys(mediaGroups.SUBTITLES).length) {
21302 this.tech_.trigger({
21303 type: 'usage',
21304 name: 'vhs-webvtt'
21305 });
21306 this.tech_.trigger({
21307 type: 'usage',
21308 name: 'hls-webvtt'
21309 });
21310 }
21311
21312 if (Vhs$1.Playlist.isAes(media)) {
21313 this.tech_.trigger({
21314 type: 'usage',
21315 name: 'vhs-aes'
21316 });
21317 this.tech_.trigger({
21318 type: 'usage',
21319 name: 'hls-aes'
21320 });
21321 }
21322
21323 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
21324 this.tech_.trigger({
21325 type: 'usage',
21326 name: 'vhs-alternate-audio'
21327 });
21328 this.tech_.trigger({
21329 type: 'usage',
21330 name: 'hls-alternate-audio'
21331 });
21332 }
21333
21334 if (this.useCueTags_) {
21335 this.tech_.trigger({
21336 type: 'usage',
21337 name: 'vhs-playlist-cue-tags'
21338 });
21339 this.tech_.trigger({
21340 type: 'usage',
21341 name: 'hls-playlist-cue-tags'
21342 });
21343 }
21344 };
21345
21346 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
21347 var currentPlaylist = this.masterPlaylistLoader_.media();
21348 var buffered = this.tech_.buffered();
21349 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - this.tech_.currentTime() : 0;
21350 var bufferLowWaterLine = this.bufferLowWaterLine();
21351 var bufferHighWaterLine = this.bufferHighWaterLine();
21352 return shouldSwitchToMedia({
21353 currentPlaylist: currentPlaylist,
21354 nextPlaylist: nextPlaylist,
21355 forwardBuffer: forwardBuffer,
21356 bufferLowWaterLine: bufferLowWaterLine,
21357 bufferHighWaterLine: bufferHighWaterLine,
21358 duration: this.duration(),
21359 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
21360 log: this.logger_
21361 });
21362 }
21363 /**
21364 * Register event handlers on the segment loaders. A helper function
21365 * for construction time.
21366 *
21367 * @private
21368 */
21369 ;
21370
21371 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
21372 var _this4 = this;
21373
21374 if (!this.experimentalBufferBasedABR) {
21375 this.mainSegmentLoader_.on('bandwidthupdate', function () {
21376 var nextPlaylist = _this4.selectPlaylist();
21377
21378 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
21379 _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
21380 }
21381
21382 _this4.tech_.trigger('bandwidthupdate');
21383 });
21384 this.mainSegmentLoader_.on('progress', function () {
21385 _this4.trigger('progress');
21386 });
21387 }
21388
21389 this.mainSegmentLoader_.on('error', function () {
21390 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
21391 });
21392 this.mainSegmentLoader_.on('appenderror', function () {
21393 _this4.error = _this4.mainSegmentLoader_.error_;
21394
21395 _this4.trigger('error');
21396 });
21397 this.mainSegmentLoader_.on('syncinfoupdate', function () {
21398 _this4.onSyncInfoUpdate_();
21399 });
21400 this.mainSegmentLoader_.on('timestampoffset', function () {
21401 _this4.tech_.trigger({
21402 type: 'usage',
21403 name: 'vhs-timestamp-offset'
21404 });
21405
21406 _this4.tech_.trigger({
21407 type: 'usage',
21408 name: 'hls-timestamp-offset'
21409 });
21410 });
21411 this.audioSegmentLoader_.on('syncinfoupdate', function () {
21412 _this4.onSyncInfoUpdate_();
21413 });
21414 this.audioSegmentLoader_.on('appenderror', function () {
21415 _this4.error = _this4.audioSegmentLoader_.error_;
21416
21417 _this4.trigger('error');
21418 });
21419 this.mainSegmentLoader_.on('ended', function () {
21420 _this4.logger_('main segment loader ended');
21421
21422 _this4.onEndOfStream();
21423 });
21424 this.mainSegmentLoader_.on('earlyabort', function (event) {
21425 // never try to early abort with the new ABR algorithm
21426 if (_this4.experimentalBufferBasedABR) {
21427 return;
21428 }
21429
21430 _this4.delegateLoaders_('all', ['abort']);
21431
21432 _this4.blacklistCurrentPlaylist({
21433 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
21434 }, ABORT_EARLY_BLACKLIST_SECONDS);
21435 });
21436
21437 var updateCodecs = function updateCodecs() {
21438 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
21439 return _this4.tryToCreateSourceBuffers_();
21440 }
21441
21442 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
21443
21444
21445 if (!codecs) {
21446 return;
21447 }
21448
21449 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
21450 };
21451
21452 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
21453 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
21454 this.mainSegmentLoader_.on('fmp4', function () {
21455 if (!_this4.triggeredFmp4Usage) {
21456 _this4.tech_.trigger({
21457 type: 'usage',
21458 name: 'vhs-fmp4'
21459 });
21460
21461 _this4.tech_.trigger({
21462 type: 'usage',
21463 name: 'hls-fmp4'
21464 });
21465
21466 _this4.triggeredFmp4Usage = true;
21467 }
21468 });
21469 this.audioSegmentLoader_.on('fmp4', function () {
21470 if (!_this4.triggeredFmp4Usage) {
21471 _this4.tech_.trigger({
21472 type: 'usage',
21473 name: 'vhs-fmp4'
21474 });
21475
21476 _this4.tech_.trigger({
21477 type: 'usage',
21478 name: 'hls-fmp4'
21479 });
21480
21481 _this4.triggeredFmp4Usage = true;
21482 }
21483 });
21484 this.audioSegmentLoader_.on('ended', function () {
21485 _this4.logger_('audioSegmentLoader ended');
21486
21487 _this4.onEndOfStream();
21488 });
21489 };
21490
21491 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
21492 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
21493 }
21494 /**
21495 * Call load on our SegmentLoaders
21496 */
21497 ;
21498
21499 _proto.load = function load() {
21500 this.mainSegmentLoader_.load();
21501
21502 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
21503 this.audioSegmentLoader_.load();
21504 }
21505
21506 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
21507 this.subtitleSegmentLoader_.load();
21508 }
21509 }
21510 /**
21511 * Re-tune playback quality level for the current player
21512 * conditions without performing destructive actions, like
21513 * removing already buffered content
21514 *
21515 * @private
21516 */
21517 ;
21518
21519 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
21520 if (media === void 0) {
21521 media = this.selectPlaylist();
21522 }
21523
21524 if (media === this.masterPlaylistLoader_.media()) {
21525 return;
21526 }
21527
21528 this.switchMedia_(media, 'smooth-quality');
21529 this.mainSegmentLoader_.resetLoader(); // don't need to reset audio as it is reset when media changes
21530 }
21531 /**
21532 * Re-tune playback quality level for the current player
21533 * conditions. This method will perform destructive actions like removing
21534 * already buffered content in order to readjust the currently active
21535 * playlist quickly. This is good for manual quality changes
21536 *
21537 * @private
21538 */
21539 ;
21540
21541 _proto.fastQualityChange_ = function fastQualityChange_(media) {
21542 var _this5 = this;
21543
21544 if (media === void 0) {
21545 media = this.selectPlaylist();
21546 }
21547
21548 if (media === this.masterPlaylistLoader_.media()) {
21549 this.logger_('skipping fastQualityChange because new media is same as old');
21550 return;
21551 }
21552
21553 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
21554 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
21555 // ahead is roughly the minimum that will accomplish this across a variety of content
21556 // in IE and Edge, but seeking in place is sufficient on all other browsers)
21557 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
21558 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
21559
21560 this.mainSegmentLoader_.resetEverything(function () {
21561 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
21562 // from the previously enabled rendition to load before the new playlist has finished loading
21563 if (videojs__default['default'].browser.IE_VERSION || videojs__default['default'].browser.IS_EDGE) {
21564 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
21565 } else {
21566 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
21567 }
21568 }); // don't need to reset audio as it is reset when media changes
21569 }
21570 /**
21571 * Begin playback.
21572 */
21573 ;
21574
21575 _proto.play = function play() {
21576 if (this.setupFirstPlay()) {
21577 return;
21578 }
21579
21580 if (this.tech_.ended()) {
21581 this.tech_.setCurrentTime(0);
21582 }
21583
21584 if (this.hasPlayed_) {
21585 this.load();
21586 }
21587
21588 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
21589 // seek forward to the live point
21590
21591 if (this.tech_.duration() === Infinity) {
21592 if (this.tech_.currentTime() < seekable.start(0)) {
21593 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
21594 }
21595 }
21596 }
21597 /**
21598 * Seek to the latest media position if this is a live video and the
21599 * player and video are loaded and initialized.
21600 */
21601 ;
21602
21603 _proto.setupFirstPlay = function setupFirstPlay() {
21604 var _this6 = this;
21605
21606 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
21607 // If 1) there is no active media
21608 // 2) the player is paused
21609 // 3) the first play has already been setup
21610 // then exit early
21611
21612 if (!media || this.tech_.paused() || this.hasPlayed_) {
21613 return false;
21614 } // when the video is a live stream
21615
21616
21617 if (!media.endList) {
21618 var seekable = this.seekable();
21619
21620 if (!seekable.length) {
21621 // without a seekable range, the player cannot seek to begin buffering at the live
21622 // point
21623 return false;
21624 }
21625
21626 if (videojs__default['default'].browser.IE_VERSION && this.tech_.readyState() === 0) {
21627 // IE11 throws an InvalidStateError if you try to set currentTime while the
21628 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
21629 this.tech_.one('loadedmetadata', function () {
21630 _this6.trigger('firstplay');
21631
21632 _this6.tech_.setCurrentTime(seekable.end(0));
21633
21634 _this6.hasPlayed_ = true;
21635 });
21636 return false;
21637 } // trigger firstplay to inform the source handler to ignore the next seek event
21638
21639
21640 this.trigger('firstplay'); // seek to the live point
21641
21642 this.tech_.setCurrentTime(seekable.end(0));
21643 }
21644
21645 this.hasPlayed_ = true; // we can begin loading now that everything is ready
21646
21647 this.load();
21648 return true;
21649 }
21650 /**
21651 * handle the sourceopen event on the MediaSource
21652 *
21653 * @private
21654 */
21655 ;
21656
21657 _proto.handleSourceOpen_ = function handleSourceOpen_() {
21658 // Only attempt to create the source buffer if none already exist.
21659 // handleSourceOpen is also called when we are "re-opening" a source buffer
21660 // after `endOfStream` has been called (in response to a seek for instance)
21661 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
21662 // code in video.js but is required because play() must be invoked
21663 // *after* the media source has opened.
21664
21665 if (this.tech_.autoplay()) {
21666 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
21667 // on browsers which return a promise
21668
21669 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
21670 playPromise.then(null, function (e) {});
21671 }
21672 }
21673
21674 this.trigger('sourceopen');
21675 }
21676 /**
21677 * handle the sourceended event on the MediaSource
21678 *
21679 * @private
21680 */
21681 ;
21682
21683 _proto.handleSourceEnded_ = function handleSourceEnded_() {
21684 if (!this.inbandTextTracks_.metadataTrack_) {
21685 return;
21686 }
21687
21688 var cues = this.inbandTextTracks_.metadataTrack_.cues;
21689
21690 if (!cues || !cues.length) {
21691 return;
21692 }
21693
21694 var duration = this.duration();
21695 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
21696 }
21697 /**
21698 * handle the durationchange event on the MediaSource
21699 *
21700 * @private
21701 */
21702 ;
21703
21704 _proto.handleDurationChange_ = function handleDurationChange_() {
21705 this.tech_.trigger('durationchange');
21706 }
21707 /**
21708 * Calls endOfStream on the media source when all active stream types have called
21709 * endOfStream
21710 *
21711 * @param {string} streamType
21712 * Stream type of the segment loader that called endOfStream
21713 * @private
21714 */
21715 ;
21716
21717 _proto.onEndOfStream = function onEndOfStream() {
21718 var isEndOfStream = this.mainSegmentLoader_.ended_;
21719
21720 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
21721 // if the audio playlist loader exists, then alternate audio is active
21722 if (!this.mainSegmentLoader_.currentMediaInfo_ || this.mainSegmentLoader_.currentMediaInfo_.hasVideo) {
21723 // if we do not know if the main segment loader contains video yet or if we
21724 // definitively know the main segment loader contains video, then we need to wait
21725 // for both main and audio segment loaders to call endOfStream
21726 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
21727 } else {
21728 // otherwise just rely on the audio loader
21729 isEndOfStream = this.audioSegmentLoader_.ended_;
21730 }
21731 }
21732
21733 if (!isEndOfStream) {
21734 return;
21735 }
21736
21737 this.stopABRTimer_();
21738 this.sourceUpdater_.endOfStream();
21739 }
21740 /**
21741 * Check if a playlist has stopped being updated
21742 *
21743 * @param {Object} playlist the media playlist object
21744 * @return {boolean} whether the playlist has stopped being updated or not
21745 */
21746 ;
21747
21748 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
21749 var seekable = this.seekable();
21750
21751 if (!seekable.length) {
21752 // playlist doesn't have enough information to determine whether we are stuck
21753 return false;
21754 }
21755
21756 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
21757
21758 if (expired === null) {
21759 return false;
21760 } // does not use the safe live end to calculate playlist end, since we
21761 // don't want to say we are stuck while there is still content
21762
21763
21764 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
21765 var currentTime = this.tech_.currentTime();
21766 var buffered = this.tech_.buffered();
21767
21768 if (!buffered.length) {
21769 // return true if the playhead reached the absolute end of the playlist
21770 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
21771 }
21772
21773 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
21774 // end of playlist
21775
21776 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
21777 }
21778 /**
21779 * Blacklists a playlist when an error occurs for a set amount of time
21780 * making it unavailable for selection by the rendition selection algorithm
21781 * and then forces a new playlist (rendition) selection.
21782 *
21783 * @param {Object=} error an optional error that may include the playlist
21784 * to blacklist
21785 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
21786 * playlist
21787 */
21788 ;
21789
21790 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
21791 if (error === void 0) {
21792 error = {};
21793 }
21794
21795 // If the `error` was generated by the playlist loader, it will contain
21796 // the playlist we were trying to load (but failed) and that should be
21797 // blacklisted instead of the currently selected playlist which is likely
21798 // out-of-date in this scenario
21799 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
21800 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
21801 // trying to load the master OR while we were disposing of the tech
21802
21803 if (!currentPlaylist) {
21804 this.error = error;
21805
21806 if (this.mediaSource.readyState !== 'open') {
21807 this.trigger('error');
21808 } else {
21809 this.sourceUpdater_.endOfStream('network');
21810 }
21811
21812 return;
21813 }
21814
21815 var playlists = this.masterPlaylistLoader_.master.playlists;
21816 var enabledPlaylists = playlists.filter(isEnabled);
21817 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
21818 // forever
21819
21820 if (playlists.length === 1 && blacklistDuration !== Infinity) {
21821 videojs__default['default'].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
21822 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
21823
21824 return this.masterPlaylistLoader_.load(isFinalRendition);
21825 }
21826
21827 if (isFinalRendition) {
21828 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
21829 // it, instead of erring the player or retrying this playlist, clear out the current
21830 // blacklist. This allows other playlists to be attempted in case any have been
21831 // fixed.
21832 var reincluded = false;
21833 playlists.forEach(function (playlist) {
21834 // skip current playlist which is about to be blacklisted
21835 if (playlist === currentPlaylist) {
21836 return;
21837 }
21838
21839 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
21840
21841 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
21842 reincluded = true;
21843 delete playlist.excludeUntil;
21844 }
21845 });
21846
21847 if (reincluded) {
21848 videojs__default['default'].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
21849 // playlist. This is needed for users relying on the retryplaylist event to catch a
21850 // case where the player might be stuck and looping through "dead" playlists.
21851
21852 this.tech_.trigger('retryplaylist');
21853 }
21854 } // Blacklist this playlist
21855
21856
21857 currentPlaylist.excludeUntil = Date.now() + blacklistDuration * 1000;
21858
21859 if (error.reason) {
21860 currentPlaylist.lastExcludeReason_ = error.reason;
21861 }
21862
21863 this.tech_.trigger('blacklistplaylist');
21864 this.tech_.trigger({
21865 type: 'usage',
21866 name: 'vhs-rendition-blacklisted'
21867 });
21868 this.tech_.trigger({
21869 type: 'usage',
21870 name: 'hls-rendition-blacklisted'
21871 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
21872 // Would be something like media().id !=== currentPlaylist.id and we would need something
21873 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
21874 // from loading a new playlist on any blacklist.
21875 // Select a new playlist
21876
21877 var nextPlaylist = this.selectPlaylist();
21878
21879 if (!nextPlaylist) {
21880 this.error = 'Playback cannot continue. No available working or supported playlists.';
21881 this.trigger('error');
21882 return;
21883 }
21884
21885 var logFn = error.internal ? this.logger_ : videojs__default['default'].log.warn;
21886 var errorMessage = error.message ? ' ' + error.message : '';
21887 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
21888
21889 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
21890 this.delegateLoaders_('audio', ['abort', 'pause']);
21891 } // if subtitle group changed reset subtitle loaders
21892
21893
21894 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
21895 this.delegateLoaders_('subtitle', ['abort', 'pause']);
21896 }
21897
21898 this.delegateLoaders_('main', ['abort', 'pause']);
21899 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
21900 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
21901
21902 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
21903 }
21904 /**
21905 * Pause all segment/playlist loaders
21906 */
21907 ;
21908
21909 _proto.pauseLoading = function pauseLoading() {
21910 this.delegateLoaders_('all', ['abort', 'pause']);
21911 this.stopABRTimer_();
21912 }
21913 /**
21914 * Call a set of functions in order on playlist loaders, segment loaders,
21915 * or both types of loaders.
21916 *
21917 * @param {string} filter
21918 * Filter loaders that should call fnNames using a string. Can be:
21919 * * all - run on all loaders
21920 * * audio - run on all audio loaders
21921 * * subtitle - run on all subtitle loaders
21922 * * main - run on the main/master loaders
21923 *
21924 * @param {Array|string} fnNames
21925 * A string or array of function names to call.
21926 */
21927 ;
21928
21929 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
21930 var _this7 = this;
21931
21932 var loaders = [];
21933 var dontFilterPlaylist = filter === 'all';
21934
21935 if (dontFilterPlaylist || filter === 'main') {
21936 loaders.push(this.masterPlaylistLoader_);
21937 }
21938
21939 var mediaTypes = [];
21940
21941 if (dontFilterPlaylist || filter === 'audio') {
21942 mediaTypes.push('AUDIO');
21943 }
21944
21945 if (dontFilterPlaylist || filter === 'subtitle') {
21946 mediaTypes.push('CLOSED-CAPTIONS');
21947 mediaTypes.push('SUBTITLES');
21948 }
21949
21950 mediaTypes.forEach(function (mediaType) {
21951 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
21952
21953 if (loader) {
21954 loaders.push(loader);
21955 }
21956 });
21957 ['main', 'audio', 'subtitle'].forEach(function (name) {
21958 var loader = _this7[name + "SegmentLoader_"];
21959
21960 if (loader && (filter === name || filter === 'all')) {
21961 loaders.push(loader);
21962 }
21963 });
21964 loaders.forEach(function (loader) {
21965 return fnNames.forEach(function (fnName) {
21966 if (typeof loader[fnName] === 'function') {
21967 loader[fnName]();
21968 }
21969 });
21970 });
21971 }
21972 /**
21973 * set the current time on all segment loaders
21974 *
21975 * @param {TimeRange} currentTime the current time to set
21976 * @return {TimeRange} the current time
21977 */
21978 ;
21979
21980 _proto.setCurrentTime = function setCurrentTime(currentTime) {
21981 var buffered = findRange(this.tech_.buffered(), currentTime);
21982
21983 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
21984 // return immediately if the metadata is not ready yet
21985 return 0;
21986 } // it's clearly an edge-case but don't thrown an error if asked to
21987 // seek within an empty playlist
21988
21989
21990 if (!this.masterPlaylistLoader_.media().segments) {
21991 return 0;
21992 } // if the seek location is already buffered, continue buffering as usual
21993
21994
21995 if (buffered && buffered.length) {
21996 return currentTime;
21997 } // cancel outstanding requests so we begin buffering at the new
21998 // location
21999
22000
22001 this.mainSegmentLoader_.resetEverything();
22002 this.mainSegmentLoader_.abort();
22003
22004 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
22005 this.audioSegmentLoader_.resetEverything();
22006 this.audioSegmentLoader_.abort();
22007 }
22008
22009 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
22010 this.subtitleSegmentLoader_.resetEverything();
22011 this.subtitleSegmentLoader_.abort();
22012 } // start segment loader loading in case they are paused
22013
22014
22015 this.load();
22016 }
22017 /**
22018 * get the current duration
22019 *
22020 * @return {TimeRange} the duration
22021 */
22022 ;
22023
22024 _proto.duration = function duration() {
22025 if (!this.masterPlaylistLoader_) {
22026 return 0;
22027 }
22028
22029 var media = this.masterPlaylistLoader_.media();
22030
22031 if (!media) {
22032 // no playlists loaded yet, so can't determine a duration
22033 return 0;
22034 } // Don't rely on the media source for duration in the case of a live playlist since
22035 // setting the native MediaSource's duration to infinity ends up with consequences to
22036 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
22037 //
22038 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
22039 // however, few browsers have support for setLiveSeekableRange()
22040 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
22041 //
22042 // Until a time when the duration of the media source can be set to infinity, and a
22043 // seekable range specified across browsers, just return Infinity.
22044
22045
22046 if (!media.endList) {
22047 return Infinity;
22048 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
22049 // available). If it's not available, fall back to a playlist-calculated estimate.
22050
22051
22052 if (this.mediaSource) {
22053 return this.mediaSource.duration;
22054 }
22055
22056 return Vhs$1.Playlist.duration(media);
22057 }
22058 /**
22059 * check the seekable range
22060 *
22061 * @return {TimeRange} the seekable range
22062 */
22063 ;
22064
22065 _proto.seekable = function seekable() {
22066 return this.seekable_;
22067 };
22068
22069 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
22070 var audioSeekable;
22071
22072 if (!this.masterPlaylistLoader_) {
22073 return;
22074 }
22075
22076 var media = this.masterPlaylistLoader_.media();
22077
22078 if (!media) {
22079 return;
22080 }
22081
22082 var expired = this.syncController_.getExpiredTime(media, this.duration());
22083
22084 if (expired === null) {
22085 // not enough information to update seekable
22086 return;
22087 }
22088
22089 var master = this.masterPlaylistLoader_.master;
22090 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
22091
22092 if (mainSeekable.length === 0) {
22093 return;
22094 }
22095
22096 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
22097 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
22098 expired = this.syncController_.getExpiredTime(media, this.duration());
22099
22100 if (expired === null) {
22101 return;
22102 }
22103
22104 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
22105
22106 if (audioSeekable.length === 0) {
22107 return;
22108 }
22109 }
22110
22111 var oldEnd;
22112 var oldStart;
22113
22114 if (this.seekable_ && this.seekable_.length) {
22115 oldEnd = this.seekable_.end(0);
22116 oldStart = this.seekable_.start(0);
22117 }
22118
22119 if (!audioSeekable) {
22120 // seekable has been calculated based on buffering video data so it
22121 // can be returned directly
22122 this.seekable_ = mainSeekable;
22123 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
22124 // seekables are pretty far off, rely on main
22125 this.seekable_ = mainSeekable;
22126 } else {
22127 this.seekable_ = videojs__default['default'].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
22128 } // seekable is the same as last time
22129
22130
22131 if (this.seekable_ && this.seekable_.length) {
22132 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
22133 return;
22134 }
22135 }
22136
22137 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
22138 this.tech_.trigger('seekablechanged');
22139 }
22140 /**
22141 * Update the player duration
22142 */
22143 ;
22144
22145 _proto.updateDuration = function updateDuration(isLive) {
22146 if (this.updateDuration_) {
22147 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
22148 this.updateDuration_ = null;
22149 }
22150
22151 if (this.mediaSource.readyState !== 'open') {
22152 this.updateDuration_ = this.updateDuration.bind(this, isLive);
22153 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
22154 return;
22155 }
22156
22157 if (isLive) {
22158 var seekable = this.seekable();
22159
22160 if (!seekable.length) {
22161 return;
22162 } // Even in the case of a live playlist, the native MediaSource's duration should not
22163 // be set to Infinity (even though this would be expected for a live playlist), since
22164 // setting the native MediaSource's duration to infinity ends up with consequences to
22165 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
22166 //
22167 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
22168 // however, few browsers have support for setLiveSeekableRange()
22169 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
22170 //
22171 // Until a time when the duration of the media source can be set to infinity, and a
22172 // seekable range specified across browsers, the duration should be greater than or
22173 // equal to the last possible seekable value.
22174 // MediaSource duration starts as NaN
22175 // It is possible (and probable) that this case will never be reached for many
22176 // sources, since the MediaSource reports duration as the highest value without
22177 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
22178 // we buffered times 0 to 100 with real times of 100 to 200, even though current
22179 // time will be between 0 and 100, the native media source may report the duration
22180 // as 200. However, since we report duration separate from the media source (as
22181 // Infinity), and as long as the native media source duration value is greater than
22182 // our reported seekable range, seeks will work as expected. The large number as
22183 // duration for live is actually a strategy used by some players to work around the
22184 // issue of live seekable ranges cited above.
22185
22186
22187 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
22188 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
22189 }
22190
22191 return;
22192 }
22193
22194 var buffered = this.tech_.buffered();
22195 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
22196
22197 if (buffered.length > 0) {
22198 duration = Math.max(duration, buffered.end(buffered.length - 1));
22199 }
22200
22201 if (this.mediaSource.duration !== duration) {
22202 this.sourceUpdater_.setDuration(duration);
22203 }
22204 }
22205 /**
22206 * dispose of the MasterPlaylistController and everything
22207 * that it controls
22208 */
22209 ;
22210
22211 _proto.dispose = function dispose() {
22212 var _this8 = this;
22213
22214 this.trigger('dispose');
22215 this.decrypter_.terminate();
22216 this.masterPlaylistLoader_.dispose();
22217 this.mainSegmentLoader_.dispose();
22218
22219 if (this.loadOnPlay_) {
22220 this.tech_.off('play', this.loadOnPlay_);
22221 }
22222
22223 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22224 var groups = _this8.mediaTypes_[type].groups;
22225
22226 for (var id in groups) {
22227 groups[id].forEach(function (group) {
22228 if (group.playlistLoader) {
22229 group.playlistLoader.dispose();
22230 }
22231 });
22232 }
22233 });
22234 this.audioSegmentLoader_.dispose();
22235 this.subtitleSegmentLoader_.dispose();
22236 this.sourceUpdater_.dispose();
22237 this.timelineChangeController_.dispose();
22238 this.stopABRTimer_();
22239
22240 if (this.updateDuration_) {
22241 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
22242 }
22243
22244 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
22245
22246 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
22247 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
22248 this.off();
22249 }
22250 /**
22251 * return the master playlist object if we have one
22252 *
22253 * @return {Object} the master playlist object that we parsed
22254 */
22255 ;
22256
22257 _proto.master = function master() {
22258 return this.masterPlaylistLoader_.master;
22259 }
22260 /**
22261 * return the currently selected playlist
22262 *
22263 * @return {Object} the currently selected playlist object that we parsed
22264 */
22265 ;
22266
22267 _proto.media = function media() {
22268 // playlist loader will not return media if it has not been fully loaded
22269 return this.masterPlaylistLoader_.media() || this.initialMedia_;
22270 };
22271
22272 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
22273 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader; // one or both loaders has not loaded sufficently to get codecs
22274
22275 if (!this.mainSegmentLoader_.currentMediaInfo_ || usingAudioLoader && !this.audioSegmentLoader_.currentMediaInfo_) {
22276 return false;
22277 }
22278
22279 return true;
22280 };
22281
22282 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
22283 var _this9 = this;
22284
22285 var media = {
22286 main: this.mainSegmentLoader_.currentMediaInfo_ || {},
22287 audio: this.audioSegmentLoader_.currentMediaInfo_ || {}
22288 }; // set "main" media equal to video
22289
22290 media.video = media.main;
22291 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
22292 var codecs = {};
22293 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
22294
22295 if (media.main.hasVideo) {
22296 codecs.video = playlistCodecs.video || media.main.videoCodec || codecs_js.DEFAULT_VIDEO_CODEC;
22297 }
22298
22299 if (media.main.isMuxed) {
22300 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC);
22301 }
22302
22303 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
22304 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
22305
22306 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
22307 } // no codecs, no playback.
22308
22309
22310 if (!codecs.audio && !codecs.video) {
22311 this.blacklistCurrentPlaylist({
22312 playlist: this.media(),
22313 message: 'Could not determine codecs for playlist.',
22314 blacklistDuration: Infinity
22315 });
22316 return;
22317 } // fmp4 relies on browser support, while ts relies on muxer support
22318
22319
22320 var supportFunction = function supportFunction(isFmp4, codec) {
22321 return isFmp4 ? codecs_js.browserSupportsCodec(codec) : codecs_js.muxerSupportsCodec(codec);
22322 };
22323
22324 var unsupportedCodecs = {};
22325 var unsupportedAudio;
22326 ['video', 'audio'].forEach(function (type) {
22327 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
22328 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
22329 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
22330 unsupportedCodecs[supporter].push(codecs[type]);
22331
22332 if (type === 'audio') {
22333 unsupportedAudio = supporter;
22334 }
22335 }
22336 });
22337
22338 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
22339 var audioGroup = this.media().attributes.AUDIO;
22340 this.master().playlists.forEach(function (variant) {
22341 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
22342
22343 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
22344 variant.excludeUntil = Infinity;
22345 }
22346 });
22347 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
22348 } // if we have any unsupported codecs blacklist this playlist.
22349
22350
22351 if (Object.keys(unsupportedCodecs).length) {
22352 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
22353 if (acc) {
22354 acc += ', ';
22355 }
22356
22357 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
22358 return acc;
22359 }, '') + '.';
22360 this.blacklistCurrentPlaylist({
22361 playlist: this.media(),
22362 internal: true,
22363 message: message,
22364 blacklistDuration: Infinity
22365 });
22366 return;
22367 } // check if codec switching is happening
22368
22369
22370 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
22371 var switchMessages = [];
22372 ['video', 'audio'].forEach(function (type) {
22373 var newCodec = (codecs_js.parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
22374 var oldCodec = (codecs_js.parseCodecs(codecs[type] || '')[0] || {}).type;
22375
22376 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
22377 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
22378 }
22379 });
22380
22381 if (switchMessages.length) {
22382 this.blacklistCurrentPlaylist({
22383 playlist: this.media(),
22384 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
22385 blacklistDuration: Infinity,
22386 internal: true
22387 });
22388 return;
22389 }
22390 } // TODO: when using the muxer shouldn't we just return
22391 // the codecs that the muxer outputs?
22392
22393
22394 return codecs;
22395 }
22396 /**
22397 * Create source buffers and exlude any incompatible renditions.
22398 *
22399 * @private
22400 */
22401 ;
22402
22403 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
22404 // media source is not ready yet or sourceBuffers are already
22405 // created.
22406 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
22407 return;
22408 }
22409
22410 if (!this.areMediaTypesKnown_()) {
22411 return;
22412 }
22413
22414 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
22415
22416 if (!codecs) {
22417 return;
22418 }
22419
22420 this.sourceUpdater_.createSourceBuffers(codecs);
22421 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
22422 this.excludeIncompatibleVariants_(codecString);
22423 }
22424 /**
22425 * Excludes playlists with codecs that are unsupported by the muxer and browser.
22426 */
22427 ;
22428
22429 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
22430 var _this10 = this;
22431
22432 var playlists = this.master().playlists;
22433 var ids = []; // TODO: why don't we have a property to loop through all
22434 // playlist? Why did we ever mix indexes and keys?
22435
22436 Object.keys(playlists).forEach(function (key) {
22437 var variant = playlists[key]; // check if we already processed this playlist.
22438
22439 if (ids.indexOf(variant.id) !== -1) {
22440 return;
22441 }
22442
22443 ids.push(variant.id);
22444 var codecs = codecsForPlaylist(_this10.master, variant);
22445 var unsupported = [];
22446
22447 if (codecs.audio && !codecs_js.muxerSupportsCodec(codecs.audio) && !codecs_js.browserSupportsCodec(codecs.audio)) {
22448 unsupported.push("audio codec " + codecs.audio);
22449 }
22450
22451 if (codecs.video && !codecs_js.muxerSupportsCodec(codecs.video) && !codecs_js.browserSupportsCodec(codecs.video)) {
22452 unsupported.push("video codec " + codecs.video);
22453 }
22454
22455 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
22456 unsupported.push("text codec " + codecs.text);
22457 }
22458
22459 if (unsupported.length) {
22460 variant.excludeUntil = Infinity;
22461
22462 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
22463 }
22464 });
22465 }
22466 /**
22467 * Blacklist playlists that are known to be codec or
22468 * stream-incompatible with the SourceBuffer configuration. For
22469 * instance, Media Source Extensions would cause the video element to
22470 * stall waiting for video data if you switched from a variant with
22471 * video and audio to an audio-only one.
22472 *
22473 * @param {Object} media a media playlist compatible with the current
22474 * set of SourceBuffers. Variants in the current master playlist that
22475 * do not appear to have compatible codec or stream configurations
22476 * will be excluded from the default playlist selection algorithm
22477 * indefinitely.
22478 * @private
22479 */
22480 ;
22481
22482 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
22483 var _this11 = this;
22484
22485 var ids = [];
22486 var playlists = this.master().playlists;
22487 var codecs = unwrapCodecList(codecs_js.parseCodecs(codecString));
22488 var codecCount_ = codecCount(codecs);
22489 var videoDetails = codecs.video && codecs_js.parseCodecs(codecs.video)[0] || null;
22490 var audioDetails = codecs.audio && codecs_js.parseCodecs(codecs.audio)[0] || null;
22491 Object.keys(playlists).forEach(function (key) {
22492 var variant = playlists[key]; // check if we already processed this playlist.
22493 // or it if it is already excluded forever.
22494
22495 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
22496 return;
22497 }
22498
22499 ids.push(variant.id);
22500 var blacklistReasons = []; // get codecs from the playlist for this variant
22501
22502 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
22503 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
22504 // variant is incompatible. Wait for mux.js to probe
22505
22506 if (!variantCodecs.audio && !variantCodecs.video) {
22507 return;
22508 } // TODO: we can support this by removing the
22509 // old media source and creating a new one, but it will take some work.
22510 // The number of streams cannot change
22511
22512
22513 if (variantCodecCount !== codecCount_) {
22514 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
22515 } // only exclude playlists by codec change, if codecs cannot switch
22516 // during playback.
22517
22518
22519 if (!_this11.sourceUpdater_.canChangeType()) {
22520 var variantVideoDetails = variantCodecs.video && codecs_js.parseCodecs(variantCodecs.video)[0] || null;
22521 var variantAudioDetails = variantCodecs.audio && codecs_js.parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
22522
22523 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
22524 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
22525 } // the audio codec cannot change
22526
22527
22528 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
22529 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
22530 }
22531 }
22532
22533 if (blacklistReasons.length) {
22534 variant.excludeUntil = Infinity;
22535
22536 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
22537 }
22538 });
22539 };
22540
22541 _proto.updateAdCues_ = function updateAdCues_(media) {
22542 var offset = 0;
22543 var seekable = this.seekable();
22544
22545 if (seekable.length) {
22546 offset = seekable.start(0);
22547 }
22548
22549 updateAdCues(media, this.cueTagsTrack_, offset);
22550 }
22551 /**
22552 * Calculates the desired forward buffer length based on current time
22553 *
22554 * @return {number} Desired forward buffer length in seconds
22555 */
22556 ;
22557
22558 _proto.goalBufferLength = function goalBufferLength() {
22559 var currentTime = this.tech_.currentTime();
22560 var initial = Config.GOAL_BUFFER_LENGTH;
22561 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
22562 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
22563 return Math.min(initial + currentTime * rate, max);
22564 }
22565 /**
22566 * Calculates the desired buffer low water line based on current time
22567 *
22568 * @return {number} Desired buffer low water line in seconds
22569 */
22570 ;
22571
22572 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
22573 var currentTime = this.tech_.currentTime();
22574 var initial = Config.BUFFER_LOW_WATER_LINE;
22575 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
22576 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
22577 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
22578 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
22579 };
22580
22581 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
22582 return Config.BUFFER_HIGH_WATER_LINE;
22583 };
22584
22585 return MasterPlaylistController;
22586}(videojs__default['default'].EventTarget);
22587
22588/**
22589 * Returns a function that acts as the Enable/disable playlist function.
22590 *
22591 * @param {PlaylistLoader} loader - The master playlist loader
22592 * @param {string} playlistID - id of the playlist
22593 * @param {Function} changePlaylistFn - A function to be called after a
22594 * playlist's enabled-state has been changed. Will NOT be called if a
22595 * playlist's enabled-state is unchanged
22596 * @param {boolean=} enable - Value to set the playlist enabled-state to
22597 * or if undefined returns the current enabled-state for the playlist
22598 * @return {Function} Function for setting/getting enabled
22599 */
22600
22601var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
22602 return function (enable) {
22603 var playlist = loader.master.playlists[playlistID];
22604 var incompatible = isIncompatible(playlist);
22605 var currentlyEnabled = isEnabled(playlist);
22606
22607 if (typeof enable === 'undefined') {
22608 return currentlyEnabled;
22609 }
22610
22611 if (enable) {
22612 delete playlist.disabled;
22613 } else {
22614 playlist.disabled = true;
22615 }
22616
22617 if (enable !== currentlyEnabled && !incompatible) {
22618 // Ensure the outside world knows about our changes
22619 changePlaylistFn();
22620
22621 if (enable) {
22622 loader.trigger('renditionenabled');
22623 } else {
22624 loader.trigger('renditiondisabled');
22625 }
22626 }
22627
22628 return enable;
22629 };
22630};
22631/**
22632 * The representation object encapsulates the publicly visible information
22633 * in a media playlist along with a setter/getter-type function (enabled)
22634 * for changing the enabled-state of a particular playlist entry
22635 *
22636 * @class Representation
22637 */
22638
22639
22640var Representation = function Representation(vhsHandler, playlist, id) {
22641 var mpc = vhsHandler.masterPlaylistController_,
22642 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
22643
22644 var changeType = smoothQualityChange ? 'smooth' : 'fast';
22645 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
22646
22647 if (playlist.attributes) {
22648 var resolution = playlist.attributes.RESOLUTION;
22649 this.width = resolution && resolution.width;
22650 this.height = resolution && resolution.height;
22651 this.bandwidth = playlist.attributes.BANDWIDTH;
22652 }
22653
22654 this.codecs = codecsForPlaylist(mpc.master(), playlist);
22655 this.playlist = playlist; // The id is simply the ordinality of the media playlist
22656 // within the master playlist
22657
22658 this.id = id; // Partially-apply the enableFunction to create a playlist-
22659 // specific variant
22660
22661 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
22662};
22663/**
22664 * A mixin function that adds the `representations` api to an instance
22665 * of the VhsHandler class
22666 *
22667 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
22668 * representation API into
22669 */
22670
22671
22672var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
22673 // Add a single API-specific function to the VhsHandler instance
22674 vhsHandler.representations = function () {
22675 var master = vhsHandler.masterPlaylistController_.master();
22676 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
22677
22678 if (!playlists) {
22679 return [];
22680 }
22681
22682 return playlists.filter(function (media) {
22683 return !isIncompatible(media);
22684 }).map(function (e, i) {
22685 return new Representation(vhsHandler, e, e.id);
22686 });
22687 };
22688};
22689
22690/**
22691 * @file playback-watcher.js
22692 *
22693 * Playback starts, and now my watch begins. It shall not end until my death. I shall
22694 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
22695 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
22696 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
22697 * my life and honor to the Playback Watch, for this Player and all the Players to come.
22698 */
22699
22700var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
22701/**
22702 * Returns whether or not the current time should be considered close to buffered content,
22703 * taking into consideration whether there's enough buffered content for proper playback.
22704 *
22705 * @param {Object} options
22706 * Options object
22707 * @param {TimeRange} options.buffered
22708 * Current buffer
22709 * @param {number} options.targetDuration
22710 * The active playlist's target duration
22711 * @param {number} options.currentTime
22712 * The current time of the player
22713 * @return {boolean}
22714 * Whether the current time should be considered close to the buffer
22715 */
22716
22717var closeToBufferedContent = function closeToBufferedContent(_ref) {
22718 var buffered = _ref.buffered,
22719 targetDuration = _ref.targetDuration,
22720 currentTime = _ref.currentTime;
22721
22722 if (!buffered.length) {
22723 return false;
22724 } // At least two to three segments worth of content should be buffered before there's a
22725 // full enough buffer to consider taking any actions.
22726
22727
22728 if (buffered.end(0) - buffered.start(0) < targetDuration * 2) {
22729 return false;
22730 } // It's possible that, on seek, a remove hasn't completed and the buffered range is
22731 // somewhere past the current time. In that event, don't consider the buffered content
22732 // close.
22733
22734
22735 if (currentTime > buffered.start(0)) {
22736 return false;
22737 } // Since target duration generally represents the max (or close to max) duration of a
22738 // segment, if the buffer is within a segment of the current time, the gap probably
22739 // won't be closed, and current time should be considered close to buffered content.
22740
22741
22742 return buffered.start(0) - currentTime < targetDuration;
22743};
22744/**
22745 * @class PlaybackWatcher
22746 */
22747
22748var PlaybackWatcher = /*#__PURE__*/function () {
22749 /**
22750 * Represents an PlaybackWatcher object.
22751 *
22752 * @class
22753 * @param {Object} options an object that includes the tech and settings
22754 */
22755 function PlaybackWatcher(options) {
22756 var _this = this;
22757
22758 this.masterPlaylistController_ = options.masterPlaylistController;
22759 this.tech_ = options.tech;
22760 this.seekable = options.seekable;
22761 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
22762 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
22763 this.media = options.media;
22764 this.consecutiveUpdates = 0;
22765 this.lastRecordedTime = null;
22766 this.timer_ = null;
22767 this.checkCurrentTimeTimeout_ = null;
22768 this.logger_ = logger('PlaybackWatcher');
22769 this.logger_('initialize');
22770
22771 var playHandler = function playHandler() {
22772 return _this.monitorCurrentTime_();
22773 };
22774
22775 var canPlayHandler = function canPlayHandler() {
22776 return _this.monitorCurrentTime_();
22777 };
22778
22779 var waitingHandler = function waitingHandler() {
22780 return _this.techWaiting_();
22781 };
22782
22783 var cancelTimerHandler = function cancelTimerHandler() {
22784 return _this.cancelTimer_();
22785 };
22786
22787 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
22788 return _this.fixesBadSeeks_();
22789 };
22790
22791 var mpc = this.masterPlaylistController_;
22792 var loaderTypes = ['main', 'subtitle', 'audio'];
22793 var loaderChecks = {};
22794 loaderTypes.forEach(function (type) {
22795 loaderChecks[type] = {
22796 reset: function reset() {
22797 return _this.resetSegmentDownloads_(type);
22798 },
22799 updateend: function updateend() {
22800 return _this.checkSegmentDownloads_(type);
22801 }
22802 };
22803 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
22804 // isn't changing we want to reset. We cannot assume that the new rendition
22805 // will also be stalled, until after new appends.
22806
22807 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
22808 // This prevents one segment playlists (single vtt or single segment content)
22809 // from being detected as stalling. As the buffer will not change in those cases, since
22810 // the buffer is the entire video duration.
22811
22812 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
22813 });
22814 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
22815 this.tech_.on('waiting', waitingHandler);
22816 this.tech_.on(timerCancelEvents, cancelTimerHandler);
22817 this.tech_.on('canplay', canPlayHandler);
22818 /*
22819 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
22820 is surfaced in one of two ways:
22821 1) The `waiting` event is fired before the player has buffered content, making it impossible
22822 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
22823 we can check if playback is stalled due to a gap, and skip the gap if necessary.
22824 2) A source with a gap at the beginning of the stream is loaded programatically while the player
22825 is in a playing state. To catch this case, it's important that our one-time play listener is setup
22826 even if the player is in a playing state
22827 */
22828
22829 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
22830
22831 this.dispose = function () {
22832 _this.logger_('dispose');
22833
22834 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
22835
22836 _this.tech_.off('waiting', waitingHandler);
22837
22838 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
22839
22840 _this.tech_.off('canplay', canPlayHandler);
22841
22842 _this.tech_.off('play', playHandler);
22843
22844 loaderTypes.forEach(function (type) {
22845 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
22846 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
22847
22848 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
22849 });
22850
22851 if (_this.checkCurrentTimeTimeout_) {
22852 window__default['default'].clearTimeout(_this.checkCurrentTimeTimeout_);
22853 }
22854
22855 _this.cancelTimer_();
22856 };
22857 }
22858 /**
22859 * Periodically check current time to see if playback stopped
22860 *
22861 * @private
22862 */
22863
22864
22865 var _proto = PlaybackWatcher.prototype;
22866
22867 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
22868 this.checkCurrentTime_();
22869
22870 if (this.checkCurrentTimeTimeout_) {
22871 window__default['default'].clearTimeout(this.checkCurrentTimeTimeout_);
22872 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
22873
22874
22875 this.checkCurrentTimeTimeout_ = window__default['default'].setTimeout(this.monitorCurrentTime_.bind(this), 250);
22876 }
22877 /**
22878 * Reset stalled download stats for a specific type of loader
22879 *
22880 * @param {string} type
22881 * The segment loader type to check.
22882 *
22883 * @listens SegmentLoader#playlistupdate
22884 * @listens Tech#seeking
22885 * @listens Tech#seeked
22886 */
22887 ;
22888
22889 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
22890 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
22891
22892 if (this[type + "StalledDownloads_"] > 0) {
22893 this.logger_("resetting possible stalled download count for " + type + " loader");
22894 }
22895
22896 this[type + "StalledDownloads_"] = 0;
22897 this[type + "Buffered_"] = loader.buffered_();
22898 }
22899 /**
22900 * Checks on every segment `appendsdone` to see
22901 * if segment appends are making progress. If they are not
22902 * and we are still downloading bytes. We blacklist the playlist.
22903 *
22904 * @param {string} type
22905 * The segment loader type to check.
22906 *
22907 * @listens SegmentLoader#appendsdone
22908 */
22909 ;
22910
22911 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
22912 var mpc = this.masterPlaylistController_;
22913 var loader = mpc[type + "SegmentLoader_"];
22914 var buffered = loader.buffered_();
22915 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
22916 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
22917 // the buffered value for this loader changed
22918 // appends are working
22919
22920 if (isBufferedDifferent) {
22921 this.resetSegmentDownloads_(type);
22922 return;
22923 }
22924
22925 this[type + "StalledDownloads_"]++;
22926 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
22927 playlistId: loader.playlist_ && loader.playlist_.id,
22928 buffered: timeRangesToArray(buffered)
22929 }); // after 10 possibly stalled appends with no reset, exclude
22930
22931 if (this[type + "StalledDownloads_"] < 10) {
22932 return;
22933 }
22934
22935 this.logger_(type + " loader stalled download exclusion");
22936 this.resetSegmentDownloads_(type);
22937 this.tech_.trigger({
22938 type: 'usage',
22939 name: "vhs-" + type + "-download-exclusion"
22940 });
22941
22942 if (type === 'subtitle') {
22943 return;
22944 } // TODO: should we exclude audio tracks rather than main tracks
22945 // when type is audio?
22946
22947
22948 mpc.blacklistCurrentPlaylist({
22949 message: "Excessive " + type + " segment downloading detected."
22950 }, Infinity);
22951 }
22952 /**
22953 * The purpose of this function is to emulate the "waiting" event on
22954 * browsers that do not emit it when they are waiting for more
22955 * data to continue playback
22956 *
22957 * @private
22958 */
22959 ;
22960
22961 _proto.checkCurrentTime_ = function checkCurrentTime_() {
22962 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
22963 this.consecutiveUpdates = 0;
22964 this.lastRecordedTime = this.tech_.currentTime();
22965 return;
22966 }
22967
22968 if (this.tech_.paused() || this.tech_.seeking()) {
22969 return;
22970 }
22971
22972 var currentTime = this.tech_.currentTime();
22973 var buffered = this.tech_.buffered();
22974
22975 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
22976 // If current time is at the end of the final buffered region, then any playback
22977 // stall is most likely caused by buffering in a low bandwidth environment. The tech
22978 // should fire a `waiting` event in this scenario, but due to browser and tech
22979 // inconsistencies. Calling `techWaiting_` here allows us to simulate
22980 // responding to a native `waiting` event when the tech fails to emit one.
22981 return this.techWaiting_();
22982 }
22983
22984 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
22985 this.consecutiveUpdates++;
22986 this.waiting_();
22987 } else if (currentTime === this.lastRecordedTime) {
22988 this.consecutiveUpdates++;
22989 } else {
22990 this.consecutiveUpdates = 0;
22991 this.lastRecordedTime = currentTime;
22992 }
22993 }
22994 /**
22995 * Cancels any pending timers and resets the 'timeupdate' mechanism
22996 * designed to detect that we are stalled
22997 *
22998 * @private
22999 */
23000 ;
23001
23002 _proto.cancelTimer_ = function cancelTimer_() {
23003 this.consecutiveUpdates = 0;
23004
23005 if (this.timer_) {
23006 this.logger_('cancelTimer_');
23007 clearTimeout(this.timer_);
23008 }
23009
23010 this.timer_ = null;
23011 }
23012 /**
23013 * Fixes situations where there's a bad seek
23014 *
23015 * @return {boolean} whether an action was taken to fix the seek
23016 * @private
23017 */
23018 ;
23019
23020 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
23021 var seeking = this.tech_.seeking();
23022
23023 if (!seeking) {
23024 return false;
23025 }
23026
23027 var seekable = this.seekable();
23028 var currentTime = this.tech_.currentTime();
23029 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
23030 var seekTo;
23031
23032 if (isAfterSeekableRange) {
23033 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
23034
23035 seekTo = seekableEnd;
23036 }
23037
23038 if (this.beforeSeekableWindow_(seekable, currentTime)) {
23039 var seekableStart = seekable.start(0); // sync to the beginning of the live window
23040 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
23041
23042 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
23043 // happen in live with a 3 segment playlist), then don't use a time delta
23044 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
23045 }
23046
23047 if (typeof seekTo !== 'undefined') {
23048 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
23049 this.tech_.setCurrentTime(seekTo);
23050 return true;
23051 }
23052
23053 var buffered = this.tech_.buffered();
23054
23055 if (closeToBufferedContent({
23056 buffered: buffered,
23057 targetDuration: this.media().targetDuration,
23058 currentTime: currentTime
23059 })) {
23060 seekTo = buffered.start(0) + SAFE_TIME_DELTA;
23061 this.logger_("Buffered region starts (" + buffered.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
23062 this.tech_.setCurrentTime(seekTo);
23063 return true;
23064 }
23065
23066 return false;
23067 }
23068 /**
23069 * Handler for situations when we determine the player is waiting.
23070 *
23071 * @private
23072 */
23073 ;
23074
23075 _proto.waiting_ = function waiting_() {
23076 if (this.techWaiting_()) {
23077 return;
23078 } // All tech waiting checks failed. Use last resort correction
23079
23080
23081 var currentTime = this.tech_.currentTime();
23082 var buffered = this.tech_.buffered();
23083 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
23084 // region with no indication that anything is amiss (seen in Firefox). Seeking to
23085 // currentTime is usually enough to kickstart the player. This checks that the player
23086 // is currently within a buffered region before attempting a corrective seek.
23087 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
23088 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
23089 // make sure there is ~3 seconds of forward buffer before taking any corrective action
23090 // to avoid triggering an `unknownwaiting` event when the network is slow.
23091
23092 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
23093 this.cancelTimer_();
23094 this.tech_.setCurrentTime(currentTime);
23095 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
23096
23097 this.tech_.trigger({
23098 type: 'usage',
23099 name: 'vhs-unknown-waiting'
23100 });
23101 this.tech_.trigger({
23102 type: 'usage',
23103 name: 'hls-unknown-waiting'
23104 });
23105 return;
23106 }
23107 }
23108 /**
23109 * Handler for situations when the tech fires a `waiting` event
23110 *
23111 * @return {boolean}
23112 * True if an action (or none) was needed to correct the waiting. False if no
23113 * checks passed
23114 * @private
23115 */
23116 ;
23117
23118 _proto.techWaiting_ = function techWaiting_() {
23119 var seekable = this.seekable();
23120 var currentTime = this.tech_.currentTime();
23121
23122 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
23123 // Tech is seeking or bad seek fixed, no action needed
23124 return true;
23125 }
23126
23127 if (this.tech_.seeking() || this.timer_ !== null) {
23128 // Tech is seeking or already waiting on another action, no action needed
23129 return true;
23130 }
23131
23132 if (this.beforeSeekableWindow_(seekable, currentTime)) {
23133 var livePoint = seekable.end(seekable.length - 1);
23134 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
23135 this.cancelTimer_();
23136 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
23137
23138 this.tech_.trigger({
23139 type: 'usage',
23140 name: 'vhs-live-resync'
23141 });
23142 this.tech_.trigger({
23143 type: 'usage',
23144 name: 'hls-live-resync'
23145 });
23146 return true;
23147 }
23148
23149 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
23150 var buffered = this.tech_.buffered();
23151 var videoUnderflow = this.videoUnderflow_({
23152 audioBuffered: sourceUpdater.audioBuffered(),
23153 videoBuffered: sourceUpdater.videoBuffered(),
23154 currentTime: currentTime
23155 });
23156
23157 if (videoUnderflow) {
23158 // Even though the video underflowed and was stuck in a gap, the audio overplayed
23159 // the gap, leading currentTime into a buffered range. Seeking to currentTime
23160 // allows the video to catch up to the audio position without losing any audio
23161 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
23162 this.cancelTimer_();
23163 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
23164
23165 this.tech_.trigger({
23166 type: 'usage',
23167 name: 'vhs-video-underflow'
23168 });
23169 this.tech_.trigger({
23170 type: 'usage',
23171 name: 'hls-video-underflow'
23172 });
23173 return true;
23174 }
23175
23176 var nextRange = findNextRange(buffered, currentTime); // check for gap
23177
23178 if (nextRange.length > 0) {
23179 var difference = nextRange.start(0) - currentTime;
23180 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
23181 this.cancelTimer_();
23182 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
23183 return true;
23184 } // All checks failed. Returning false to indicate failure to correct waiting
23185
23186
23187 return false;
23188 };
23189
23190 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
23191 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
23192 allowSeeksWithinUnsafeLiveWindow = false;
23193 }
23194
23195 if (!seekable.length) {
23196 // we can't make a solid case if there's no seekable, default to false
23197 return false;
23198 }
23199
23200 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
23201 var isLive = !playlist.endList;
23202
23203 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
23204 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
23205 }
23206
23207 if (currentTime > allowedEnd) {
23208 return true;
23209 }
23210
23211 return false;
23212 };
23213
23214 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
23215 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
23216 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
23217 return true;
23218 }
23219
23220 return false;
23221 };
23222
23223 _proto.videoUnderflow_ = function videoUnderflow_(_ref2) {
23224 var videoBuffered = _ref2.videoBuffered,
23225 audioBuffered = _ref2.audioBuffered,
23226 currentTime = _ref2.currentTime;
23227
23228 // audio only content will not have video underflow :)
23229 if (!videoBuffered) {
23230 return;
23231 }
23232
23233 var gap; // find a gap in demuxed content.
23234
23235 if (videoBuffered.length && audioBuffered.length) {
23236 // in Chrome audio will continue to play for ~3s when we run out of video
23237 // so we have to check that the video buffer did have some buffer in the
23238 // past.
23239 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
23240 var videoRange = findRange(videoBuffered, currentTime);
23241 var audioRange = findRange(audioBuffered, currentTime);
23242
23243 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
23244 gap = {
23245 start: lastVideoRange.end(0),
23246 end: audioRange.end(0)
23247 };
23248 } // find a gap in muxed content.
23249
23250 } else {
23251 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
23252 // stuck in a gap due to video underflow.
23253
23254 if (!nextRange.length) {
23255 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
23256 }
23257 }
23258
23259 if (gap) {
23260 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
23261 return true;
23262 }
23263
23264 return false;
23265 }
23266 /**
23267 * Timer callback. If playback still has not proceeded, then we seek
23268 * to the start of the next buffered region.
23269 *
23270 * @private
23271 */
23272 ;
23273
23274 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
23275 var buffered = this.tech_.buffered();
23276 var currentTime = this.tech_.currentTime();
23277 var nextRange = findNextRange(buffered, currentTime);
23278 this.cancelTimer_();
23279
23280 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
23281 return;
23282 }
23283
23284 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
23285
23286 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
23287 this.tech_.trigger({
23288 type: 'usage',
23289 name: 'vhs-gap-skip'
23290 });
23291 this.tech_.trigger({
23292 type: 'usage',
23293 name: 'hls-gap-skip'
23294 });
23295 };
23296
23297 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
23298 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
23299 // playing for ~3 seconds after the video gap starts. This is done to account for
23300 // video buffer underflow/underrun (note that this is not done when there is audio
23301 // buffer underflow/underrun -- in that case the video will stop as soon as it
23302 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
23303 // video stalls). The player's time will reflect the playthrough of audio, so the
23304 // time will appear as if we are in a buffered region, even if we are stuck in a
23305 // "gap."
23306 //
23307 // Example:
23308 // video buffer: 0 => 10.1, 10.2 => 20
23309 // audio buffer: 0 => 20
23310 // overall buffer: 0 => 10.1, 10.2 => 20
23311 // current time: 13
23312 //
23313 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
23314 // however, the audio continued playing until it reached ~3 seconds past the gap
23315 // (13 seconds), at which point it stops as well. Since current time is past the
23316 // gap, findNextRange will return no ranges.
23317 //
23318 // To check for this issue, we see if there is a gap that starts somewhere within
23319 // a 3 second range (3 seconds +/- 1 second) back from our current time.
23320 var gaps = findGaps(buffered);
23321
23322 for (var i = 0; i < gaps.length; i++) {
23323 var start = gaps.start(i);
23324 var end = gaps.end(i); // gap is starts no more than 4 seconds back
23325
23326 if (currentTime - start < 4 && currentTime - start > 2) {
23327 return {
23328 start: start,
23329 end: end
23330 };
23331 }
23332 }
23333
23334 return null;
23335 };
23336
23337 return PlaybackWatcher;
23338}();
23339
23340var defaultOptions = {
23341 errorInterval: 30,
23342 getSource: function getSource(next) {
23343 var tech = this.tech({
23344 IWillNotUseThisInPlugins: true
23345 });
23346 var sourceObj = tech.currentSource_ || this.currentSource();
23347 return next(sourceObj);
23348 }
23349};
23350/**
23351 * Main entry point for the plugin
23352 *
23353 * @param {Player} player a reference to a videojs Player instance
23354 * @param {Object} [options] an object with plugin options
23355 * @private
23356 */
23357
23358var initPlugin = function initPlugin(player, options) {
23359 var lastCalled = 0;
23360 var seekTo = 0;
23361 var localOptions = videojs__default['default'].mergeOptions(defaultOptions, options);
23362 player.ready(function () {
23363 player.trigger({
23364 type: 'usage',
23365 name: 'vhs-error-reload-initialized'
23366 });
23367 player.trigger({
23368 type: 'usage',
23369 name: 'hls-error-reload-initialized'
23370 });
23371 });
23372 /**
23373 * Player modifications to perform that must wait until `loadedmetadata`
23374 * has been triggered
23375 *
23376 * @private
23377 */
23378
23379 var loadedMetadataHandler = function loadedMetadataHandler() {
23380 if (seekTo) {
23381 player.currentTime(seekTo);
23382 }
23383 };
23384 /**
23385 * Set the source on the player element, play, and seek if necessary
23386 *
23387 * @param {Object} sourceObj An object specifying the source url and mime-type to play
23388 * @private
23389 */
23390
23391
23392 var setSource = function setSource(sourceObj) {
23393 if (sourceObj === null || sourceObj === undefined) {
23394 return;
23395 }
23396
23397 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
23398 player.one('loadedmetadata', loadedMetadataHandler);
23399 player.src(sourceObj);
23400 player.trigger({
23401 type: 'usage',
23402 name: 'vhs-error-reload'
23403 });
23404 player.trigger({
23405 type: 'usage',
23406 name: 'hls-error-reload'
23407 });
23408 player.play();
23409 };
23410 /**
23411 * Attempt to get a source from either the built-in getSource function
23412 * or a custom function provided via the options
23413 *
23414 * @private
23415 */
23416
23417
23418 var errorHandler = function errorHandler() {
23419 // Do not attempt to reload the source if a source-reload occurred before
23420 // 'errorInterval' time has elapsed since the last source-reload
23421 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
23422 player.trigger({
23423 type: 'usage',
23424 name: 'vhs-error-reload-canceled'
23425 });
23426 player.trigger({
23427 type: 'usage',
23428 name: 'hls-error-reload-canceled'
23429 });
23430 return;
23431 }
23432
23433 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
23434 videojs__default['default'].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
23435 return;
23436 }
23437
23438 lastCalled = Date.now();
23439 return localOptions.getSource.call(player, setSource);
23440 };
23441 /**
23442 * Unbind any event handlers that were bound by the plugin
23443 *
23444 * @private
23445 */
23446
23447
23448 var cleanupEvents = function cleanupEvents() {
23449 player.off('loadedmetadata', loadedMetadataHandler);
23450 player.off('error', errorHandler);
23451 player.off('dispose', cleanupEvents);
23452 };
23453 /**
23454 * Cleanup before re-initializing the plugin
23455 *
23456 * @param {Object} [newOptions] an object with plugin options
23457 * @private
23458 */
23459
23460
23461 var reinitPlugin = function reinitPlugin(newOptions) {
23462 cleanupEvents();
23463 initPlugin(player, newOptions);
23464 };
23465
23466 player.on('error', errorHandler);
23467 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
23468 // initializing the plugin
23469
23470 player.reloadSourceOnError = reinitPlugin;
23471};
23472/**
23473 * Reload the source when an error is detected as long as there
23474 * wasn't an error previously within the last 30 seconds
23475 *
23476 * @param {Object} [options] an object with plugin options
23477 */
23478
23479
23480var reloadSourceOnError = function reloadSourceOnError(options) {
23481 initPlugin(this, options);
23482};
23483
23484var version$4 = "2.7.0";
23485
23486var version$3 = "5.11.0";
23487
23488var version$2 = "0.16.0";
23489
23490var version$1 = "4.6.0";
23491
23492var version = "3.1.2";
23493
23494var Vhs = {
23495 PlaylistLoader: PlaylistLoader,
23496 Playlist: Playlist,
23497 utils: utils,
23498 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
23499 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
23500 lastBandwidthSelector: lastBandwidthSelector,
23501 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
23502 comparePlaylistBandwidth: comparePlaylistBandwidth,
23503 comparePlaylistResolution: comparePlaylistResolution,
23504 xhr: xhrFactory()
23505}; // Define getter/setters for config properties
23506
23507Object.keys(Config).forEach(function (prop) {
23508 Object.defineProperty(Vhs, prop, {
23509 get: function get() {
23510 videojs__default['default'].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
23511 return Config[prop];
23512 },
23513 set: function set(value) {
23514 videojs__default['default'].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
23515
23516 if (typeof value !== 'number' || value < 0) {
23517 videojs__default['default'].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
23518 return;
23519 }
23520
23521 Config[prop] = value;
23522 }
23523 });
23524});
23525var LOCAL_STORAGE_KEY = 'videojs-vhs';
23526/**
23527 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
23528 *
23529 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
23530 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
23531 * @function handleVhsMediaChange
23532 */
23533
23534var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
23535 var newPlaylist = playlistLoader.media();
23536 var selectedIndex = -1;
23537
23538 for (var i = 0; i < qualityLevels.length; i++) {
23539 if (qualityLevels[i].id === newPlaylist.id) {
23540 selectedIndex = i;
23541 break;
23542 }
23543 }
23544
23545 qualityLevels.selectedIndex_ = selectedIndex;
23546 qualityLevels.trigger({
23547 selectedIndex: selectedIndex,
23548 type: 'change'
23549 });
23550};
23551/**
23552 * Adds quality levels to list once playlist metadata is available
23553 *
23554 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
23555 * @param {Object} vhs Vhs object to listen to for media events.
23556 * @function handleVhsLoadedMetadata
23557 */
23558
23559
23560var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
23561 vhs.representations().forEach(function (rep) {
23562 qualityLevels.addQualityLevel(rep);
23563 });
23564 handleVhsMediaChange(qualityLevels, vhs.playlists);
23565}; // HLS is a source handler, not a tech. Make sure attempts to use it
23566// as one do not cause exceptions.
23567
23568
23569Vhs.canPlaySource = function () {
23570 return videojs__default['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
23571};
23572
23573var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
23574 if (!keySystemOptions) {
23575 return keySystemOptions;
23576 }
23577
23578 var codecs = {};
23579
23580 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
23581 codecs = unwrapCodecList(codecs_js.parseCodecs(mainPlaylist.attributes.CODECS));
23582 }
23583
23584 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
23585 codecs.audio = audioPlaylist.attributes.CODECS;
23586 }
23587
23588 var videoContentType = codecs_js.getMimeForCodec(codecs.video);
23589 var audioContentType = codecs_js.getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
23590
23591 var keySystemContentTypes = {};
23592
23593 for (var keySystem in keySystemOptions) {
23594 keySystemContentTypes[keySystem] = {};
23595
23596 if (audioContentType) {
23597 keySystemContentTypes[keySystem].audioContentType = audioContentType;
23598 }
23599
23600 if (videoContentType) {
23601 keySystemContentTypes[keySystem].videoContentType = videoContentType;
23602 } // Default to using the video playlist's PSSH even though they may be different, as
23603 // videojs-contrib-eme will only accept one in the options.
23604 //
23605 // This shouldn't be an issue for most cases as early intialization will handle all
23606 // unique PSSH values, and if they aren't, then encrypted events should have the
23607 // specific information needed for the unique license.
23608
23609
23610 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
23611 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
23612 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
23613 // so we need to prevent overwriting the URL entirely
23614
23615
23616 if (typeof keySystemOptions[keySystem] === 'string') {
23617 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
23618 }
23619 }
23620
23621 return videojs__default['default'].mergeOptions(keySystemOptions, keySystemContentTypes);
23622};
23623/**
23624 * @typedef {Object} KeySystems
23625 *
23626 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
23627 * Note: not all options are listed here.
23628 *
23629 * @property {Uint8Array} [pssh]
23630 * Protection System Specific Header
23631 */
23632
23633/**
23634 * Goes through all the playlists and collects an array of KeySystems options objects
23635 * containing each playlist's keySystems and their pssh values, if available.
23636 *
23637 * @param {Object[]} playlists
23638 * The playlists to look through
23639 * @param {string[]} keySystems
23640 * The keySystems to collect pssh values for
23641 *
23642 * @return {KeySystems[]}
23643 * An array of KeySystems objects containing available key systems and their
23644 * pssh values
23645 */
23646
23647
23648var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
23649 return playlists.reduce(function (keySystemsArr, playlist) {
23650 if (!playlist.contentProtection) {
23651 return keySystemsArr;
23652 }
23653
23654 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
23655 var keySystemOptions = playlist.contentProtection[keySystem];
23656
23657 if (keySystemOptions && keySystemOptions.pssh) {
23658 keySystemsObj[keySystem] = {
23659 pssh: keySystemOptions.pssh
23660 };
23661 }
23662
23663 return keySystemsObj;
23664 }, {});
23665
23666 if (Object.keys(keySystemsOptions).length) {
23667 keySystemsArr.push(keySystemsOptions);
23668 }
23669
23670 return keySystemsArr;
23671 }, []);
23672};
23673/**
23674 * Returns a promise that waits for the
23675 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
23676 *
23677 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
23678 * browsers.
23679 *
23680 * As per the above ticket, this is particularly important for Chrome, where, if
23681 * unencrypted content is appended before encrypted content and the key session has not
23682 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
23683 * during playback.
23684 *
23685 * @param {Object} player
23686 * The player instance
23687 * @param {Object[]} sourceKeySystems
23688 * The key systems options from the player source
23689 * @param {Object} [audioMedia]
23690 * The active audio media playlist (optional)
23691 * @param {Object[]} mainPlaylists
23692 * The playlists found on the master playlist object
23693 *
23694 * @return {Object}
23695 * Promise that resolves when the key session has been created
23696 */
23697
23698
23699var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
23700 var player = _ref.player,
23701 sourceKeySystems = _ref.sourceKeySystems,
23702 audioMedia = _ref.audioMedia,
23703 mainPlaylists = _ref.mainPlaylists;
23704
23705 if (!player.eme.initializeMediaKeys) {
23706 return Promise.resolve();
23707 } // TODO should all audio PSSH values be initialized for DRM?
23708 //
23709 // All unique video rendition pssh values are initialized for DRM, but here only
23710 // the initial audio playlist license is initialized. In theory, an encrypted
23711 // event should be fired if the user switches to an alternative audio playlist
23712 // where a license is required, but this case hasn't yet been tested. In addition, there
23713 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
23714 // languages).
23715
23716
23717 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
23718 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
23719 var initializationFinishedPromises = [];
23720 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
23721 // only place where it should not be deduped is for ms-prefixed APIs, but the early
23722 // return for IE11 above, and the existence of modern EME APIs in addition to
23723 // ms-prefixed APIs on Edge should prevent this from being a concern.
23724 // initializeMediaKeys also won't use the webkit-prefixed APIs.
23725
23726 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
23727 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
23728 player.tech_.one('keysessioncreated', resolve);
23729 }));
23730 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
23731 player.eme.initializeMediaKeys({
23732 keySystems: keySystemsOptions
23733 }, function (err) {
23734 if (err) {
23735 reject(err);
23736 return;
23737 }
23738
23739 resolve();
23740 });
23741 }));
23742 }); // The reasons Promise.race is chosen over Promise.any:
23743 //
23744 // * Promise.any is only available in Safari 14+.
23745 // * None of these promises are expected to reject. If they do reject, it might be
23746 // better here for the race to surface the rejection, rather than mask it by using
23747 // Promise.any.
23748
23749 return Promise.race([// If a session was previously created, these will all finish resolving without
23750 // creating a new session, otherwise it will take until the end of all license
23751 // requests, which is why the key session check is used (to make setup much faster).
23752 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
23753 Promise.race(keySessionCreatedPromises)]);
23754};
23755/**
23756 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
23757 * there are keySystems on the source, sets up source options to prepare the source for
23758 * eme.
23759 *
23760 * @param {Object} player
23761 * The player instance
23762 * @param {Object[]} sourceKeySystems
23763 * The key systems options from the player source
23764 * @param {Object} media
23765 * The active media playlist
23766 * @param {Object} [audioMedia]
23767 * The active audio media playlist (optional)
23768 *
23769 * @return {boolean}
23770 * Whether or not options were configured and EME is available
23771 */
23772
23773var setupEmeOptions = function setupEmeOptions(_ref2) {
23774 var player = _ref2.player,
23775 sourceKeySystems = _ref2.sourceKeySystems,
23776 media = _ref2.media,
23777 audioMedia = _ref2.audioMedia;
23778 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
23779
23780 if (!sourceOptions) {
23781 return false;
23782 }
23783
23784 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
23785 // do nothing.
23786
23787 if (sourceOptions && !player.eme) {
23788 videojs__default['default'].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
23789 return false;
23790 }
23791
23792 return true;
23793};
23794
23795var getVhsLocalStorage = function getVhsLocalStorage() {
23796 if (!window__default['default'].localStorage) {
23797 return null;
23798 }
23799
23800 var storedObject = window__default['default'].localStorage.getItem(LOCAL_STORAGE_KEY);
23801
23802 if (!storedObject) {
23803 return null;
23804 }
23805
23806 try {
23807 return JSON.parse(storedObject);
23808 } catch (e) {
23809 // someone may have tampered with the value
23810 return null;
23811 }
23812};
23813
23814var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
23815 if (!window__default['default'].localStorage) {
23816 return false;
23817 }
23818
23819 var objectToStore = getVhsLocalStorage();
23820 objectToStore = objectToStore ? videojs__default['default'].mergeOptions(objectToStore, options) : options;
23821
23822 try {
23823 window__default['default'].localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
23824 } catch (e) {
23825 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
23826 // storage is set to 0).
23827 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
23828 // No need to perform any operation.
23829 return false;
23830 }
23831
23832 return objectToStore;
23833};
23834/**
23835 * Parses VHS-supported media types from data URIs. See
23836 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
23837 * for information on data URIs.
23838 *
23839 * @param {string} dataUri
23840 * The data URI
23841 *
23842 * @return {string|Object}
23843 * The parsed object/string, or the original string if no supported media type
23844 * was found
23845 */
23846
23847
23848var expandDataUri = function expandDataUri(dataUri) {
23849 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
23850 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
23851 } // no known case for this data URI, return the string as-is
23852
23853
23854 return dataUri;
23855};
23856/**
23857 * Whether the browser has built-in HLS support.
23858 */
23859
23860
23861Vhs.supportsNativeHls = function () {
23862 if (!document__default['default'] || !document__default['default'].createElement) {
23863 return false;
23864 }
23865
23866 var video = document__default['default'].createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
23867
23868 if (!videojs__default['default'].getTech('Html5').isSupported()) {
23869 return false;
23870 } // HLS manifests can go by many mime-types
23871
23872
23873 var canPlay = [// Apple santioned
23874 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
23875 'audio/mpegurl', // Very common
23876 'audio/x-mpegurl', // Very common
23877 'application/x-mpegurl', // Included for completeness
23878 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
23879 return canPlay.some(function (canItPlay) {
23880 return /maybe|probably/i.test(video.canPlayType(canItPlay));
23881 });
23882}();
23883
23884Vhs.supportsNativeDash = function () {
23885 if (!document__default['default'] || !document__default['default'].createElement || !videojs__default['default'].getTech('Html5').isSupported()) {
23886 return false;
23887 }
23888
23889 return /maybe|probably/i.test(document__default['default'].createElement('video').canPlayType('application/dash+xml'));
23890}();
23891
23892Vhs.supportsTypeNatively = function (type) {
23893 if (type === 'hls') {
23894 return Vhs.supportsNativeHls;
23895 }
23896
23897 if (type === 'dash') {
23898 return Vhs.supportsNativeDash;
23899 }
23900
23901 return false;
23902};
23903/**
23904 * HLS is a source handler, not a tech. Make sure attempts to use it
23905 * as one do not cause exceptions.
23906 */
23907
23908
23909Vhs.isSupported = function () {
23910 return videojs__default['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
23911};
23912
23913var Component = videojs__default['default'].getComponent('Component');
23914/**
23915 * The Vhs Handler object, where we orchestrate all of the parts
23916 * of HLS to interact with video.js
23917 *
23918 * @class VhsHandler
23919 * @extends videojs.Component
23920 * @param {Object} source the soruce object
23921 * @param {Tech} tech the parent tech object
23922 * @param {Object} options optional and required options
23923 */
23924
23925var VhsHandler = /*#__PURE__*/function (_Component) {
23926 _inheritsLoose__default['default'](VhsHandler, _Component);
23927
23928 function VhsHandler(source, tech, options) {
23929 var _this;
23930
23931 _this = _Component.call(this, tech, videojs__default['default'].mergeOptions(options.hls, options.vhs)) || this;
23932
23933 if (options.hls && Object.keys(options.hls).length) {
23934 videojs__default['default'].log.warn('Using hls options is deprecated. Use vhs instead.');
23935 }
23936
23937 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
23938 // backwards-compatibility
23939
23940 if (tech.options_ && tech.options_.playerId) {
23941 var _player = videojs__default['default'](tech.options_.playerId);
23942
23943 if (!_player.hasOwnProperty('hls')) {
23944 Object.defineProperty(_player, 'hls', {
23945 get: function get() {
23946 videojs__default['default'].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
23947 tech.trigger({
23948 type: 'usage',
23949 name: 'hls-player-access'
23950 });
23951 return _assertThisInitialized__default['default'](_this);
23952 },
23953 configurable: true
23954 });
23955 }
23956
23957 if (!_player.hasOwnProperty('vhs')) {
23958 Object.defineProperty(_player, 'vhs', {
23959 get: function get() {
23960 videojs__default['default'].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
23961 tech.trigger({
23962 type: 'usage',
23963 name: 'vhs-player-access'
23964 });
23965 return _assertThisInitialized__default['default'](_this);
23966 },
23967 configurable: true
23968 });
23969 }
23970
23971 if (!_player.hasOwnProperty('dash')) {
23972 Object.defineProperty(_player, 'dash', {
23973 get: function get() {
23974 videojs__default['default'].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
23975 return _assertThisInitialized__default['default'](_this);
23976 },
23977 configurable: true
23978 });
23979 }
23980
23981 _this.player_ = _player;
23982 }
23983
23984 _this.tech_ = tech;
23985 _this.source_ = source;
23986 _this.stats = {};
23987 _this.ignoreNextSeekingEvent_ = false;
23988
23989 _this.setOptions_();
23990
23991 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
23992 tech.overrideNativeAudioTracks(true);
23993 tech.overrideNativeVideoTracks(true);
23994 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
23995 // overriding native HLS only works if audio tracks have been emulated
23996 // error early if we're misconfigured
23997 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
23998 } // listen for fullscreenchange events for this player so that we
23999 // can adjust our quality selection quickly
24000
24001
24002 _this.on(document__default['default'], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
24003 var fullscreenElement = document__default['default'].fullscreenElement || document__default['default'].webkitFullscreenElement || document__default['default'].mozFullScreenElement || document__default['default'].msFullscreenElement;
24004
24005 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
24006 _this.masterPlaylistController_.smoothQualityChange_();
24007 }
24008 });
24009
24010 _this.on(_this.tech_, 'seeking', function () {
24011 if (this.ignoreNextSeekingEvent_) {
24012 this.ignoreNextSeekingEvent_ = false;
24013 return;
24014 }
24015
24016 this.setCurrentTime(this.tech_.currentTime());
24017 });
24018
24019 _this.on(_this.tech_, 'error', function () {
24020 // verify that the error was real and we are loaded
24021 // enough to have mpc loaded.
24022 if (this.tech_.error() && this.masterPlaylistController_) {
24023 this.masterPlaylistController_.pauseLoading();
24024 }
24025 });
24026
24027 _this.on(_this.tech_, 'play', _this.play);
24028
24029 return _this;
24030 }
24031
24032 var _proto = VhsHandler.prototype;
24033
24034 _proto.setOptions_ = function setOptions_() {
24035 var _this2 = this;
24036
24037 // defaults
24038 this.options_.withCredentials = this.options_.withCredentials || false;
24039 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
24040 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
24041 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
24042 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
24043 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
24044 this.options_.customTagParsers = this.options_.customTagParsers || [];
24045 this.options_.customTagMappers = this.options_.customTagMappers || [];
24046 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
24047 this.options_.handlePartialData = this.options_.handlePartialData || false;
24048
24049 if (typeof this.options_.blacklistDuration !== 'number') {
24050 this.options_.blacklistDuration = 5 * 60;
24051 }
24052
24053 if (typeof this.options_.bandwidth !== 'number') {
24054 if (this.options_.useBandwidthFromLocalStorage) {
24055 var storedObject = getVhsLocalStorage();
24056
24057 if (storedObject && storedObject.bandwidth) {
24058 this.options_.bandwidth = storedObject.bandwidth;
24059 this.tech_.trigger({
24060 type: 'usage',
24061 name: 'vhs-bandwidth-from-local-storage'
24062 });
24063 this.tech_.trigger({
24064 type: 'usage',
24065 name: 'hls-bandwidth-from-local-storage'
24066 });
24067 }
24068
24069 if (storedObject && storedObject.throughput) {
24070 this.options_.throughput = storedObject.throughput;
24071 this.tech_.trigger({
24072 type: 'usage',
24073 name: 'vhs-throughput-from-local-storage'
24074 });
24075 this.tech_.trigger({
24076 type: 'usage',
24077 name: 'hls-throughput-from-local-storage'
24078 });
24079 }
24080 }
24081 } // if bandwidth was not set by options or pulled from local storage, start playlist
24082 // selection at a reasonable bandwidth
24083
24084
24085 if (typeof this.options_.bandwidth !== 'number') {
24086 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
24087 } // If the bandwidth number is unchanged from the initial setting
24088 // then this takes precedence over the enableLowInitialPlaylist option
24089
24090
24091 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
24092
24093 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'handlePartialData', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS'].forEach(function (option) {
24094 if (typeof _this2.source_[option] !== 'undefined') {
24095 _this2.options_[option] = _this2.source_[option];
24096 }
24097 });
24098 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
24099 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
24100 }
24101 /**
24102 * called when player.src gets called, handle a new source
24103 *
24104 * @param {Object} src the source object to handle
24105 */
24106 ;
24107
24108 _proto.src = function src(_src, type) {
24109 var _this3 = this;
24110
24111 // do nothing if the src is falsey
24112 if (!_src) {
24113 return;
24114 }
24115
24116 this.setOptions_(); // add master playlist controller options
24117
24118 this.options_.src = expandDataUri(this.source_.src);
24119 this.options_.tech = this.tech_;
24120 this.options_.externVhs = Vhs;
24121 this.options_.sourceType = mediaTypes_js.simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
24122
24123 this.options_.seekTo = function (time) {
24124 _this3.tech_.setCurrentTime(time);
24125 };
24126
24127 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
24128 var playbackWatcherOptions = videojs__default['default'].mergeOptions({
24129 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
24130 }, this.options_, {
24131 seekable: function seekable() {
24132 return _this3.seekable();
24133 },
24134 media: function media() {
24135 return _this3.masterPlaylistController_.media();
24136 },
24137 masterPlaylistController: this.masterPlaylistController_
24138 });
24139 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
24140 this.masterPlaylistController_.on('error', function () {
24141 var player = videojs__default['default'].players[_this3.tech_.options_.playerId];
24142 var error = _this3.masterPlaylistController_.error;
24143
24144 if (typeof error === 'object' && !error.code) {
24145 error.code = 3;
24146 } else if (typeof error === 'string') {
24147 error = {
24148 message: error,
24149 code: 3
24150 };
24151 }
24152
24153 player.error(error);
24154 });
24155 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
24156 // compatibility with < v2
24157
24158 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
24159 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
24160
24161 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
24162 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
24163 // controller. Using a custom property for backwards compatibility
24164 // with < v2
24165
24166 Object.defineProperties(this, {
24167 selectPlaylist: {
24168 get: function get() {
24169 return this.masterPlaylistController_.selectPlaylist;
24170 },
24171 set: function set(selectPlaylist) {
24172 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
24173 }
24174 },
24175 throughput: {
24176 get: function get() {
24177 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
24178 },
24179 set: function set(throughput) {
24180 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
24181 // for the cumulative average
24182
24183 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
24184 }
24185 },
24186 bandwidth: {
24187 get: function get() {
24188 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
24189 },
24190 set: function set(bandwidth) {
24191 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
24192 // `count` is set to zero that current value of `rate` isn't included
24193 // in the cumulative average
24194
24195 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
24196 rate: 0,
24197 count: 0
24198 };
24199 }
24200 },
24201
24202 /**
24203 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
24204 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
24205 * the entire process after that - decryption, transmuxing, and appending - provided
24206 * by `throughput`.
24207 *
24208 * Since the two process are serial, the overall system bandwidth is given by:
24209 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
24210 */
24211 systemBandwidth: {
24212 get: function get() {
24213 var invBandwidth = 1 / (this.bandwidth || 1);
24214 var invThroughput;
24215
24216 if (this.throughput > 0) {
24217 invThroughput = 1 / this.throughput;
24218 } else {
24219 invThroughput = 0;
24220 }
24221
24222 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
24223 return systemBitrate;
24224 },
24225 set: function set() {
24226 videojs__default['default'].log.error('The "systemBandwidth" property is read-only');
24227 }
24228 }
24229 });
24230
24231 if (this.options_.bandwidth) {
24232 this.bandwidth = this.options_.bandwidth;
24233 }
24234
24235 if (this.options_.throughput) {
24236 this.throughput = this.options_.throughput;
24237 }
24238
24239 Object.defineProperties(this.stats, {
24240 bandwidth: {
24241 get: function get() {
24242 return _this3.bandwidth || 0;
24243 },
24244 enumerable: true
24245 },
24246 mediaRequests: {
24247 get: function get() {
24248 return _this3.masterPlaylistController_.mediaRequests_() || 0;
24249 },
24250 enumerable: true
24251 },
24252 mediaRequestsAborted: {
24253 get: function get() {
24254 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
24255 },
24256 enumerable: true
24257 },
24258 mediaRequestsTimedout: {
24259 get: function get() {
24260 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
24261 },
24262 enumerable: true
24263 },
24264 mediaRequestsErrored: {
24265 get: function get() {
24266 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
24267 },
24268 enumerable: true
24269 },
24270 mediaTransferDuration: {
24271 get: function get() {
24272 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
24273 },
24274 enumerable: true
24275 },
24276 mediaBytesTransferred: {
24277 get: function get() {
24278 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
24279 },
24280 enumerable: true
24281 },
24282 mediaSecondsLoaded: {
24283 get: function get() {
24284 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
24285 },
24286 enumerable: true
24287 },
24288 buffered: {
24289 get: function get() {
24290 return timeRangesToArray(_this3.tech_.buffered());
24291 },
24292 enumerable: true
24293 },
24294 currentTime: {
24295 get: function get() {
24296 return _this3.tech_.currentTime();
24297 },
24298 enumerable: true
24299 },
24300 currentSource: {
24301 get: function get() {
24302 return _this3.tech_.currentSource_;
24303 },
24304 enumerable: true
24305 },
24306 currentTech: {
24307 get: function get() {
24308 return _this3.tech_.name_;
24309 },
24310 enumerable: true
24311 },
24312 duration: {
24313 get: function get() {
24314 return _this3.tech_.duration();
24315 },
24316 enumerable: true
24317 },
24318 master: {
24319 get: function get() {
24320 return _this3.playlists.master;
24321 },
24322 enumerable: true
24323 },
24324 playerDimensions: {
24325 get: function get() {
24326 return _this3.tech_.currentDimensions();
24327 },
24328 enumerable: true
24329 },
24330 seekable: {
24331 get: function get() {
24332 return timeRangesToArray(_this3.tech_.seekable());
24333 },
24334 enumerable: true
24335 },
24336 timestamp: {
24337 get: function get() {
24338 return Date.now();
24339 },
24340 enumerable: true
24341 },
24342 videoPlaybackQuality: {
24343 get: function get() {
24344 return _this3.tech_.getVideoPlaybackQuality();
24345 },
24346 enumerable: true
24347 }
24348 });
24349 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
24350 this.tech_.on('bandwidthupdate', function () {
24351 if (_this3.options_.useBandwidthFromLocalStorage) {
24352 updateVhsLocalStorage({
24353 bandwidth: _this3.bandwidth,
24354 throughput: Math.round(_this3.throughput)
24355 });
24356 }
24357 });
24358 this.masterPlaylistController_.on('selectedinitialmedia', function () {
24359 // Add the manual rendition mix-in to VhsHandler
24360 renditionSelectionMixin(_this3);
24361 });
24362 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
24363 _this3.setupEme_();
24364 }); // the bandwidth of the primary segment loader is our best
24365 // estimate of overall bandwidth
24366
24367 this.on(this.masterPlaylistController_, 'progress', function () {
24368 this.tech_.trigger('progress');
24369 }); // In the live case, we need to ignore the very first `seeking` event since
24370 // that will be the result of the seek-to-live behavior
24371
24372 this.on(this.masterPlaylistController_, 'firstplay', function () {
24373 this.ignoreNextSeekingEvent_ = true;
24374 });
24375 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
24376 // this can occur if someone sets the src in player.ready(), for instance
24377
24378 if (!this.tech_.el()) {
24379 return;
24380 }
24381
24382 this.mediaSourceUrl_ = window__default['default'].URL.createObjectURL(this.masterPlaylistController_.mediaSource);
24383 this.tech_.src(this.mediaSourceUrl_);
24384 }
24385 /**
24386 * If necessary and EME is available, sets up EME options and waits for key session
24387 * creation.
24388 *
24389 * This function also updates the source updater so taht it can be used, as for some
24390 * browsers, EME must be configured before content is appended (if appending unencrypted
24391 * content before encrypted content).
24392 */
24393 ;
24394
24395 _proto.setupEme_ = function setupEme_() {
24396 var _this4 = this;
24397
24398 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
24399 var didSetupEmeOptions = setupEmeOptions({
24400 player: this.player_,
24401 sourceKeySystems: this.source_.keySystems,
24402 media: this.playlists.media(),
24403 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
24404 }); // In IE11 this is too early to initialize media keys, and IE11 does not support
24405 // promises.
24406
24407 if (videojs__default['default'].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
24408 // If EME options were not set up, we've done all we could to initialize EME.
24409 this.masterPlaylistController_.sourceUpdater_.initializedEme();
24410 return;
24411 }
24412
24413 this.logger_('waiting for EME key session creation');
24414 waitForKeySessionCreation({
24415 player: this.player_,
24416 sourceKeySystems: this.source_.keySystems,
24417 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
24418 mainPlaylists: this.playlists.master.playlists
24419 }).then(function () {
24420 _this4.logger_('created EME key session');
24421
24422 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
24423 }).catch(function (err) {
24424 _this4.logger_('error while creating EME key session', err);
24425
24426 _this4.player_.error({
24427 message: 'Failed to initialize media keys for EME',
24428 code: 3
24429 });
24430 });
24431 }
24432 /**
24433 * Initializes the quality levels and sets listeners to update them.
24434 *
24435 * @method setupQualityLevels_
24436 * @private
24437 */
24438 ;
24439
24440 _proto.setupQualityLevels_ = function setupQualityLevels_() {
24441 var _this5 = this;
24442
24443 var player = videojs__default['default'].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
24444 // or qualityLevels_ listeners have already been setup, do nothing.
24445
24446 if (!player || !player.qualityLevels || this.qualityLevels_) {
24447 return;
24448 }
24449
24450 this.qualityLevels_ = player.qualityLevels();
24451 this.masterPlaylistController_.on('selectedinitialmedia', function () {
24452 handleVhsLoadedMetadata(_this5.qualityLevels_, _this5);
24453 });
24454 this.playlists.on('mediachange', function () {
24455 handleVhsMediaChange(_this5.qualityLevels_, _this5.playlists);
24456 });
24457 }
24458 /**
24459 * return the version
24460 */
24461 ;
24462
24463 VhsHandler.version = function version$5() {
24464 return {
24465 '@videojs/http-streaming': version$4,
24466 'mux.js': version$3,
24467 'mpd-parser': version$2,
24468 'm3u8-parser': version$1,
24469 'aes-decrypter': version
24470 };
24471 }
24472 /**
24473 * return the version
24474 */
24475 ;
24476
24477 _proto.version = function version() {
24478 return this.constructor.version();
24479 };
24480
24481 _proto.canChangeType = function canChangeType() {
24482 return SourceUpdater.canChangeType();
24483 }
24484 /**
24485 * Begin playing the video.
24486 */
24487 ;
24488
24489 _proto.play = function play() {
24490 this.masterPlaylistController_.play();
24491 }
24492 /**
24493 * a wrapper around the function in MasterPlaylistController
24494 */
24495 ;
24496
24497 _proto.setCurrentTime = function setCurrentTime(currentTime) {
24498 this.masterPlaylistController_.setCurrentTime(currentTime);
24499 }
24500 /**
24501 * a wrapper around the function in MasterPlaylistController
24502 */
24503 ;
24504
24505 _proto.duration = function duration() {
24506 return this.masterPlaylistController_.duration();
24507 }
24508 /**
24509 * a wrapper around the function in MasterPlaylistController
24510 */
24511 ;
24512
24513 _proto.seekable = function seekable() {
24514 return this.masterPlaylistController_.seekable();
24515 }
24516 /**
24517 * Abort all outstanding work and cleanup.
24518 */
24519 ;
24520
24521 _proto.dispose = function dispose() {
24522 if (this.playbackWatcher_) {
24523 this.playbackWatcher_.dispose();
24524 }
24525
24526 if (this.masterPlaylistController_) {
24527 this.masterPlaylistController_.dispose();
24528 }
24529
24530 if (this.qualityLevels_) {
24531 this.qualityLevels_.dispose();
24532 }
24533
24534 if (this.player_) {
24535 delete this.player_.vhs;
24536 delete this.player_.dash;
24537 delete this.player_.hls;
24538 }
24539
24540 if (this.tech_ && this.tech_.vhs) {
24541 delete this.tech_.vhs;
24542 } // don't check this.tech_.hls as it will log a deprecated warning
24543
24544
24545 if (this.tech_) {
24546 delete this.tech_.hls;
24547 }
24548
24549 if (this.mediaSourceUrl_ && window__default['default'].URL.revokeObjectURL) {
24550 window__default['default'].URL.revokeObjectURL(this.mediaSourceUrl_);
24551 this.mediaSourceUrl_ = null;
24552 }
24553
24554 _Component.prototype.dispose.call(this);
24555 };
24556
24557 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
24558 return getProgramTime({
24559 playlist: this.masterPlaylistController_.media(),
24560 time: time,
24561 callback: callback
24562 });
24563 } // the player must be playing before calling this
24564 ;
24565
24566 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
24567 if (pauseAfterSeek === void 0) {
24568 pauseAfterSeek = true;
24569 }
24570
24571 if (retryCount === void 0) {
24572 retryCount = 2;
24573 }
24574
24575 return seekToProgramTime({
24576 programTime: programTime,
24577 playlist: this.masterPlaylistController_.media(),
24578 retryCount: retryCount,
24579 pauseAfterSeek: pauseAfterSeek,
24580 seekTo: this.options_.seekTo,
24581 tech: this.options_.tech,
24582 callback: callback
24583 });
24584 };
24585
24586 return VhsHandler;
24587}(Component);
24588/**
24589 * The Source Handler object, which informs video.js what additional
24590 * MIME types are supported and sets up playback. It is registered
24591 * automatically to the appropriate tech based on the capabilities of
24592 * the browser it is running in. It is not necessary to use or modify
24593 * this object in normal usage.
24594 */
24595
24596
24597var VhsSourceHandler = {
24598 name: 'videojs-http-streaming',
24599 VERSION: version$4,
24600 canHandleSource: function canHandleSource(srcObj, options) {
24601 if (options === void 0) {
24602 options = {};
24603 }
24604
24605 var localOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options);
24606 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
24607 },
24608 handleSource: function handleSource(source, tech, options) {
24609 if (options === void 0) {
24610 options = {};
24611 }
24612
24613 var localOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options);
24614 tech.vhs = new VhsHandler(source, tech, localOptions);
24615
24616 if (!videojs__default['default'].hasOwnProperty('hls')) {
24617 Object.defineProperty(tech, 'hls', {
24618 get: function get() {
24619 videojs__default['default'].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
24620 return tech.vhs;
24621 },
24622 configurable: true
24623 });
24624 }
24625
24626 tech.vhs.xhr = xhrFactory();
24627 tech.vhs.src(source.src, source.type);
24628 return tech.vhs;
24629 },
24630 canPlayType: function canPlayType(type, options) {
24631 if (options === void 0) {
24632 options = {};
24633 }
24634
24635 var _videojs$mergeOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options),
24636 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs.overrideNative,
24637 overrideNative = _videojs$mergeOptions2 === void 0 ? !videojs__default['default'].browser.IS_ANY_SAFARI : _videojs$mergeOptions2;
24638
24639 var supportedType = mediaTypes_js.simpleTypeFromSourceType(type);
24640 var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || overrideNative);
24641 return canUseMsePlayback ? 'maybe' : '';
24642 }
24643};
24644/**
24645 * Check to see if the native MediaSource object exists and supports
24646 * an MP4 container with both H.264 video and AAC-LC audio.
24647 *
24648 * @return {boolean} if native media sources are supported
24649 */
24650
24651var supportsNativeMediaSources = function supportsNativeMediaSources() {
24652 return codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2');
24653}; // register source handlers with the appropriate techs
24654
24655
24656if (supportsNativeMediaSources()) {
24657 videojs__default['default'].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
24658}
24659
24660videojs__default['default'].VhsHandler = VhsHandler;
24661Object.defineProperty(videojs__default['default'], 'HlsHandler', {
24662 get: function get() {
24663 videojs__default['default'].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
24664 return VhsHandler;
24665 },
24666 configurable: true
24667});
24668videojs__default['default'].VhsSourceHandler = VhsSourceHandler;
24669Object.defineProperty(videojs__default['default'], 'HlsSourceHandler', {
24670 get: function get() {
24671 videojs__default['default'].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
24672 return VhsSourceHandler;
24673 },
24674 configurable: true
24675});
24676videojs__default['default'].Vhs = Vhs;
24677Object.defineProperty(videojs__default['default'], 'Hls', {
24678 get: function get() {
24679 videojs__default['default'].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
24680 return Vhs;
24681 },
24682 configurable: true
24683});
24684
24685if (!videojs__default['default'].use) {
24686 videojs__default['default'].registerComponent('Hls', Vhs);
24687 videojs__default['default'].registerComponent('Vhs', Vhs);
24688}
24689
24690videojs__default['default'].options.vhs = videojs__default['default'].options.vhs || {};
24691videojs__default['default'].options.hls = videojs__default['default'].options.hls || {};
24692
24693if (videojs__default['default'].registerPlugin) {
24694 videojs__default['default'].registerPlugin('reloadSourceOnError', reloadSourceOnError);
24695} else {
24696 videojs__default['default'].plugin('reloadSourceOnError', reloadSourceOnError);
24697}
24698
24699Object.defineProperty(exports, 'simpleTypeFromSourceType', {
24700 enumerable: true,
24701 get: function () {
24702 return mediaTypes_js.simpleTypeFromSourceType;
24703 }
24704});
24705exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
24706exports.Vhs = Vhs;
24707exports.VhsHandler = VhsHandler;
24708exports.VhsSourceHandler = VhsSourceHandler;
24709exports.emeKeySystems = emeKeySystems;
24710exports.expandDataUri = expandDataUri;
24711exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
24712exports.setupEmeOptions = setupEmeOptions;
24713exports.waitForKeySessionCreation = waitForKeySessionCreation;